-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
279 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,32 @@ | ||
# nano-stats-recorder | ||
Recorder Scripts for Nanocurrency Node Stats | ||
# Nano Stats Recorder | ||
Python scripts to record Nanocurrency node stats - used by https://nano-faucet.org/beta/chart/ | ||
- Confirmation History | ||
- Block Count | ||
- Unchecked Count | ||
- Cemented Count | ||
- Active Confirmation Count | ||
- Confirmation Height Processor Count | ||
- Active Difficulty Multiplier | ||
|
||
# Install | ||
These scripts use the requests library as well as a few others. Install any missing libraries | ||
|
||
`pip install requests` | ||
|
||
# Usage | ||
Confirmation History is pulled from the confirmation_history RPC. The script by default runs every 10 seconds and saves to a file every 60 seconds. The file save amount should be in multiples of the RPC delay. The process will save to a file with the date in the filename (eg. confirmation_history_2019-06-29.json). | ||
|
||
It will read the contents of any file that matches the filename and combine the results for that day. | ||
|
||
After quiting the execution (eg. Ctrl-C) it will save the current run to confirmation_history.json. When the script is started again it will automatically rename confirmation_history.json to include the timestamp at the end to preserve prior attempts. | ||
|
||
`confirmation_history.py` | ||
|
||
Node Stats will collect other statistical measures to help with plotting the node performance. The script by default checks every 15 seconds and saves to a file every 60 seconds. The file save amount should be in multiples of the RPC delay. The process will save to a file with the date in the filename (eg. stats_2019-06-29.json). | ||
|
||
It will read the contents of any file that matches the filename and combine the results for that day. | ||
|
||
After quiting the execution (eg. Ctrl-C) it will save the current run to stats.json. When the script is started again it will automatically rename confirmation_history.json to include the timestamp at the end to preserve prior attempts. | ||
|
||
`node_stats.py` | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
#!/usr/bin/env python3 | ||
|
||
import asyncio | ||
import argparse | ||
import json | ||
import csv | ||
import os | ||
# importing the requests library | ||
# pip install requests | ||
import requests | ||
import json | ||
import time | ||
import datetime | ||
from collections import defaultdict | ||
from sys import exit | ||
from time import sleep | ||
from datetime import datetime | ||
|
||
parser = argparse.ArgumentParser() | ||
parser.add_argument('-host', '--node_url', type=str, help='Nano node url', default='localhost') | ||
parser.add_argument('-port', '--node_port', type=str, help='Nano node port', default='55000') | ||
parser.add_argument('-save', '--save', type=int, help='Save blocks to disk how often (in seconds) should be multiple of --delay', default=60) | ||
parser.add_argument('-delay', '--delay', type=int, help='recorder delay (in seconds)', default=15) | ||
args = parser.parse_args() | ||
|
||
json_data = [] | ||
timeString = datetime.utcnow().strftime("%Y-%m-%d") | ||
filename = 'stats_'+timeString+'.json' | ||
#Rename existing file | ||
try: | ||
os.rename('stats.json','stats.json.'+datetime.utcnow().strftime("%Y%m%d%H%M%S")) | ||
print('Renaming stats.json ...') | ||
except: | ||
print('stats.json does not exist, create new file ...') | ||
|
||
def writeBkup(): | ||
global json_data | ||
global filename | ||
print('Writing to '+filename+' ...') | ||
try: | ||
with open(filename, 'w') as jsonfile: | ||
jsonfile.write(json.dumps(json_data)) | ||
except: | ||
print('unable to write to file ...') | ||
|
||
async def main(): | ||
global json_data | ||
global filename | ||
global timeString | ||
try: | ||
with open(filename) as jsonfile: | ||
json_data = json.load(jsonfile) | ||
except: | ||
print(filename+' does not exist, create new file ...') | ||
loop_count = 0 | ||
# api-endpoint | ||
URL = "http://"+args.node_url+":"+args.node_port | ||
print("Connecting to: "+URL) | ||
|
||
# defining a params dict for the parameters to be sent to the API | ||
data1 = {'action':'active_difficulty'} | ||
data2 = {'action':'confirmation_active'} | ||
data3 = {'action':'stats','type':'objects'} | ||
data4 = {'action':'block_count','include_cemented':'true'} | ||
|
||
while 1: | ||
filename2 = 'stats_'+datetime.utcnow().strftime("%Y-%m-%d")+'.json' | ||
if filename2 != filename: | ||
writeBkup() | ||
timeString = datetime.utcnow().strftime("%Y-%m-%d") | ||
json_data = [] | ||
filename = filename2 | ||
loop_count += 1 | ||
currentTime = time.time() | ||
# sending get request and saving the response as response object | ||
try: | ||
r = requests.post(url = URL, json = data1) | ||
r2 = requests.post(url = URL, json = data2) | ||
r3 = requests.post(url = URL, json = data3) | ||
r4 = requests.post(url = URL, json = data4) | ||
# extracting data in json format | ||
response = r.json() | ||
response2 = r2.json() | ||
response3 = r3.json() | ||
response4 = r4.json() | ||
except: | ||
print("Error connecting to RPC server. Make sure you have enabled it in ~/Nano/config.json and check " | ||
"./sample_client.py --help") | ||
# print(response2) | ||
try: | ||
data = {} | ||
data['timestamp'] = str(time.time()) | ||
data['confirmation_active'] = str(len(response2['confirmations'])) | ||
data['network_minimum'] = response['network_minimum'] | ||
data['network_current'] = response['network_current'] | ||
data['multiplier'] = response['multiplier'] | ||
data['confirmation_height_count'] = response3['node']['pending_confirmation_height']['pending']['count'] | ||
data['block_count'] = response4['count'] | ||
data['unchecked_count'] = response4['unchecked'] | ||
data['cemented_count'] = response4['cemented'] | ||
json_data.append(data) | ||
except Exception as e: print(e) | ||
# print('\nAn error occurred getting data') | ||
if loop_count%(round(args.save/args.delay)) == 0: | ||
writeBkup() | ||
endTime = time.time() | ||
print('{} records - '.format(len(json_data))+'Time to Process: '+str(endTime-currentTime)+' - Active Confirmations: '+str(len(response2['confirmations']))) | ||
if (args.delay-(endTime - currentTime)) < 0: | ||
sleep(0) | ||
else: | ||
sleep(args.delay-(endTime - currentTime)) | ||
|
||
try: | ||
asyncio.get_event_loop().run_until_complete(main()) | ||
except KeyboardInterrupt: | ||
pass | ||
|
||
print('\nWriting to stats.json .....') | ||
with open('stats.json', 'w') as jsonfile: | ||
jsonfile.write(json.dumps(json_data)) | ||
print('Done') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
#!/usr/bin/env python3 | ||
|
||
import asyncio | ||
import argparse | ||
import json | ||
import csv | ||
import os | ||
# importing the requests library | ||
# pip install requests | ||
import requests | ||
import json | ||
import time | ||
import datetime | ||
from collections import defaultdict | ||
from sys import exit | ||
from time import sleep | ||
from datetime import datetime | ||
|
||
parser = argparse.ArgumentParser() | ||
parser.add_argument('-host', '--node_url', type=str, help='Nano node url', default='localhost') | ||
parser.add_argument('-port', '--node_port', type=str, help='Nano node port', default='55000') | ||
parser.add_argument('-save', '--save', type=int, help='Save blocks to disk how often (in seconds) should be multiple of --delay', default=60) | ||
parser.add_argument('-delay', '--delay', type=int, help='recorder delay (in seconds)', default=10) | ||
args = parser.parse_args() | ||
|
||
json_data = [] | ||
hashes = [] | ||
distinct = [] | ||
timeString = datetime.utcnow().strftime("%Y-%m-%d") | ||
filename = 'confirmation_history_'+timeString+'.json' | ||
#Rename existing file | ||
try: | ||
os.rename('confirmation_history.json','confirmation_history.json.'+datetime.utcnow().strftime("%Y%m%d%H%M%S")) | ||
print('Renaming confirmation_history.json ...') | ||
except: | ||
print('confirmation_history.json does not exist, create new file ...') | ||
|
||
def writeBkup(): | ||
global json_data | ||
global filename | ||
print('Writing to '+filename+' ...') | ||
try: | ||
with open(filename, 'w') as jsonfile: | ||
jsonfile.write(json.dumps(json_data)) | ||
except: | ||
print('unable to write to file ...') | ||
|
||
async def main(): | ||
global json_data | ||
global hashes | ||
global distinct | ||
global filename | ||
global timeString | ||
try: | ||
with open(filename) as jsonfile: | ||
json_data = json.load(jsonfile) | ||
except: | ||
print(filename+' does not exist, create new file ...') | ||
loop_count = 0 | ||
# api-endpoint | ||
URL = "http://"+args.node_url+":"+args.node_port | ||
print("Connecting to: "+URL) | ||
|
||
# defining a params dict for the parameters to be sent to the API | ||
data = {'action':'confirmation_history'} | ||
|
||
while 1: | ||
filename2 = 'confirmation_history_'+datetime.utcnow().strftime("%Y-%m-%d")+'.json' | ||
if filename2 != filename: | ||
writeBkup() | ||
writeString = timeString+'|'+str(len(json_data))+'\n' | ||
with open('files.txt', 'a') as files: | ||
files.write(writeString) | ||
timeString = datetime.utcnow().strftime("%Y-%m-%d") | ||
json_data = [] | ||
filename = filename2 | ||
loop_count += 1 | ||
currentTime = time.time() | ||
# sending get request and saving the response as response object | ||
try: | ||
r = requests.post(url = URL, json = data) | ||
|
||
# extracting data in json format | ||
response = r.json() | ||
except: | ||
print("Error connecting to RPC server. Make sure you have enabled it in ~/Nano/config.json and check " | ||
"./sample_client.py --help") | ||
# print(response) | ||
try: | ||
json_data += response['confirmations'] | ||
# To sort the list in place... | ||
json_data.sort(key=lambda x: x.get('time', 0), reverse=True) | ||
json_data = list({ each['hash'] : each for each in json_data }.values()) | ||
except: | ||
print('\nAn error occurred getting data') | ||
print('{} blocks confirmed - '.format(len(json_data))+"execution: %s seconds" % (time.time() - currentTime)) | ||
if loop_count%(round(args.save/args.delay)) == 0: | ||
writeBkup() | ||
endTime = time.time() | ||
if (args.delay-(endTime - currentTime)) < 0: | ||
sleep(0) | ||
else: | ||
sleep(args.delay-(endTime - currentTime)) | ||
|
||
try: | ||
asyncio.get_event_loop().run_until_complete(main()) | ||
#except ConnectionRefusedError: | ||
# print("Error connecting to RPC server. Make sure you have enabled it in ~/Nano/config.json and check " | ||
# "./sample_client.py --help") | ||
# exit(1) | ||
except KeyboardInterrupt: | ||
pass | ||
|
||
json_data.sort(key=lambda x: x.get('time', 0), reverse=True) | ||
json_data = list({ each['hash'] : each for each in json_data }.values()) | ||
# reformat data | ||
confirmations = {'hashes':{}} | ||
for item in json_data: | ||
hash = item['hash'] | ||
confirmations['hashes'][hash] = item | ||
#confirmations = json_data | ||
print('{} distinct confirmations'.format(len(confirmations['hashes']))) | ||
print('\nWriting to confirmation_history.json .....') | ||
with open('confirmation_history.json', 'w') as jsonfile: | ||
jsonfile.write(json.dumps(json_data)) | ||
print('Done') |