Skip to content

Commit

Permalink
Merge pull request #92 from fabric-testbed/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
cscarp authored Apr 21, 2023
2 parents e5c5678 + 7394589 commit a115e83
Show file tree
Hide file tree
Showing 20 changed files with 257 additions and 84 deletions.
69 changes: 48 additions & 21 deletions user_services/elk/service_commands/info.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,61 @@
import os
import json
import subprocess

import elk_utilities as eu

def main():
ret_val = { "success":True, "msg":"" }
ret_val = { "success":True}
data = eu.get_data()

if "get" in data:
try:
with open(eu.nginx_password_filename, 'r') as f:
nginx_password = f.read().strip()

if "nginx_password" in data["get"]:
ret_val["nginx_password"] = nginx_password
if "nginx_id" in data["get"]:
ret_val["nginx_id"] = "fabric"

except IOError:
ret_val["error"] = "Nginx credential file does not appear to exist."


if "get" in data:
if "nginx_password" in data["get"] or "nginx_id" in data["get"]:
try:
with open(eu.nginx_password_filename, 'r') as f:
nginx_password = f.read().strip()
if "nginx_password" in data["get"]:
ret_val["nginx_password"] = nginx_password
if "nginx_id" in data["get"]:
ret_val["nginx_id"] = "fabric"
except IOError:
ret_val["error"] = "Nginx credential file does not appear to exist."
if "index_names" in data["get"]:
try:
# Calls index name API
r = subprocess.run(["curl","-XGET","http://localhost:9200/_cat/indices/?h=index"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
# Gathers all external user indices, ignoring internal "." prefixed indices.
indices = []
for line in r.stdout.splitlines():
if line[0] != ".":
indices.append(line)
indices = tuple(indices)
ret_val["index_names"] = indices
except:
ret_val = {"success": False, "ERROR": "Failed to fetch ELK index data."}
if "index_downloads" in data["get"]:
files = subprocess.run(["ls", "/home/mfuser/services/elk/files/indices"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
full = []
partial = []
for file in files.stdout.splitlines():
if ".download" not in file:
full.append(file)
else:
partial.append(file)
print("Files finished exporting")
print("------------------------")
for file in full:
print(file)
print()
print("Export in progress")
print("------------------------")
for file in partial:
print(file)
print()
ret_val["Export Complete"] = full
ret_val["Export In Progress"] = partial

else:
try:
with open(eu.nginx_password_filename, 'r') as f:
ret_val["nginx_password"] = f.read().strip()
ret_val["nginx_id"] = "fabric"
except IOError:
ret_val["error"] = "File does not appear to exist."
ret_val["info"] = "Pass in a dictionary with the info you want to get. For example: data['get'] = ['info_type']. info types include nginx_id, nginx_password, and index_names"

print(eu.get_json_string(ret_val))
#print(json.dumps(ret_val))
Expand Down
180 changes: 147 additions & 33 deletions user_services/elk/service_commands/update.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
from gettext import install
import os
import json
import datetime
import subprocess
import time

import elk_utilities as eu
import custom_dashboards

import logging

import logging


def copy_files(src_dir, dst_dir):
os.system(f"cp -r {src_dir}/* {dst_dir}")


def copy_file(src_file, dst_file):
os.system(f"cp -r {src_file} {dst_file}")

Expand All @@ -23,23 +26,143 @@ def get_file_basenames(files):
return basenames


def main():
def dependency_check():
ready = True
os.system("echo Checking dependencies...")

# Checking for indices directory
if not os.path.isdir("/home/mfuser/services/elk/files/indices"):
os.mkdir("/home/mfuser/services/elk/files/indices")

# Checking for NPM package
try:
subprocess.run(["npm", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
except:
ready = False
os.system("echo - Missing NPM package. Install info: https://docs.npmjs.com/cli/v9/configuring-npm/install")

# Checking for elastic_dump package
try:
subprocess.run(["elasticdump", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
except:
ready = False
os.system("echo - Missing ElasticDump package - Install info: https://github.com/elasticsearch-dump/elasticsearch-dump")

if ready:
os.system("echo All dependencies are satisfied.")
return ready


def main():
logFilePath = os.path.join(eu.log_dir, "update.log")
logging.basicConfig(filename=logFilePath, format='%(asctime)s %(name)-8s %(levelname)-8s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level="INFO")
logging.info("-----Start Update Script.-----")


ret_val = { "success":True, "msg":"" }
ret_val = {"success": True, "msg": ""}
data = eu.get_data()

#############
# testing dashboard single loading

command_found = False
if "commands" in data:
# Ensure certain commands are run in the needed order

for cmd in data["commands"]:

if "cmd" in cmd and cmd["cmd"] == "import_index":
if "indices" in cmd and cmd["indices"]:
# Stopping import if dependencies are not there.
if not dependency_check():
ret_val['success'] = False
ret_val['msg'] = "Install dependencies and try again."
break
# Ready to import indices
os.system("echo")
os.system('echo Import started.')
os.system(
"echo ------------------------------------------------------------------------------------")
os.system("echo")

# Loop through and import each index
for file in cmd["indices"]:
command = ['sudo', 'elasticdump', '--bulk=true',
'--input=/home/mfuser/services/elk/files/' + file,
'--output=http://localhost:9200/']

os.system("echo")
os.system("echo -n Importing " + file)

export_process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, text=True)
while export_process.poll() is None:
os.system("echo -n .")
time.sleep(5)
os.system("echo")
if export_process.poll() == 0:
os.system("echo Imported successfully")
else:
os.system("echo Import failed. Dumping output for troubleshooting:")
os.system("echo " + str(export_process.communicate()))
ret_val['success'] = False
os.system("echo \n")
else:
ret_val['success'] = False
ret_val['msg'] = "Failed to import any indices: Missing index file names."

if "cmd" in cmd and cmd["cmd"] == "export_index":
# Confirming all dependencies are install before export
if "indices" in cmd and cmd["indices"]:
# Stopping export if dependencies are not there.
if not dependency_check():
ret_val['success'] = False
ret_val['msg'] = "Install dependencies and try again."
break

# Ready to export indices
os.system("echo")
os.system('echo Data export started. Files will be placed in ' + eu.files_dir + '/indices')
os.system(
"echo ------------------------------------------------------------------------------------")
os.system("echo")

# Creates timestamp for exported file name
timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S')

# Loop through and export each index
total, successful = 0, 0
for index in cmd["indices"]:
total += 1
file_name = index + "_exported_" + timestamp + ".json"
output_dir = eu.files_dir + '/indices/'
command = ['sudo', 'elasticdump', '--input=http://localhost:9200/' + index, '--output=' + output_dir + file_name + '.download', '--type=data']

os.system("echo -n Exporting " + index)
export_process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)

# Check if export process is finished every second.
while export_process.poll() is None:
os.system("echo -n .")
time.sleep(5)
os.system("echo")

# Once finished, check if succeeded or failed
if export_process.poll() == 0:
os.system("echo Exported successfully as " + file_name)
os.system("sudo mv " + output_dir + file_name + ".download " + output_dir + file_name)
successful += 1
else:
os.system("echo Export failed. Dumping output for troubleshooting:")
os.system("echo " + str(export_process.communicate()))
os.system("echo \n")

os.system(
"echo ------------------------------------------------------------------------------------")
# Returning results
ret_val['export_results'] = str(successful) + "/" + str(total) + " indices exported successfully."
ret_val['export_location'] = eu.files_dir + "/indices"

else:
ret_val['success'] = False
ret_val['msg'] = "Failed to export any indices: Missing index names."

for cmd in data["commands"]:
if "cmd" in cmd and cmd["cmd"] == "upload_dashboards":
Expand All @@ -49,14 +172,14 @@ def main():
# move files from files dir to dashboards dir
if "dashboard_filenames" in cmd:
logging.info("found dashboard_filenames")
for dashboard_filename in get_file_basenames( cmd["dashboard_filenames"] ):
for dashboard_filename in get_file_basenames(cmd["dashboard_filenames"]):
logging.info(f" Dashboard {dashboard_filename}")
src_dashboard_filename = os.path.join(eu.files_dir, dashboard_filename )
dst_dashboard_filename = os.path.join(eu.dashboards_dir, dashboard_filename )
src_dashboard_filename = os.path.join(eu.files_dir, dashboard_filename)
dst_dashboard_filename = os.path.join(eu.dashboards_dir, dashboard_filename)
logging.info(f" Copy {src_dashboard_filename} to {dst_dashboard_filename}")
copy_file(src_dashboard_filename, dst_dashboard_filename)
ret_val['uploaded_dashboards'][dashboard_filename] = {}

ret_val['uploaded_dashboards'][dashboard_filename] = {}
ret_val['uploaded_dashboards'][dashboard_filename]['success'] = True

for cmd in data["commands"]:
Expand All @@ -74,19 +197,19 @@ def main():

# Get list of installed dashboards to prevent double installing.
installed_dashboards = eu.read_installed_dashboards()

# import the dashboard into kibana
if "dashboard_filenames" in cmd:
logging.info("found dashboard_filenames")
for dashboard_filename in get_file_basenames( cmd["dashboard_filenames"] ):
logging.info(f" Dashboard {dashboard_filename}" )
for dashboard_filename in get_file_basenames(cmd["dashboard_filenames"]):
logging.info(f" Dashboard {dashboard_filename}")

# Only install the dashboard if it has not been installed or if user wants to force reinstall
if do_force or dashboard_filename not in installed_dashboards:
logging.info( f" Importing {os.path.join(eu.dashboards_dir, dashboard_filename )} to kibana" )
logging.info(f" Importing {os.path.join(eu.dashboards_dir, dashboard_filename)} to kibana")
result = custom_dashboards.import_dashboard(dashboard_filename)
logging.info(result)
#ret_val["msg"] += f'Added dashboard {dashboard_filename}\n'
# ret_val["msg"] += f'Added dashboard {dashboard_filename}\n'
ret_val["added_dashboards"][dashboard_filename] = {}
ret_val["added_dashboards"][dashboard_filename]["success"] = result["success"]
ret_val["added_dashboards"][dashboard_filename]["msg"] = result["msg"]
Expand All @@ -101,7 +224,7 @@ def main():
ret_val["added_dashboards"][dashboard_filename]["success"] = False
ret_val["added_dashboards"][dashboard_filename]["msg"] = "Already installed."

#else: Do nothing dashboard alread exists
# else: Do nothing dashboard already exists

# Version without force
# if dashboard_filename in installed_dashboards:
Expand All @@ -117,31 +240,21 @@ def main():
# if (result["success"]):
# installed_dashboards.append(dashboard_filename)
# eu.write_installed_dashboards(installed_dashboards)


#result["data"] is not dependable json serializable

# result["data"] is not dependable json serializable

# if not command_found:
# # Command not recognized
# ret_val['msg'] += f"No recognized command found."




####################






if "cmd" in data:
if "upload_custom_dashboards" in data["cmd"]:
# get list of filenames
if "dashboard_filenames" in data:
# Dashboards should have been uploaded to the files directory.
#os.chdir(ansible_dir)
# os.chdir(ansible_dir)
for dfilename in data["dashboard_filenames"]:
src_dashboard_filename = os.path.join(eu.files_dir, dfilename)
dst_dashboard_filename = os.path.join(eu.dashboards_dir, dfilename)
Expand All @@ -151,10 +264,11 @@ def main():
# do something with dashboard file
# maybe move them to the Dashboards dir
if "add_custom_dashboards" in data["cmd"]:
ret_val['msg'] += custom_dashboards.import_dashboards()
ret_val['msg'] += custom_dashboards.import_dashboards()

print(eu.get_json_string(ret_val))
#print(json.dumps(ret_val))
# print(json.dumps(ret_val))


if __name__ == "__main__":
main()
main()
2 changes: 1 addition & 1 deletion user_services/influxdb/playbooks/remove_influxdb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,5 @@

- name: Recursively remove volume directory
ansible.builtin.file:
path: /home/mfuser/influxdb
path: /home/ubuntu/influxdb
state: absent
2 changes: 1 addition & 1 deletion user_services/influxdb/playbooks/start_influxdb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@
network_mode: host
detach: true
volumes:
- /home/mfuser/influxdb:/var/lib/influxdb2
- /home/ubuntu/influxdb:/var/lib/influxdb2
3 changes: 3 additions & 0 deletions user_services/overview/service_commands/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ def main():
if data and "readme_format" in data:
readme_format = data["readme_format"]

if data and "get_service_list" in data:
retVal["services"] = ou.get_services_list()
return
print(readme_format)
try:
retVal["data_recieved"] = ou.get_data()
Expand Down
Loading

0 comments on commit a115e83

Please sign in to comment.