-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathRDG2CSV.py
148 lines (128 loc) · 5.75 KB
/
RDG2CSV.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
import xml.etree.ElementTree as ET
import xmltodict
import json
import os
import csv
def validate_file(filename, folder):
while True:
filepath = os.path.join(folder, filename)
if not os.path.isdir(folder):
os.mkdir(folder)
if not os.path.isfile(filepath):
print(f"{filename} not found in ./input/, enter the RDCMan exported XML filename:")
filename = input()
else:
return filepath
def build_filepath(filename, folder):
filepath = os.path.join(folder, filename)
return filepath
def mkdir_output(folder):
if not os.path.isdir(folder):
os.mkdir(folder)
def clear_output_folder(filepaths, folder):
if os.path.isdir(folder):
for filepath in filepaths:
if os.path.isfile(filepath):
print(f"Deleting old file {filepath}")
os.remove(filepath)
def import_xml(filepath):
print(f"Importing {filepath}")
tree = ET.parse(filepath)
root = tree.getroot()
return ET.tostring(root, encoding='utf-8', method='xml')
def export_json(json, filepath):
print(f"Exporting {filepath}")
with open(filepath, "w") as json_file:
json_file.write(json)
def pull_server_properties(server, group, group_name, subgroup_name=""):
properties = {}
logon = {"userName": "", "domain": ""}
try:
properties = server["properties"]
except:
properties = group["properties"]
properties["group"] = group_name
properties["subgroup"] = subgroup_name
try:
logon["userName"] = server["logonCredentials"]["userName"]
logon["domain"] = server["logonCredentials"]["domain"]
except:
try:
logon["userName"] = group["logonCredentials"]["userName"]
logon["domain"] = group["logonCredentials"]["domain"]
except:
pass
properties["userName"] = logon["userName"]
properties["domain"] = logon["domain"]
return properties
def standardize_properties(property_list):
print(f"Standardizing data")
std_property_list = []
for property in property_list:
std_property = {"displayName":"", "name":"", "group":"", "subgroup":"", "userName":"", "domain":""}
for key in std_property.keys():
if key in property.keys():
std_property[key] = property[key]
std_property_list.append(std_property)
return std_property_list
def export_csv(filepath, headers, dicitonary_list):
print(f"Exporting {filepath}")
with open(filepath, "w") as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=headers, delimiter=',', quotechar='"', lineterminator="\n", quoting=csv.QUOTE_MINIMAL)
writer.writeheader()
writer.writerows(dicitonary_list)
def validate_non_empty(entry, key):
if entry[key] == "":
return "None"
else:
return entry[key]
def export_ahk(filepath, dictionary_list):
print(f"Exporting {filepath}")
ahk = [
"#NoEnv", "; #Warn", "SendMode Input", "SetWorkingDir %A_ScriptDir%", ""
]
for entry in dictionary_list:
ahk.append(f'::${validate_non_empty(entry, "name")}!::{validate_non_empty(entry, "displayName")} / {entry["group"]} {entry["subgroup"]} @ {validate_non_empty(entry, "name")}')
ahk.append(f'::$n{validate_non_empty(entry, "name")}!::{validate_non_empty(entry, "displayName")}')
ahk.append(f'::$u{validate_non_empty(entry, "name")}!::{entry["userName"]}')
ahk.append(f'::$u@{validate_non_empty(entry, "name")}!::{entry["userName"]}@{validate_non_empty(entry, "name")}')
ahk.append(f'::${validate_non_empty(entry, "displayName")}!::{validate_non_empty(entry, "displayName")} / {entry["group"]} {entry["subgroup"]} @ {validate_non_empty(entry, "name")}')
ahk.append(f'::$ip{validate_non_empty(entry, "displayName")}!::{validate_non_empty(entry, "name")}')
with open(filepath, "w") as ahk_file:
for element in ahk:
ahk_file.write(f"{element}\n")
def main():
folder_in = "./input/"
folder_out = "./output/"
rdg_filepath_in = validate_file("servers.rdg", folder_in)
csv_filepath_out = build_filepath("servers.csv", folder_out)
json_filepath_out = build_filepath("servers.json", folder_out)
ahk_filepath_out = build_filepath("servers.ahk", folder_out)
mkdir_output(folder_out)
clear_output_folder([csv_filepath_out, json_filepath_out, ahk_filepath_out], folder_out)
rdg_xml = import_xml(rdg_filepath_in)
rdg_dict = xmltodict.parse(rdg_xml)
rdg_json = json.dumps(rdg_dict, indent=2)
export_json(rdg_json, json_filepath_out)
rdg_group_list = rdg_dict['RDCMan']['file']['group']
output_header = ["displayName", "name", "group", "subgroup", "userName", "domain"]
output_list = []
# This loop will only traverse for a single sub-level (sub-group)
print(f"Processing data")
for group in rdg_group_list:
group_name = group["properties"]["name"]
if "server" in group.keys():
for server in group["server"]:
output_list.append(pull_server_properties(server, group, group_name))
if "group" in group.keys():
for subgroup in group["group"]:
subgroup_name = subgroup["properties"]["name"]
if "server" in subgroup.keys():
for server in subgroup["server"]:
output_list.append(pull_server_properties(server, subgroup, group_name, subgroup_name))
output_list = standardize_properties(output_list)
export_csv(csv_filepath_out, output_header, output_list)
export_ahk(ahk_filepath_out, output_list)
print(f"Finished")
if __name__ == "__main__":
main()