Skip to content

Commit

Permalink
fix for sense aws when deleting and fabfed exits with code 1 now
Browse files Browse the repository at this point in the history
  • Loading branch information
abessiari committed Mar 14, 2024
1 parent c528c4e commit 98804e9
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 8 deletions.
5 changes: 5 additions & 0 deletions fabfed/provider/sense/sense_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ def setup_environment(self):
if not can_read(pkey) or not is_private_key(pkey):
raise ProviderException(f"{self.name}: unable to read/parse ssh key in {pkey}")

self.config[SENSE_SLICE_PRIVATE_KEY_LOCATION] = pkey

@property
def private_key_file_location(self):
from .sense_constants import SENSE_SLICE_PRIVATE_KEY_LOCATION
Expand Down Expand Up @@ -60,9 +62,11 @@ def _handle_peering_config(self, resource):

def _init_client(self):
if not self.initialized:
self.logger.info(f"{self.name}: Initializing sense client")
from .sense_client import init_client

init_client(self.config)
self.logger.info(f"{self.name}: Initialized sense client")
self.initialized = True

def do_add_resource(self, *, resource: dict):
Expand Down Expand Up @@ -188,6 +192,7 @@ def do_create_resource(self, *, resource: dict):
self.logger.debug(f"Created network: {vars(net)}")

def do_delete_resource(self, *, resource: dict):
self._init_client()
rtype = resource.get(Constants.RES_TYPE)
assert rtype in self.supported_resources
label = resource.get(Constants.LABEL)
Expand Down
19 changes: 11 additions & 8 deletions tools/fabfed.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,9 @@ def manage_workflow(args):
except ControllerException as ce:
logger.error(f"Exceptions while creating resources ... {ce}")
workflow_failed = True
except Exception as e:
logger.error(f"Unknown error while creating resources ... {e}")
workflow_failed = True

controller_duration = time.time() - controller_duration_start
providers_duration = 0
Expand All @@ -105,9 +108,7 @@ def manage_workflow(args):

states = controller.get_states()
nodes, networks, services, pending, failed = utils.get_counters(states=states)

if pending or failed:
workflow_failed = True
workflow_failed = workflow_failed or pending or failed

if Constants.RECONCILE_STATES:
states = sutil.reconcile_states(states, args.session)
Expand All @@ -134,7 +135,7 @@ def manage_workflow(args):
logger.info(f"STATS:duration_in_seconds={workflow_duration}")
logger.info(f"nodes={nodes}, networks={networks}, services={services}, pending={pending}, failed={failed}")
sutil.save_stats(dict(comment="all durations are in seconds", stats=fabfed_stats), args.session)
return
sys.exit(1 if workflow_failed else 0)

if args.init:
config = WorkflowConfig.parse(dir_path=config_dir, var_dict=var_dict)
Expand Down Expand Up @@ -168,7 +169,7 @@ def manage_workflow(args):
stitch_info=network.attributes.get(Constants.RES_STITCH_INFO))
stitch_info_details.append(details)

stitch_info_details = dict(StitchNetworkDetails = stitch_info_details )
stitch_info_details = dict(StitchNetworkDetails=stitch_info_details)
sutil.dump_objects(objects=stitch_info_details, to_json=args.json)

NetworkInfo = namedtuple("NetworkInfo", "label provider_label")
Expand All @@ -178,7 +179,6 @@ def manage_workflow(args):
stitch_info_map = {}
stitch_info_network_info_map = {}


for network in filter(lambda n: n.is_network and n.attributes.get(Constants.RES_STITCH_INFO), resources):
stitch_info = network.attributes.get(Constants.RES_STITCH_INFO)

Expand All @@ -196,7 +196,7 @@ def manage_workflow(args):
stitch_info_summary = StitchInfoSummary(network_infos=v, stitch_info=stitch_info_map[k])
stitch_info_summaries.append(stitch_info_summary)

stitch_info_summaries = dict(StitchInfoSummary = stitch_info_summaries)
stitch_info_summaries = dict(StitchInfoSummary=stitch_info_summaries)
sutil.dump_objects(objects=stitch_info_summaries, to_json=args.json)
return

Expand Down Expand Up @@ -251,10 +251,13 @@ def manage_workflow(args):
logger.error(f"Exceptions while initializing controller .... {e}")
sys.exit(1)

destroy_failed = False

try:
controller.destroy(provider_states=states)
except ControllerException as e:
logger.error(f"Exceptions while deleting resources ...{e}")
destroy_failed = True
except KeyboardInterrupt as kie:
logger.error(f"Keyboard Interrupt while deleting resources ... {kie}")
sys.exit(1)
Expand Down Expand Up @@ -293,7 +296,7 @@ def manage_workflow(args):
provider_stats=provider_stats)
logger.info(f"STATS:duration_in_seconds={workflow_duration}")
sutil.save_stats(dict(comment="all durations are in seconds", stats=fabfed_stats), args.session)
return
sys.exit(1 if destroy_failed else 0)


def manage_sessions(args):
Expand Down

0 comments on commit 98804e9

Please sign in to comment.