Skip to content

Commit

Permalink
Merge pull request #10 from morenod/aws_role_fix
Browse files Browse the repository at this point in the history
AWS role patch required by OCM-3187
  • Loading branch information
dry923 authored Sep 27, 2023
2 parents d993eaa + f77e165 commit 975b85c
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 5 deletions.
22 changes: 22 additions & 0 deletions libs/platforms/rosa/hypershift/hypershift.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,16 @@ def delete_cluster(self, platform, cluster_name):
cluster_info["timestamp"] = datetime.datetime.utcnow().isoformat()
self.es.index_metadata(cluster_info)

def _get_aws_role_name(self, cluster_name):
# Required by OCM-3187 (https://issues.redhat.com/browse/OCM-3187), remove when fixed
(role_policy_code, role_policy_out, role_policy_err) = self.utils.subprocess_exec("rosa describe cluster -c " + cluster_name + " -o json")
if role_policy_code == 0:
for role in json.loads(role_policy_out.decode("utf-8")).get("aws", {}).get("sts", {}).get("operator_iam_roles", []):
if role.get("name", "") == "kube-controller-manager":
return role.get("role_arn").split("/")[-1]
self.logging.error(f"No Role named kube-controller-manager found on Cluster {cluster_name}")
return None

def create_cluster(self, platform, cluster_name):
super().create_cluster(platform, cluster_name)
cluster_info = platform.environment["clusters"][cluster_name]
Expand Down Expand Up @@ -462,6 +472,18 @@ def create_cluster(self, platform, cluster_name):
return 1
else:
break

# Required by OCM-3187 (https://issues.redhat.com/browse/OCM-3187), remove when fixed
self.logging.info(f"Getting kube-controller-manager role for cluster {cluster_name}")
aws_role_name = self._get_aws_role_name(cluster_name)
self.logging.info(f"Found kube-controller-manager role {aws_role_name} for cluster {cluster_name}")
(aws_policy_code, aws_policy_out, aws_policy_err) = self.utils.subprocess_exec("aws iam attach-role-policy --role-name " + aws_role_name + " --policy-arn arn:aws:iam::415909267177:policy/hack-414-custom-policy")
if aws_policy_code != 0:
cluster_info['status'] = "aws policy failed"
return 1
else:
self.logging.info(f"Patched kube-controller-manager role {aws_role_name} for cluster {cluster_name} with policy arn:aws:iam::415909267177:policy/hack-414-custom-policy")

cluster_info['status'] = "Installing"
self.logging.info(f"Cluster {cluster_name} installation started on the {trying} try")
cluster_info["metadata"] = self.get_metadata(cluster_name)
Expand Down
2 changes: 1 addition & 1 deletion libs/platforms/rosa/rosa.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def __init__(self, arguments, logging, utils, es):
aws.set_aws_envvars(arguments['aws_profile'], arguments['aws_region'])
self.environment['aws'] = aws.set_aws_environment(arguments['aws_profile'], arguments['aws_region'])
self.environment["commands"].append("rosa")
# self.environment["commands"].append("aws")
self.environment["commands"].append("aws")

self.environment["rosa_env"] = arguments["rosa_env"]

Expand Down
7 changes: 4 additions & 3 deletions libs/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,11 @@ def generate_cluster_name_seed(self, seed):
return cluster_name_seed

def verify_cmnd(self, command):
(cmd_code, cmd_out, cmd_err) = self.subprocess_exec(command + " -h")
help_command = command + " help" if command != "terraform" else command + " -h"
(cmd_code, cmd_out, cmd_err) = self.subprocess_exec(help_command)
if cmd_code != 0:
self.logging(cmd_out)
self.logging(cmd_err)
self.logging.error(cmd_out)
self.logging.error(cmd_err)
sys.exit("Exiting...")
else:
self.logging.info(f"{command} command validated with -h")
Expand Down
2 changes: 1 addition & 1 deletion rosa-burner.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[Defaults]
install_clusters = True
cluster_count = 1
cluster_name_seed = gitci
cluster_name_seed = rbur
workers = 3
workers_wait_time = 60
wait_for_workers = True
Expand Down

0 comments on commit 975b85c

Please sign in to comment.