diff --git a/Support/Open-Api-Condense/Pipfile b/Support/Open-Api-Condense/Pipfile new file mode 100644 index 0000000..cf00258 --- /dev/null +++ b/Support/Open-Api-Condense/Pipfile @@ -0,0 +1,13 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +pyyaml = "*" +"ruamel.yaml" = "*" + +[dev-packages] + +[requires] +python_version = "3.11" diff --git a/Support/Open-Api-Condense/Pipfile.lock b/Support/Open-Api-Condense/Pipfile.lock new file mode 100644 index 0000000..d1295e5 --- /dev/null +++ b/Support/Open-Api-Condense/Pipfile.lock @@ -0,0 +1,144 @@ +{ + "_meta": { + "hash": { + "sha256": "b7ac51a42de731081a8478d5f6d5d72dab7bdf99cb47e475541bfcf5f73ab74a" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.11" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "pyyaml": { + "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", + "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", + "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", + "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", + "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", + "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", + "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", + "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", + "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", + "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", + "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", + "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", + "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", + "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", + "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", + "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", + "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", + "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", + "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", + "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", + "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", + "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", + "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", + "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", + "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", + "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", + "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", + "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", + "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", + "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", + "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", + "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", + "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", + "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", + "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", + "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", + "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", + "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", + "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", + "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", + "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", + "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" + ], + "index": "pypi", + "markers": "python_version >= '3.6'", + "version": "==6.0.1" + }, + "ruamel.yaml": { + "hashes": [ + "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e", + "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.18.5" + }, + "ruamel.yaml.clib": { + "hashes": [ + "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d", + "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001", + "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462", + "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9", + "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe", + "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b", + "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b", + "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615", + "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62", + "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15", + "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b", + "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1", + "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9", + "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675", + "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899", + "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7", + "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7", + "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312", + "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa", + "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91", + "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b", + "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6", + "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3", + "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334", + "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5", + "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3", + "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe", + "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c", + "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed", + "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337", + "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880", + "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f", + "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d", + "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248", + "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d", + "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf", + "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512", + "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069", + "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb", + "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942", + "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d", + "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31", + "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92", + "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5", + "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28", + "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d", + "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1", + "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2", + "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875", + "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412" + ], + "markers": "platform_python_implementation == 'CPython' and python_version < '3.13'", + "version": "==0.2.8" + } + }, + "develop": {} +} diff --git a/Support/Open-Api-Condense/openApiCondense.py b/Support/Open-Api-Condense/openApiCondense.py new file mode 100644 index 0000000..3ecd633 --- /dev/null +++ b/Support/Open-Api-Condense/openApiCondense.py @@ -0,0 +1,381 @@ +""" Condense app for open api apps +""" +import json +import os +import ruamel.yaml +import shutil +import traceback +import yaml +from pathlib import Path + + +class ReadWorkflowData: + """Read workflow data.""" + + def __init__(self, workflow_dict, filepath): + self.workflow_dict = workflow_dict + self.filepath = filepath + self._set_first_job() + self._read_params() + + def _read_params(self): + self.trigger_branch = self._get_branch_name() + self.trigger_path = self._get_trigger_path() + self.region = self._get_region() + self.app_name = self._get_app_name() + self.cluster_name = self._get_cluster_name() + self.env_name = self._get_env_name() + self.task_defintion_file = self._get_task_def_file_path() + self.account_number = self._get_account_number() + self.docker_file = self._get_docker_file() + self.workflow_name = self._get_workflow_name() + + def _set_first_job(self): + self.job = "" + if self.workflow_dict.get("jobs", {}): + self.job = list(self.workflow_dict["jobs"].keys())[0] + + def _get_branch_name(self): + return self.workflow_dict.get(True, {}).get("push", {}).get("branches", [""]) + + def _get_trigger_path(self): + trigger_folder = self.workflow_dict.get(True, {}).get("push", {}).get("paths") + if len(trigger_folder) == 1: + folder_taskdef = trigger_folder[0].strip("/**") + taskdef_folder = os.sep.join([folder_taskdef, "taskdef"]) + trigger_folder.append("!" + taskdef_folder + "/**") + github_workflow_file = self.filepath.split(".github/workflows", 1)[1] + trigger_folder.append(".github/workflows" + github_workflow_file) + return trigger_folder + + def _get_region(self): + return self.workflow_dict.get("env", {}).get("AWS_REGION") + + def _get_app_name(self): + return self.workflow_dict.get("env", {}).get("CONTAINER_NAME") + + def _get_cluster_name(self): + return self.workflow_dict.get("env", {}).get("ECS_CLUSTER") + + def _get_env_name(self): + return ( + self.workflow_dict.get("jobs", {}).get(self.job, {}).get("environment", "") + ) + + def _get_task_def_file_path(self): + ecs_task_definition = self.workflow_dict.get("env", {}).get( + "ECS_TASK_DEFINITION" + ) + self.trigger_path.append( + ecs_task_definition.replace(".json", ".yaml") + ) # Trigger on taskdef file changes + return ecs_task_definition + + def _get_account_number(self): + steps = self.workflow_dict.get("jobs", {}).get(self.job, {}).get("steps", []) + account = "" + for step in steps: + if "configure-aws-credentials" in step.get("uses", ""): + role_assumed = step.get("with", {}).get("role-to-assume") + account = ( + role_assumed.strip().split("arn:aws:iam::", 1)[1].split(":", 1)[0] + ) + break + return account + + def _get_docker_file(self): + steps = self.workflow_dict.get("jobs", {}).get(self.job, {}).get("steps", []) + dockerfile = "" + for step in steps: + if "docker build -t" in step.get("run", ""): + commands = step.get("run").split("\n") + dockerfile = ( + commands[0].split(":$IMAGE_TAG -f ", 1)[1].rsplit(" .", 1)[0] + ) + break + return dockerfile + + def _get_workflow_name(self): + return self.workflow_dict.get("name", "").strip() + + +class ReadTaskdefData: + def __init__(self, workflow_read_obj): + self.relative_taskdef_file = workflow_read_obj.task_defintion_file + self.repo_path = Path(workflow_read_obj.filepath).parent.parent.parent + self._read_taskdef_file() + self._read_app_container_def(workflow_read_obj.app_name) + self._read_params() + + def _read_taskdef_file(self): + self.task_def_file = os.sep.join( + [str(self.repo_path), self.relative_taskdef_file] + ) + with open(self.task_def_file, "r", encoding="utf-8") as file_obj: + self.taskdef_data = json.load(file_obj) + + def _read_params(self): + self.host_port, self.container_port = self._read_host_port() + self.role = self._read_role() + self.iac = self._read_iac() + self.team_name = self._read_team_name() + self.environment = self._read_environment() + self.memory = self._read_memory() + self.cpu = self._read_cpu() + self.env_vars = self._read_env_vars() + self.secrets = self._read_secrets() + + def _read_app_container_def(self, appname): + containerDefs = self.taskdef_data.get("containerDefinitions", {}) + for container_def in containerDefs: + if container_def.get("name") == appname: + self.app_container_def = container_def + + def _read_host_port(self): + port_mappings = self.app_container_def.get("portMappings", [{}])[0] + host_port = port_mappings.get("hostPort") + container_port = port_mappings.get("containerPort") + return host_port, container_port + + def _read_role(self): + tags = self.taskdef_data.get("tags", []) + role = "" + for tag in tags: + if tag["key"] == "Role": + role = tag["value"] + break + return role + + def _read_team_name(self): + tags = self.taskdef_data.get("tags", []) + team_name = "" + for tag in tags: + if tag["key"] == "Team": + team_name = tag["value"] + break + return team_name + + def _read_environment(self): + tags = self.taskdef_data.get("tags", []) + environment = "" + for tag in tags: + if tag["key"] == "Environment": + environment = tag["value"] + break + return environment + + def _read_iac(self): + tags = self.taskdef_data.get("tags", []) + iac = "" + for tag in tags: + if tag["key"] == "IAC": + iac = tag["value"] + break + return iac + + def _read_memory(self): + return self.taskdef_data.get("memory") + + def _read_cpu(self): + return self.taskdef_data.get("cpu") + + def _read_env_vars(self): + return self.app_container_def.get("environment", []) + + def _read_secrets(self): + secret_list = self.app_container_def.get("secrets", []) + return [element.get("name") for element in secret_list] + + +class OpenApiCondense: + """Condenses github workflow and related taskdef file""" + + def __init__(self): + self._total_lines_saved = 0 + self.total_files = 0 + + def _read_yaml_file_data(self, filepath): + data_dict = {} + with open(filepath, "r") as file_obj: + data_dict = yaml.safe_load(file_obj) + return data_dict + + def _count_lines(self, file_path): + with open(file_path, "r", encoding="utf-8") as file: + line_count = sum(1 for line in file) + return line_count + + def _check_if_updated(self, workflow_dict: dict) -> bool: + """Checks if the workflow is already condensed by checking presence of uses: 0xPolygon/pipelines + + Args: + workflow_dict (dict): Data read from yaml file in dict format + + Returns: + bool: True if the file is already condensed + """ + updated = False + if workflow_dict.get("jobs", {}) and len(workflow_dict["jobs"].keys()) == 1: + job = list(workflow_dict["jobs"].keys())[0] + if ( + workflow_dict["jobs"][job] + .get("uses", "") + .startswith("0xPolygon/pipelines") + ): + updated = True + return updated + + def _create_github_workflow_file(self, workflow_read_obj): + workflow_dict = {} + workflow_dict["name"] = workflow_read_obj.workflow_name + workflow_dict["on"] = { + "push": { + "branches": workflow_read_obj.trigger_branch, + "paths": workflow_read_obj.trigger_path, + }, + "workflow_dispatch": "", + } + taskdef_file_vars = workflow_read_obj.task_defintion_file.replace( + ".json", ".yaml" + ) + workflow_dict["jobs"] = { + "deploy": { + "uses": "0xPolygon/pipelines/.github/workflows/ecs_deploy_docker_taskdef.yaml@main" + }, + "with": { + "app_name": workflow_read_obj.app_name, + "taskdef_file_vars": taskdef_file_vars, + "account_number": workflow_read_obj.account_number, + "aws_region": workflow_read_obj.region, + "environment": workflow_read_obj.env_name, + "docker_file": workflow_read_obj.docker_file, + "cluster_name": workflow_read_obj.cluster_name, + }, + "secrets": "inherit", + } + desired_key_order = ["name", "on", "jobs"] + with open(workflow_read_obj.filepath, "w", encoding="utf-8") as file_obj: + yaml_rumael = ruamel.yaml.YAML() + yaml_rumael.dump( + {key: workflow_dict[key] for key in desired_key_order}, file_obj + ) + return workflow_read_obj.filepath + + def _create_taskdef_file(self, workflow_read_obj, taskdef_read_obj): + taskdef_dict = { + "region": workflow_read_obj.region, + "account_number": workflow_read_obj.account_number, + "hostport": taskdef_read_obj.host_port, + "containerport": taskdef_read_obj.container_port, + "app_name": workflow_read_obj.app_name, + "role": taskdef_read_obj.role, + "environment": taskdef_read_obj.environment, + "iac": taskdef_read_obj.iac, + "team_name": taskdef_read_obj.team_name, + "memory": int(taskdef_read_obj.memory), + "cpu": int(taskdef_read_obj.cpu), + "secret_vars": taskdef_read_obj.secrets, + } + if taskdef_read_obj.env_vars: + taskdef_dict["env_vars"] = [] + for env in taskdef_read_obj.env_vars: + taskdef_dict["env_vars"].append( + {"name": env.get("name"), "value": env.get("value")} + ) + new_file_path = ( + taskdef_read_obj.task_def_file[ + : -(len(os.path.splitext(taskdef_read_obj.task_def_file)[1])) + ] + + ".yaml" + ) + + desired_key_order = [ + "region", + "account_number", + "hostport", + "containerport", + "app_name", + "role", + "environment", + "iac", + "team_name", + "memory", + "cpu", + "env_vars", + "secret_vars", + ] + with open(taskdef_read_obj.task_def_file, "w", encoding="utf-8") as file_obj: + yaml_rumael = ruamel.yaml.YAML() + yaml_rumael.dump( + {key: taskdef_dict[key] for key in desired_key_order}, file_obj + ) + shutil.move(taskdef_read_obj.task_def_file, new_file_path) + return taskdef_read_obj.task_def_file + + def _compare_file_lines( + self, old_workflow_lines, old_taskdef_lines, new_workflow, new_taskdef + ): + new_workflow_lines = self._count_lines(new_workflow) + new_taskdef_lines = self._count_lines(new_taskdef) + app_lines_saved = ( + old_workflow_lines + + old_taskdef_lines + - new_workflow_lines + - new_taskdef_lines + ) + app_name = os.path.basename(new_workflow) + print(f"{app_name} lines saved: {app_lines_saved}") + self._total_lines_saved += app_lines_saved + + def process_github_workflow(self, filepath: str): + """Reads github workflow and converts it into condensed format + + Args: + filepath (str): Github workflow file path + """ + workflow_dict = self._read_yaml_file_data(filepath) + if self._check_if_updated(workflow_dict): + return + try: + workflow_read_obj = ReadWorkflowData(workflow_dict, filepath) + if not workflow_read_obj.account_number: + print(f"Issue with current github workflow {filepath}") + return + except TypeError: + print(f"Issue with current github workflow {filepath}") + return + taskdef_read_obj = ReadTaskdefData(workflow_read_obj) + old_workflow_lines = self._count_lines(filepath) + old_taskdef_lines = self._count_lines(taskdef_read_obj.task_def_file) + + new_workflow_file = self._create_github_workflow_file(workflow_read_obj) + new_taskdef_file = self._create_taskdef_file( + workflow_read_obj, taskdef_read_obj + ) + self._compare_file_lines( + old_workflow_lines, + old_taskdef_lines, + new_workflow_file, + new_taskdef_file.replace(".json", ".yaml"), + ) + self.total_files += 1 + + def process_all_files(self, directory): + all_files = [ + os.sep.join([directory, filepath]) + for filepath in os.listdir(directory) + if filepath.endswith(".yaml") or filepath.endswith(".yml") + ] + for filepath in all_files: + try: + self.process_github_workflow(filepath) + except Exception: + print(f"Error {traceback.format_exc()}") + break + print(f"Total lines saved {self._total_lines_saved}") + print(f"Total files processed {self.total_files}") + + +if __name__ == "__main__": + CONDENSER = OpenApiCondense() + CONDENSER.process_all_files("/open-api/.github/workflows")