defon_check_suite(request: CheckRunCompleted): logger.info(f"Received check run completed event for {request.repository.full_name}") g = get_github_client(request.installation.id) repo = g.get_repo(request.repository.full_name) ifnot get_gha_enabled(repo): logger.info(f"Skipping github action for {request.repository.full_name} because it is not enabled") returnNone pr = repo.get_pull(request.check_run.pull_requests[0].number) num_pr_commits = len(list(pr.get_commits())) if num_pr_commits > 20: logger.info(f"Skipping github action for PR with {num_pr_commits} commits") returnNone logger.info(f"Running github action for PR with {num_pr_commits} commits") logs = download_logs( request.repository.full_name, request.check_run.run_id, request.installation.id ) ifnot logs: returnNone logs = clean_logs(logs) extractor = GHAExtractor() logger.info(f"Extracting logs from {request.repository.full_name}, logs: {logs}") problematic_logs = extractor.gha_extract(logs) if problematic_logs.count(" ") > 15: problematic_logs += "
========================================
There are a lot of errors. This is likely a larger issue with the PR and not a small linting/type-checking issue." comments = list(pr.get_issue_comments()) iflen(comments) >= 2and problematic_logs == comments[-1].body and comments[-2].body == comments[-1].body: comment = pr.as_issue().create_comment(log_message.format(error_logs=problematic_logs) + "
I'm getting the same errors 3 times in a row, so I will stop working on fixing this PR.") logger.warning("Skipping logs because it is duplicated") raise Exception("Duplicate error logs") print(problematic_logs) comment = pr.as_issue().create_comment(log_message.format(error_logs=problematic_logs)) on_comment( repo_full_name=request.repository.full_name, repo_description=request.repository.description, comment=problematic_logs, pr_path=None, pr_line_position=None, username=request.sender.login, installation_id=request.installation.id, pr_number=request.check_run.pull_requests[0].number, comment_id=comment.id, repo=repo, ) return {"success": True}
defon_check_suite(request: CheckRunCompleted): logger.info(f"Received check run completed event for {request.repository.full_name}") g = get_github_client(request.installation.id) repo = g.get_repo(request.repository.full_name) ifnot get_gha_enabled(repo): logger.info(f"Skipping github action for {request.repository.full_name} because it is not enabled") returnNone pr = repo.get_pull(request.check_run.pull_requests[0].number) num_pr_commits = len(list(pr.get_commits())) if num_pr_commits > 20: logger.info(f"Skipping github action for PR with {num_pr_commits} commits") returnNone logger.info(f"Running github action for PR with {num_pr_commits} commits") logs = download_logs( request.repository.full_name, request.check_run.run_id, request.installation.id ) ifnot logs: returnNone logs = clean_logs(logs) extractor = GHAExtractor() logger.info(f"Extracting logs from {request.repository.full_name}, logs: {logs}") problematic_logs = extractor.gha_extract(logs) if problematic_logs.count("\n") > 15: problematic_logs += "\n\nThere are a lot of errors. This is likely a larger issue with the PR and not a small linting/type-checking issue." comments = list(pr.get_issue_comments())
==========
iflen(comments) >= 2and problematic_logs == comments[-1].body and comments[-2].body == comments[-1].body: comment = pr.as_issue().create_comment(log_message.format(error_logs=problematic_logs) + "\n\nI'm getting the same errors 3 times in a row, so I will stop working on fixing this PR.") logger.warning("Skipping logs because it is duplicated") raise Exception("Duplicate error logs") print(problematic_logs) comment = pr.as_issue().create_comment(log_message.format(error_logs=problematic_logs)) on_comment( repo_full_name=request.repository.full_name, repo_description=request.repository.description, comment=problematic_logs, pr_path=None, pr_line_position=None, username=request.sender.login, installation_id=request.installation.id, pr_number=request.check_run.pull_requests[0].number, comment_id=comment.id, repo=repo, )
language_names = ["python", "java", "cpp", "go", "rust", "ruby", "php"] # and more
# Installing the parsers languages = {} for language in LANGUAGE_NAMES: subprocess.run(f"git clone https://github.com/tree-sitter/tree-sitter-{language} cache/tree-sitter-{language}", shell=True) for language in LANGUAGE_NAMES: Language.build_library(f'cache/build/{language}.so', [f"cache/tree-sitter-{language}"]) self.languages = {language: Language(f"cache/build/{language}.so", language) for language in LANGUAGE_NAMES}
defchunk(text: str, MAX_CHARS: int = 1500) -> list[str]: # Determining the language for language_name in language_names: language = languages[language_name] parser = Parser() parser.set_language(language) tree = parser.parse(bytes(text, "utf-8")) ifnot tree.root_node.children or tree.root_node.children[0].type != "ERROR": file_language = language break logger.warning(f"Not language {language_name}")
# Smart chunker if file_language: return chunk_node(tree.root_node, text, max_chunk_size)