github_searcher/github_searcher.py

135 lines
4.9 KiB
Python
Raw Normal View History

import os
2024-08-26 13:27:14 -05:00
import argparse
import logging
import json
from github import Github, Auth, GithubException
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
def auth():
access_token = os.getenv("GITHUB_ACCESS_TOKEN")
if not access_token:
raise ValueError("GITHUB_ACCESS_TOKEN environment variable not set")
auth = Auth.Token(access_token)
g = Github(auth=auth)
return g
class GithubSearcher():
def __init__(self, query):
self.g = auth()
self.query = query
self.result = None
def search_repo(self):
try:
self.result = self.g.search_repositories(self.query)
except GithubException as e:
logging.error(f"Error searching repositories: {e}")
self.result = None
2024-08-26 13:27:14 -05:00
def search_users(self):
try:
self.result = self.g.search_users(self.query)
except GithubException as e:
logging.error(f"Error searching users: {e}")
self.result = None
def search_in_repo_name(self):
try:
self.result = self.g.search_repositories('in:name ' + self.query)
except GithubException as e:
logging.error(f"Error searching in name: {e}")
self.result = None
2024-08-26 13:27:14 -05:00
def get_repo_open_issues(self, repo):
try:
2024-08-27 10:35:22 -05:00
return repo.get_issues(state='open')
except GithubException as e:
logging.error(f"Error getting open issues: {e}")
return None
2024-08-27 10:35:22 -05:00
def get_repo_open_pull_requests(self, repo):
try:
return repo.get_pulls(state='open')
except GithubException as e:
logging.error(f"Error getting open pull requests: {e}")
return None
def get_repo_contents(self, repo):
try:
return repo.get_contents("")
except GithubException as e:
logging.error(f"Error getting repository contents: {e}")
return None
def get_result(self):
return self.result
def main():
parser = argparse.ArgumentParser(description="Search GitHub repositories and users for PoC exploits and CVEs.")
parser.add_argument("--query", type=str, required=True, help="The search query.")
parser.add_argument("--search_type", type=str, required=True, choices=["repo", "users", "in-repo-name"], help="The type of search to perform: 'repo', 'users', or 'in-repo-name'.")
2024-08-27 10:35:22 -05:00
parser.add_argument("--get_file_contents", action="store_true", help="Get the contents of repo results.")
parser.add_argument("--get_open_issues", action="store_true", help="Get the open issues of repo results.")
parser.add_argument("--get_open_pull_requests", action="store_true", help="Get the open pull requests of repo results.")
parser.add_argument("--json", action="store_true", help="Output results in JSON format.")
2024-08-27 10:35:22 -05:00
2024-08-26 13:27:14 -05:00
args = parser.parse_args()
searcher = GithubSearcher(args.query)
if args.search_type == "repo":
searcher.search_repo()
elif args.search_type == "users":
searcher.search_users()
elif args.search_type == "in-repo-name":
searcher.search_in_repo_name()
2024-08-26 13:27:14 -05:00
result = searcher.get_result()
if result is None:
print("No results found.")
return
output = []
2024-08-26 13:27:14 -05:00
for item in result:
if args.search_type == "users":
user_repos = item.get_repos()
for repo in user_repos:
repo_info = {"repo_url": repo.html_url, "repo_name": repo.name, "repo_description": repo.description}
output.append(repo_info)
2024-08-26 13:27:14 -05:00
else:
repo_info = {"repo_url": item.html_url, "repo_name": item.name, "repo_description": item.description}
if args.get_open_issues:
open_issues = searcher.get_repo_open_issues(item)
if open_issues:
repo_info["open_issues"] = [{"title": issue.title, "url": issue.html_url} for issue in open_issues]
2024-08-27 10:35:22 -05:00
if args.get_open_pull_requests:
open_prs = searcher.get_repo_open_pull_requests(item)
if open_prs:
repo_info["open_pull_requests"] = [{"title": pr.title, "url": pr.html_url} for pr in open_prs]
if args.get_file_contents:
contents = searcher.get_repo_contents(item)
if contents:
repo_info["contents"] = [
{
"name": content_file.name,
"sha": content_file.sha,
"size": content_file.size,
"encoding": content_file.encoding,
"html_url": content_file.html_url
} for content_file in contents
]
output.append(repo_info)
if args.json:
print(json.dumps(output, indent=4))
else:
for item in output:
print(item)
if __name__ == "__main__":
main()