aboutsummaryrefslogtreecommitdiff
path: root/tools/github-projects.py
blob: ff890f6b792bb8fad8754440c53deaf1c45fd086 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
"""Fetches my public repositories and downloads tagged versions."""
from datetime import datetime
import sys
import json
import requests

USERNAME = "mitjafelicijan"
CACHE_BUSTER = int(datetime.now().timestamp())
DOUBLE_NL = "\n\n"

headers = {
    "Accept": "application/vnd.github.v3+json"
}

def generate_markdown_file(include_repositories):
    file = open("../content/pages/projects.github.md.part", "w")

    file.write(DOUBLE_NL)
    file.write("# GitHub repositories")
    file.write(DOUBLE_NL)

    file.write("<div class='project-list'>")
    file.write(DOUBLE_NL)

    for repo in include_repositories:
        file.write(f"- [{repo['name']}](#{repo['name'].lower()}) \n")

    file.write(DOUBLE_NL)
    file.write("</div>")
    file.write(DOUBLE_NL)

    for repo in include_repositories:
        print(f"> {repo['name']}")

        file.write(f"## {repo['name']}\n")
        file.write(f"{repo['description']}\n")

        file.write(DOUBLE_NL)
        file.write("<div class='project-release'>")
        file.write(DOUBLE_NL)
        file.write("|Released|Description|Download|\n")
        file.write("|--------|-----------|--------|\n")

        for release in repo['releases']:
            print(f"   - {release['name']} - {release['created_at']}")
            dt = datetime.strptime(release["created_at"], "%Y-%m-%dT%H:%M:%SZ")
            file.write(f"|{dt.strftime('%Y-%m-%d')}|{release['name']}| [{release['filename']}](/projects/{release['filename']}) |\n")

        file.write(DOUBLE_NL)
        file.write("</div>")
        file.write(DOUBLE_NL)

        file.write("<div class='github-link'>")
        file.write(DOUBLE_NL)
        file.write("![](/assets/general/github.svg)")
        file.write(f"[{USERNAME}/{repo['name']}](https://github.com/{USERNAME}/{repo['name']})")
        file.write(DOUBLE_NL)
        file.write("</div>")
        file.write(DOUBLE_NL)

    file.write(DOUBLE_NL)
    file.write("<style>\n")
    file.write(".project-release table tr td:last-child { text-align: right; }\n")
    file.write(".project-release table tr th:last-child { text-align: right; }\n")
    file.write(".project-list ul { column-count: 3; column-gap: 3em; }\n")
    file.write(".github-link p { display: flex; align-items: center; gap: 0.3em; }\n")
    file.write(".github-link p img { border: 0; padding: 0; height: 15px; }\n")
    file.write("</style>")
    file.write(DOUBLE_NL)

    file.close()

def download_tarball(url, filepath):
    with requests.get(url, stream=True, timeout=30) as response:
        response.raise_for_status()

        with open(filepath, "wb") as file:
            for chunk in response.iter_content(chunk_size=8192):
                file.write(chunk)

def assert_rate_limit(response):
    rate_limit_limit = int(response.headers.get("x-ratelimit-limit"))
    rate_limit_remaining = int(response.headers.get("x-ratelimit-remaining"))
    rate_limit_reset = int(response.headers.get("x-ratelimit-reset"))
    print(f"Rate limit: {rate_limit_remaining}/{rate_limit_limit}")
    print(f"Reset time: {datetime.fromtimestamp(rate_limit_reset)}")

    if rate_limit_remaining == 0:
        sys.exit(1)

def fetch_github_data():
    include_repositories = []
    print(headers)
    response = requests.get(f"https://api.github.com/users/{USERNAME}/repos?ts={CACHE_BUSTER}&per_page=100",
                            headers=headers,
                            timeout=10)

    assert_rate_limit(response)

    if response.status_code == 200:
        repos = response.json()
        for repo in repos:
            # Check if repository has "winc" topic. This means I want to include
            # this repository on this page.
            if "winc" in repo["topics"]:
                include_repositories.append(repo)
    else:
        print(f"Failed to retrieve repositories: {response.status_code}")
        sys.exit(1)

    for repo in include_repositories:
        print(f"Name: {repo['name']}, URL: {repo['html_url']}")

        response = requests.get(f"https://api.github.com/repos/{USERNAME}/{repo['name']}/releases?ts={CACHE_BUSTER}",
                                headers=headers,
                                timeout=10)

        assert_rate_limit(response)

        if response.status_code == 200:
            repo["releases"] = response.json()
            for release in repo["releases"]:
                release["filename"] = f"{repo['name']}-{release['tag_name']}.tar.gz"
                print(f"  > {release['tag_name']}, {release['name']}, {release['filename']}")
                download_tarball(release["tarball_url"], f"../static/projects/{release['filename']}")
        
    return include_repositories

include_repositories = fetch_github_data()

# with open("out.json", "w") as json_file:
#     json.dump(include_repositories, json_file, indent=4)

generate_markdown_file(include_repositories)

# with open("out.json", "r") as fp:
#     include_repositories = json.load(fp)
#     generate_markdown_file(include_repositories)