modified script to adapt for sync script

This commit is contained in:
Jonas_Jones 2025-04-02 21:31:32 +02:00
parent 065aa592f9
commit 4fd2404f36

View file

@ -101,65 +101,70 @@ def get_languagages(repo, access_token):
print(f"[{repo}] Error fetching languages ", end="\n")
return None
# Path to the projects.json file
projects_json_path = os.path.expanduser("~/.cache/gh-projects/projects.json")
# create the directory if it doesn't exist
os.makedirs(os.path.dirname(projects_json_path), exist_ok=True)
def main():
# Path to the projects.json file
projects_json_path = os.path.expanduser("~/.cache/gh-projects/projects.json")
# create the directory if it doesn't exist
os.makedirs(os.path.dirname(projects_json_path), exist_ok=True)
if not DONTDOWNLOAD:
# fetch the projects.json file from https://cdn.jonasjones.dev/api/projects/projects.json
projects_json_url = "https://cdn.jonasjones.dev/api/projects/projects.json"
projects_json = requests.get(projects_json_url)
with open(projects_json_path, "wb") as file:
file.write(projects_json.content)
if not DONTDOWNLOAD:
# fetch the projects.json file from https://cdn.jonasjones.dev/api/projects/projects.json
projects_json_url = "https://cdn.jonasjones.dev/api/projects/projects.json"
projects_json = requests.get(projects_json_url)
with open(projects_json_path, "wb") as file:
file.write(projects_json.content)
verboseprint(f"Fetched projects.json file")
elif not os.path.exists("~/.cache/gh-projects/projects.json"):
FileNotFoundError("File 'projects.json' not found. Cannot proceed without \
downloading it. Remove '-dd' or '--dontdownload' from the launch arguments.")
else:
print("Skipping download of 'projects.json'")
verboseprint(f"Fetched projects.json file")
elif not os.path.exists("~/.cache/gh-projects/projects.json"):
FileNotFoundError("File 'projects.json' not found. Cannot proceed without \
downloading it. Remove '-dd' or '--dontdownload' from the launch arguments.")
else:
print("Skipping download of 'projects.json'")
# Load the existing projects.json file
with open(projects_json_path, "r") as file:
projects_data = json.load(file)
# Load the existing projects.json file
with open(projects_json_path, "r") as file:
projects_data = json.load(file)
if not DONTUPDATEGH:
print("Fetching Repo data...")
if not DONTUPDATEGH:
print("Fetching Repo data...")
# Update the last_update (Unix timestamp) for each project
for project in projects_data:
gh_api = project.get("gh_api")
if gh_api:
last_commit_timestamp = get_last_commit_timestamp(gh_api, GITHUB_API_TOKEN)
last_release_version = get_last_release_version(gh_api, GITHUB_API_TOKEN)
if last_commit_timestamp:
project["last_update"] = last_commit_timestamp
else:
project["last_update"] = 0
if last_release_version:
project["version"] = last_release_version.replace("v", "")
languages = get_languagages(gh_api, GITHUB_API_TOKEN)
if languages:
project["languages"] = languages
else:
print("Skipping Github updates...")
# Update the last_update (Unix timestamp) for each project
for project in projects_data:
gh_api = project.get("gh_api")
if gh_api:
last_commit_timestamp = get_last_commit_timestamp(gh_api, GITHUB_API_TOKEN)
last_release_version = get_last_release_version(gh_api, GITHUB_API_TOKEN)
if last_commit_timestamp:
project["last_update"] = last_commit_timestamp
else:
project["last_update"] = 0
if last_release_version:
project["version"] = last_release_version.replace("v", "")
languages = get_languagages(gh_api, GITHUB_API_TOKEN)
if languages:
project["languages"] = languages
else:
print("Skipping Github updates...")
# remove first element
projects_data.pop(0)
# remove first element
projects_data.pop(0)
# sort projects alphabetically
projects_data = sorted(projects_data, key=lambda x: x['last_update'], reverse=True)
# sort projects alphabetically
projects_data = sorted(projects_data, key=lambda x: x['last_update'], reverse=True)
# add a first element to the list that holds the date of the last update
projects_data.insert(0, {"last_update": time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) + " UTC"})
# add a first element to the list that holds the date of the last update
projects_data.insert(0, {"last_update": time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) + " UTC"})
# Save the updated data back to the projects.json file
with open(projects_json_path, "w") as file:
json.dump(projects_data, file, indent=2)
# Save the updated data back to the projects.json file
with open(projects_json_path, "w") as file:
json.dump(projects_data, file, indent=2)
print("Updated projects.json\nUploading to cdn...")
print("Updated projects.json\nUploading to cdn...")
os.system(f"rclone copy {projects_json_path} cdn:cdn/api/projects/")
os.system(f"rclone copy {projects_json_path} cdn:cdn/api/projects/")
print("Uploaded projects.json to cdn")
print("Uploaded projects.json to cdn")
if __name__ == "__main__":
main()