11import os
22import requests
3+ import json
34from datetime import datetime
45
5- ### setup env variables
6- GITHUB_PAT = os . getenv ( "GHRS_GITHUB_API_TOKEN" )
7- GITHUB_OWNER = os .getenv ("TRAFFIC_ACTION_OWNER " )
8- GITHUB_REPO = os .getenv ("TRAFFIC_ACTION_REPO " )
6+ # --- Configuration ---
7+ # Get environment variables set by the GitHub Actions workflow
8+ GITHUB_TOKEN = os .getenv ("GITHUB_TOKEN " )
9+ REPO_FULL_NAME = os .getenv ("GITHUB_REPOSITORY " )
910
11+ # Exit with an error if the required environment variables are not set
12+ if not GITHUB_TOKEN or not REPO_FULL_NAME :
13+ print ("Error: GITHUB_TOKEN and GITHUB_REPOSITORY environment variables are required." )
14+ print ("Please run this script within a GitHub Actions workflow." )
15+ exit (1 )
1016
11- output_dir = "traffic/"
12- os .makedirs (output_dir , exist_ok = True )
17+ # Split "owner/repo" string into two parts
18+ try :
19+ GITHUB_OWNER , GITHUB_REPO = REPO_FULL_NAME .split ("/" )
20+ except ValueError :
21+ print (f"Error: GITHUB_REPOSITORY format is incorrect: '{ REPO_FULL_NAME } '" )
22+ exit (1 )
23+
24+ # --- Setup Directories and Date ---
25+ output_dir = "traffic"
1326current_date = datetime .today ().strftime ('%Y-%m-%d' )
27+ date_specific_dir = os .path .join (output_dir , current_date )
28+
29+ # Create both the base and the date-specific directories
30+ os .makedirs (date_specific_dir , exist_ok = True )
31+
32+ # --- API Definitions ---
33+ # Use a dictionary for clearer mapping and file naming
34+ endpoints = {
35+ "clones" : f"/repos/{ GITHUB_OWNER } /{ GITHUB_REPO } /traffic/clones" ,
36+ "paths" : f"/repos/{ GITHUB_OWNER } /{ GITHUB_REPO } /traffic/popular/paths" ,
37+ "referrers" : f"/repos/{ GITHUB_OWNER } /{ GITHUB_REPO } /traffic/popular/referrers" ,
38+ "views" : f"/repos/{ GITHUB_OWNER } /{ GITHUB_REPO } /traffic/views"
39+ }
1440
15- # Define API endpoint URLs
16- endpoints = [
17- "/repos/{}/{}/traffic/clones" .format (GITHUB_OWNER , GITHUB_REPO ),
18- "/repos/{}/{}/traffic/popular/paths" .format (GITHUB_OWNER , GITHUB_REPO ),
19- "/repos/{}/{}/traffic/popular/referrers" .format (GITHUB_OWNER , GITHUB_REPO ),
20- "/repos/{}/{}/traffic/views" .format (GITHUB_OWNER , GITHUB_REPO )
21- ]
41+ headers = {
42+ "Accept" : "application/vnd.github.v3+json" ,
43+ "Authorization" : f"token { GITHUB_TOKEN } "
44+ }
2245
23- # Function to fetch data from GitHub API
24- def fetch_data (endpoint ):
46+ # --- Data Fetching and Saving ---
47+ # Store data in memory to avoid reading files multiple times for the summary
48+ clones_data = {}
49+ views_data = {}
50+
51+ for name , endpoint in endpoints .items ():
2552 url = "https://api.github.com" + endpoint
26- headers = {
27- "Accept" : "application/vnd.github.v3+json" ,
28- "Authorization" : f"token { GITHUB_PAT } "
29- }
30- response = requests .get (url , headers = headers )
31- return response .json ()
32-
33-
34- # Loop through the endpoints and fetch traffic data
35- for endpoint in endpoints :
36- data = fetch_data (endpoint )
37- print (data )
38- with open (os .path .join (output_dir , current_date , endpoint .split ('/' )[- 1 ] + ".json" ), "w" ) as f :
39- json .dump (data , f )
40-
41-
42- # Write summary to a CSV file
43- with open (os .path .join (output_dir , "summary.csv" ), "a" ) as summary_file :
44- summary_file .write (
45- "{},{},{},{},{}\n " .format (
46- current_date ,
47- json .loads (open (os .path .join (output_dir , current_date , "clones.json" )).read ())["count" ],
48- json .loads (open (os .path .join (output_dir , current_date , "clones.json" )).read ())["uniques" ],
49- json .loads (open (os .path .join (output_dir , current_date , "views.json" )).read ())["count" ],
50- json .loads (open (os .path .join (output_dir , current_date , "views.json" )).read ())["uniques" ]
53+ try :
54+ response = requests .get (url , headers = headers )
55+ response .raise_for_status () # Raises an error for bad responses (4xx or 5xx)
56+ data = response .json ()
57+
58+ # Store data needed for the summary
59+ if name == "clones" :
60+ clones_data = data
61+ elif name == "views" :
62+ views_data = data
63+
64+ # Save the raw data to a JSON file
65+ file_path = os .path .join (date_specific_dir , f"{ name } .json" )
66+ with open (file_path , "w" ) as f :
67+ json .dump (data , f , indent = 4 )
68+ print (f"Successfully saved { name } data to { file_path } " )
69+
70+ except requests .exceptions .RequestException as e :
71+ print (f"Error fetching data for { name } : { e } " )
72+ except json .JSONDecodeError :
73+ print (f"Error decoding JSON for { name } . Response: { response .text } " )
74+
75+ # --- Write Summary to CSV ---
76+ summary_csv_path = os .path .join (output_dir , "summary.csv" )
77+ file_exists = os .path .isfile (summary_csv_path )
78+
79+ try :
80+ with open (summary_csv_path , "a" , newline = '' ) as summary_file :
81+ # Write a header row only if the file is being created for the first time
82+ if not file_exists :
83+ summary_file .write ("date,clones_count,clones_uniques,views_count,views_uniques\n " )
84+
85+ # Append the new data row using the stored data
86+ # Use .get() to prevent errors if the keys are missing (e.g., no traffic)
87+ summary_file .write (
88+ f"{ current_date } ,"
89+ f"{ clones_data .get ('count' , 0 )} ,"
90+ f"{ clones_data .get ('uniques' , 0 )} ,"
91+ f"{ views_data .get ('count' , 0 )} ,"
92+ f"{ views_data .get ('uniques' , 0 )} \n "
5193 )
52- )
94+ print (f"Successfully updated summary at { summary_csv_path } " )
95+ except Exception as e :
96+ print (f"Could not write to summary file: { e } " )
0 commit comments