Skip to content

Track Downloads & Clones #48

Track Downloads & Clones

Track Downloads & Clones #48

name: Track Downloads & Clones
on:
schedule:
- cron: "0 3 * * *" # Daily at 3am UTC
workflow_dispatch: # Manual trigger
push: # Auto-refresh on push (rate-limited, see freshness check below)
branches: [main]
# workflow_run: # Uncomment and set your CI workflow name to run after CI
# workflows: ["CI"]
# types: [completed]
concurrency:
group: traffic-collection
cancel-in-progress: true
jobs:
update-badges:
runs-on: ubuntu-latest
steps:
- name: Fetch stats and update gist
uses: actions/github-script@v7
env:
GIST_ID: ${{ vars.TRAFFIC_GIST_ID }}
ARCHIVE_GIST_ID: ${{ vars.TRAFFIC_ARCHIVE_GIST_ID }}
with:
github-token: ${{ secrets.TRAFFIC_GIST_TOKEN }}
script: |
const owner = context.repo.owner;
const repo = context.repo.repo;
const gistId = process.env.GIST_ID;
// 0. Freshness check — skip if last run was < 1 hour ago
// Only applies to push-triggered runs; schedule and manual always run
const eventName = context.eventName;
const FRESHNESS_MINUTES = 60;
if (eventName === 'push' && gistId) {
try {
const gist = await github.rest.gists.get({ gist_id: gistId });
const stateFile = gist.data.files["state.json"];
if (stateFile) {
const prevState = JSON.parse(stateFile.content);
const lastHistory = (prevState.dailyHistory || []).slice(-1)[0];
if (lastHistory && lastHistory.capturedAt) {
const lastRun = new Date(lastHistory.capturedAt);
const minutesAgo = (Date.now() - lastRun.getTime()) / 60000;
if (minutesAgo < FRESHNESS_MINUTES) {
console.log(`Skipping push-triggered run: last collection was ${Math.round(minutesAgo)}m ago (threshold: ${FRESHNESS_MINUTES}m)`);
return;
}
console.log(`Last collection: ${Math.round(minutesAgo)}m ago — proceeding`);
}
}
} catch (e) {
console.log(`Freshness check failed (proceeding anyway): ${e.message}`);
}
}
// 1. Total release asset downloads
let totalDownloads = 0;
const releases = await github.rest.repos.listReleases({ owner, repo, per_page: 100 });
for (const release of releases.data) {
for (const asset of release.assets) {
totalDownloads += asset.download_count;
}
}
console.log(`Release downloads: ${totalDownloads}`);
// 2. Load previous state from gist
let state = { totalClones: 0, lastSeenDates: [], totalDownloads: 0, dailyHistory: [], totalViews: 0, lastSeenViewDates: [] };
if (gistId) {
try {
const gist = await github.rest.gists.get({ gist_id: gistId });
const stateFile = gist.data.files["state.json"];
if (stateFile) {
state = JSON.parse(stateFile.content);
if (!state.dailyHistory) state.dailyHistory = [];
if (!state.lastSeenViewDates) state.lastSeenViewDates = [];
}
} catch (e) {
console.log(`Could not load gist state: ${e.message}`);
}
}
// 2.5 Schema migration
// Schema versions: 1 = boolean dedup, 2 = delta maps, 3 = totalOrganicClones
const CURRENT_SCHEMA = 3;
const stateVersion = state.schemaVersion || 1;
// v1 → v2: Migrate boolean lastSeenDates to delta-based lastSeenCloneCounts
if (stateVersion < 2) {
if (!state.lastSeenCloneCounts) {
state.lastSeenCloneCounts = {};
if (Array.isArray(state.lastSeenDates) && state.lastSeenDates.length > 0) {
const histByDate = {};
for (const entry of (state.dailyHistory || [])) {
histByDate[entry.date.slice(0, 10)] = entry;
}
for (const ts of state.lastSeenDates) {
const d = ts.slice(0, 10);
const hist = histByDate[d];
state.lastSeenCloneCounts[d] = hist ? (hist.clones || 0) : 0;
}
}
}
if (!state.lastSeenViewCounts) {
state.lastSeenViewCounts = {};
if (Array.isArray(state.lastSeenViewDates) && state.lastSeenViewDates.length > 0) {
const histByDate = {};
for (const entry of (state.dailyHistory || [])) {
histByDate[entry.date.slice(0, 10)] = entry;
}
for (const ts of state.lastSeenViewDates) {
const d = ts.slice(0, 10);
const hist = histByDate[d];
state.lastSeenViewCounts[d] = hist ? (hist.views || 0) : 0;
}
}
}
console.log(`Schema migration v1 → v2: delta-based dedup maps`);
}
// v2 → v3: Compute totalOrganicClones from dailyHistory
// Previously badges used totalClones - totalCiCheckouts (global subtraction)
// which allows phantom CI on zero-clone days to reduce organic below
// individual day values. Now we accumulate per-day organic instead.
if (stateVersion < 3) {
const histOrganic = (state.dailyHistory || []).reduce(
(sum, d) => sum + (d.organicClones || d.clones || 0), 0
);
// For repos tracked longer than dailyHistory window, preserve
// the higher estimate from global subtraction
const globalOrganic = Math.max(0,
(state.totalClones || 0) - (state.totalCiCheckouts || 0)
);
state.totalOrganicClones = Math.max(histOrganic, globalOrganic);
console.log(`Schema migration v2 → v3: totalOrganicClones = ${state.totalOrganicClones}`);
}
state.schemaVersion = CURRENT_SCHEMA;
// 3. Accumulate clones (delta-based: track last-seen counts per date)
// The Traffic API returns 14 days of data, including zero-count days.
// We track the last-seen count for each date so we only accumulate the
// increase (delta) when counts change. This handles both:
// - Zero-count dates that gain traffic later (first-run seeding)
// - Counts that increase within the 14-day window (API updates)
let newClones = 0;
let newUniqueClones = 0;
let clonesByDate = {};
let uniqueClonesByDate = {};
if (!state.lastSeenCloneCounts) state.lastSeenCloneCounts = {};
if (!state._lastSeenUniqueClones) state._lastSeenUniqueClones = {};
try {
const traffic = await github.rest.repos.getClones({ owner, repo, per: "day" });
for (const day of traffic.data.clones) {
const dateKey = day.timestamp.slice(0, 10);
const previousCount = state.lastSeenCloneCounts[dateKey] || 0;
const delta = Math.max(0, day.count - previousCount);
newClones += delta;
if (delta > 0) {
const prevUnique = state._lastSeenUniqueClones[dateKey] || 0;
newUniqueClones += Math.max(0, day.uniques - prevUnique);
}
clonesByDate[dateKey] = day.count;
uniqueClonesByDate[dateKey] = day.uniques;
state.lastSeenCloneCounts[dateKey] = day.count;
state._lastSeenUniqueClones[dateKey] = day.uniques;
}
// Trim old entries (keep 30 days)
const cloneDateKeys = Object.keys(state.lastSeenCloneCounts).sort();
if (cloneDateKeys.length > 30) {
for (const old of cloneDateKeys.slice(0, -30)) {
delete state.lastSeenCloneCounts[old];
delete state._lastSeenUniqueClones[old];
}
}
// Keep legacy field for backward compat with older dashboard versions
state.lastSeenDates = traffic.data.clones.map(d => d.timestamp).slice(-30);
console.log(`Recent clones (14d): ${traffic.data.count} (${traffic.data.uniques} unique)`);
} catch (e) {
console.log(`Clone accumulation error: ${e.message}`);
}
state.totalClones = (state.totalClones || 0) + newClones;
state.totalUniqueClones = (state.totalUniqueClones || 0) + newUniqueClones;
state.totalDownloads = totalDownloads;
console.log(`Accumulated clones: ${state.totalClones} (+${newClones} new, ${newUniqueClones} unique)`);
// 4. Accumulate views (delta-based, same approach as clones)
let newViews = 0;
let newUniqueViews = 0;
let viewsByDate = {};
let uniqueViewsByDate = {};
if (!state.lastSeenViewCounts) state.lastSeenViewCounts = {};
if (!state._lastSeenUniqueViews) state._lastSeenUniqueViews = {};
try {
const viewTraffic = await github.rest.repos.getViews({ owner, repo, per: "day" });
for (const day of viewTraffic.data.views) {
const dateKey = day.timestamp.slice(0, 10);
const previousCount = state.lastSeenViewCounts[dateKey] || 0;
const delta = Math.max(0, day.count - previousCount);
newViews += delta;
if (delta > 0) {
const prevUnique = state._lastSeenUniqueViews[dateKey] || 0;
newUniqueViews += Math.max(0, day.uniques - prevUnique);
}
viewsByDate[dateKey] = day.count;
uniqueViewsByDate[dateKey] = day.uniques;
state.lastSeenViewCounts[dateKey] = day.count;
state._lastSeenUniqueViews[dateKey] = day.uniques;
}
const viewDateKeys = Object.keys(state.lastSeenViewCounts).sort();
if (viewDateKeys.length > 30) {
for (const old of viewDateKeys.slice(0, -30)) {
delete state.lastSeenViewCounts[old];
delete state._lastSeenUniqueViews[old];
}
}
state.lastSeenViewDates = viewTraffic.data.views.map(d => d.timestamp).slice(-30);
console.log(`Recent views (14d): ${viewTraffic.data.count} (${viewTraffic.data.uniques} unique)`);
} catch (e) {
console.log(`View accumulation error: ${e.message}`);
}
state.totalViews = (state.totalViews || 0) + newViews;
state.totalUniqueViews = (state.totalUniqueViews || 0) + newUniqueViews;
console.log(`Accumulated views: ${state.totalViews} (+${newViews} new, ${newUniqueViews} unique)`);
// 5. Repo metadata snapshot (stars, forks, issues, creation date)
try {
const repoData = await github.rest.repos.get({ owner, repo });
state.stars = repoData.data.stargazers_count;
state.forks = repoData.data.forks_count;
state.openIssues = repoData.data.open_issues_count;
// Record repo creation date (from GitHub) — entries before this
// are pre-repo noise from the Traffic API's 14-day backfill
if (!state.repoCreatedAt) {
state.repoCreatedAt = repoData.data.created_at;
}
// Record when we first started collecting — set once on first run.
// Distinct from repoCreatedAt: the repo may have existed for months
// before tracking was enabled. Days between repoCreatedAt and
// trackingSince are "not collected" (gap), not "zero traffic".
if (!state.trackingSince) {
state.trackingSince = new Date().toISOString();
}
console.log(`Repo: ${state.stars} stars, ${state.forks} forks, ${state.openIssues} open issues`);
} catch (e) {
console.log(`Repo metadata error: ${e.message}`);
}
// 6. Referrers snapshot (top 10, refreshed daily)
try {
const referrers = await github.rest.repos.getTopReferrers({ owner, repo });
state.referrers = referrers.data.map(r => ({
source: r.referrer, count: r.count, uniques: r.uniques
}));
console.log(`Referrers: ${state.referrers.length} sources`);
} catch (e) {
console.log(`Referrers error: ${e.message}`);
}
// 6.5. Popular paths snapshot (top 10 pages visited)
try {
const paths = await github.rest.repos.getTopPaths({ owner, repo });
state.popularPaths = paths.data.map(p => ({
path: p.path,
title: (p.title || '').replace(/\u00c3\u201a\u00c2\u00b7/g, ' \u00b7 ').replace(/\u00c2\u00b7/g, '\u00b7'),
count: p.count, uniques: p.uniques
}));
console.log(`Popular paths: ${state.popularPaths.length} paths`);
} catch (e) {
console.log(`Popular paths error: ${e.message}`);
}
// 7. Record daily history using API timestamps (not wall clock)
const capturedAt = new Date().toISOString();
const todayStr = capturedAt.slice(0, 10);
const previousDownloads = state.previousTotalDownloads || state.totalDownloads;
const todayDownloads = Math.max(0, totalDownloads - previousDownloads);
// Build a lookup of existing entries by date
const existingByDate = {};
for (const entry of state.dailyHistory) {
existingByDate[entry.date.slice(0, 10)] = entry;
}
// Update or create entries for each day the Traffic API reports
// Merge strategy: always improve, never erase. Traffic counts can
// only increase within a day, so use Math.max for monotonic fields.
const allApiDates = new Set([
...Object.keys(clonesByDate),
...Object.keys(viewsByDate)
]);
for (const dateStr of allApiDates) {
if (!existingByDate[dateStr]) {
const newEntry = { date: dateStr + "T00:00:00Z" };
state.dailyHistory.push(newEntry);
existingByDate[dateStr] = newEntry;
}
const entry = existingByDate[dateStr];
entry.date = dateStr + "T00:00:00Z"; // Normalize timestamp
entry.capturedAt = capturedAt;
if (clonesByDate[dateStr] !== undefined) {
entry.clones = Math.max(entry.clones || 0, clonesByDate[dateStr]);
// Write uniqueClones when API returned data for this date.
// The API includes both count and uniques for every date in
// its 14-day window, so if clonesByDate has this date,
// uniqueClonesByDate does too. Accept zeros — they mean
// "no unique cloners today", not "expired data". Math.max
// protects against overwriting a real value with a stale zero.
if (uniqueClonesByDate[dateStr] !== undefined) {
entry.uniqueClones = Math.max(entry.uniqueClones || 0, uniqueClonesByDate[dateStr]);
}
}
if (viewsByDate[dateStr] !== undefined) {
entry.views = Math.max(entry.views || 0, viewsByDate[dateStr]);
if (uniqueViewsByDate[dateStr] !== undefined) {
entry.uniqueViews = Math.max(entry.uniqueViews || 0, uniqueViewsByDate[dateStr]);
}
}
// Only set stars/forks/openIssues for today — these are
// point-in-time snapshots, not historical values. Writing
// today's count onto past entries overwrites their real values.
if (dateStr === todayStr) {
entry.stars = state.stars || 0;
entry.forks = state.forks || 0;
entry.openIssues = state.openIssues || 0;
}
}
// Always ensure today's entry exists to prove the script ran.
if (!existingByDate[todayStr]) {
const todayEntry = {
date: todayStr + "T00:00:00Z",
capturedAt: capturedAt,
clones: 0,
downloads: 0,
views: 0,
total: 0,
ciCheckouts: 0,
organicClones: 0,
stars: state.stars || 0,
forks: state.forks || 0,
openIssues: state.openIssues || 0
};
state.dailyHistory.push(todayEntry);
existingByDate[todayStr] = todayEntry;
} else {
// Backfill metadata on existing entry (may have been created
// by an older workflow version missing these fields)
const today = existingByDate[todayStr];
today.capturedAt = capturedAt;
today.stars = state.stars || 0;
today.forks = state.forks || 0;
today.openIssues = state.openIssues || 0;
}
// Apply downloads to today's entry
existingByDate[todayStr].downloads = todayDownloads;
// Backfill zeros for dates within the API's 14-day window that
// weren't in the API response. The Traffic API only returns dates
// with count > 0, so absence means zero traffic — not "no data."
// Dates outside the API window keep undefined (gap detection).
{
const windowStart = new Date();
windowStart.setDate(windowStart.getDate() - 13);
const windowStartStr = windowStart.toISOString().slice(0, 10);
for (const entry of state.dailyHistory) {
const dateStr = entry.date.slice(0, 10);
if (dateStr >= windowStartStr && dateStr <= todayStr) {
if (entry.uniqueClones === undefined) entry.uniqueClones = 0;
if (entry.uniqueViews === undefined) entry.uniqueViews = 0;
}
}
}
// Recompute totals for all entries that were updated
for (const entry of state.dailyHistory) {
entry.total = (entry.clones || 0) + (entry.downloads || 0);
}
// Deduplicate by UTC date (keep last entry per day) and normalize dates
const byDate = {};
for (const entry of state.dailyHistory) {
const key = entry.date.slice(0, 10);
entry.date = key + "T00:00:00Z"; // Normalize all to midnight UTC
byDate[key] = entry;
}
state.dailyHistory = Object.values(byDate).sort(
(a, b) => new Date(a.date) - new Date(b.date)
);
// Remove pre-repo entries (Traffic API reports 14 days of zeros
// that predate the repo). Keep only entries from repo creation onward.
if (state.repoCreatedAt) {
const repoCreatedDate = state.repoCreatedAt.slice(0, 10);
const beforeCount = state.dailyHistory.length;
state.dailyHistory = state.dailyHistory.filter(
e => e.date.slice(0, 10) >= repoCreatedDate
);
const removed = beforeCount - state.dailyHistory.length;
if (removed > 0) {
console.log(`Removed ${removed} pre-repo entries (before ${repoCreatedDate})`);
}
}
// Trim to 31 days
if (state.dailyHistory.length > 31) {
state.dailyHistory = state.dailyHistory.slice(-31);
}
state.previousTotalDownloads = totalDownloads;
const todayEntry = existingByDate[todayStr];
console.log(`Today (${todayStr}): ${todayEntry.total} installs (${todayEntry.downloads} dls + ${todayEntry.clones} clones), ${todayEntry.views || 0} views`);
console.log(`Daily history: ${state.dailyHistory.length} entries`);
// 7.5 Count CI checkouts for today (dynamic per-run detection)
let ciCheckoutsToday = 0;
const ciDetail = {};
try {
const runs = await github.rest.actions.listWorkflowRunsForRepo({
owner, repo, created: todayStr, per_page: 100
});
for (const run of runs.data.workflow_runs) {
const wfName = run.name;
if (!ciDetail[wfName]) {
ciDetail[wfName] = { runs: 0, checkoutsPerRun: [] };
}
const jobs = await github.rest.actions.listJobsForWorkflowRun({
owner, repo, run_id: run.id, per_page: 100
});
let runCheckouts = 0;
for (const job of jobs.data.jobs) {
const hasCheckout = (job.steps || []).some(s =>
/^(Run )?actions\/checkout|^Checkout$/i.test(s.name)
);
if (hasCheckout) runCheckouts++;
}
ciDetail[wfName].runs++;
ciDetail[wfName].checkoutsPerRun.push(runCheckouts);
ciCheckoutsToday += runCheckouts;
}
console.log(`CI checkouts today: ${ciCheckoutsToday} (${runs.data.total_count} runs)`);
} catch (e) {
console.log(`CI checkout counting error: ${e.message}`);
}
// Count distinct CI runs that performed at least one checkout
let ciRunsWithCheckouts = 0;
for (const wf of Object.values(ciDetail)) {
ciRunsWithCheckouts += wf.checkoutsPerRun.filter(c => c > 0).length;
}
// Store CI checkout data
if (!state.ciCheckouts) state.ciCheckouts = {};
const previousCiToday = (state.ciCheckouts[todayStr] && state.ciCheckouts[todayStr].total) || 0;
state.ciCheckouts[todayStr] = { total: ciCheckoutsToday, runs: ciRunsWithCheckouts, byWorkflow: ciDetail };
// Cumulative CI checkout counter (never trimmed, unlike the per-day map)
state.totalCiCheckouts = (state.totalCiCheckouts || 0) + (ciCheckoutsToday - previousCiToday);
// Trim ciCheckouts to last 31 days
const ciDates = Object.keys(state.ciCheckouts).sort();
if (ciDates.length > 31) {
for (const old of ciDates.slice(0, -31)) {
delete state.ciCheckouts[old];
}
}
// Apply CI checkouts to today's dailyHistory entry
todayEntry.ciCheckouts = ciCheckoutsToday;
todayEntry.ciRuns = ciRunsWithCheckouts;
todayEntry.organicClones = Math.max(0, (todayEntry.clones || 0) - ciCheckoutsToday);
// Also backfill ciCheckouts for other days in dailyHistory from stored data
for (const entry of state.dailyHistory) {
const dKey = entry.date.slice(0, 10);
if (state.ciCheckouts[dKey]) {
entry.ciCheckouts = state.ciCheckouts[dKey].total;
entry.ciRuns = state.ciCheckouts[dKey].runs || 0;
entry.organicClones = Math.max(0, (entry.clones || 0) - entry.ciCheckouts);
} else if (entry.ciCheckouts === undefined) {
entry.ciCheckouts = 0;
entry.ciRuns = 0;
entry.organicClones = entry.clones || 0;
}
}
// Compute organicUniqueClones for all daily entries
// Formula: organicUnique = unique - MIN(round(unique * ciRate), ciRuns)
// Skip entries without uniqueClones — don't create false zeros
for (const entry of state.dailyHistory) {
if (entry.uniqueClones == null) continue;
const rawUnique = entry.uniqueClones;
const ciRate = (entry.clones || 0) > 0 ? entry.ciCheckouts / entry.clones : 0;
const ciUniqueByPct = Math.round(rawUnique * ciRate);
const ciUniqueCeiling = entry.ciRuns || 0;
const ciUniqueClones = Math.min(ciUniqueByPct, ciUniqueCeiling);
entry.organicUniqueClones = Math.max(0, rawUnique - ciUniqueClones);
}
// Accumulate totalOrganicClones (per-day organic, avoids phantom CI subtraction)
// Uses delta: today's organic - previously stored organic for today
const previousOrganicToday = state._previousOrganicToday;
const todayOrganic = todayEntry.organicClones || 0;
if (previousOrganicToday === undefined) {
// First run after v2→v3 migration — totalOrganicClones was already
// seeded from dailyHistory which may include today. Don't double-count.
state._previousOrganicToday = todayOrganic;
} else {
state.totalOrganicClones = (state.totalOrganicClones || 0) + (todayOrganic - previousOrganicToday);
state._previousOrganicToday = todayOrganic;
}
console.log(`Organic clones: today=${todayOrganic}, total=${state.totalOrganicClones}`);
// Accumulate totalOrganicUniqueClones
// Uses same delta approach as totalUniqueClones: track CI unique adjustment for today,
// then subtract cumulative CI uniques from cumulative raw uniques
const todayCiUnique = (todayEntry.uniqueClones || 0) - (todayEntry.organicUniqueClones || 0);
const previousCiUniqueToday = state._previousCiUniqueToday || 0;
state.totalCiUniqueClones = (state.totalCiUniqueClones || 0) + (todayCiUnique - previousCiUniqueToday);
state._previousCiUniqueToday = todayCiUnique;
state.totalOrganicUniqueClones = Math.max(0,
(state.totalUniqueClones || 0) - (state.totalCiUniqueClones || 0)
);
console.log(`Organic unique clones: today=${todayEntry.organicUniqueClones}, total=${state.totalOrganicUniqueClones} (raw ${state.totalUniqueClones} - CI ${state.totalCiUniqueClones})`);
// 7.7 Totals sanity check — repair under-counted totals from
// the first-run seeding bug (zero-count dates marked as "seen",
// causing real traffic to be skipped on subsequent runs).
// The dailyHistory entries are correct (Math.max merge), so
// if totals are lower than the sum of dailyHistory, fix them.
// Only raises totals, never lowers (safe for long-running installs
// where totalClones > last-31-day sum).
const histClones = state.dailyHistory.reduce((s, d) => s + (d.clones || 0), 0);
const histViews = state.dailyHistory.reduce((s, d) => s + (d.views || 0), 0);
if (state.totalClones < histClones) {
console.log(`Totals repair: totalClones ${state.totalClones} -> ${histClones} (from dailyHistory)`);
state.totalClones = histClones;
}
if (state.totalViews < histViews) {
console.log(`Totals repair: totalViews ${state.totalViews} -> ${histViews} (from dailyHistory)`);
state.totalViews = histViews;
}
const histUniqueClones = state.dailyHistory.reduce((s, d) => s + (d.uniqueClones || 0), 0);
const histUniqueViews = state.dailyHistory.reduce((s, d) => s + (d.uniqueViews || 0), 0);
if ((state.totalUniqueClones || 0) < histUniqueClones) {
console.log(`Totals repair: totalUniqueClones ${state.totalUniqueClones} -> ${histUniqueClones}`);
state.totalUniqueClones = histUniqueClones;
}
if ((state.totalUniqueViews || 0) < histUniqueViews) {
console.log(`Totals repair: totalUniqueViews ${state.totalUniqueViews} -> ${histUniqueViews}`);
state.totalUniqueViews = histUniqueViews;
}
// Sanity check totalOrganicClones
const histOrganic = state.dailyHistory.reduce((s, d) => s + (d.organicClones || d.clones || 0), 0);
if ((state.totalOrganicClones || 0) < histOrganic) {
console.log(`Totals repair: totalOrganicClones ${state.totalOrganicClones} -> ${histOrganic}`);
state.totalOrganicClones = histOrganic;
}
// Recompute organic unique clones after any totals repair
state.totalOrganicUniqueClones = Math.max(0,
(state.totalUniqueClones || 0) - (state.totalCiUniqueClones || 0)
);
// 8. Compute cascading recency suffix (organic only)
// Use yesterday's complete 24h clone data for the "24h" badge.
// Yesterday is always a full day regardless of when the workflow
// runs (3am cron vs commit trigger), so the label is accurate.
const yesterday = new Date();
yesterday.setUTCDate(yesterday.getUTCDate() - 1);
const yesterdayStr = yesterday.toISOString().slice(0, 10);
let last24hClones = 0;
if (clonesByDate[yesterdayStr] !== undefined) {
const raw = clonesByDate[yesterdayStr];
const ci = (state.ciCheckouts[yesterdayStr] && state.ciCheckouts[yesterdayStr].total) || 0;
last24hClones = Math.max(0, raw - ci);
}
const last24h = last24hClones + todayDownloads;
const sumWindow = (n) => {
const window = state.dailyHistory.slice(-n);
return window.reduce((sum, d) => sum + (d.organicClones || d.clones || 0) + (d.downloads || 0), 0);
};
const lastWeek = sumWindow(7);
const lastMonth = sumWindow(31);
let suffix = "";
if (last24h > 0) suffix = ` (+${last24h} 24h)`;
else if (lastWeek > 0) suffix = ` (+${lastWeek} wk)`;
else if (lastMonth > 0) suffix = ` (+${lastMonth} mo)`;
console.log(`Recency suffix: "${suffix || "(none)"}" (24h organic clones: ${last24hClones}, downloads: ${todayDownloads})`);
// 9. Monthly archive (runs on 1st of each month)
const archiveGistId = process.env.ARCHIVE_GIST_ID;
const now = new Date();
if (now.getUTCDate() === 1 && archiveGistId && state.dailyHistory.length > 0) {
const prevMonth = new Date(now);
prevMonth.setUTCMonth(prevMonth.getUTCMonth() - 1);
const period = prevMonth.toISOString().slice(0, 7);
const archiveOrganicClones = state.totalOrganicClones || 0;
const archive = {
repo: `${owner}/${repo}`,
period: period,
generatedAt: now.toISOString(),
version: "0.1.0",
cumulativeTotals: {
downloads: state.totalDownloads,
clones: state.totalClones,
uniqueClones: state.totalUniqueClones || 0,
organicClones: archiveOrganicClones,
organicUniqueClones: state.totalOrganicUniqueClones || 0,
ciCheckouts: state.totalCiCheckouts || 0,
views: state.totalViews,
uniqueViews: state.totalUniqueViews || 0,
combined: state.totalDownloads + archiveOrganicClones
},
monthSummary: {
downloads: state.dailyHistory.reduce((s, d) => s + (d.downloads || 0), 0),
clones: state.dailyHistory.reduce((s, d) => s + (d.clones || 0), 0),
uniqueClones: state.dailyHistory.reduce((s, d) => s + (d.uniqueClones || 0), 0),
organicClones: state.dailyHistory.reduce((s, d) => s + (d.organicClones || d.clones || 0), 0),
organicUniqueClones: state.dailyHistory.reduce((s, d) => s + (d.organicUniqueClones || d.uniqueClones || 0), 0),
views: state.dailyHistory.reduce((s, d) => s + (d.views || 0), 0),
uniqueViews: state.dailyHistory.reduce((s, d) => s + (d.uniqueViews || 0), 0),
combined: state.dailyHistory.reduce((s, d) => s + (d.organicClones || d.clones || 0) + (d.downloads || 0), 0)
},
dailyHistory: [...state.dailyHistory]
};
const archiveFile = `archive-${period}.json`;
try {
const archiveFiles = {};
archiveFiles[archiveFile] = { content: JSON.stringify(archive, null, 2) };
await github.rest.gists.update({ gist_id: archiveGistId, files: archiveFiles });
console.log(`Archived ${period} to gist (${state.dailyHistory.length} days)`);
} catch (e) {
console.log(`Archive failed: ${e.message}`);
}
}
// 10. Build shields.io endpoint JSON badges (organic clones only)
// Use per-day accumulated organic (avoids phantom CI subtraction)
const organicTotalClones = state.totalOrganicClones || 0;
const combined = state.totalDownloads + organicTotalClones;
const dlsClonesBadge = {
schemaVersion: 1,
label: "installs",
message: `${combined}${suffix}`,
color: "blue"
};
const downloadsBadge = {
schemaVersion: 1,
label: "downloads",
message: `${state.totalDownloads}`,
color: "blue"
};
const clonesBadge = {
schemaVersion: 1,
label: "clones",
message: `${organicTotalClones}`,
color: "blue"
};
const viewsBadge = {
schemaVersion: 1,
label: "views",
message: `${state.totalViews}`,
color: "blue"
};
// 11. Update badge gist
const files = {
"installs.json": { content: JSON.stringify(dlsClonesBadge, null, 2) },
"downloads.json": { content: JSON.stringify(downloadsBadge, null, 2) },
"clones.json": { content: JSON.stringify(clonesBadge, null, 2) },
"views.json": { content: JSON.stringify(viewsBadge, null, 2) },
"state.json": { content: JSON.stringify(state, null, 2) }
};
if (gistId) {
await github.rest.gists.update({ gist_id: gistId, files });
console.log(`Updated gist: ${gistId}`);
} else {
const newGist = await github.rest.gists.create({
description: `${owner}/${repo} traffic badges`,
public: true,
files
});
console.log(`Created new gist: ${newGist.data.id}`);
console.log(`⚠️ Set TRAFFIC_GIST_ID repo variable to: ${newGist.data.id}`);
}
console.log(`\nBadge URLs (after setting GIST_ID):`);
const gid = gistId || "<GIST_ID>";
console.log(`Installs: https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/${owner}/${gid}/raw/installs.json`);