Add tasks for segment sync and posthog sync
This commit is contained in:
parent
9dd955524b
commit
cce2e386be
3 changed files with 4 additions and 93 deletions
|
|
@ -1,8 +1,8 @@
|
|||
name: analytics | Update Cognee Stats Daily
|
||||
on: pull_request
|
||||
#on:
|
||||
# schedule:
|
||||
# - cron: '0 1 * * *' # Runs every day at 01:00 UTC
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 1 * * *' # Runs every day at 01:00 UTC
|
||||
|
||||
jobs:
|
||||
update_stats:
|
||||
|
|
|
|||
|
|
@ -1,31 +0,0 @@
|
|||
name: analytics | Historical Import of Cognee Stats
|
||||
|
||||
on: pull_request
|
||||
|
||||
#on:
|
||||
# workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
import_stats:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pip install requests posthog
|
||||
|
||||
- name: Run Historical Import Script
|
||||
env:
|
||||
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
|
||||
POSTHOG_API_HOST: ${{ secrets.POSTHOG_API_HOST }}
|
||||
run: |
|
||||
cd tools # Change to the 'tools' directory
|
||||
python historical_import_cognee_stats.py
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
import uuid
|
||||
|
||||
import requests
|
||||
import posthog
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Replace with your PostHog Project API Key
|
||||
POSTHOG_API_KEY = os.getenv('POSTHOG_API_KEY')
|
||||
POSTHOG_API_HOST = 'https://eu.i.posthog.com'
|
||||
|
||||
# Initialize PostHog client
|
||||
posthog.project_api_key = POSTHOG_API_KEY
|
||||
posthog.host = POSTHOG_API_HOST
|
||||
|
||||
# Fetch historical download data for the last 180 days
|
||||
package = 'cognee'
|
||||
url = f'https://pypistats.org/api/packages/{package}/overall'
|
||||
|
||||
response = requests.get(url)
|
||||
if response.status_code != 200:
|
||||
print(f"Failed to fetch data: {response.status_code}")
|
||||
exit(1)
|
||||
|
||||
data = response.json()
|
||||
|
||||
# Exclude today and yesterday
|
||||
today = datetime.utcnow().date()
|
||||
yesterday = today - timedelta(days=1)
|
||||
|
||||
# Process and send data to PostHog
|
||||
for entry in data['data']:
|
||||
date_str = entry['date']
|
||||
date_obj = datetime.strptime(date_str, '%Y-%m-%d').date()
|
||||
downloads = entry['downloads']
|
||||
|
||||
# Skip today and yesterday
|
||||
if date_obj >= yesterday:
|
||||
continue
|
||||
|
||||
# Create a unique message_id
|
||||
message_id = f"cognee_downloads_{date_str}"
|
||||
|
||||
distinct_id = str(uuid.uuid4())
|
||||
|
||||
# Send an event to PostHog
|
||||
posthog.capture(
|
||||
distinct_id=distinct_id,
|
||||
event='cognee_downloads',
|
||||
properties={
|
||||
'date': date_str,
|
||||
'downloads': downloads,
|
||||
}
|
||||
)
|
||||
|
||||
print(f"Data for {date_str} imported successfully.")
|
||||
|
||||
print("Historical data import completed.")
|
||||
Loading…
Add table
Reference in a new issue