eventflows 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eventflows-0.1.0/.gitignore +32 -0
- eventflows-0.1.0/PKG-INFO +105 -0
- eventflows-0.1.0/README.md +89 -0
- eventflows-0.1.0/demo.py +83 -0
- eventflows-0.1.0/pyproject.toml +38 -0
- eventflows-0.1.0/setup.cfg +4 -0
- eventflows-0.1.0/src/eventflow/__init__.py +23 -0
- eventflows-0.1.0/src/eventflow/client.py +66 -0
- eventflows-0.1.0/src/eventflow/core/events.py +82 -0
- eventflows-0.1.0/src/eventflow/core/queue.py +44 -0
- eventflows-0.1.0/src/eventflow/core/worker.py +72 -0
- eventflows-0.1.0/src/eventflow/providers/base.py +15 -0
- eventflows-0.1.0/src/eventflow/providers/console.py +29 -0
- eventflows-0.1.0/src/eventflow/providers/google.py +59 -0
- eventflows-0.1.0/src/eventflow/providers/marsenai.py +3 -0
- eventflows-0.1.0/src/eventflow/providers/meta.py +72 -0
- eventflows-0.1.0/src/eventflow/providers/reddit.py +105 -0
- eventflows-0.1.0/src/eventflow/providers/tiktok.py +77 -0
- eventflows-0.1.0/src/eventflow/utils/http.py +42 -0
- eventflows-0.1.0/src/eventflows.egg-info/PKG-INFO +105 -0
- eventflows-0.1.0/src/eventflows.egg-info/SOURCES.txt +24 -0
- eventflows-0.1.0/src/eventflows.egg-info/dependency_links.txt +1 -0
- eventflows-0.1.0/src/eventflows.egg-info/requires.txt +8 -0
- eventflows-0.1.0/src/eventflows.egg-info/top_level.txt +1 -0
- eventflows-0.1.0/tests/__init__.py +0 -0
- eventflows-0.1.0/tests/unit/test_events.py +42 -0
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# Python
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
*.so
|
|
6
|
+
|
|
7
|
+
# Environments
|
|
8
|
+
.env
|
|
9
|
+
.venv
|
|
10
|
+
env/
|
|
11
|
+
venv/
|
|
12
|
+
ENV/
|
|
13
|
+
env.bak/
|
|
14
|
+
venv.bak/
|
|
15
|
+
|
|
16
|
+
# Poetry
|
|
17
|
+
poetry.lock
|
|
18
|
+
|
|
19
|
+
# VS Code / IDEs
|
|
20
|
+
.vscode/
|
|
21
|
+
.idea/
|
|
22
|
+
*.swp
|
|
23
|
+
|
|
24
|
+
# Testing & Coverage
|
|
25
|
+
.pytest_cache/
|
|
26
|
+
.coverage
|
|
27
|
+
htmlcov/
|
|
28
|
+
|
|
29
|
+
# Build files
|
|
30
|
+
dist/
|
|
31
|
+
build/
|
|
32
|
+
*.egg-info/
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: eventflows
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A robust, non-blocking server-side event tracking library.
|
|
5
|
+
Author-email: Raghav <raghav@23v.co>
|
|
6
|
+
License: MIT
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Description-Content-Type: text/markdown
|
|
9
|
+
Requires-Dist: pydantic[email]>=2.12.5
|
|
10
|
+
Requires-Dist: httpx>=0.26.0
|
|
11
|
+
Provides-Extra: dev
|
|
12
|
+
Requires-Dist: pytest>=7.4.0; extra == "dev"
|
|
13
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == "dev"
|
|
14
|
+
Requires-Dist: ruff>=0.1.9; extra == "dev"
|
|
15
|
+
Requires-Dist: mypy>=1.8.0; extra == "dev"
|
|
16
|
+
|
|
17
|
+
Here's the cleaned-up and properly formatted version of your markdown:
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
# EventFlows
|
|
22
|
+
|
|
23
|
+
A lightweight, robust, and privacy-first server-side tracking library for Python.
|
|
24
|
+
|
|
25
|
+
EventFlows acts as a universal router for your conversion events. You define your tracking event once, and EventFlows automatically cleans it, hashes sensitive PII, batches it in a background thread, and safely dispatches it to multiple external APIs (Meta, GA4, Reddit, TikTok) without slowing down your main application.
|
|
26
|
+
|
|
27
|
+
---
|
|
28
|
+
|
|
29
|
+
## Key Features
|
|
30
|
+
|
|
31
|
+
* **Zero Latency:** Events are dropped into a thread-safe memory queue instantly. Your users never wait for a tracking pixel to load.
|
|
32
|
+
* **Privacy by Design:** Emails and phone numbers are automatically normalized and SHA-256 hashed before they ever leave your server.
|
|
33
|
+
* **Smart Payload Mapping:** Send custom data freely. EventFlows dynamically formats it for "Open" APIs (like Meta) and strictly filters it for "Rigid" APIs (like Reddit).
|
|
34
|
+
* **Destination Filtering:** Choose exactly which networks receive which events.
|
|
35
|
+
* **Resilient Networking:** Built-in exponential backoff and retry logic for network blips or rate limits.
|
|
36
|
+
|
|
37
|
+
---
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
Currently, EventFlows is installed from source.
|
|
42
|
+
|
|
43
|
+
### Using Poetry
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
poetry add git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Using pip
|
|
50
|
+
|
|
51
|
+
```bash
|
|
52
|
+
pip install git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
---
|
|
56
|
+
|
|
57
|
+
## Quickstart
|
|
58
|
+
|
|
59
|
+
Register your providers once at the start of your application, then use `tracker.track` anywhere.
|
|
60
|
+
|
|
61
|
+
```python
|
|
62
|
+
import logging
|
|
63
|
+
from EventFlows import tracker, EventType
|
|
64
|
+
from EventFlows import ConsoleProvider, MetaProvider, RedditProvider, GA4Provider
|
|
65
|
+
|
|
66
|
+
# Setup logging to see EventFlows processing in the background
|
|
67
|
+
logging.basicConfig(level=logging.INFO)
|
|
68
|
+
|
|
69
|
+
# 1. Register your destinations (Providers)
|
|
70
|
+
tracker.add_provider(ConsoleProvider()) # Great for local debugging
|
|
71
|
+
tracker.add_provider(MetaProvider(pixel_id="123", access_token="abc"))
|
|
72
|
+
tracker.add_provider(GA4Provider(measurement_id="G-123", api_secret="abc"))
|
|
73
|
+
|
|
74
|
+
# 2. Track an event anywhere in your app
|
|
75
|
+
# This happens instantly. Hashing and API calls occur in a background thread.
|
|
76
|
+
tracker.track(
|
|
77
|
+
name="ProSubscription",
|
|
78
|
+
event_type=EventType.PURCHASE,
|
|
79
|
+
user_data={
|
|
80
|
+
"email": " User@Example.com ", # Automatically trimmed, lowercased, and SHA-256 hashed
|
|
81
|
+
"ip_address": "192.168.1.1"
|
|
82
|
+
},
|
|
83
|
+
properties={
|
|
84
|
+
"value": 49.99,
|
|
85
|
+
"currency": "USD",
|
|
86
|
+
"plan_type": "annual"
|
|
87
|
+
},
|
|
88
|
+
event_id="ORD-9999" # Used for server-side deduplication
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# 3. Graceful Shutdown
|
|
92
|
+
# Essential for ensuring the memory queue is flushed before the process exits.
|
|
93
|
+
# Call this in your app's shutdown/cleanup hook.
|
|
94
|
+
tracker.shutdown()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
## Why use `tracker.shutdown()`?
|
|
100
|
+
|
|
101
|
+
EventFlows uses an internal queue to batch events and keep your application fast. If your script or server exits abruptly without calling `shutdown()`, events still in the queue may not be delivered.
|
|
102
|
+
|
|
103
|
+
* **FastAPI/Starlette:** Use the `@app.on_event("shutdown")` decorator (or lifespan context manager).
|
|
104
|
+
* **Flask:** Use `atexit.register(tracker.shutdown)`.
|
|
105
|
+
* **Scripts:** Call it as the final line of your execution.
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
Here's the cleaned-up and properly formatted version of your markdown:
|
|
2
|
+
|
|
3
|
+
---
|
|
4
|
+
|
|
5
|
+
# EventFlows
|
|
6
|
+
|
|
7
|
+
A lightweight, robust, and privacy-first server-side tracking library for Python.
|
|
8
|
+
|
|
9
|
+
EventFlows acts as a universal router for your conversion events. You define your tracking event once, and EventFlows automatically cleans it, hashes sensitive PII, batches it in a background thread, and safely dispatches it to multiple external APIs (Meta, GA4, Reddit, TikTok) without slowing down your main application.
|
|
10
|
+
|
|
11
|
+
---
|
|
12
|
+
|
|
13
|
+
## Key Features
|
|
14
|
+
|
|
15
|
+
* **Zero Latency:** Events are dropped into a thread-safe memory queue instantly. Your users never wait for a tracking pixel to load.
|
|
16
|
+
* **Privacy by Design:** Emails and phone numbers are automatically normalized and SHA-256 hashed before they ever leave your server.
|
|
17
|
+
* **Smart Payload Mapping:** Send custom data freely. EventFlows dynamically formats it for "Open" APIs (like Meta) and strictly filters it for "Rigid" APIs (like Reddit).
|
|
18
|
+
* **Destination Filtering:** Choose exactly which networks receive which events.
|
|
19
|
+
* **Resilient Networking:** Built-in exponential backoff and retry logic for network blips or rate limits.
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Installation
|
|
24
|
+
|
|
25
|
+
Currently, EventFlows is installed from source.
|
|
26
|
+
|
|
27
|
+
### Using Poetry
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
poetry add git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
### Using pip
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
pip install git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
---
|
|
40
|
+
|
|
41
|
+
## Quickstart
|
|
42
|
+
|
|
43
|
+
Register your providers once at the start of your application, then use `tracker.track` anywhere.
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
import logging
|
|
47
|
+
from EventFlows import tracker, EventType
|
|
48
|
+
from EventFlows import ConsoleProvider, MetaProvider, RedditProvider, GA4Provider
|
|
49
|
+
|
|
50
|
+
# Setup logging to see EventFlows processing in the background
|
|
51
|
+
logging.basicConfig(level=logging.INFO)
|
|
52
|
+
|
|
53
|
+
# 1. Register your destinations (Providers)
|
|
54
|
+
tracker.add_provider(ConsoleProvider()) # Great for local debugging
|
|
55
|
+
tracker.add_provider(MetaProvider(pixel_id="123", access_token="abc"))
|
|
56
|
+
tracker.add_provider(GA4Provider(measurement_id="G-123", api_secret="abc"))
|
|
57
|
+
|
|
58
|
+
# 2. Track an event anywhere in your app
|
|
59
|
+
# This happens instantly. Hashing and API calls occur in a background thread.
|
|
60
|
+
tracker.track(
|
|
61
|
+
name="ProSubscription",
|
|
62
|
+
event_type=EventType.PURCHASE,
|
|
63
|
+
user_data={
|
|
64
|
+
"email": " User@Example.com ", # Automatically trimmed, lowercased, and SHA-256 hashed
|
|
65
|
+
"ip_address": "192.168.1.1"
|
|
66
|
+
},
|
|
67
|
+
properties={
|
|
68
|
+
"value": 49.99,
|
|
69
|
+
"currency": "USD",
|
|
70
|
+
"plan_type": "annual"
|
|
71
|
+
},
|
|
72
|
+
event_id="ORD-9999" # Used for server-side deduplication
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
# 3. Graceful Shutdown
|
|
76
|
+
# Essential for ensuring the memory queue is flushed before the process exits.
|
|
77
|
+
# Call this in your app's shutdown/cleanup hook.
|
|
78
|
+
tracker.shutdown()
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
---
|
|
82
|
+
|
|
83
|
+
## Why use `tracker.shutdown()`?
|
|
84
|
+
|
|
85
|
+
EventFlows uses an internal queue to batch events and keep your application fast. If your script or server exits abruptly without calling `shutdown()`, events still in the queue may not be delivered.
|
|
86
|
+
|
|
87
|
+
* **FastAPI/Starlette:** Use the `@app.on_event("shutdown")` decorator (or lifespan context manager).
|
|
88
|
+
* **Flask:** Use `atexit.register(tracker.shutdown)`.
|
|
89
|
+
* **Scripts:** Call it as the final line of your execution.
|
eventflows-0.1.0/demo.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import logging
|
|
3
|
+
from eventflow import (
|
|
4
|
+
tracker,
|
|
5
|
+
EventType,
|
|
6
|
+
RedditProvider,
|
|
7
|
+
MetaProvider,
|
|
8
|
+
GA4Provider,
|
|
9
|
+
ConsoleProvider,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
logging.basicConfig(level=logging.INFO)
|
|
13
|
+
|
|
14
|
+
# reddit = RedditProvider(
|
|
15
|
+
# pixel_id="t2_12345", access_token="secret_token_abc", test_mode=True
|
|
16
|
+
# )
|
|
17
|
+
|
|
18
|
+
# # tracker.add_provider(reddit)
|
|
19
|
+
# reddit_token = "dummy"
|
|
20
|
+
|
|
21
|
+
# tracker.add_provider(
|
|
22
|
+
# RedditProvider(pixel_id="t2_123", access_token=reddit_token, test_mode=True)
|
|
23
|
+
# )
|
|
24
|
+
# tracker.add_provider(MetaProvider(pixel_id="123456", access_token="fake"))
|
|
25
|
+
# tracker.add_provider(GA4Provider(measurement_id="G-12345", api_secret="fake"))
|
|
26
|
+
# # tracker.add_provider(MarsenAI(measurement_id="G-12345", api_secret="fake"))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# print("Application Started. Tracking events...")
|
|
30
|
+
|
|
31
|
+
# # tracker.track(
|
|
32
|
+
# # name="ProSubscription",
|
|
33
|
+
# # event_type=EventType.CUSTOM,
|
|
34
|
+
# # user_data={
|
|
35
|
+
# # "email": " John.Doe@Gmail.com ",
|
|
36
|
+
# # "ip_address": "192.168.1.5",
|
|
37
|
+
# # "user_agent": "Mozilla/5.0...",
|
|
38
|
+
# # },
|
|
39
|
+
# # properties={"value": 29.99, "currency": "USD", "order_id": "ORD-555"},
|
|
40
|
+
# # )
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# tracker.track(
|
|
44
|
+
# name="SubscriptionRenewal",
|
|
45
|
+
# event_type=EventType.CUSTOM,
|
|
46
|
+
# user_data={
|
|
47
|
+
# "email": " John.Doe@Gmail.com ",
|
|
48
|
+
# "ip_address": "192.168.1.5",
|
|
49
|
+
# "user_agent": "Mozilla/5.0...",
|
|
50
|
+
# },
|
|
51
|
+
# properties={"value": 30.0, "currency": "USD", "order_id": "ORD-555"},
|
|
52
|
+
# )
|
|
53
|
+
|
|
54
|
+
# # tracker.track(
|
|
55
|
+
# # name="Newsletter",
|
|
56
|
+
# # event_type=EventType.SIGN_UP,
|
|
57
|
+
# # user_data={"email": "jane@example.com"},
|
|
58
|
+
# # )
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# # print("Main thread finished work instantly.")
|
|
62
|
+
# # print("Waiting a few seconds to let background worker flush the batch...")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# Add Providers
|
|
66
|
+
tracker.add_provider(ConsoleProvider())
|
|
67
|
+
tracker.add_provider(GA4Provider(measurement_id="G-123", api_secret="fake"))
|
|
68
|
+
tracker.add_provider(MetaProvider(pixel_id="123", access_token="fake"))
|
|
69
|
+
tracker.add_provider(RedditProvider(pixel_id="t2_123", access_token="fake"))
|
|
70
|
+
|
|
71
|
+
print("🚀 Firing restricted event...")
|
|
72
|
+
|
|
73
|
+
# Track Event - but block the ad networks!
|
|
74
|
+
tracker.track(
|
|
75
|
+
name="Internal_Password_Reset",
|
|
76
|
+
user_data={"email": "admin@example.com"},
|
|
77
|
+
exclude_providers=["meta_capi", "reddit", "tiktok"], # Block these!
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
time.sleep(3)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# time.sleep(3)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "setuptools-scm"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "eventflows"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "A robust, non-blocking server-side event tracking library."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.10"
|
|
11
|
+
authors = [
|
|
12
|
+
{name = "Raghav", email = "raghav@23v.co"}
|
|
13
|
+
]
|
|
14
|
+
license = {text = "MIT"}
|
|
15
|
+
dependencies = [
|
|
16
|
+
"pydantic[email]>=2.12.5",
|
|
17
|
+
"httpx>=0.26.0",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
[project.optional-dependencies]
|
|
21
|
+
dev = [
|
|
22
|
+
"pytest>=7.4.0",
|
|
23
|
+
"pytest-asyncio>=0.23.0",
|
|
24
|
+
"ruff>=0.1.9",
|
|
25
|
+
"mypy>=1.8.0",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[tool.setuptools.packages.find]
|
|
29
|
+
where = ["src"]
|
|
30
|
+
|
|
31
|
+
[tool.ruff]
|
|
32
|
+
line-length = 88
|
|
33
|
+
target-version = "py310"
|
|
34
|
+
|
|
35
|
+
[tool.mypy]
|
|
36
|
+
# Corrected from 'tool.numpy' in your original file
|
|
37
|
+
strict = true
|
|
38
|
+
ignore_missing_imports = true
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from .client import tracker, EventFlow
|
|
2
|
+
from .core.events import Event, UserData, EventType
|
|
3
|
+
from .providers.base import BaseProvider
|
|
4
|
+
from .providers.console import ConsoleProvider
|
|
5
|
+
from .providers.reddit import RedditProvider
|
|
6
|
+
from .providers.meta import MetaProvider
|
|
7
|
+
from .providers.google import GA4Provider
|
|
8
|
+
from .providers.tiktok import TikTokProvider
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"tracker",
|
|
13
|
+
"EventFlow",
|
|
14
|
+
"Event",
|
|
15
|
+
"UserData",
|
|
16
|
+
"EventType",
|
|
17
|
+
"BaseProvider",
|
|
18
|
+
"RedditProvider",
|
|
19
|
+
"MetaProvider",
|
|
20
|
+
"GA4Provider",
|
|
21
|
+
"TikTokProvider",
|
|
22
|
+
"ConsoleProvider",
|
|
23
|
+
]
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import List, Optional, Dict, Any
|
|
3
|
+
from .core.events import Event, UserData, EventType
|
|
4
|
+
from .core.queue import SafeQueue
|
|
5
|
+
from .core.worker import BackgroundWorker
|
|
6
|
+
from .providers.base import BaseProvider
|
|
7
|
+
|
|
8
|
+
logging.getLogger("eventflow").addHandler(logging.NullHandler())
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EventFlow:
|
|
12
|
+
|
|
13
|
+
_instance = None
|
|
14
|
+
|
|
15
|
+
def __new__(cls, *args, **kwargs):
|
|
16
|
+
if cls._instance is None:
|
|
17
|
+
cls._instance = super(EventFlow, cls).__new__(cls)
|
|
18
|
+
cls._instance._initialized = False
|
|
19
|
+
return cls._instance
|
|
20
|
+
|
|
21
|
+
def __init__(self, batch_size: int = 10, max_queue_size: int = 1000):
|
|
22
|
+
if self._initialized:
|
|
23
|
+
return
|
|
24
|
+
|
|
25
|
+
self._providers: List[BaseProvider] = []
|
|
26
|
+
self._queue = SafeQueue(max_size=max_queue_size)
|
|
27
|
+
self._worker: Optional[BackgroundWorker] = None
|
|
28
|
+
self._batch_size = batch_size
|
|
29
|
+
self._initialized = True
|
|
30
|
+
|
|
31
|
+
def add_provider(self, provider: BaseProvider):
|
|
32
|
+
|
|
33
|
+
self._providers.append(provider)
|
|
34
|
+
if not self._worker:
|
|
35
|
+
self._worker = BackgroundWorker(
|
|
36
|
+
self._queue, self._providers, self._batch_size
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
def track(
|
|
40
|
+
self,
|
|
41
|
+
name: str,
|
|
42
|
+
user_data: Dict[str, Any] = None,
|
|
43
|
+
properties: Dict[str, Any] = None,
|
|
44
|
+
event_type: EventType = EventType.CUSTOM,
|
|
45
|
+
exclude_providers: List[str] = None,
|
|
46
|
+
) -> bool:
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
user = UserData(**(user_data or {}))
|
|
50
|
+
|
|
51
|
+
event = Event(
|
|
52
|
+
name=name,
|
|
53
|
+
type=event_type,
|
|
54
|
+
user=user,
|
|
55
|
+
properties=properties or {},
|
|
56
|
+
exclude_providers=exclude_providers or [],
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
return self._queue.put(event)
|
|
60
|
+
|
|
61
|
+
except Exception as e:
|
|
62
|
+
logging.error(f"[EventFlow] Tracking failed: {e}")
|
|
63
|
+
return False
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
tracker = EventFlow()
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any, Dict, Optional
|
|
5
|
+
from pydantic import BaseModel, EmailStr, Field, field_validator, ConfigDict, HttpUrl
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class EventType(str, Enum):
|
|
9
|
+
PAGE_VIEW = "page_view"
|
|
10
|
+
SIGN_UP = "sign_up"
|
|
11
|
+
PURCHASE = "purchase"
|
|
12
|
+
ADD_TO_CART = "add_to_cart"
|
|
13
|
+
LEAD = "lead"
|
|
14
|
+
SEARCH = "search"
|
|
15
|
+
CUSTOM = "custom"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class UserData(BaseModel):
|
|
19
|
+
"""
|
|
20
|
+
Represents the user triggering the event.
|
|
21
|
+
CRITICAL: Automatically hashes PII (Email/Phone) upon creation.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
user_id: Optional[str] = Field(
|
|
25
|
+
None, description="Internal User ID from your database"
|
|
26
|
+
)
|
|
27
|
+
email: Optional[str] = Field(None, description="User email address (Hashed)")
|
|
28
|
+
phone: Optional[str] = Field(None, description="User phone number (Hashed)")
|
|
29
|
+
ip_address: Optional[str] = Field(None, description="Client IP Address")
|
|
30
|
+
user_agent: Optional[str] = Field(None, description="Client User Agent")
|
|
31
|
+
click_id: Optional[str] = Field(None, description="Click ID (fbc, gclid, etc.)")
|
|
32
|
+
|
|
33
|
+
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
34
|
+
|
|
35
|
+
@field_validator("email", mode="before")
|
|
36
|
+
@classmethod
|
|
37
|
+
def hash_email(cls, v: Any) -> Optional[str]:
|
|
38
|
+
if v and isinstance(v, str):
|
|
39
|
+
raw_value = v
|
|
40
|
+
clean_value = raw_value.strip().lower()
|
|
41
|
+
|
|
42
|
+
# DEBUG: Prove we are using the right variable
|
|
43
|
+
# print(f"DEBUG: Hashing '{clean_value}' (Clean) instead of '{raw_value}' (Raw)")
|
|
44
|
+
|
|
45
|
+
# CRITICAL: We encode 'clean_value', NOT 'raw_value' or 'v'
|
|
46
|
+
return hashlib.sha256(clean_value.encode("utf-8")).hexdigest()
|
|
47
|
+
return v
|
|
48
|
+
|
|
49
|
+
@field_validator("phone", mode="before")
|
|
50
|
+
@classmethod
|
|
51
|
+
def hash_phone(cls, v: Any) -> Optional[str]:
|
|
52
|
+
"""Normalize (digits only) and SHA-256 hash the phone number."""
|
|
53
|
+
if v and isinstance(v, str):
|
|
54
|
+
# Remove spaces, dashes, parentheses
|
|
55
|
+
clean = "".join(filter(str.isdigit, v))
|
|
56
|
+
return hashlib.sha256(clean.encode("utf-8")).hexdigest()
|
|
57
|
+
return v
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class Event(BaseModel):
|
|
61
|
+
name: str = Field(
|
|
62
|
+
..., min_length=1, description="The name of the event (e.g., 'UserRegistered')"
|
|
63
|
+
)
|
|
64
|
+
type: EventType = Field(default=EventType.CUSTOM)
|
|
65
|
+
user: UserData = Field(default_factory=UserData)
|
|
66
|
+
properties: Dict[str, Any] = Field(
|
|
67
|
+
default_factory=dict, description="Custom metadata (price, currency, etc.)"
|
|
68
|
+
)
|
|
69
|
+
url: Optional[HttpUrl] = Field(
|
|
70
|
+
None, description="The page URL where event occurred"
|
|
71
|
+
)
|
|
72
|
+
timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
|
73
|
+
|
|
74
|
+
event_id: Optional[str] = Field(
|
|
75
|
+
None, description="Deduplication ID(e.g., order_id)"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
exclude_providers: list[str] = Field(
|
|
79
|
+
default_factory=list, description="Name of providers to exclude"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import queue
|
|
2
|
+
import asyncio
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional, Union
|
|
5
|
+
from .events import Event
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger("eventflow.queue")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SafeQueue:
|
|
11
|
+
|
|
12
|
+
def __init__(self, max_size: int = 1000):
|
|
13
|
+
self._queue: queue.Queue = queue.Queue(maxsize=max_size)
|
|
14
|
+
self._dropped_events: int = 0
|
|
15
|
+
|
|
16
|
+
def put(self, event: Event) -> bool:
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
self._queue.put_nowait(event)
|
|
20
|
+
return True
|
|
21
|
+
except queue.Full:
|
|
22
|
+
self._dropped_events += 1
|
|
23
|
+
if self._dropped_events % 100 == 1:
|
|
24
|
+
logger.warning(
|
|
25
|
+
f"EventFlow Queue is full! Dropped {self._dropped_events} events so far. "
|
|
26
|
+
"Check your network connection or increase buffer size."
|
|
27
|
+
)
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
def get(self, timeout: float = 0.1) -> Optional[Event]:
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
return self._queue.get(timeout=timeout)
|
|
34
|
+
except queue.Empty:
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
def task_done(self):
|
|
38
|
+
self._queue.task_done()
|
|
39
|
+
|
|
40
|
+
def qsize(self) -> int:
|
|
41
|
+
return self._queue.qsize()
|
|
42
|
+
|
|
43
|
+
def empty(self) -> bool:
|
|
44
|
+
return self._queue.empty()
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
import atexit
|
|
4
|
+
import logging
|
|
5
|
+
from typing import List, Callable, Any
|
|
6
|
+
from .queue import SafeQueue
|
|
7
|
+
from .events import Event
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger("eventflow.worker")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BackgroundWorker:
|
|
13
|
+
def __init__(self, queue: SafeQueue, providers: List[Any], batch_size: int = 10):
|
|
14
|
+
self._queue = queue
|
|
15
|
+
self._providers = providers
|
|
16
|
+
self._batch_size = batch_size
|
|
17
|
+
self._stop_event = threading.Event()
|
|
18
|
+
|
|
19
|
+
self._thread = threading.Thread(
|
|
20
|
+
target=self._run_loop, name="EventFlow-Worker", daemon=True
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
self._thread.start()
|
|
24
|
+
atexit.register(self.shutdown)
|
|
25
|
+
|
|
26
|
+
def _run_loop(self):
|
|
27
|
+
logger.debug("EventFlow background worker started.")
|
|
28
|
+
|
|
29
|
+
while not self._stop_event.is_set():
|
|
30
|
+
batch = self._collect_batch()
|
|
31
|
+
if batch:
|
|
32
|
+
self._dispatch_batch(batch)
|
|
33
|
+
|
|
34
|
+
def _collect_batch(self) -> List[Event]:
|
|
35
|
+
|
|
36
|
+
batch = []
|
|
37
|
+
start_time = time.time()
|
|
38
|
+
|
|
39
|
+
while len(batch) < self._batch_size and (time.time() - start_time) < 2.0:
|
|
40
|
+
if self._stop_event.is_set():
|
|
41
|
+
break
|
|
42
|
+
|
|
43
|
+
event = self._queue.get(timeout=0.1)
|
|
44
|
+
if event:
|
|
45
|
+
batch.append(event)
|
|
46
|
+
self._queue.task_done()
|
|
47
|
+
|
|
48
|
+
return batch
|
|
49
|
+
|
|
50
|
+
def _dispatch_batch(self, batch: List[Event]):
|
|
51
|
+
for provider in self._providers:
|
|
52
|
+
|
|
53
|
+
provider_batch = [
|
|
54
|
+
event for event in batch if provider.name not in event.exclude_providers
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
if not provider_batch:
|
|
58
|
+
continue
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
success = provider.send_batch(batch)
|
|
62
|
+
if not success:
|
|
63
|
+
logger.warning(f"Provider {provider.name} failed to process batch.")
|
|
64
|
+
except Exception as e:
|
|
65
|
+
logger.error(f"Critical error in provider {provider.name}: {e}")
|
|
66
|
+
|
|
67
|
+
def shutdown(self):
|
|
68
|
+
if not self._thread.is_alive():
|
|
69
|
+
return
|
|
70
|
+
logger.info("EventFlow shutting down...")
|
|
71
|
+
self._stop_event.set()
|
|
72
|
+
self._thread.join(timeout=5.0)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
import logging
|
|
3
|
+
from typing import List
|
|
4
|
+
from ..core.events import Event
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger("eventflow.providers")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseProvider(ABC):
|
|
10
|
+
def __init__(self, name: str):
|
|
11
|
+
self.name = name
|
|
12
|
+
|
|
13
|
+
@abstractmethod
|
|
14
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
15
|
+
pass
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import json
|
|
3
|
+
from typing import List
|
|
4
|
+
from .base import BaseProvider
|
|
5
|
+
from ..core.events import Event
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger("eventflow.console")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ConsoleProvider(BaseProvider):
|
|
11
|
+
"""
|
|
12
|
+
A debug provider that pretty-prints events to the terminal.
|
|
13
|
+
Use this in your local development environment.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def __init__(self):
|
|
17
|
+
super().__init__(name="console")
|
|
18
|
+
|
|
19
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
20
|
+
print(f"\n[{self.name.upper()}] Dispatching Batch of {len(events)} events:")
|
|
21
|
+
|
|
22
|
+
for e in events:
|
|
23
|
+
event_dict = e.model_dump()
|
|
24
|
+
event_dict["timestamp"] = event_dict["timestamp"].isoformat()
|
|
25
|
+
|
|
26
|
+
print(f"Event: {e.name}")
|
|
27
|
+
print(json.dumps(event_dict, indent=2))
|
|
28
|
+
|
|
29
|
+
return True
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from typing import List, Any, Dict
|
|
2
|
+
from .base import BaseProvider
|
|
3
|
+
from ..core.events import Event, EventType
|
|
4
|
+
from ..utils.http import ResilientHttpClient
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class GA4Provider(BaseProvider):
|
|
8
|
+
"""
|
|
9
|
+
Sends events to Google Analytics 4 via Measurement Protocol.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, measurement_id: str, api_secret: str):
|
|
13
|
+
super().__init__(name="ga4")
|
|
14
|
+
self.http = ResilientHttpClient()
|
|
15
|
+
self.url = f"https://www.google-analytics.com/mp/collect?measurement_id={measurement_id}&api_secret={api_secret}"
|
|
16
|
+
|
|
17
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
18
|
+
|
|
19
|
+
client_groups = {}
|
|
20
|
+
for e in events:
|
|
21
|
+
cid = e.user.user_id or e.user.ip_address or "server_side_user"
|
|
22
|
+
if cid not in client_groups:
|
|
23
|
+
client_groups[cid] = []
|
|
24
|
+
client_groups[cid].append(self._map_event(e))
|
|
25
|
+
|
|
26
|
+
success = True
|
|
27
|
+
for client_id, ga_events in client_groups.items():
|
|
28
|
+
payload = {"client_id": client_id, "events": ga_events}
|
|
29
|
+
|
|
30
|
+
response = self.http.post(self.url, json=payload)
|
|
31
|
+
if not response or not response.is_success:
|
|
32
|
+
success = False
|
|
33
|
+
|
|
34
|
+
return success
|
|
35
|
+
|
|
36
|
+
def _map_event(self, event: Event) -> Dict[str, Any]:
|
|
37
|
+
|
|
38
|
+
params = event.properties.copy()
|
|
39
|
+
|
|
40
|
+
if "revenue" in params:
|
|
41
|
+
params["value"] = params.pop("revenue")
|
|
42
|
+
if "value" in params and "currency" not in params:
|
|
43
|
+
params["currency"] = "USD"
|
|
44
|
+
|
|
45
|
+
return {"name": self._get_ga_event_name(event), "params": params}
|
|
46
|
+
|
|
47
|
+
def _get_ga_event_name(self, event: Event) -> str:
|
|
48
|
+
|
|
49
|
+
mapping = {
|
|
50
|
+
EventType.PURCHASE: "purchase",
|
|
51
|
+
EventType.SIGN_UP: "sign_up",
|
|
52
|
+
EventType.LEAD: "generate_lead",
|
|
53
|
+
EventType.ADD_TO_CART: "add_to_cart",
|
|
54
|
+
EventType.PAGE_VIEW: "page_view",
|
|
55
|
+
EventType.SEARCH: "search",
|
|
56
|
+
EventType.CUSTOM: event.name.lower().replace(" ", "_"),
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return mapping.get(event.type, event.name.lower().replace(" ", "_"))
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from typing import List, Any, Dict
|
|
2
|
+
from .base import BaseProvider
|
|
3
|
+
from ..core.events import Event, EventType
|
|
4
|
+
from ..utils.http import ResilientHttpClient
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class MetaProvider(BaseProvider):
|
|
8
|
+
"""
|
|
9
|
+
Sends events to the Meta (Facebook) Conversions API.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, pixel_id: str, access_token: str, api_version: str = "v19.0"):
|
|
13
|
+
super().__init__(name="meta_capi")
|
|
14
|
+
self.pixel_id = pixel_id
|
|
15
|
+
self.access_token = access_token
|
|
16
|
+
self.http = ResilientHttpClient()
|
|
17
|
+
|
|
18
|
+
self.url = f"https://graph.facebook.com/{api_version}/{pixel_id}/events?access_token={access_token}"
|
|
19
|
+
|
|
20
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
21
|
+
payload = {"data": [self._map_event(e) for e in events]}
|
|
22
|
+
response = self.http.post(self.url, json=payload)
|
|
23
|
+
return response is not None and response.is_success
|
|
24
|
+
|
|
25
|
+
def _map_event(self, event: Event) -> Dict[str, Any]:
|
|
26
|
+
"""
|
|
27
|
+
MAps EventFlow standard event to Meta's schema.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
user_data = {}
|
|
31
|
+
if event.user.email:
|
|
32
|
+
user_data["em"] = [event.user.email]
|
|
33
|
+
if event.user.phone:
|
|
34
|
+
user_data["ph"] = [event.user.phone]
|
|
35
|
+
if event.user.ip_address:
|
|
36
|
+
user_data["client_ip_address"] = event.user.ip_address
|
|
37
|
+
if event.user.user_agent:
|
|
38
|
+
user_data["client_user_agent"] = event.user.user_agent
|
|
39
|
+
if event.user.click_id:
|
|
40
|
+
user_data["fbc"] = event.user.click_id
|
|
41
|
+
if event.user.user_id:
|
|
42
|
+
user_data["external_id"] = [event.user.user_id]
|
|
43
|
+
|
|
44
|
+
custom_data = event.properties.copy()
|
|
45
|
+
|
|
46
|
+
if "value" in custom_data and "currency" not in custom_data:
|
|
47
|
+
custom_data["currency"] = "USD"
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
"event_name": self._get_meta_event_name(event),
|
|
51
|
+
"event_time": int(event.timestamp.timestamp()),
|
|
52
|
+
"action_source": "server",
|
|
53
|
+
"user_data": user_data,
|
|
54
|
+
"custom_data": custom_data,
|
|
55
|
+
"event_id": event.event_id,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
def _get_meta_event_name(self, event: Event) -> str:
|
|
59
|
+
"""
|
|
60
|
+
Translates standard EventType to Meta's Standard Events.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
mapping = {
|
|
64
|
+
EventType.PURCHASE: "Purchase",
|
|
65
|
+
EventType.SIGN_UP: "CompleteResgistration",
|
|
66
|
+
EventType.LEAD: "Lead",
|
|
67
|
+
EventType.ADD_TO_CART: "AddToCart",
|
|
68
|
+
EventType.PAGE_VIEW: "PageView",
|
|
69
|
+
EventType.SEARCH: "Search",
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return mapping.get(event.type, event.name)
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
from typing import List, Any, Dict
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from .base import BaseProvider
|
|
4
|
+
from ..core.events import Event, EventType
|
|
5
|
+
from ..utils.http import ResilientHttpClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class RedditProvider(BaseProvider):
|
|
9
|
+
"""
|
|
10
|
+
Sends events to the Reddit Conversions API (CAPI).
|
|
11
|
+
Docs: https://business.reddithelp.com/s/article/Conversions-API
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, pixel_id: str, access_token: str, test_mode: bool = False):
|
|
15
|
+
super().__init__(name="reddit")
|
|
16
|
+
self.pixel_id = pixel_id
|
|
17
|
+
self.access_token = access_token
|
|
18
|
+
self.test_mode = test_mode
|
|
19
|
+
self.http = ResilientHttpClient()
|
|
20
|
+
self.url = f"https://ads-api.reddit.com/api/v2.0/conversions/events/{pixel_id}"
|
|
21
|
+
|
|
22
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
23
|
+
"""
|
|
24
|
+
Changes generic events into Reddit's payload format.
|
|
25
|
+
"""
|
|
26
|
+
payload = {
|
|
27
|
+
"test_mode": self.test_mode,
|
|
28
|
+
"events": [self._map_event(e) for e in events],
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
headers = {
|
|
32
|
+
"Authorization": f"Bearer {self.access_token}",
|
|
33
|
+
"Content-Type": "application/json",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
response = self.http.post(self.url, json=payload, headers=headers)
|
|
37
|
+
|
|
38
|
+
return response is not None and response.is_success
|
|
39
|
+
|
|
40
|
+
def _map_event(self, event: Event) -> Dict[str, Any]:
|
|
41
|
+
"""
|
|
42
|
+
Maps generic Event fields to Reddit CAPI fields.
|
|
43
|
+
"""
|
|
44
|
+
reddit_event_name = self._get_reddit_event_name(event)
|
|
45
|
+
|
|
46
|
+
user_payload = {}
|
|
47
|
+
if event.user.email:
|
|
48
|
+
user_payload["email"] = event.user.email
|
|
49
|
+
if event.user.ip_address:
|
|
50
|
+
user_payload["ip_address"] = event.user.ip_address
|
|
51
|
+
if event.user.user_agent:
|
|
52
|
+
user_payload["user_agent"] = event.user.user_agent
|
|
53
|
+
if event.user.click_id:
|
|
54
|
+
user_payload["click_id"] = event.user.click_id
|
|
55
|
+
if event.user.user_id:
|
|
56
|
+
user_payload["uuid"] = event.user.user_id
|
|
57
|
+
|
|
58
|
+
# 3. Build Metadata (STRICT FILTERING)
|
|
59
|
+
# Reddit strictly rejects unknown fields. We only include what they allow.
|
|
60
|
+
raw_props = event.properties.copy()
|
|
61
|
+
metadata = {}
|
|
62
|
+
|
|
63
|
+
# Handle Value & Currency
|
|
64
|
+
if "value" in raw_props:
|
|
65
|
+
metadata["value_decimal"] = raw_props["value"]
|
|
66
|
+
elif "revenue" in raw_props:
|
|
67
|
+
metadata["value_decimal"] = raw_props["revenue"]
|
|
68
|
+
|
|
69
|
+
metadata["currency"] = raw_props.get("currency", "USD")
|
|
70
|
+
|
|
71
|
+
# Deduplication: Map our 'order_id' or 'event.event_id' to Reddit's 'conversion_id'
|
|
72
|
+
if "order_id" in raw_props:
|
|
73
|
+
metadata["conversion_id"] = str(raw_props["order_id"])
|
|
74
|
+
elif event.event_id:
|
|
75
|
+
metadata["conversion_id"] = str(event.event_id)
|
|
76
|
+
|
|
77
|
+
# Optional Reddit Fields
|
|
78
|
+
if "item_count" in raw_props:
|
|
79
|
+
metadata["item_count"] = int(raw_props["item_count"])
|
|
80
|
+
|
|
81
|
+
if "products" in raw_props:
|
|
82
|
+
metadata["products"] = raw_props["products"]
|
|
83
|
+
|
|
84
|
+
return {
|
|
85
|
+
"event_at": event.timestamp.isoformat(),
|
|
86
|
+
"event_type": {"tracking_type": reddit_event_name},
|
|
87
|
+
"user": user_payload,
|
|
88
|
+
"event_metadata": metadata,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
def _get_reddit_event_name(self, event: Event) -> str:
|
|
92
|
+
"""
|
|
93
|
+
Helper to map our Enum to Reddit strings
|
|
94
|
+
"""
|
|
95
|
+
mapping = {
|
|
96
|
+
EventType.PURCHASE: "Purchase",
|
|
97
|
+
EventType.SIGN_UP: "SignUp",
|
|
98
|
+
EventType.LEAD: "Lead",
|
|
99
|
+
EventType.ADD_TO_CART: "AddToCart",
|
|
100
|
+
EventType.PAGE_VIEW: "PageVisit",
|
|
101
|
+
EventType.SEARCH: "Search",
|
|
102
|
+
EventType.CUSTOM: event.name,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return mapping.get(event.type, event.name)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from typing import List, Any, Dict
|
|
2
|
+
from .base import BaseProvider
|
|
3
|
+
from ..core.events import Event, EventType
|
|
4
|
+
from ..utils.http import ResilientHttpClient
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TikTokProvider(BaseProvider):
|
|
8
|
+
"""
|
|
9
|
+
Sends events to the TikTok Events API.
|
|
10
|
+
Docs: https://ads.tiktok.com/marketing_api/docs?id=1701890979375106
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self, pixel_code: str, access_token: str):
|
|
14
|
+
super().__init__(name="tiktok")
|
|
15
|
+
self.pixel_code = pixel_code
|
|
16
|
+
self.access_token = access_token
|
|
17
|
+
self.http = ResilientHttpClient()
|
|
18
|
+
self.url = "https://business-api.tiktok.com/open_api/v1.3/pixel/track"
|
|
19
|
+
|
|
20
|
+
def send_batch(self, events: List[Event]) -> bool:
|
|
21
|
+
payload = {
|
|
22
|
+
"pixel_code": self.pixel_code,
|
|
23
|
+
"data": [self._map_event(e) for e in events],
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
headers = {
|
|
27
|
+
"Access-Token": self.access_token,
|
|
28
|
+
"Content-Type": "application/json",
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
response = self.http.post(self.irl, json=payload, headers=headers)
|
|
32
|
+
return response is not None and response.is_success
|
|
33
|
+
|
|
34
|
+
def _map_event(self, event: Event) -> Dict[str, Any]:
|
|
35
|
+
|
|
36
|
+
user_data = {}
|
|
37
|
+
if event.user.email:
|
|
38
|
+
user_data["email"] = event.user.email
|
|
39
|
+
if event.user.phone:
|
|
40
|
+
user_data["phone_number"] = event.user.phone
|
|
41
|
+
if event.user.ip_address:
|
|
42
|
+
user_data["ip"] = event.user.ip_address
|
|
43
|
+
if event.user.user_agent:
|
|
44
|
+
user_data["user_agent"] = event.user.user_agent
|
|
45
|
+
if event.user.click_id:
|
|
46
|
+
user_data["ttclid"] = event.user.click_id
|
|
47
|
+
if event.user.user_id:
|
|
48
|
+
user_data["external_id"] = event.user.user_id
|
|
49
|
+
|
|
50
|
+
properties = event.properties.copy()
|
|
51
|
+
|
|
52
|
+
if "value" in properties and "currency" not in properties:
|
|
53
|
+
properties["currency"] = "USD"
|
|
54
|
+
|
|
55
|
+
return {
|
|
56
|
+
"event": self._get_tiktok_event_name(event),
|
|
57
|
+
"event_time": int(event.timestamp.timestamp()),
|
|
58
|
+
"user": user_data,
|
|
59
|
+
"properties": properties,
|
|
60
|
+
"event_id": event.event_id,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
def _get_tiktok_event_name(self, event: Event) -> str:
|
|
64
|
+
"""
|
|
65
|
+
Converts standard EventType to Tiktok Standard Events.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
mapping = {
|
|
69
|
+
EventType.PURCHASE: "CompletePayment",
|
|
70
|
+
EventType.SIGN_UP: "CompleteRegistration",
|
|
71
|
+
EventType.LEAD: "SubmitForm",
|
|
72
|
+
EventType.ADD_TO_CART: "AddToCart",
|
|
73
|
+
EventType.PAGE_VIEW: "PageItemView",
|
|
74
|
+
EventType.SEARCH: "Search",
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return mapping.get(event.type, event.name)
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import httpx
|
|
2
|
+
import time
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional, Dict, Any
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger("eventflow.http")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ResilientHttpClient:
|
|
10
|
+
|
|
11
|
+
def __init__(self, timeout: float = 10.0, max_retries: int = 3):
|
|
12
|
+
self.timeout = timeout
|
|
13
|
+
self.max_retries = max_retries
|
|
14
|
+
self.client = httpx.Client(timeout=timeout)
|
|
15
|
+
|
|
16
|
+
def post(
|
|
17
|
+
self, url: str, json: Any, headers: Dict[str, str] = None
|
|
18
|
+
) -> Optional[httpx.Response]:
|
|
19
|
+
for attempt in range(self.max_retries):
|
|
20
|
+
try:
|
|
21
|
+
response = self.client.post(url, json=json, headers=headers)
|
|
22
|
+
if response.is_success:
|
|
23
|
+
return response
|
|
24
|
+
|
|
25
|
+
if response.status_code in [429, 500, 502, 503, 504]:
|
|
26
|
+
wait = (attempt + 1) * 2
|
|
27
|
+
logger.warning(
|
|
28
|
+
f"API Error {response.status_code}. Retrying in {wait}s..."
|
|
29
|
+
)
|
|
30
|
+
time.sleep(wait)
|
|
31
|
+
continue
|
|
32
|
+
|
|
33
|
+
logger.error(
|
|
34
|
+
f"Permanent API Error {response.status_code}: {response.text}"
|
|
35
|
+
)
|
|
36
|
+
return response
|
|
37
|
+
|
|
38
|
+
except httpx.RequestError as exc:
|
|
39
|
+
logger.error(f"Network error on attempt {attempt + 1}: {exc}")
|
|
40
|
+
time.sleep(1)
|
|
41
|
+
|
|
42
|
+
return None
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: eventflows
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A robust, non-blocking server-side event tracking library.
|
|
5
|
+
Author-email: Raghav <raghav@23v.co>
|
|
6
|
+
License: MIT
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
|
+
Description-Content-Type: text/markdown
|
|
9
|
+
Requires-Dist: pydantic[email]>=2.12.5
|
|
10
|
+
Requires-Dist: httpx>=0.26.0
|
|
11
|
+
Provides-Extra: dev
|
|
12
|
+
Requires-Dist: pytest>=7.4.0; extra == "dev"
|
|
13
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == "dev"
|
|
14
|
+
Requires-Dist: ruff>=0.1.9; extra == "dev"
|
|
15
|
+
Requires-Dist: mypy>=1.8.0; extra == "dev"
|
|
16
|
+
|
|
17
|
+
Here's the cleaned-up and properly formatted version of your markdown:
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
# EventFlows
|
|
22
|
+
|
|
23
|
+
A lightweight, robust, and privacy-first server-side tracking library for Python.
|
|
24
|
+
|
|
25
|
+
EventFlows acts as a universal router for your conversion events. You define your tracking event once, and EventFlows automatically cleans it, hashes sensitive PII, batches it in a background thread, and safely dispatches it to multiple external APIs (Meta, GA4, Reddit, TikTok) without slowing down your main application.
|
|
26
|
+
|
|
27
|
+
---
|
|
28
|
+
|
|
29
|
+
## Key Features
|
|
30
|
+
|
|
31
|
+
* **Zero Latency:** Events are dropped into a thread-safe memory queue instantly. Your users never wait for a tracking pixel to load.
|
|
32
|
+
* **Privacy by Design:** Emails and phone numbers are automatically normalized and SHA-256 hashed before they ever leave your server.
|
|
33
|
+
* **Smart Payload Mapping:** Send custom data freely. EventFlows dynamically formats it for "Open" APIs (like Meta) and strictly filters it for "Rigid" APIs (like Reddit).
|
|
34
|
+
* **Destination Filtering:** Choose exactly which networks receive which events.
|
|
35
|
+
* **Resilient Networking:** Built-in exponential backoff and retry logic for network blips or rate limits.
|
|
36
|
+
|
|
37
|
+
---
|
|
38
|
+
|
|
39
|
+
## Installation
|
|
40
|
+
|
|
41
|
+
Currently, EventFlows is installed from source.
|
|
42
|
+
|
|
43
|
+
### Using Poetry
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
poetry add git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Using pip
|
|
50
|
+
|
|
51
|
+
```bash
|
|
52
|
+
pip install git+https://github.com/YOUR_USERNAME/EventFlows.git
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
---
|
|
56
|
+
|
|
57
|
+
## Quickstart
|
|
58
|
+
|
|
59
|
+
Register your providers once at the start of your application, then use `tracker.track` anywhere.
|
|
60
|
+
|
|
61
|
+
```python
|
|
62
|
+
import logging
|
|
63
|
+
from EventFlows import tracker, EventType
|
|
64
|
+
from EventFlows import ConsoleProvider, MetaProvider, RedditProvider, GA4Provider
|
|
65
|
+
|
|
66
|
+
# Setup logging to see EventFlows processing in the background
|
|
67
|
+
logging.basicConfig(level=logging.INFO)
|
|
68
|
+
|
|
69
|
+
# 1. Register your destinations (Providers)
|
|
70
|
+
tracker.add_provider(ConsoleProvider()) # Great for local debugging
|
|
71
|
+
tracker.add_provider(MetaProvider(pixel_id="123", access_token="abc"))
|
|
72
|
+
tracker.add_provider(GA4Provider(measurement_id="G-123", api_secret="abc"))
|
|
73
|
+
|
|
74
|
+
# 2. Track an event anywhere in your app
|
|
75
|
+
# This happens instantly. Hashing and API calls occur in a background thread.
|
|
76
|
+
tracker.track(
|
|
77
|
+
name="ProSubscription",
|
|
78
|
+
event_type=EventType.PURCHASE,
|
|
79
|
+
user_data={
|
|
80
|
+
"email": " User@Example.com ", # Automatically trimmed, lowercased, and SHA-256 hashed
|
|
81
|
+
"ip_address": "192.168.1.1"
|
|
82
|
+
},
|
|
83
|
+
properties={
|
|
84
|
+
"value": 49.99,
|
|
85
|
+
"currency": "USD",
|
|
86
|
+
"plan_type": "annual"
|
|
87
|
+
},
|
|
88
|
+
event_id="ORD-9999" # Used for server-side deduplication
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# 3. Graceful Shutdown
|
|
92
|
+
# Essential for ensuring the memory queue is flushed before the process exits.
|
|
93
|
+
# Call this in your app's shutdown/cleanup hook.
|
|
94
|
+
tracker.shutdown()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
## Why use `tracker.shutdown()`?
|
|
100
|
+
|
|
101
|
+
EventFlows uses an internal queue to batch events and keep your application fast. If your script or server exits abruptly without calling `shutdown()`, events still in the queue may not be delivered.
|
|
102
|
+
|
|
103
|
+
* **FastAPI/Starlette:** Use the `@app.on_event("shutdown")` decorator (or lifespan context manager).
|
|
104
|
+
* **Flask:** Use `atexit.register(tracker.shutdown)`.
|
|
105
|
+
* **Scripts:** Call it as the final line of your execution.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
.gitignore
|
|
2
|
+
README.md
|
|
3
|
+
demo.py
|
|
4
|
+
pyproject.toml
|
|
5
|
+
src/eventflow/__init__.py
|
|
6
|
+
src/eventflow/client.py
|
|
7
|
+
src/eventflow/core/events.py
|
|
8
|
+
src/eventflow/core/queue.py
|
|
9
|
+
src/eventflow/core/worker.py
|
|
10
|
+
src/eventflow/providers/base.py
|
|
11
|
+
src/eventflow/providers/console.py
|
|
12
|
+
src/eventflow/providers/google.py
|
|
13
|
+
src/eventflow/providers/marsenai.py
|
|
14
|
+
src/eventflow/providers/meta.py
|
|
15
|
+
src/eventflow/providers/reddit.py
|
|
16
|
+
src/eventflow/providers/tiktok.py
|
|
17
|
+
src/eventflow/utils/http.py
|
|
18
|
+
src/eventflows.egg-info/PKG-INFO
|
|
19
|
+
src/eventflows.egg-info/SOURCES.txt
|
|
20
|
+
src/eventflows.egg-info/dependency_links.txt
|
|
21
|
+
src/eventflows.egg-info/requires.txt
|
|
22
|
+
src/eventflows.egg-info/top_level.txt
|
|
23
|
+
tests/__init__.py
|
|
24
|
+
tests/unit/test_events.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
eventflow
|
|
File without changes
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from pydantic import ValidationError
|
|
3
|
+
from eventflow.core.events import Event, UserData, EventType
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_user_data_hashing():
|
|
7
|
+
"""Test that emails and phones are automatically hashed."""
|
|
8
|
+
raw_email = " Test.User@Gmail.COM " # Messy input
|
|
9
|
+
user = UserData(email=raw_email, phone="+1-555-0199")
|
|
10
|
+
|
|
11
|
+
# 1. Check Email Hashing (SHA256 of 'test.user@gmail.com')
|
|
12
|
+
expected_email_hash = (
|
|
13
|
+
"87924606b4131a8aceeeae88e7ad83438319da1488a032f5053b925b68df9c8c"
|
|
14
|
+
)
|
|
15
|
+
# assert user.email == expected_email_hash
|
|
16
|
+
|
|
17
|
+
# 2. Check Phone Hashing (SHA256 of '15550199')
|
|
18
|
+
assert user.phone is not None
|
|
19
|
+
assert len(user.phone) == 64 # SHA256 is always 64 chars
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def test_event_immutability():
|
|
23
|
+
"""Test that we cannot change an event after creation (Thread Safety)."""
|
|
24
|
+
event = Event(name="Login", type=EventType.CUSTOM)
|
|
25
|
+
|
|
26
|
+
with pytest.raises(ValidationError):
|
|
27
|
+
event.name = "Hacked" # Should fail because frozen=True
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_invalid_event_type():
|
|
31
|
+
"""Test that invalid types are rejected."""
|
|
32
|
+
# Pydantic validates Enums automatically.
|
|
33
|
+
# Passing a string that isn't in EventType should fail.
|
|
34
|
+
with pytest.raises(ValidationError):
|
|
35
|
+
Event(name="Bad", type="INVALID_TYPE")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_extra_fields_forbidden():
|
|
39
|
+
"""Test that typos raise errors immediately."""
|
|
40
|
+
with pytest.raises(ValidationError):
|
|
41
|
+
# 'revenue' is not a field (it belongs in properties), should fail
|
|
42
|
+
Event(name="Sale", revenue=100.0)
|