@daemux/store-automator 0.10.5 → 0.10.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -2
- package/package.json +1 -1
- package/plugins/store-automator/.claude-plugin/plugin.json +1 -1
- package/templates/fastlane/android/Fastfile.template +24 -7
- package/templates/fastlane/android/Pluginfile.template +2 -4
- package/templates/fastlane/ios/Fastfile.template +3 -5
- package/templates/fastlane/ios/Pluginfile.template +2 -4
- package/templates/scripts/asc_iap_api.py +215 -0
- package/templates/scripts/check_google_play.py +1 -1
- package/templates/scripts/ci/android/sync-iap.sh +14 -10
- package/templates/scripts/ci/ios/sync-iap.sh +16 -20
- package/templates/scripts/gplay_iap_api.py +189 -0
- package/templates/scripts/sync_iap_android.py +219 -0
- package/templates/scripts/sync_iap_ios.py +163 -0
|
@@ -5,14 +5,14 @@
|
|
|
5
5
|
},
|
|
6
6
|
"metadata": {
|
|
7
7
|
"description": "App Store & Google Play automation for Flutter apps",
|
|
8
|
-
"version": "0.10.
|
|
8
|
+
"version": "0.10.7"
|
|
9
9
|
},
|
|
10
10
|
"plugins": [
|
|
11
11
|
{
|
|
12
12
|
"name": "store-automator",
|
|
13
13
|
"source": "./plugins/store-automator",
|
|
14
14
|
"description": "3 agents for app store publishing: reviewer, meta-creator, media-designer",
|
|
15
|
-
"version": "0.10.
|
|
15
|
+
"version": "0.10.7",
|
|
16
16
|
"keywords": [
|
|
17
17
|
"flutter",
|
|
18
18
|
"app-store",
|
package/package.json
CHANGED
|
@@ -7,6 +7,23 @@ APP_DIR = ENV.fetch("APP_ROOT", "#{ROOT_DIR}/app")
|
|
|
7
7
|
APP_DIR = "#{ROOT_DIR}/#{APP_DIR}" unless APP_DIR.start_with?("/")
|
|
8
8
|
|
|
9
9
|
AAB_PATH = "#{APP_DIR}/build/app/outputs/bundle/release/app-release.aab"
|
|
10
|
+
METADATA_DIR = "#{ROOT_DIR}/fastlane/metadata/android"
|
|
11
|
+
SCREENSHOT_DIR = "#{ROOT_DIR}/fastlane/screenshots/android"
|
|
12
|
+
|
|
13
|
+
# Fastlane supply expects screenshots at {metadata_path}/{locale}/images/.
|
|
14
|
+
# Our screenshots live in a separate directory (fastlane/screenshots/android/{locale}/).
|
|
15
|
+
# Bridge the gap by symlinking each locale's screenshot folder into the metadata tree.
|
|
16
|
+
def link_screenshots_into_metadata
|
|
17
|
+
return unless File.directory?(SCREENSHOT_DIR)
|
|
18
|
+
Dir.glob("#{SCREENSHOT_DIR}/*/").each do |locale_dir|
|
|
19
|
+
locale = File.basename(locale_dir)
|
|
20
|
+
images_link = "#{METADATA_DIR}/#{locale}/images"
|
|
21
|
+
next if File.exist?(images_link)
|
|
22
|
+
FileUtils.mkdir_p("#{METADATA_DIR}/#{locale}")
|
|
23
|
+
FileUtils.ln_s(locale_dir.chomp("/"), images_link)
|
|
24
|
+
Fastlane::UI.message("Linked screenshots: #{images_link} -> #{locale_dir.chomp('/')}")
|
|
25
|
+
end
|
|
26
|
+
end
|
|
10
27
|
|
|
11
28
|
def metadata_changed?(path)
|
|
12
29
|
!sh("git diff --name-only HEAD~1 -- #{path}").strip.empty?
|
|
@@ -32,6 +49,7 @@ end
|
|
|
32
49
|
|
|
33
50
|
platform :android do
|
|
34
51
|
lane :deploy_android do
|
|
52
|
+
link_screenshots_into_metadata
|
|
35
53
|
status = ENV.fetch("RELEASE_STATUS", "draft")
|
|
36
54
|
opts = base_play_store_options.merge(
|
|
37
55
|
aab: AAB_PATH,
|
|
@@ -40,7 +58,7 @@ platform :android do
|
|
|
40
58
|
skip_upload_screenshots: !metadata_changed?("fastlane/screenshots/android/"),
|
|
41
59
|
skip_upload_images: !metadata_changed?("fastlane/screenshots/android/"),
|
|
42
60
|
skip_upload_changelogs: false,
|
|
43
|
-
metadata_path:
|
|
61
|
+
metadata_path: METADATA_DIR
|
|
44
62
|
)
|
|
45
63
|
opts.merge!(rollout_options) unless status == "draft"
|
|
46
64
|
upload_to_play_store(opts)
|
|
@@ -63,6 +81,7 @@ platform :android do
|
|
|
63
81
|
# this keeps working after the first production release.
|
|
64
82
|
status = ENV.fetch("RELEASE_STATUS", "draft")
|
|
65
83
|
|
|
84
|
+
link_screenshots_into_metadata
|
|
66
85
|
upload_to_play_store(
|
|
67
86
|
base_play_store_options.merge(
|
|
68
87
|
version_code: latest_code,
|
|
@@ -73,7 +92,7 @@ platform :android do
|
|
|
73
92
|
skip_upload_screenshots: false,
|
|
74
93
|
skip_upload_images: false,
|
|
75
94
|
skip_upload_changelogs: true,
|
|
76
|
-
metadata_path:
|
|
95
|
+
metadata_path: METADATA_DIR
|
|
77
96
|
)
|
|
78
97
|
)
|
|
79
98
|
end
|
|
@@ -94,11 +113,9 @@ platform :android do
|
|
|
94
113
|
end
|
|
95
114
|
|
|
96
115
|
lane :sync_google_iap do
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
config_path: "#{ROOT_DIR}/fastlane/iap_config.json"
|
|
101
|
-
)
|
|
116
|
+
config_path = "#{ROOT_DIR}/fastlane/iap_config.json"
|
|
117
|
+
script = "#{ROOT_DIR}/scripts/sync_iap_android.py"
|
|
118
|
+
sh("python3", script, config_path) if File.exist?(config_path) && File.exist?(script)
|
|
102
119
|
end
|
|
103
120
|
|
|
104
121
|
lane :update_data_safety do
|
|
@@ -1,4 +1,2 @@
|
|
|
1
|
-
#
|
|
2
|
-
#
|
|
3
|
-
# Uncomment when the plugin is available:
|
|
4
|
-
# gem "fastlane-plugin-iap", git: "https://github.com/daemux/fastlane-plugin-iap"
|
|
1
|
+
# IAP sync is handled by scripts/sync_iap_android.py (direct Google Play API).
|
|
2
|
+
# No fastlane plugins required for IAP management.
|
|
@@ -110,10 +110,8 @@ platform :ios do
|
|
|
110
110
|
end
|
|
111
111
|
|
|
112
112
|
lane :sync_iap do
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
config_path: "#{ROOT_DIR}/fastlane/iap_config.json"
|
|
117
|
-
)
|
|
113
|
+
config_path = "#{ROOT_DIR}/fastlane/iap_config.json"
|
|
114
|
+
script = "#{ROOT_DIR}/scripts/sync_iap_ios.py"
|
|
115
|
+
sh("python3", script, config_path) if File.exist?(config_path) && File.exist?(script)
|
|
118
116
|
end
|
|
119
117
|
end
|
|
@@ -1,4 +1,2 @@
|
|
|
1
|
-
#
|
|
2
|
-
#
|
|
3
|
-
# Uncomment when the plugin is available:
|
|
4
|
-
# gem "fastlane-plugin-iap", git: "https://github.com/daemux/fastlane-plugin-iap"
|
|
1
|
+
# IAP sync is handled by scripts/sync_iap_ios.py (direct App Store Connect API).
|
|
2
|
+
# No fastlane plugins required for IAP management.
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
"""
|
|
2
|
+
App Store Connect IAP API layer.
|
|
3
|
+
|
|
4
|
+
Low-level functions for interacting with the App Store Connect REST API
|
|
5
|
+
for subscription groups and subscriptions.
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import jwt
|
|
12
|
+
import requests
|
|
13
|
+
except ImportError:
|
|
14
|
+
import subprocess
|
|
15
|
+
subprocess.check_call(
|
|
16
|
+
[sys.executable, "-m", "pip", "install", "--break-system-packages", "PyJWT", "cryptography", "requests"],
|
|
17
|
+
stdout=subprocess.DEVNULL,
|
|
18
|
+
)
|
|
19
|
+
import jwt
|
|
20
|
+
import requests
|
|
21
|
+
|
|
22
|
+
BASE_URL = "https://api.appstoreconnect.apple.com/v1"
|
|
23
|
+
TIMEOUT = (10, 30)
|
|
24
|
+
|
|
25
|
+
# ISO 8601 duration to App Store Connect subscription period mapping
|
|
26
|
+
DURATION_MAP = {
|
|
27
|
+
"P1W": "ONE_WEEK",
|
|
28
|
+
"P1M": "ONE_MONTH",
|
|
29
|
+
"P2M": "TWO_MONTHS",
|
|
30
|
+
"P3M": "THREE_MONTHS",
|
|
31
|
+
"P6M": "SIX_MONTHS",
|
|
32
|
+
"P1Y": "ONE_YEAR",
|
|
33
|
+
"ONE_WEEK": "ONE_WEEK",
|
|
34
|
+
"ONE_MONTH": "ONE_MONTH",
|
|
35
|
+
"TWO_MONTHS": "TWO_MONTHS",
|
|
36
|
+
"THREE_MONTHS": "THREE_MONTHS",
|
|
37
|
+
"SIX_MONTHS": "SIX_MONTHS",
|
|
38
|
+
"ONE_YEAR": "ONE_YEAR",
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_jwt_token(key_id: str, issuer_id: str, private_key: str) -> str:
|
|
43
|
+
"""Generate a signed JWT for App Store Connect API authentication."""
|
|
44
|
+
payload = {
|
|
45
|
+
"iss": issuer_id,
|
|
46
|
+
"iat": int(time.time()),
|
|
47
|
+
"exp": int(time.time()) + 1200,
|
|
48
|
+
"aud": "appstoreconnect-v1",
|
|
49
|
+
}
|
|
50
|
+
return jwt.encode(payload, private_key, algorithm="ES256", headers={"kid": key_id})
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_app_id(headers: dict, bundle_id: str) -> str:
|
|
54
|
+
"""Look up the App Store Connect app ID for the given bundle identifier."""
|
|
55
|
+
resp = requests.get(
|
|
56
|
+
f"{BASE_URL}/apps",
|
|
57
|
+
params={"filter[bundleId]": bundle_id},
|
|
58
|
+
headers=headers,
|
|
59
|
+
timeout=TIMEOUT,
|
|
60
|
+
)
|
|
61
|
+
resp.raise_for_status()
|
|
62
|
+
data = resp.json().get("data", [])
|
|
63
|
+
if not data:
|
|
64
|
+
print(f"ERROR: No app found for bundle ID '{bundle_id}'", file=sys.stderr)
|
|
65
|
+
sys.exit(1)
|
|
66
|
+
return data[0]["id"]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def list_subscription_groups(headers: dict, app_id: str) -> list:
|
|
70
|
+
"""List all existing subscription groups for the app."""
|
|
71
|
+
resp = requests.get(
|
|
72
|
+
f"{BASE_URL}/apps/{app_id}/subscriptionGroups",
|
|
73
|
+
headers=headers,
|
|
74
|
+
timeout=TIMEOUT,
|
|
75
|
+
)
|
|
76
|
+
resp.raise_for_status()
|
|
77
|
+
return resp.json().get("data", [])
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def create_subscription_group(headers: dict, app_id: str, reference_name: str) -> str:
|
|
81
|
+
"""Create a subscription group and return its ID."""
|
|
82
|
+
resp = requests.post(
|
|
83
|
+
f"{BASE_URL}/subscriptionGroups",
|
|
84
|
+
json={
|
|
85
|
+
"data": {
|
|
86
|
+
"type": "subscriptionGroups",
|
|
87
|
+
"attributes": {"referenceName": reference_name},
|
|
88
|
+
"relationships": {
|
|
89
|
+
"app": {"data": {"type": "apps", "id": app_id}}
|
|
90
|
+
},
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
headers=headers,
|
|
94
|
+
timeout=TIMEOUT,
|
|
95
|
+
)
|
|
96
|
+
if not resp.ok:
|
|
97
|
+
print_api_errors(resp, f"create subscription group '{reference_name}'")
|
|
98
|
+
sys.exit(1)
|
|
99
|
+
group_id = resp.json()["data"]["id"]
|
|
100
|
+
print(f" Created subscription group '{reference_name}' (ID: {group_id})")
|
|
101
|
+
return group_id
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def list_subscriptions_in_group(headers: dict, group_id: str) -> list:
|
|
105
|
+
"""List all subscriptions within a subscription group."""
|
|
106
|
+
resp = requests.get(
|
|
107
|
+
f"{BASE_URL}/subscriptionGroups/{group_id}/subscriptions",
|
|
108
|
+
headers=headers,
|
|
109
|
+
timeout=TIMEOUT,
|
|
110
|
+
)
|
|
111
|
+
resp.raise_for_status()
|
|
112
|
+
return resp.json().get("data", [])
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def create_subscription(headers: dict, group_id: str, sub_config: dict) -> str:
|
|
116
|
+
"""Create a subscription within a group and return its ID."""
|
|
117
|
+
duration = DURATION_MAP.get(sub_config["duration"], sub_config["duration"])
|
|
118
|
+
resp = requests.post(
|
|
119
|
+
f"{BASE_URL}/subscriptions",
|
|
120
|
+
json={
|
|
121
|
+
"data": {
|
|
122
|
+
"type": "subscriptions",
|
|
123
|
+
"attributes": {
|
|
124
|
+
"productId": sub_config["product_id"],
|
|
125
|
+
"name": sub_config["reference_name"],
|
|
126
|
+
"subscriptionPeriod": duration,
|
|
127
|
+
"groupLevel": sub_config.get("group_level", 1),
|
|
128
|
+
"familySharable": sub_config.get("family_sharable", False),
|
|
129
|
+
"reviewNote": sub_config.get("review_note", ""),
|
|
130
|
+
},
|
|
131
|
+
"relationships": {
|
|
132
|
+
"group": {"data": {"type": "subscriptionGroups", "id": group_id}}
|
|
133
|
+
},
|
|
134
|
+
}
|
|
135
|
+
},
|
|
136
|
+
headers=headers,
|
|
137
|
+
timeout=TIMEOUT,
|
|
138
|
+
)
|
|
139
|
+
if not resp.ok:
|
|
140
|
+
print_api_errors(resp, f"create subscription '{sub_config['product_id']}'")
|
|
141
|
+
sys.exit(1)
|
|
142
|
+
sub_id = resp.json()["data"]["id"]
|
|
143
|
+
print(f" Created subscription '{sub_config['product_id']}' (ID: {sub_id})")
|
|
144
|
+
return sub_id
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_subscription_localizations(headers: dict, sub_id: str) -> list:
|
|
148
|
+
"""Fetch existing localizations for a subscription."""
|
|
149
|
+
resp = requests.get(
|
|
150
|
+
f"{BASE_URL}/subscriptions/{sub_id}/subscriptionLocalizations",
|
|
151
|
+
headers=headers,
|
|
152
|
+
timeout=TIMEOUT,
|
|
153
|
+
)
|
|
154
|
+
resp.raise_for_status()
|
|
155
|
+
return resp.json().get("data", [])
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def create_localization(headers: dict, sub_id: str, locale: str, loc_data: dict) -> None:
|
|
159
|
+
"""Create a new localization for a subscription."""
|
|
160
|
+
resp = requests.post(
|
|
161
|
+
f"{BASE_URL}/subscriptionLocalizations",
|
|
162
|
+
json={
|
|
163
|
+
"data": {
|
|
164
|
+
"type": "subscriptionLocalizations",
|
|
165
|
+
"attributes": {
|
|
166
|
+
"locale": locale,
|
|
167
|
+
"name": loc_data.get("name", ""),
|
|
168
|
+
"description": loc_data.get("description", ""),
|
|
169
|
+
},
|
|
170
|
+
"relationships": {
|
|
171
|
+
"subscription": {"data": {"type": "subscriptions", "id": sub_id}}
|
|
172
|
+
},
|
|
173
|
+
}
|
|
174
|
+
},
|
|
175
|
+
headers=headers,
|
|
176
|
+
timeout=TIMEOUT,
|
|
177
|
+
)
|
|
178
|
+
if not resp.ok:
|
|
179
|
+
print_api_errors(resp, f"create localization '{locale}' for subscription {sub_id}")
|
|
180
|
+
return
|
|
181
|
+
print(f" Created localization '{locale}'")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def update_localization(headers: dict, loc_id: str, loc_data: dict) -> None:
|
|
185
|
+
"""Update an existing subscription localization."""
|
|
186
|
+
resp = requests.patch(
|
|
187
|
+
f"{BASE_URL}/subscriptionLocalizations/{loc_id}",
|
|
188
|
+
json={
|
|
189
|
+
"data": {
|
|
190
|
+
"type": "subscriptionLocalizations",
|
|
191
|
+
"id": loc_id,
|
|
192
|
+
"attributes": {
|
|
193
|
+
"name": loc_data.get("name", ""),
|
|
194
|
+
"description": loc_data.get("description", ""),
|
|
195
|
+
},
|
|
196
|
+
}
|
|
197
|
+
},
|
|
198
|
+
headers=headers,
|
|
199
|
+
timeout=TIMEOUT,
|
|
200
|
+
)
|
|
201
|
+
if not resp.ok:
|
|
202
|
+
print_api_errors(resp, f"update localization {loc_id}")
|
|
203
|
+
return
|
|
204
|
+
print(f" Updated localization (ID: {loc_id})")
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def print_api_errors(resp, action: str) -> None:
|
|
208
|
+
"""Print human-readable API error messages."""
|
|
209
|
+
try:
|
|
210
|
+
errors = resp.json().get("errors", [])
|
|
211
|
+
for err in errors:
|
|
212
|
+
detail = err.get("detail", err.get("title", "Unknown error"))
|
|
213
|
+
print(f"ERROR ({action}): {detail}", file=sys.stderr)
|
|
214
|
+
except (ValueError, KeyError):
|
|
215
|
+
print(f"ERROR ({action}): HTTP {resp.status_code} - {resp.text[:200]}", file=sys.stderr)
|
|
@@ -51,7 +51,7 @@ def get_access_token(sa_path: str) -> str:
|
|
|
51
51
|
resp = requests.post(
|
|
52
52
|
"https://oauth2.googleapis.com/token",
|
|
53
53
|
data={
|
|
54
|
-
"grant_type": "urn:ietf:params:oauth:
|
|
54
|
+
"grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer",
|
|
55
55
|
"assertion": signed,
|
|
56
56
|
},
|
|
57
57
|
timeout=TIMEOUT,
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
#!/usr/bin/env bash
|
|
2
|
-
#
|
|
2
|
+
# Syncs Android IAPs to Google Play via direct API calls (Python script).
|
|
3
|
+
# Requires read-config.sh to have been sourced (provides PROJECT_ROOT, credentials, etc.).
|
|
3
4
|
set -euo pipefail
|
|
4
5
|
|
|
5
6
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
6
7
|
source "$SCRIPT_DIR/../common/read-config.sh"
|
|
7
8
|
source "$SCRIPT_DIR/../common/ci-notify.sh"
|
|
8
9
|
|
|
10
|
+
echo "=== Android IAP Sync ==="
|
|
11
|
+
|
|
9
12
|
# --- Check Google Play readiness ---
|
|
10
13
|
if [ "${GOOGLE_PLAY_READY:-false}" != "true" ]; then
|
|
11
14
|
echo "ERROR: Google Play not ready. Cannot sync IAPs." >&2
|
|
@@ -18,12 +21,6 @@ if [ ! -f "$IAP_CONFIG" ]; then
|
|
|
18
21
|
ci_skip "No Android IAP config file found"
|
|
19
22
|
fi
|
|
20
23
|
|
|
21
|
-
# --- Check if IAP plugin is available ---
|
|
22
|
-
cd "$APP_ROOT/android"
|
|
23
|
-
if ! bundle exec gem list fastlane-plugin-iap --installed >/dev/null 2>&1; then
|
|
24
|
-
ci_skip "fastlane-plugin-iap not installed"
|
|
25
|
-
fi
|
|
26
|
-
|
|
27
24
|
# --- Hash-based change detection ---
|
|
28
25
|
STATE_DIR="$PROJECT_ROOT/.ci-state"
|
|
29
26
|
mkdir -p "$STATE_DIR"
|
|
@@ -47,12 +44,19 @@ if [ ! -f "$SA_FULL_PATH" ]; then
|
|
|
47
44
|
exit 1
|
|
48
45
|
fi
|
|
49
46
|
|
|
50
|
-
# ---
|
|
47
|
+
# --- Run IAP sync via Python ---
|
|
48
|
+
SYNC_SCRIPT="$PROJECT_ROOT/scripts/sync_iap_android.py"
|
|
49
|
+
|
|
50
|
+
if [ ! -f "$SYNC_SCRIPT" ]; then
|
|
51
|
+
echo "ERROR: sync_iap_android.py not found at $SYNC_SCRIPT" >&2
|
|
52
|
+
exit 1
|
|
53
|
+
fi
|
|
54
|
+
|
|
51
55
|
echo "Syncing Android IAP configuration..."
|
|
52
56
|
|
|
57
|
+
SA_JSON="$SA_FULL_PATH" \
|
|
53
58
|
PACKAGE_NAME="$PACKAGE_NAME" \
|
|
54
|
-
|
|
55
|
-
bundle exec fastlane sync_google_iap
|
|
59
|
+
python3 "$SYNC_SCRIPT" "$IAP_CONFIG"
|
|
56
60
|
|
|
57
61
|
# --- Update hash on success ---
|
|
58
62
|
echo "$HASH" > "$STATE_FILE"
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env bash
|
|
2
|
-
#
|
|
2
|
+
# Syncs iOS IAPs to App Store Connect via direct API calls (Python script).
|
|
3
|
+
# Requires read-config.sh to have been sourced (provides PROJECT_ROOT, credentials, etc.).
|
|
3
4
|
set -euo pipefail
|
|
4
5
|
|
|
5
6
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
@@ -15,15 +16,6 @@ if [ ! -f "$IAP_CONFIG" ]; then
|
|
|
15
16
|
ci_skip "No iOS IAP config file found"
|
|
16
17
|
fi
|
|
17
18
|
|
|
18
|
-
# --- Check if IAP plugin is available ---
|
|
19
|
-
cd "$APP_ROOT/ios"
|
|
20
|
-
|
|
21
|
-
if ! bundle exec gem list fastlane-plugin-iap --installed >/dev/null 2>&1; then
|
|
22
|
-
ci_skip "fastlane-plugin-iap not installed"
|
|
23
|
-
fi
|
|
24
|
-
|
|
25
|
-
echo "fastlane-plugin-iap is installed. Proceeding with sync."
|
|
26
|
-
|
|
27
19
|
# --- Hash-based change detection ---
|
|
28
20
|
CURRENT_HASH=$(shasum -a 256 "$IAP_CONFIG" | cut -d' ' -f1)
|
|
29
21
|
|
|
@@ -41,26 +33,30 @@ else
|
|
|
41
33
|
echo "No cached hash found. First run — will sync IAPs."
|
|
42
34
|
fi
|
|
43
35
|
|
|
44
|
-
# --- Set up App Store Connect API
|
|
36
|
+
# --- Set up App Store Connect API credentials ---
|
|
45
37
|
P8_FULL_PATH="$PROJECT_ROOT/$P8_KEY_PATH"
|
|
46
38
|
if [ ! -f "$P8_FULL_PATH" ]; then
|
|
47
39
|
echo "ERROR: P8 key file not found at $P8_FULL_PATH" >&2
|
|
48
40
|
exit 1
|
|
49
41
|
fi
|
|
50
42
|
|
|
51
|
-
export
|
|
52
|
-
export
|
|
53
|
-
export
|
|
54
|
-
export
|
|
43
|
+
export APP_STORE_CONNECT_KEY_IDENTIFIER="$APPLE_KEY_ID"
|
|
44
|
+
export APP_STORE_CONNECT_ISSUER_ID="$APPLE_ISSUER_ID"
|
|
45
|
+
export APP_STORE_CONNECT_PRIVATE_KEY="$(cat "$P8_FULL_PATH")"
|
|
46
|
+
export BUNDLE_ID="$BUNDLE_ID"
|
|
55
47
|
|
|
56
48
|
echo "ASC API key configured (Key ID: $APPLE_KEY_ID)"
|
|
57
49
|
|
|
58
|
-
# --- Run IAP sync ---
|
|
59
|
-
|
|
50
|
+
# --- Run IAP sync via Python ---
|
|
51
|
+
SYNC_SCRIPT="$PROJECT_ROOT/scripts/sync_iap_ios.py"
|
|
60
52
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
53
|
+
if [ ! -f "$SYNC_SCRIPT" ]; then
|
|
54
|
+
echo "ERROR: sync_iap_ios.py not found at $SYNC_SCRIPT" >&2
|
|
55
|
+
exit 1
|
|
56
|
+
fi
|
|
57
|
+
|
|
58
|
+
echo "Syncing IAPs to App Store Connect..."
|
|
59
|
+
python3 "$SYNC_SCRIPT" "$IAP_CONFIG"
|
|
64
60
|
|
|
65
61
|
# --- Update hash on success ---
|
|
66
62
|
echo "$CURRENT_HASH" > "$STATE_FILE"
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Google Play IAP API layer.
|
|
3
|
+
|
|
4
|
+
Low-level functions for interacting with the Android Publisher API
|
|
5
|
+
for subscriptions, base plans, and offers.
|
|
6
|
+
"""
|
|
7
|
+
import json
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
import jwt
|
|
13
|
+
import requests
|
|
14
|
+
except ImportError:
|
|
15
|
+
import subprocess
|
|
16
|
+
subprocess.check_call(
|
|
17
|
+
[sys.executable, "-m", "pip", "install", "--break-system-packages", "PyJWT", "cryptography", "requests"],
|
|
18
|
+
stdout=subprocess.DEVNULL,
|
|
19
|
+
)
|
|
20
|
+
import jwt
|
|
21
|
+
import requests
|
|
22
|
+
|
|
23
|
+
API_BASE = "https://androidpublisher.googleapis.com/androidpublisher/v3/applications"
|
|
24
|
+
TIMEOUT = (10, 30)
|
|
25
|
+
|
|
26
|
+
# ISO 8601 duration mapping (normalize to ISO 8601 for Google Play)
|
|
27
|
+
DURATION_MAP = {
|
|
28
|
+
"ONE_WEEK": "P1W",
|
|
29
|
+
"ONE_MONTH": "P1M",
|
|
30
|
+
"TWO_MONTHS": "P2M",
|
|
31
|
+
"THREE_MONTHS": "P3M",
|
|
32
|
+
"SIX_MONTHS": "P6M",
|
|
33
|
+
"ONE_YEAR": "P1Y",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Regions version required by the API
|
|
37
|
+
REGIONS_VERSION = {"version": "2022/02"}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_access_token(sa_path: str) -> str:
|
|
41
|
+
"""Obtain an OAuth2 access token using the service account credentials."""
|
|
42
|
+
with open(sa_path, "r", encoding="utf-8") as fh:
|
|
43
|
+
sa = json.load(fh)
|
|
44
|
+
now = int(time.time())
|
|
45
|
+
payload = {
|
|
46
|
+
"iss": sa["client_email"],
|
|
47
|
+
"scope": "https://www.googleapis.com/auth/androidpublisher",
|
|
48
|
+
"aud": "https://oauth2.googleapis.com/token",
|
|
49
|
+
"iat": now,
|
|
50
|
+
"exp": now + 3600,
|
|
51
|
+
}
|
|
52
|
+
signed = jwt.encode(payload, sa["private_key"], algorithm="RS256")
|
|
53
|
+
resp = requests.post(
|
|
54
|
+
"https://oauth2.googleapis.com/token",
|
|
55
|
+
data={"grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", "assertion": signed},
|
|
56
|
+
timeout=TIMEOUT,
|
|
57
|
+
)
|
|
58
|
+
resp.raise_for_status()
|
|
59
|
+
return resp.json()["access_token"]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def list_subscriptions(headers: dict, package_name: str) -> dict:
|
|
63
|
+
"""List all existing subscriptions. Returns a dict keyed by productId."""
|
|
64
|
+
resp = requests.get(
|
|
65
|
+
f"{API_BASE}/{package_name}/subscriptions",
|
|
66
|
+
headers=headers,
|
|
67
|
+
timeout=TIMEOUT,
|
|
68
|
+
)
|
|
69
|
+
if resp.status_code == 404:
|
|
70
|
+
return {}
|
|
71
|
+
resp.raise_for_status()
|
|
72
|
+
if not resp.text.strip():
|
|
73
|
+
return {}
|
|
74
|
+
subs = resp.json().get("subscriptions", [])
|
|
75
|
+
return {s["productId"]: s for s in subs}
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def normalize_duration(duration: str) -> str:
|
|
79
|
+
"""Normalize duration to ISO 8601 format accepted by Google Play."""
|
|
80
|
+
return DURATION_MAP.get(duration, duration)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def build_price(price_str: str, currency: str = "USD") -> dict:
|
|
84
|
+
"""Build a Money object from a decimal price string like '9.99'."""
|
|
85
|
+
parts = price_str.split(".")
|
|
86
|
+
units = parts[0]
|
|
87
|
+
nanos = 0
|
|
88
|
+
if len(parts) > 1:
|
|
89
|
+
frac = parts[1].ljust(9, "0")[:9]
|
|
90
|
+
nanos = int(frac)
|
|
91
|
+
return {"currencyCode": currency, "units": units, "nanos": nanos}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def currency_to_region(currency: str) -> str:
|
|
95
|
+
"""Map common currency codes to region codes."""
|
|
96
|
+
mapping = {
|
|
97
|
+
"USD": "US",
|
|
98
|
+
"EUR": "DE",
|
|
99
|
+
"GBP": "GB",
|
|
100
|
+
"JPY": "JP",
|
|
101
|
+
"CAD": "CA",
|
|
102
|
+
"AUD": "AU",
|
|
103
|
+
}
|
|
104
|
+
return mapping.get(currency, "")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def create_subscription(headers: dict, package_name: str, product_id: str, body: dict) -> dict:
|
|
108
|
+
"""Create a new subscription via the API."""
|
|
109
|
+
resp = requests.post(
|
|
110
|
+
f"{API_BASE}/{package_name}/subscriptions",
|
|
111
|
+
params={"productId": product_id, "regionsVersion.version": REGIONS_VERSION["version"]},
|
|
112
|
+
json=body,
|
|
113
|
+
headers=headers,
|
|
114
|
+
timeout=TIMEOUT,
|
|
115
|
+
)
|
|
116
|
+
if not resp.ok:
|
|
117
|
+
print_api_error(resp, f"create subscription '{product_id}'")
|
|
118
|
+
return {}
|
|
119
|
+
|
|
120
|
+
print(f" Created subscription '{product_id}'")
|
|
121
|
+
return resp.json()
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def update_subscription(headers: dict, package_name: str, product_id: str, body: dict) -> dict:
|
|
125
|
+
"""Update an existing subscription via the API."""
|
|
126
|
+
resp = requests.patch(
|
|
127
|
+
f"{API_BASE}/{package_name}/subscriptions/{product_id}",
|
|
128
|
+
params={
|
|
129
|
+
"updateMask": "listings",
|
|
130
|
+
"regionsVersion.version": REGIONS_VERSION["version"],
|
|
131
|
+
},
|
|
132
|
+
json=body,
|
|
133
|
+
headers=headers,
|
|
134
|
+
timeout=TIMEOUT,
|
|
135
|
+
)
|
|
136
|
+
if not resp.ok:
|
|
137
|
+
print_api_error(resp, f"update subscription '{product_id}'")
|
|
138
|
+
return {}
|
|
139
|
+
|
|
140
|
+
print(f" Updated subscription '{product_id}'")
|
|
141
|
+
return resp.json()
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def activate_base_plan(headers: dict, package_name: str, product_id: str, base_plan_id: str) -> bool:
|
|
145
|
+
"""Activate a base plan for a subscription."""
|
|
146
|
+
resp = requests.post(
|
|
147
|
+
f"{API_BASE}/{package_name}/subscriptions/{product_id}/basePlans/{base_plan_id}:activate",
|
|
148
|
+
headers=headers,
|
|
149
|
+
json={},
|
|
150
|
+
timeout=TIMEOUT,
|
|
151
|
+
)
|
|
152
|
+
if not resp.ok:
|
|
153
|
+
print_api_error(resp, f"activate base plan '{base_plan_id}'")
|
|
154
|
+
return False
|
|
155
|
+
print(f" Activated base plan '{base_plan_id}'")
|
|
156
|
+
return True
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def create_intro_offer(
|
|
160
|
+
headers: dict, package_name: str, product_id: str, base_plan_id: str, offer_id: str, body: dict
|
|
161
|
+
) -> bool:
|
|
162
|
+
"""Create an introductory offer (free trial) for a base plan."""
|
|
163
|
+
resp = requests.post(
|
|
164
|
+
f"{API_BASE}/{package_name}/subscriptions/{product_id}"
|
|
165
|
+
f"/basePlans/{base_plan_id}/offers",
|
|
166
|
+
params={"offerId": offer_id, "regionsVersion.version": REGIONS_VERSION["version"]},
|
|
167
|
+
json=body,
|
|
168
|
+
headers=headers,
|
|
169
|
+
timeout=TIMEOUT,
|
|
170
|
+
)
|
|
171
|
+
if resp.status_code == 409:
|
|
172
|
+
print(f" Intro offer '{offer_id}' already exists")
|
|
173
|
+
return True
|
|
174
|
+
if not resp.ok:
|
|
175
|
+
print_api_error(resp, f"create intro offer for '{product_id}'")
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
print(f" Created intro offer '{offer_id}'")
|
|
179
|
+
return True
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def print_api_error(resp, action: str) -> None:
|
|
183
|
+
"""Print human-readable API error messages."""
|
|
184
|
+
try:
|
|
185
|
+
error_data = resp.json()
|
|
186
|
+
message = error_data.get("error", {}).get("message", resp.text[:200])
|
|
187
|
+
print(f"ERROR ({action}): {message}", file=sys.stderr)
|
|
188
|
+
except (ValueError, KeyError):
|
|
189
|
+
print(f"ERROR ({action}): HTTP {resp.status_code} - {resp.text[:200]}", file=sys.stderr)
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sync Android In-App Purchases to Google Play via the Android Publisher API.
|
|
4
|
+
|
|
5
|
+
Reads iap_config.json and creates/updates subscriptions with base plans and offers.
|
|
6
|
+
Idempotent: safe to run repeatedly -- existing resources are skipped or updated.
|
|
7
|
+
|
|
8
|
+
Required env vars:
|
|
9
|
+
SA_JSON - Path to Google service account JSON file
|
|
10
|
+
PACKAGE_NAME - Android package name (e.g. com.example.app)
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
python3 sync_iap_android.py <path/to/iap_config.json>
|
|
14
|
+
"""
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
import sys
|
|
18
|
+
|
|
19
|
+
from gplay_iap_api import (
|
|
20
|
+
activate_base_plan,
|
|
21
|
+
build_price,
|
|
22
|
+
create_intro_offer,
|
|
23
|
+
create_subscription,
|
|
24
|
+
currency_to_region,
|
|
25
|
+
get_access_token,
|
|
26
|
+
list_subscriptions,
|
|
27
|
+
normalize_duration,
|
|
28
|
+
update_subscription,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _build_base_plan(sub_config: dict) -> dict:
|
|
33
|
+
"""Build a base plan object from subscription config."""
|
|
34
|
+
duration = normalize_duration(sub_config["duration"])
|
|
35
|
+
prices = sub_config.get("prices", {})
|
|
36
|
+
usd_price = prices.get("USD", sub_config.get("price_tier", "0"))
|
|
37
|
+
eur_price = prices.get("EUR", usd_price)
|
|
38
|
+
|
|
39
|
+
regional_configs = []
|
|
40
|
+
for currency, amount in prices.items():
|
|
41
|
+
region = currency_to_region(currency)
|
|
42
|
+
if region:
|
|
43
|
+
regional_configs.append({
|
|
44
|
+
"regionCode": region,
|
|
45
|
+
"newSubscriberAvailability": True,
|
|
46
|
+
"price": build_price(amount, currency),
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
"basePlanId": sub_config["product_id"].replace(".", "-").replace("_", "-"),
|
|
51
|
+
"autoRenewingBasePlanType": {
|
|
52
|
+
"billingPeriodDuration": duration,
|
|
53
|
+
"gracePeriodDuration": "P3D",
|
|
54
|
+
"resubscribeState": "RESUBSCRIBE_STATE_ACTIVE",
|
|
55
|
+
"legacyCompatible": True,
|
|
56
|
+
},
|
|
57
|
+
"regionalConfigs": regional_configs,
|
|
58
|
+
"otherRegionsConfig": {
|
|
59
|
+
"usdPrice": build_price(usd_price, "USD"),
|
|
60
|
+
"eurPrice": build_price(eur_price, "EUR"),
|
|
61
|
+
"newSubscriberAvailability": True,
|
|
62
|
+
},
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _build_listings(sub_config: dict) -> list:
|
|
67
|
+
"""Build localized listings from subscription config."""
|
|
68
|
+
localizations = sub_config.get("localizations", {})
|
|
69
|
+
listings = []
|
|
70
|
+
|
|
71
|
+
if localizations:
|
|
72
|
+
for lang_code, loc_data in localizations.items():
|
|
73
|
+
listings.append({
|
|
74
|
+
"languageCode": lang_code,
|
|
75
|
+
"title": loc_data.get("name", sub_config["reference_name"]),
|
|
76
|
+
"description": loc_data.get("description", ""),
|
|
77
|
+
"benefits": loc_data.get("benefits", []),
|
|
78
|
+
})
|
|
79
|
+
else:
|
|
80
|
+
listings.append({
|
|
81
|
+
"languageCode": "en-US",
|
|
82
|
+
"title": sub_config["reference_name"],
|
|
83
|
+
"description": sub_config.get("description", ""),
|
|
84
|
+
"benefits": [],
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
return listings
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def _build_subscription_body(sub_config: dict, package_name: str) -> dict:
|
|
91
|
+
"""Build the full subscription request body."""
|
|
92
|
+
return {
|
|
93
|
+
"packageName": package_name,
|
|
94
|
+
"productId": sub_config["product_id"],
|
|
95
|
+
"basePlans": [_build_base_plan(sub_config)],
|
|
96
|
+
"listings": _build_listings(sub_config),
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _build_intro_offer_body(sub_config: dict) -> dict:
|
|
101
|
+
"""Build the introductory offer request body from subscription config."""
|
|
102
|
+
intro = sub_config["introductory_offer"]
|
|
103
|
+
offer_type = intro.get("type", "FREE")
|
|
104
|
+
duration = normalize_duration(intro.get("duration", "P1W"))
|
|
105
|
+
|
|
106
|
+
phases = [{"recurrenceCount": intro.get("periods", 1), "duration": duration}]
|
|
107
|
+
|
|
108
|
+
if offer_type in ("FREE", "FREE_TRIAL"):
|
|
109
|
+
phases[0]["regionalConfigs"] = [
|
|
110
|
+
{"regionCode": "US", "price": build_price("0", "USD")}
|
|
111
|
+
]
|
|
112
|
+
else:
|
|
113
|
+
price = intro.get("price", "0")
|
|
114
|
+
phases[0]["regionalConfigs"] = [
|
|
115
|
+
{"regionCode": "US", "price": build_price(price, "USD")}
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
offer_id = f"{sub_config['product_id'].replace('.', '-').replace('_', '-')}-intro"
|
|
119
|
+
return {
|
|
120
|
+
"offerId": offer_id,
|
|
121
|
+
"phases": phases,
|
|
122
|
+
"targeting": {
|
|
123
|
+
"acquisitionRule": {
|
|
124
|
+
"scope": {"thisSubscription": {}}
|
|
125
|
+
}
|
|
126
|
+
},
|
|
127
|
+
"regionalConfigs": [{"regionCode": "US", "newSubscriberAvailability": True}],
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def sync_subscription(headers: dict, package_name: str, sub_config: dict, existing: dict) -> dict:
|
|
132
|
+
"""Sync a single subscription: create if missing, update if exists."""
|
|
133
|
+
product_id = sub_config["product_id"]
|
|
134
|
+
print(f"\n Processing subscription: {product_id}")
|
|
135
|
+
body = _build_subscription_body(sub_config, package_name)
|
|
136
|
+
|
|
137
|
+
if product_id in existing:
|
|
138
|
+
update_subscription(headers, package_name, product_id, body)
|
|
139
|
+
return {"product_id": product_id, "action": "updated"}
|
|
140
|
+
|
|
141
|
+
result = create_subscription(headers, package_name, product_id, body)
|
|
142
|
+
if not result:
|
|
143
|
+
return {"product_id": product_id, "action": "failed"}
|
|
144
|
+
|
|
145
|
+
base_plan_id = product_id.replace(".", "-").replace("_", "-")
|
|
146
|
+
activate_base_plan(headers, package_name, product_id, base_plan_id)
|
|
147
|
+
|
|
148
|
+
if sub_config.get("introductory_offer"):
|
|
149
|
+
offer_body = _build_intro_offer_body(sub_config)
|
|
150
|
+
offer_id = f"{product_id.replace('.', '-').replace('_', '-')}-intro"
|
|
151
|
+
create_intro_offer(headers, package_name, product_id, base_plan_id, offer_id, offer_body)
|
|
152
|
+
|
|
153
|
+
return {"product_id": product_id, "action": "created"}
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def validate_env() -> tuple:
|
|
157
|
+
"""Validate required environment variables. Returns (sa_path, package_name)."""
|
|
158
|
+
sa_json = os.environ.get("SA_JSON", "")
|
|
159
|
+
package_name = os.environ.get("PACKAGE_NAME", "")
|
|
160
|
+
|
|
161
|
+
if not sa_json or not package_name:
|
|
162
|
+
print("ERROR: SA_JSON and PACKAGE_NAME env vars are required", file=sys.stderr)
|
|
163
|
+
sys.exit(1)
|
|
164
|
+
|
|
165
|
+
if not os.path.isfile(sa_json):
|
|
166
|
+
print(f"ERROR: Service account file not found: {sa_json}", file=sys.stderr)
|
|
167
|
+
sys.exit(1)
|
|
168
|
+
|
|
169
|
+
return sa_json, package_name
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def load_iap_config(config_path: str) -> dict:
|
|
173
|
+
"""Load and validate the IAP config file."""
|
|
174
|
+
if not os.path.isfile(config_path):
|
|
175
|
+
print(f"ERROR: IAP config file not found: {config_path}", file=sys.stderr)
|
|
176
|
+
sys.exit(1)
|
|
177
|
+
|
|
178
|
+
with open(config_path, "r", encoding="utf-8") as f:
|
|
179
|
+
config = json.load(f)
|
|
180
|
+
|
|
181
|
+
if not config.get("subscription_groups"):
|
|
182
|
+
print("WARNING: No subscription_groups found in config", file=sys.stderr)
|
|
183
|
+
|
|
184
|
+
return config
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def main() -> None:
|
|
188
|
+
if len(sys.argv) < 2:
|
|
189
|
+
print(f"Usage: {sys.argv[0]} <path/to/iap_config.json>", file=sys.stderr)
|
|
190
|
+
sys.exit(1)
|
|
191
|
+
|
|
192
|
+
config_path = sys.argv[1]
|
|
193
|
+
sa_json, package_name = validate_env()
|
|
194
|
+
|
|
195
|
+
access_token = get_access_token(sa_json)
|
|
196
|
+
headers = {
|
|
197
|
+
"Authorization": f"Bearer {access_token}",
|
|
198
|
+
"Content-Type": "application/json",
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
config = load_iap_config(config_path)
|
|
202
|
+
print(f"Package: {package_name}")
|
|
203
|
+
|
|
204
|
+
existing = list_subscriptions(headers, package_name)
|
|
205
|
+
print(f"Found {len(existing)} existing subscription(s)")
|
|
206
|
+
|
|
207
|
+
results = []
|
|
208
|
+
for group in config.get("subscription_groups", []):
|
|
209
|
+
group_name = group.get("reference_name", group.get("group_name", "Unknown"))
|
|
210
|
+
print(f"\nProcessing group: {group_name}")
|
|
211
|
+
for sub_config in group.get("subscriptions", []):
|
|
212
|
+
result = sync_subscription(headers, package_name, sub_config, existing)
|
|
213
|
+
results.append(result)
|
|
214
|
+
|
|
215
|
+
print(f"\n{json.dumps({'synced_subscriptions': results}, indent=2)}")
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
if __name__ == "__main__":
|
|
219
|
+
main()
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sync iOS In-App Purchases to App Store Connect via the REST API.
|
|
4
|
+
|
|
5
|
+
Reads iap_config.json and creates/updates subscription groups and subscriptions.
|
|
6
|
+
Idempotent: safe to run repeatedly -- existing resources are skipped or updated.
|
|
7
|
+
|
|
8
|
+
Required env vars:
|
|
9
|
+
APP_STORE_CONNECT_KEY_IDENTIFIER - Key ID from App Store Connect
|
|
10
|
+
APP_STORE_CONNECT_ISSUER_ID - Issuer ID from App Store Connect
|
|
11
|
+
APP_STORE_CONNECT_PRIVATE_KEY - Contents of the P8 key file
|
|
12
|
+
BUNDLE_ID - App bundle identifier
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
python3 sync_iap_ios.py <path/to/iap_config.json>
|
|
16
|
+
"""
|
|
17
|
+
import json
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
|
|
21
|
+
from asc_iap_api import (
|
|
22
|
+
create_localization,
|
|
23
|
+
create_subscription,
|
|
24
|
+
create_subscription_group,
|
|
25
|
+
get_app_id,
|
|
26
|
+
get_jwt_token,
|
|
27
|
+
get_subscription_localizations,
|
|
28
|
+
list_subscription_groups,
|
|
29
|
+
list_subscriptions_in_group,
|
|
30
|
+
update_localization,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def find_or_create_group(headers: dict, app_id: str, reference_name: str, existing_groups: list) -> str:
|
|
35
|
+
"""Find an existing subscription group by reference name or create a new one."""
|
|
36
|
+
for group in existing_groups:
|
|
37
|
+
if group["attributes"]["referenceName"] == reference_name:
|
|
38
|
+
group_id = group["id"]
|
|
39
|
+
print(f" Subscription group '{reference_name}' already exists (ID: {group_id})")
|
|
40
|
+
return group_id
|
|
41
|
+
return create_subscription_group(headers, app_id, reference_name)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def find_or_create_subscription(headers: dict, group_id: str, sub_config: dict, existing_subs: list) -> str:
|
|
45
|
+
"""Find an existing subscription by product ID or create a new one."""
|
|
46
|
+
product_id = sub_config["product_id"]
|
|
47
|
+
for sub in existing_subs:
|
|
48
|
+
if sub["attributes"]["productId"] == product_id:
|
|
49
|
+
sub_id = sub["id"]
|
|
50
|
+
print(f" Subscription '{product_id}' already exists (ID: {sub_id})")
|
|
51
|
+
return sub_id
|
|
52
|
+
return create_subscription(headers, group_id, sub_config)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def set_subscription_localization(headers: dict, sub_id: str, locale: str, loc_data: dict) -> None:
|
|
56
|
+
"""Create or update a localization for a subscription."""
|
|
57
|
+
existing = get_subscription_localizations(headers, sub_id)
|
|
58
|
+
for loc in existing:
|
|
59
|
+
if loc["attributes"]["locale"] == locale:
|
|
60
|
+
update_localization(headers, loc["id"], loc_data)
|
|
61
|
+
return
|
|
62
|
+
create_localization(headers, sub_id, locale, loc_data)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def sync_subscription_group(headers: dict, app_id: str, group_config: dict, existing_groups: list) -> dict:
|
|
66
|
+
"""Sync a single subscription group and its subscriptions. Returns sync result."""
|
|
67
|
+
ref_name = group_config.get("reference_name", group_config.get("group_name", ""))
|
|
68
|
+
if not ref_name:
|
|
69
|
+
print("WARNING: Subscription group missing reference_name, skipping", file=sys.stderr)
|
|
70
|
+
return {"group": ref_name, "status": "skipped", "subscriptions": []}
|
|
71
|
+
|
|
72
|
+
print(f"\nProcessing subscription group: {ref_name}")
|
|
73
|
+
group_id = find_or_create_group(headers, app_id, ref_name, existing_groups)
|
|
74
|
+
|
|
75
|
+
existing_subs = list_subscriptions_in_group(headers, group_id)
|
|
76
|
+
sub_results = []
|
|
77
|
+
|
|
78
|
+
for sub_config in group_config.get("subscriptions", []):
|
|
79
|
+
sub_id = find_or_create_subscription(headers, group_id, sub_config, existing_subs)
|
|
80
|
+
_sync_subscription_localizations(headers, sub_id, sub_config)
|
|
81
|
+
sub_results.append({"product_id": sub_config["product_id"], "id": sub_id})
|
|
82
|
+
|
|
83
|
+
return {"group": ref_name, "group_id": group_id, "subscriptions": sub_results}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _sync_subscription_localizations(headers: dict, sub_id: str, sub_config: dict) -> None:
|
|
87
|
+
"""Sync localizations for a subscription from its config."""
|
|
88
|
+
localizations = sub_config.get("localizations", {})
|
|
89
|
+
if not localizations:
|
|
90
|
+
return
|
|
91
|
+
for locale, loc_data in localizations.items():
|
|
92
|
+
set_subscription_localization(headers, sub_id, locale, loc_data)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def validate_env() -> tuple:
|
|
96
|
+
"""Validate required environment variables. Returns (key_id, issuer_id, private_key, bundle_id)."""
|
|
97
|
+
key_id = os.environ.get("APP_STORE_CONNECT_KEY_IDENTIFIER", "")
|
|
98
|
+
issuer_id = os.environ.get("APP_STORE_CONNECT_ISSUER_ID", "")
|
|
99
|
+
private_key = os.environ.get("APP_STORE_CONNECT_PRIVATE_KEY", "")
|
|
100
|
+
bundle_id = os.environ.get("BUNDLE_ID", "")
|
|
101
|
+
|
|
102
|
+
missing = []
|
|
103
|
+
if not key_id:
|
|
104
|
+
missing.append("APP_STORE_CONNECT_KEY_IDENTIFIER")
|
|
105
|
+
if not issuer_id:
|
|
106
|
+
missing.append("APP_STORE_CONNECT_ISSUER_ID")
|
|
107
|
+
if not private_key:
|
|
108
|
+
missing.append("APP_STORE_CONNECT_PRIVATE_KEY")
|
|
109
|
+
if not bundle_id:
|
|
110
|
+
missing.append("BUNDLE_ID")
|
|
111
|
+
|
|
112
|
+
if missing:
|
|
113
|
+
print(f"ERROR: Missing required environment variables: {', '.join(missing)}", file=sys.stderr)
|
|
114
|
+
sys.exit(1)
|
|
115
|
+
|
|
116
|
+
return key_id, issuer_id, private_key, bundle_id
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def load_iap_config(config_path: str) -> dict:
|
|
120
|
+
"""Load and validate the IAP config file."""
|
|
121
|
+
if not os.path.isfile(config_path):
|
|
122
|
+
print(f"ERROR: IAP config file not found: {config_path}", file=sys.stderr)
|
|
123
|
+
sys.exit(1)
|
|
124
|
+
|
|
125
|
+
with open(config_path, "r", encoding="utf-8") as f:
|
|
126
|
+
config = json.load(f)
|
|
127
|
+
|
|
128
|
+
if not config.get("subscription_groups"):
|
|
129
|
+
print("WARNING: No subscription_groups found in config", file=sys.stderr)
|
|
130
|
+
|
|
131
|
+
return config
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def main() -> None:
|
|
135
|
+
if len(sys.argv) < 2:
|
|
136
|
+
print(f"Usage: {sys.argv[0]} <path/to/iap_config.json>", file=sys.stderr)
|
|
137
|
+
sys.exit(1)
|
|
138
|
+
|
|
139
|
+
config_path = sys.argv[1]
|
|
140
|
+
key_id, issuer_id, private_key, bundle_id = validate_env()
|
|
141
|
+
|
|
142
|
+
token = get_jwt_token(key_id, issuer_id, private_key)
|
|
143
|
+
headers = {
|
|
144
|
+
"Authorization": f"Bearer {token}",
|
|
145
|
+
"Content-Type": "application/json",
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
config = load_iap_config(config_path)
|
|
149
|
+
app_id = get_app_id(headers, bundle_id)
|
|
150
|
+
print(f"App ID: {app_id} (Bundle: {bundle_id})")
|
|
151
|
+
|
|
152
|
+
existing_groups = list_subscription_groups(headers, app_id)
|
|
153
|
+
results = []
|
|
154
|
+
|
|
155
|
+
for group_config in config.get("subscription_groups", []):
|
|
156
|
+
result = sync_subscription_group(headers, app_id, group_config, existing_groups)
|
|
157
|
+
results.append(result)
|
|
158
|
+
|
|
159
|
+
print(f"\n{json.dumps({'synced_groups': results}, indent=2)}")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
if __name__ == "__main__":
|
|
163
|
+
main()
|