mirror of
https://github.com/bitwarden/android.git
synced 2025-12-10 00:06:22 -06:00
[PM-19049] Add workflow to regularly fetch updates to fido2_privileged_google.json (#4858)
Co-authored-by: Patrick Honkonen <1883101+SaintPatrck@users.noreply.github.com>
This commit is contained in:
parent
ad6bc883b8
commit
3eed1c1abe
1
.github/scripts/validate-json/.python-version
vendored
Normal file
1
.github/scripts/validate-json/.python-version
vendored
Normal file
@ -0,0 +1 @@
|
||||
3.13
|
||||
39
.github/scripts/validate-json/README.md
vendored
Normal file
39
.github/scripts/validate-json/README.md
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
# JSON Validation Scripts
|
||||
|
||||
Utility scripts for validating JSON files and checking for duplicate package names between Google and Community privileged browser lists.
|
||||
|
||||
## Usage
|
||||
|
||||
### Validate a JSON file
|
||||
|
||||
```bash
|
||||
python validate_json.py validate <json_file>
|
||||
```
|
||||
|
||||
### Check for duplicates between two JSON files
|
||||
|
||||
```bash
|
||||
python validate_json.py duplicates <json_file1> <json_file2> [output_file]
|
||||
```
|
||||
|
||||
If `output_file` is not specified, duplicates will be saved to `duplicates.txt`.
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
python -m unittest test_validate_json.py
|
||||
|
||||
# Run the invalid JSON test individually
|
||||
python -m unittest test_validate_json.TestValidateJson.test_validate_json_invalid
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
# Validate Google privileged browsers list
|
||||
python validate_json.py validate ../../app/src/main/assets/fido2_privileged_google.json
|
||||
|
||||
# Check for duplicates between Google and Community lists
|
||||
python validate_json.py duplicates ../../app/src/main/assets/fido2_privileged_google.json ../../app/src/main/assets/fido2_privileged_community.json duplicates.txt
|
||||
```
|
||||
20
.github/scripts/validate-json/fixtures/sample-invalid.json
vendored
Normal file
20
.github/scripts/validate-json/fixtures/sample-invalid.json
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"apps": [
|
||||
|
||||
"type": "android",
|
||||
"info": {
|
||||
"package_name": "com.android.chrome",
|
||||
"signatures": [
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "F0:FD:6C:5B:41:0F:25:CB:25:C3:B5:33:46:C8:97:2F:AE:30:F8:EE:74:11:DF:91:04:80:AD:6B:2D:60:DB:83"
|
||||
},
|
||||
{
|
||||
"build": "userdebug",
|
||||
"cert_fingerprint_sha256": "19:75:B2:F1:71:77:BC:89:A5:DF:F3:1F:9E:64:A6:CA:E2:81:A5:3D:C1:D1:D5:9B:1D:14:7F:E1:C8:2A:FA:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
48
.github/scripts/validate-json/fixtures/sample-valid1.json
vendored
Normal file
48
.github/scripts/validate-json/fixtures/sample-valid1.json
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
{
|
||||
"apps": [
|
||||
{
|
||||
"type": "android",
|
||||
"info": {
|
||||
"package_name": "com.android.chrome",
|
||||
"signatures": [
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "F0:FD:6C:5B:41:0F:25:CB:25:C3:B5:33:46:C8:97:2F:AE:30:F8:EE:74:11:DF:91:04:80:AD:6B:2D:60:DB:83"
|
||||
},
|
||||
{
|
||||
"build": "userdebug",
|
||||
"cert_fingerprint_sha256": "19:75:B2:F1:71:77:BC:89:A5:DF:F3:1F:9E:64:A6:CA:E2:81:A5:3D:C1:D1:D5:9B:1D:14:7F:E1:C8:2A:FA:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "android",
|
||||
"info": {
|
||||
"package_name": "com.chrome.dev",
|
||||
"signatures": [
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "90:44:EE:5F:EE:4B:BC:5E:21:DD:44:66:54:31:C4:EB:1F:1F:71:A3:27:16:A0:BC:92:7B:CB:B3:92:33:CA:BF"
|
||||
},
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "3D:7A:12:23:01:9A:A3:9D:9E:A0:E3:43:6A:B7:C0:89:6B:FB:4F:B6:79:F4:DE:5F:E7:C2:3F:32:6C:8F:99:4A"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "android",
|
||||
"info": {
|
||||
"package_name": "com.chrome.canary",
|
||||
"signatures": [
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "DF:A1:FB:23:EF:BF:70:C5:BC:D1:44:3C:5B:EA:B0:4F:3F:2F:F4:36:6E:9A:C1:E3:45:76:39:A2:4C:FC"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
20
.github/scripts/validate-json/fixtures/sample-valid2.json
vendored
Normal file
20
.github/scripts/validate-json/fixtures/sample-valid2.json
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"apps": [
|
||||
{
|
||||
"type": "android",
|
||||
"info": {
|
||||
"package_name": "org.chromium.chrome",
|
||||
"signatures": [
|
||||
{
|
||||
"build": "release",
|
||||
"cert_fingerprint_sha256": "C6:AD:B8:B8:3C:6D:4C:17:D2:92:AF:DE:56:FD:48:8A:51:D3:16:FF:8F:2C:11:C5:41:02:23:BF:F8:A7:DB:B3"
|
||||
},
|
||||
{
|
||||
"build": "userdebug",
|
||||
"cert_fingerprint_sha256": "19:75:B2:F1:71:77:BC:89:A5:DF:F3:1F:9E:64:A6:CA:E2:81:A5:3D:C1:D1:D5:9B:1D:14:7F:E1:C8:2A:FA:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
48
.github/scripts/validate-json/test_validate_json.py
vendored
Normal file
48
.github/scripts/validate-json/test_validate_json.py
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
import unittest
|
||||
import os
|
||||
import json
|
||||
from validate_json import validate_json, find_duplicates, get_package_names
|
||||
from unittest.mock import patch
|
||||
import io
|
||||
|
||||
|
||||
class TestValidateJson(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.valid_file = os.path.join(os.path.dirname(__file__), "fixtures/sample-valid1.json")
|
||||
self.valid_file2 = os.path.join(os.path.dirname(__file__), "fixtures/sample-valid2.json")
|
||||
self.invalid_file = os.path.join(os.path.dirname(__file__), "fixtures/sample-invalid.json")
|
||||
|
||||
# Suppress stdout
|
||||
self.stdout_patcher = patch('sys.stdout', new=io.StringIO())
|
||||
self.stdout_patcher.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.stdout_patcher.stop()
|
||||
|
||||
def test_validate_json_valid(self):
|
||||
"""Test validation of valid JSON file"""
|
||||
self.assertTrue(validate_json(self.valid_file))
|
||||
|
||||
def test_validate_json_invalid(self):
|
||||
"""Test validation of invalid JSON file"""
|
||||
self.assertFalse(validate_json(self.invalid_file))
|
||||
|
||||
def test_find_duplicates(self):
|
||||
"""Test when using the same file (should find duplicates)"""
|
||||
expected_package_names = get_package_names(self.valid_file)
|
||||
|
||||
duplicates = find_duplicates(self.valid_file, self.valid_file)
|
||||
|
||||
self.assertEqual(len(duplicates), len(expected_package_names))
|
||||
for package_name in expected_package_names:
|
||||
self.assertIn(package_name, duplicates)
|
||||
|
||||
def test_find_duplicates_returns_empty_list_when_no_duplicates(self):
|
||||
"""Test when using different files (should not find duplicates)"""
|
||||
duplicates = find_duplicates(self.valid_file, self.valid_file2)
|
||||
self.assertEqual(len(duplicates), 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
145
.github/scripts/validate-json/validate_json.py
vendored
Normal file
145
.github/scripts/validate-json/validate_json.py
vendored
Normal file
@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
from typing import List, Dict, Any, Set
|
||||
|
||||
|
||||
def get_package_names(file_path: str) -> Set[str]:
|
||||
"""
|
||||
Extracts package names from a JSON file.
|
||||
|
||||
Args:
|
||||
file_path: Path to the JSON file
|
||||
|
||||
Returns:
|
||||
Set of package names
|
||||
"""
|
||||
with open(file_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
package_names = set()
|
||||
for app in data["apps"]:
|
||||
package_names.add(app["info"]["package_name"])
|
||||
|
||||
return package_names
|
||||
|
||||
|
||||
def validate_json(file_path: str) -> bool:
|
||||
"""
|
||||
Validates if a JSON file is correctly formatted by attempting to deserialize it.
|
||||
|
||||
Args:
|
||||
file_path: Path to the JSON file to validate
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists(file_path):
|
||||
print(f"Error: File {file_path} does not exist")
|
||||
return False
|
||||
|
||||
with open(file_path, 'r') as f:
|
||||
json.load(f)
|
||||
print(f"✅ JSON file {file_path} is valid")
|
||||
return True
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"❌ Invalid JSON in {file_path}: {str(e)}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"❌ Error validating {file_path}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def find_duplicates(file1_path: str, file2_path: str) -> List[str]:
|
||||
"""
|
||||
Checks for duplicate package_name entries between two JSON files.
|
||||
|
||||
Args:
|
||||
file1_path: Path to the first JSON file
|
||||
file2_path: Path to the second JSON file
|
||||
|
||||
Returns:
|
||||
List of duplicate package names, empty list if none found
|
||||
"""
|
||||
try:
|
||||
# Get package names from both files
|
||||
packages1 = get_package_names(file1_path)
|
||||
packages2 = get_package_names(file2_path)
|
||||
|
||||
# Find duplicates
|
||||
duplicates = list(packages1.intersection(packages2))
|
||||
|
||||
if duplicates:
|
||||
print(f"❌ Found {len(duplicates)} duplicate package names between {file1_path} and {file2_path}:")
|
||||
for dup in duplicates:
|
||||
print(f" - {dup}")
|
||||
return duplicates
|
||||
else:
|
||||
print(f"✅ No duplicate package names found between {file1_path} and {file2_path}")
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error checking duplicates: {str(e)}")
|
||||
return []
|
||||
|
||||
|
||||
def save_duplicates_to_file(duplicates: List[str], output_file: str) -> None:
|
||||
"""
|
||||
Saves the list of duplicates to a file.
|
||||
|
||||
Args:
|
||||
duplicates: List of duplicate package names
|
||||
output_file: Path to save the list of duplicates
|
||||
"""
|
||||
try:
|
||||
with open(output_file, 'w') as f:
|
||||
for dup in duplicates:
|
||||
f.write(f"{dup}\n")
|
||||
print(f"Duplicates saved to {output_file}")
|
||||
except Exception as e:
|
||||
print(f"❌ Error saving duplicates to file: {str(e)}")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage:")
|
||||
print(" Validate JSON: python validate_json.py validate <json_file>")
|
||||
print(" Check duplicates: python validate_json.py duplicates <json_file1> <json_file2> [output_file]")
|
||||
sys.exit(1)
|
||||
|
||||
command = sys.argv[1]
|
||||
|
||||
match command:
|
||||
case "validate":
|
||||
if len(sys.argv) < 3:
|
||||
print("Error: Missing JSON file path")
|
||||
sys.exit(1)
|
||||
|
||||
file_path = sys.argv[2]
|
||||
success = validate_json(file_path)
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
case "duplicates":
|
||||
if len(sys.argv) < 4:
|
||||
print("Error: Missing JSON file paths")
|
||||
sys.exit(1)
|
||||
|
||||
file1_path = sys.argv[2]
|
||||
file2_path = sys.argv[3]
|
||||
output_file = sys.argv[4] if len(sys.argv) > 4 else "duplicates.txt"
|
||||
|
||||
duplicates = find_duplicates(file1_path, file2_path)
|
||||
if duplicates:
|
||||
save_duplicates_to_file(duplicates, output_file)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
case _:
|
||||
print(f"Unknown command: {command}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
98
.github/workflows/cron-sync-google-priviledged-browsers.yml
vendored
Normal file
98
.github/workflows/cron-sync-google-priviledged-browsers.yml
vendored
Normal file
@ -0,0 +1,98 @@
|
||||
name: Cron / Sync Google Privileged Browsers List
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run weekly on Monday at 00:00 UTC
|
||||
- cron: '0 0 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
SOURCE_URL: https://www.gstatic.com/gpm-passkeys-privileged-apps/apps.json
|
||||
GOOGLE_FILE: app/src/main/assets/fido2_privileged_google.json
|
||||
COMMUNITY_FILE: app/src/main/assets/fido2_privileged_community.json
|
||||
|
||||
jobs:
|
||||
sync-privileged-browsers:
|
||||
name: Sync Google Privileged Browsers List
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 4.2.2
|
||||
|
||||
- name: Download Google Privileged Browsers List
|
||||
run: curl -s $SOURCE_URL -o $GOOGLE_FILE
|
||||
|
||||
- name: Check for changes
|
||||
id: check-changes
|
||||
run: |
|
||||
if git diff --quiet -- $GOOGLE_FILE; then
|
||||
echo "👀 No changes detected, skipping..."
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
echo "👀 Changes detected, validating fido2_privileged_google.json..."
|
||||
|
||||
python .github/scripts/validate-json/validate_json.py validate $GOOGLE_FILE
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "::error::JSON validation failed for $GOOGLE_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "👀 fido2_privileged_google.json is valid, checking for duplicates..."
|
||||
|
||||
# Check for duplicates between Google and Community files
|
||||
python .github/scripts/validate-json/validate_json.py duplicates $GOOGLE_FILE $COMMUNITY_FILE duplicates.txt
|
||||
|
||||
if [ -f duplicates.txt ]; then
|
||||
echo "::warning::Duplicate package names found between Google and Community files."
|
||||
echo "duplicates_found=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "✅ No duplicate package names found between Google and Community files"
|
||||
echo "duplicates_found=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create branch and commit
|
||||
if: steps.check-changes.outputs.has_changes == 'true'
|
||||
run: |
|
||||
echo "👀 Committing fido2_privileged_google.json..."
|
||||
|
||||
BRANCH_NAME="cron-sync-privileged-browsers/$GITHUB_RUN_NUMBER-sync"
|
||||
git config user.name "GitHub Actions Bot"
|
||||
git config user.email "actions@github.com"
|
||||
git checkout -b $BRANCH_NAME
|
||||
git add $GOOGLE_FILE
|
||||
git commit -m "Update Google privileged browsers list"
|
||||
git push origin $BRANCH_NAME
|
||||
echo "BRANCH_NAME=$BRANCH_NAME" >> $GITHUB_ENV
|
||||
echo "🌱 Branch created: $BRANCH_NAME"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.check-changes.outputs.has_changes == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DUPLICATES_FOUND: ${{ steps.check-changes.outputs.duplicates_found }}
|
||||
BASE_PR_URL: ${{ github.server_url }}/${{ github.repository }}/pull/
|
||||
run: |
|
||||
PR_BODY="Updates the Google privileged browsers list with the latest data from $SOURCE_URL"
|
||||
|
||||
if [ "$DUPLICATES_FOUND" = "true" ]; then
|
||||
PR_BODY="$PR_BODY\n\n> [!WARNING]\n> :suspect: The following package(s) appear in both Google and Community files:"
|
||||
while IFS= read -r line; do
|
||||
PR_BODY="$PR_BODY\n> - $line"
|
||||
done < duplicates.txt
|
||||
fi
|
||||
|
||||
# Use echo -e to interpret escape sequences and pipe to gh pr create
|
||||
PR_URL=$(echo -e "$PR_BODY" | gh pr create \
|
||||
--title "Update Google privileged browsers list" \
|
||||
--body-file - \
|
||||
--base main \
|
||||
--head $BRANCH_NAME \
|
||||
--label "automated-pr" \
|
||||
--label "t:ci")
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@ -28,3 +28,10 @@ user.properties
|
||||
/app/src/standardBeta/google-services.json
|
||||
/app/src/standardRelease/google-services.json
|
||||
/authenticator/src/google-services.json
|
||||
|
||||
# Python
|
||||
.python-version
|
||||
__pycache__/
|
||||
|
||||
# Generated by .github/scripts/validate-json/validate-json.py
|
||||
duplicates.txt
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user