mirror of
https://github.com/microsoft/WSL.git
synced 2026-04-11 10:52:42 -05:00
* Fix localization text, Intune capitalization, ADMX indentation, unused import - distributions/validate.py: Remove unused 'import base64' - en-US/en-GB Resources.resw: Fix word order 'the list distribution' -> 'the distribution list' in MessageCouldFetchDistributionList - en-US/en-GB Resources.resw: Remove trailing space from MessageCorruptedDistroRegistration value - intune/en-US/WSL.adml: Fix 'For Linux' -> 'for Linux' capitalization (5 occurrences) to match official product name - intune/WSL.admx: Fix inconsistent 3-space indent to 4-space on AllowWSL1 and CustomKernelUserSettingConfigurable policies * Fix grammar, typos, and formatting in docs and source code - WmiService.h: Fix duplicated word 'of of' -> 'is of' in WQL comment - drvfs.md: Fix 'which tell' -> 'which tells' (subject-verb agreement), double space, second 'tell' -> 'tells' - systemd.md: Fix 'tries synchronizes' -> 'tries to synchronize' - wslhost.exe.md: Fix 'processes terminates' -> 'process terminates' - boot-process.md: Add missing 'in' preposition, fix C:/ -> C:\, fix lowercase 'linux' to 'Linux' (4 occurrences), fix misplaced parenthesis - init.md: Fix double space before hvsocket backtick - session-leader.md: Fix heading level ## -> ### to match sibling section - CONTRIBUTING.md: Fix double period after bold security notice - debugging.md: Fix missing word 'be' in 'can enabled', fix 'process' -> 'processes' - index.md: Add missing period at end of sentence - technical-documentation/index.md: Fix 'API's' -> 'APIs' (remove incorrect apostrophe) - relay.md: Fix lowercase 'linux' to 'Linux' (3 occurrences) - localhost.md: Fix lowercase 'linux' to 'Linux' - plan9.md: Fix lowercase 'linux' to 'Linux' * update errormessages testcase --------- Co-authored-by: Ben Hillis <benhill@ntdev.microsoft.com>
94 lines
3.7 KiB
Python
94 lines
3.7 KiB
Python
import requests
|
|
import json
|
|
import sys
|
|
import hashlib
|
|
import difflib
|
|
from urllib.request import urlretrieve
|
|
from xml.etree import ElementTree
|
|
import tempfile
|
|
import zipfile
|
|
|
|
def download_and_get_manifest(url: str):
|
|
print(f'Downloading {url}')
|
|
|
|
filename, _ = urlretrieve(url)
|
|
with zipfile.ZipFile(filename) as archive:
|
|
try:
|
|
with archive.open('AppxManifest.xml') as manifest:
|
|
return ElementTree.fromstring(manifest.read())
|
|
except KeyError:
|
|
# In the case of a bundle
|
|
with archive.open('AppxMetadata/AppxBundleManifest.xml') as manifest:
|
|
return ElementTree.fromstring(manifest.read())
|
|
|
|
def validate_package_url(url: str, family_name: str, platform: str):
|
|
manifest = download_and_get_manifest(url)
|
|
identity = manifest.find('.//{http://schemas.microsoft.com/appx/manifest/foundation/windows10}Identity')
|
|
dependencies = manifest.find('.//{http://schemas.microsoft.com/appx/manifest/foundation/windows10}PackageDependency')
|
|
if identity is not None:
|
|
# Check the architecture if the package isn't bundled
|
|
assert platform == identity.attrib['ProcessorArchitecture']
|
|
else:
|
|
# Only check the package name for bundles
|
|
identity = manifest.find('.//{http://schemas.microsoft.com/appx/2013/bundle}Identity')
|
|
dependencies = manifest.find('.//{http://schemas.microsoft.com/appx/2013/bundle}PackageDependency')
|
|
|
|
# Packages uploaded to the CDN shouldn't have dependencies since they can't be installed automatically on Server SKU's.
|
|
assert dependencies is None
|
|
|
|
# Validate the package family_name (the last part is based on a custom hash of the publisher)
|
|
publisher_hash = hashlib.sha256(identity.attrib['Publisher'].encode('utf-16le')).digest()[:8]
|
|
encoded_string = ''.join(['{0:b}'.format(e).rjust(8, '0') for e in publisher_hash] + ['0'])
|
|
encoded_hash = ''
|
|
charset = "0123456789abcdefghjkmnpqrstvwxyz"
|
|
for i in range(0, len(encoded_string), 5):
|
|
encoded_hash += charset[int(encoded_string[i:i + 5], 2)]
|
|
|
|
assert family_name.startswith(identity.attrib["Name"])
|
|
assert family_name.endswith('_' + encoded_hash)
|
|
|
|
def validate_distro(distro: dict):
|
|
if distro['Amd64PackageUrl'] is not None:
|
|
validate_package_url(distro['Amd64PackageUrl'], distro['PackageFamilyName'], 'x64')
|
|
|
|
if distro['Arm64PackageUrl'] is not None:
|
|
validate_package_url(distro['Arm64PackageUrl'], distro['PackageFamilyName'], 'arm64')
|
|
|
|
def is_unique(collection: list):
|
|
unique_list = set(collection)
|
|
return len(collection) == len(unique_list)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
if len(sys.argv) < 2:
|
|
print(f'Usage: {sys.argv[0]} /path/to/file [distroName]', file=sys.stderr)
|
|
exit(1)
|
|
|
|
with open(sys.argv[1]) as fd:
|
|
data = fd.read()
|
|
content = json.loads(data)
|
|
diff = difflib.unified_diff(
|
|
data.splitlines(keepends=True),
|
|
(json.dumps(content, indent=4) + "\n").splitlines(keepends=True),
|
|
fromfile="a" + sys.argv[1],
|
|
tofile="b" + sys.argv[1],
|
|
)
|
|
diff = "".join(diff)
|
|
assert diff == "", diff
|
|
|
|
distros = content['Distributions']
|
|
assert is_unique([e.get('StoreAppId') for e in distros if e])
|
|
assert is_unique([e.get('Name') for e in distros if e])
|
|
|
|
if len(sys.argv) > 2:
|
|
# Filter the distros to only the one we want to validate
|
|
content = { "Distributions": [e for e in content['Distributions'] if e['Name'] == sys.argv[2]] }
|
|
if not content['Distributions']:
|
|
raise RuntimeError(f'No distro found for name {sys.argv[2]}')
|
|
|
|
|
|
for e in content['Distributions']:
|
|
validate_distro(e)
|
|
|
|
print("All checks completed successfully")
|