mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Temp
This commit is contained in:
parent
99c8a72cbd
commit
7a443df199
226
build-system/Make/BuildConfiguration.py
Normal file
226
build-system/Make/BuildConfiguration.py
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import plistlib
|
||||||
|
|
||||||
|
from BuildEnvironment import run_executable_with_output
|
||||||
|
|
||||||
|
class BuildConfiguration:
|
||||||
|
def __init__(self,
|
||||||
|
bundle_id,
|
||||||
|
api_id,
|
||||||
|
api_hash,
|
||||||
|
team_id,
|
||||||
|
app_center_id,
|
||||||
|
is_internal_build,
|
||||||
|
is_appstore_build,
|
||||||
|
appstore_id,
|
||||||
|
app_specific_url_scheme,
|
||||||
|
premium_iap_product_id,
|
||||||
|
enable_siri,
|
||||||
|
enable_icloud
|
||||||
|
):
|
||||||
|
self.bundle_id = bundle_id
|
||||||
|
self.api_id = api_id
|
||||||
|
self.api_hash = api_hash
|
||||||
|
self.team_id = team_id
|
||||||
|
self.app_center_id = app_center_id
|
||||||
|
self.is_internal_build = is_internal_build
|
||||||
|
self.is_appstore_build = is_appstore_build
|
||||||
|
self.appstore_id = appstore_id
|
||||||
|
self.app_specific_url_scheme = app_specific_url_scheme
|
||||||
|
self.premium_iap_product_id = premium_iap_product_id
|
||||||
|
self.enable_siri = enable_siri
|
||||||
|
self.enable_icloud = enable_icloud
|
||||||
|
|
||||||
|
def write_to_variables_file(self, aps_environment, path):
|
||||||
|
string = ''
|
||||||
|
string += 'telegram_bundle_id = "{}"\n'.format(self.bundle_id)
|
||||||
|
string += 'telegram_api_id = "{}"\n'.format(self.api_id)
|
||||||
|
string += 'telegram_api_hash = "{}"\n'.format(self.api_hash)
|
||||||
|
string += 'telegram_team_id = "{}"\n'.format(self.team_id)
|
||||||
|
string += 'telegram_app_center_id = "{}"\n'.format(self.app_center_id)
|
||||||
|
string += 'telegram_is_internal_build = "{}"\n'.format(self.is_internal_build)
|
||||||
|
string += 'telegram_is_appstore_build = "{}"\n'.format(self.is_appstore_build)
|
||||||
|
string += 'telegram_appstore_id = "{}"\n'.format(self.appstore_id)
|
||||||
|
string += 'telegram_app_specific_url_scheme = "{}"\n'.format(self.app_specific_url_scheme)
|
||||||
|
string += 'telegram_premium_iap_product_id = "{}"\n'.format(self.premium_iap_product_id)
|
||||||
|
string += 'telegram_aps_environment = "{}"\n'.format(aps_environment)
|
||||||
|
string += 'telegram_enable_siri = {}\n'.format(self.enable_siri)
|
||||||
|
string += 'telegram_enable_icloud = {}\n'.format(self.enable_icloud)
|
||||||
|
string += 'telegram_enable_watch = True\n'
|
||||||
|
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.remove(path)
|
||||||
|
with open(path, 'w+') as file:
|
||||||
|
file.write(string)
|
||||||
|
|
||||||
|
|
||||||
|
def build_configuration_from_json(path):
|
||||||
|
if not os.path.exists(path):
|
||||||
|
print('Could not load build configuration from {}'.format(path))
|
||||||
|
sys.exit(1)
|
||||||
|
with open(path) as file:
|
||||||
|
configuration_dict = json.load(file)
|
||||||
|
required_keys = [
|
||||||
|
'bundle_id',
|
||||||
|
'api_id',
|
||||||
|
'api_hash',
|
||||||
|
'team_id',
|
||||||
|
'app_center_id',
|
||||||
|
'is_internal_build',
|
||||||
|
'is_appstore_build',
|
||||||
|
'appstore_id',
|
||||||
|
'app_specific_url_scheme',
|
||||||
|
'premium_iap_product_id',
|
||||||
|
'enable_siri',
|
||||||
|
'enable_icloud'
|
||||||
|
]
|
||||||
|
for key in required_keys:
|
||||||
|
if key not in configuration_dict:
|
||||||
|
print('Configuration at {} does not contain {}'.format(path, key))
|
||||||
|
return BuildConfiguration(
|
||||||
|
bundle_id=configuration_dict['bundle_id'],
|
||||||
|
api_id=configuration_dict['api_id'],
|
||||||
|
api_hash=configuration_dict['api_hash'],
|
||||||
|
team_id=configuration_dict['team_id'],
|
||||||
|
app_center_id=configuration_dict['app_center_id'],
|
||||||
|
is_internal_build=configuration_dict['is_internal_build'],
|
||||||
|
is_appstore_build=configuration_dict['is_appstore_build'],
|
||||||
|
appstore_id=configuration_dict['appstore_id'],
|
||||||
|
app_specific_url_scheme=configuration_dict['app_specific_url_scheme'],
|
||||||
|
premium_iap_product_id=configuration_dict['premium_iap_product_id'],
|
||||||
|
enable_siri=configuration_dict['enable_siri'],
|
||||||
|
enable_icloud=configuration_dict['enable_icloud']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def decrypt_codesigning_directory_recursively(source_base_path, destination_base_path, password):
|
||||||
|
for file_name in os.listdir(source_base_path):
|
||||||
|
source_path = source_base_path + '/' + file_name
|
||||||
|
destination_path = destination_base_path + '/' + file_name
|
||||||
|
if os.path.isfile(source_path):
|
||||||
|
os.system('openssl aes-256-cbc -md md5 -k "{password}" -in "{source_path}" -out "{destination_path}" -a -d'.format(
|
||||||
|
password=password,
|
||||||
|
source_path=source_path,
|
||||||
|
destination_path=destination_path
|
||||||
|
))
|
||||||
|
elif os.path.isdir(source_path):
|
||||||
|
os.makedirs(destination_path, exist_ok=True)
|
||||||
|
decrypt_codesigning_directory_recursively(source_path, destination_path, password)
|
||||||
|
|
||||||
|
|
||||||
|
def load_provisioning_profiles_from_git(working_dir, repo_url, branch, password, always_fetch):
|
||||||
|
if not os.path.exists(working_dir):
|
||||||
|
os.makedirs(working_dir, exist_ok=True)
|
||||||
|
|
||||||
|
encrypted_working_dir = working_dir + '/encrypted'
|
||||||
|
if os.path.exists(encrypted_working_dir):
|
||||||
|
if always_fetch:
|
||||||
|
original_working_dir = os.getcwd()
|
||||||
|
os.chdir(encrypted_working_dir)
|
||||||
|
os.system('git fetch')
|
||||||
|
os.system('git checkout "{branch}"'.format(branch=branch))
|
||||||
|
os.system('git pull')
|
||||||
|
os.chdir(original_working_dir)
|
||||||
|
else:
|
||||||
|
os.makedirs(encrypted_working_dir, exist_ok=True)
|
||||||
|
original_working_dir = os.getcwd()
|
||||||
|
os.chdir(working_dir)
|
||||||
|
os.system('git clone {repo_url} -b "{branch}" "{target_path}"'.format(
|
||||||
|
repo_url=repo_url,
|
||||||
|
branch=branch,
|
||||||
|
target_path=encrypted_working_dir
|
||||||
|
))
|
||||||
|
os.chdir(original_working_dir)
|
||||||
|
|
||||||
|
decrypted_working_dir = working_dir + '/decrypted'
|
||||||
|
if os.path.exists(decrypted_working_dir):
|
||||||
|
shutil.rmtree(decrypted_working_dir)
|
||||||
|
os.makedirs(decrypted_working_dir, exist_ok=True)
|
||||||
|
|
||||||
|
decrypt_codesigning_directory_recursively(encrypted_working_dir + '/profiles', decrypted_working_dir + '/profiles', password)
|
||||||
|
decrypt_codesigning_directory_recursively(encrypted_working_dir + '/certs', decrypted_working_dir + '/certs', password)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_profiles_from_directory(source_path, destination_path, team_id, bundle_id):
|
||||||
|
profile_name_mapping = {
|
||||||
|
'.SiriIntents': 'Intents',
|
||||||
|
'.NotificationContent': 'NotificationContent',
|
||||||
|
'.NotificationService': 'NotificationService',
|
||||||
|
'.Share': 'Share',
|
||||||
|
'': 'Telegram',
|
||||||
|
'.watchkitapp': 'WatchApp',
|
||||||
|
'.watchkitapp.watchkitextension': 'WatchExtension',
|
||||||
|
'.Widget': 'Widget',
|
||||||
|
'.BroadcastUpload': 'BroadcastUpload'
|
||||||
|
}
|
||||||
|
|
||||||
|
for file_name in os.listdir(source_path):
|
||||||
|
file_path = source_path + '/' + file_name
|
||||||
|
if os.path.isfile(file_path):
|
||||||
|
if not file_path.endswith('.mobileprovision'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
profile_data = run_executable_with_output('openssl', arguments=[
|
||||||
|
'smime',
|
||||||
|
'-inform',
|
||||||
|
'der',
|
||||||
|
'-verify',
|
||||||
|
'-noverify',
|
||||||
|
'-in',
|
||||||
|
file_path
|
||||||
|
], decode=False, stderr_to_stdout=False, check_result=True)
|
||||||
|
|
||||||
|
profile_dict = plistlib.loads(profile_data)
|
||||||
|
profile_name = profile_dict['Entitlements']['application-identifier']
|
||||||
|
|
||||||
|
if profile_name.startswith(team_id + '.' + bundle_id):
|
||||||
|
profile_base_name = profile_name[len(team_id + '.' + bundle_id):]
|
||||||
|
if profile_base_name in profile_name_mapping:
|
||||||
|
shutil.copyfile(file_path, destination_path + '/' + profile_name_mapping[profile_base_name] + '.mobileprovision')
|
||||||
|
else:
|
||||||
|
print('Warning: skipping provisioning profile at {} with bundle_id {} (base_name {})'.format(file_path, profile_name, profile_base_name))
|
||||||
|
|
||||||
|
|
||||||
|
class ProvisioningProfileSource:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def copy_profiles_to_destination(self, destination_path):
|
||||||
|
raise Exception('Not implemented')
|
||||||
|
|
||||||
|
|
||||||
|
class GitProvisioningProfileSource(ProvisioningProfileSource):
|
||||||
|
def __init__(self, working_dir, repo_url, team_id, bundle_id, profile_type, password, always_fetch):
|
||||||
|
self.working_dir = working_dir
|
||||||
|
self.repo_url = repo_url
|
||||||
|
self.team_id = team_id
|
||||||
|
self.bundle_id = bundle_id
|
||||||
|
self.profile_type = profile_type
|
||||||
|
self.password = password
|
||||||
|
self.always_fetch = always_fetch
|
||||||
|
|
||||||
|
def copy_profiles_to_destination(self, destination_path):
|
||||||
|
load_provisioning_profiles_from_git(working_dir=self.working_dir, repo_url=self.repo_url, branch=self.team_id, password=self.password, always_fetch=self.always_fetch)
|
||||||
|
copy_profiles_from_directory(source_path=self.working_dir + '/decrypted/profiles/{}'.format(self.profile_type), destination_path=destination_path, team_id=self.team_id, bundle_id=self.bundle_id)
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryProvisioningProfileSource(ProvisioningProfileSource):
|
||||||
|
def __init__(self, directory_path, team_id, bundle_id):
|
||||||
|
self.directory_path = directory_path
|
||||||
|
self.team_id = team_id
|
||||||
|
self.bundle_id = bundle_id
|
||||||
|
|
||||||
|
def copy_profiles_to_destination(self, destination_path):
|
||||||
|
profiles_path = self.directory_path
|
||||||
|
if not os.path.exists(profiles_path):
|
||||||
|
print('{} does not exist'.format(profiles_path))
|
||||||
|
sys.exit(1)
|
||||||
|
copy_profiles_from_directory(source_path=profiles_path, destination_path=destination_path, team_id=self.team_id, bundle_id=self.bundle_id)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_configuration_repository(path, profile_source):
|
||||||
|
pass
|
@ -2,7 +2,7 @@ import json
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
def is_apple_silicon():
|
def is_apple_silicon():
|
||||||
if platform.processor() == 'arm':
|
if platform.processor() == 'arm':
|
||||||
@ -28,20 +28,38 @@ def resolve_executable(program):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def run_executable_with_output(path, arguments):
|
def run_executable_with_output(path, arguments, decode=True, input=None, stderr_to_stdout=True, check_result=False):
|
||||||
executable_path = resolve_executable(path)
|
executable_path = resolve_executable(path)
|
||||||
if executable_path is None:
|
if executable_path is None:
|
||||||
raise Exception('Could not resolve {} to a valid executable file'.format(path))
|
raise Exception('Could not resolve {} to a valid executable file'.format(path))
|
||||||
|
|
||||||
|
stderr_assignment = subprocess.DEVNULL
|
||||||
|
if stderr_to_stdout:
|
||||||
|
stderr_assignment = subprocess.STDOUT
|
||||||
|
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
[executable_path] + arguments,
|
[executable_path] + arguments,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=stderr_assignment,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
env=get_clean_env()
|
env=get_clean_env()
|
||||||
)
|
)
|
||||||
|
if input is not None:
|
||||||
|
output_data, _ = process.communicate(input=input)
|
||||||
|
else:
|
||||||
output_data, _ = process.communicate()
|
output_data, _ = process.communicate()
|
||||||
|
|
||||||
output_string = output_data.decode('utf-8')
|
output_string = output_data.decode('utf-8')
|
||||||
|
|
||||||
|
if check_result:
|
||||||
|
if process.returncode != 0:
|
||||||
|
print('Command {} {} finished with non-zero return code and output:\n{}'.format(executable_path, arguments, output_string))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if decode:
|
||||||
return output_string
|
return output_string
|
||||||
|
else:
|
||||||
|
return output_data
|
||||||
|
|
||||||
|
|
||||||
def call_executable(arguments, use_clean_environment=True, check_result=True):
|
def call_executable(arguments, use_clean_environment=True, check_result=True):
|
||||||
|
95
build-system/Make/ImportCertificates.py
Normal file
95
build-system/Make/ImportCertificates.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from BuildEnvironment import run_executable_with_output
|
||||||
|
|
||||||
|
def import_certificates(certificatesPath):
|
||||||
|
if not os.path.exists(certificatesPath):
|
||||||
|
print('{} does not exist'.format(certificatesPath))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
keychain_name = 'temp.keychain'
|
||||||
|
keychain_password = 'secret'
|
||||||
|
|
||||||
|
existing_keychains = run_executable_with_output('security', arguments=['list-keychains'], check_result=True)
|
||||||
|
if keychain_name in existing_keychains:
|
||||||
|
run_executable_with_output('security', arguments=['delete-keychain'], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'create-keychain',
|
||||||
|
'-p',
|
||||||
|
keychain_password,
|
||||||
|
keychain_name
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
existing_keychains = run_executable_with_output('security', arguments=['list-keychains', '-d', 'user'])
|
||||||
|
existing_keychains.replace('"', '')
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'list-keychains',
|
||||||
|
'-d',
|
||||||
|
'user',
|
||||||
|
'-s',
|
||||||
|
keychain_name,
|
||||||
|
existing_keychains
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=['set-keychain-settings', keychain_name])
|
||||||
|
run_executable_with_output('security', arguments=['unlock-keychain', '-p', keychain_password, keychain_name])
|
||||||
|
|
||||||
|
for file_name in os.listdir(certificatesPath):
|
||||||
|
file_path = certificatesPath + '/' + file_name
|
||||||
|
if file_path.endwith('.p12') or file_path.endwith('.cer'):
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'import',
|
||||||
|
file_path,
|
||||||
|
'-k',
|
||||||
|
keychain_name,
|
||||||
|
'-P',
|
||||||
|
'',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/codesign',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/security'
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'import',
|
||||||
|
'build-system/AppleWWDRCAG3.cer',
|
||||||
|
'-k',
|
||||||
|
keychain_name,
|
||||||
|
'-P',
|
||||||
|
'',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/codesign',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/security'
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'set-key-partition-list',
|
||||||
|
'-S',
|
||||||
|
'apple-tool:,apple:',
|
||||||
|
'-k',
|
||||||
|
keychain_password,
|
||||||
|
keychain_name
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(prog='build')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--path',
|
||||||
|
required=True,
|
||||||
|
help='Path to certificates.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
import_certificates(args.path)
|
@ -6,10 +6,13 @@ import shlex
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import shutil
|
||||||
|
|
||||||
from BuildEnvironment import resolve_executable, call_executable, BuildEnvironment
|
from BuildEnvironment import resolve_executable, call_executable, BuildEnvironment
|
||||||
from ProjectGeneration import generate
|
from ProjectGeneration import generate
|
||||||
from BazelLocation import locate_bazel
|
from BazelLocation import locate_bazel
|
||||||
|
from BuildConfiguration import ProvisioningProfileSource, GitProvisioningProfileSource, DirectoryProvisioningProfileSource, BuildConfiguration, build_configuration_from_json
|
||||||
|
import RemoteBuild
|
||||||
|
|
||||||
class BazelCommandLine:
|
class BazelCommandLine:
|
||||||
def __init__(self, bazel, override_bazel_version, override_xcode_version, bazel_user_root):
|
def __init__(self, bazel, override_bazel_version, override_xcode_version, bazel_user_root):
|
||||||
@ -380,34 +383,75 @@ def clean(bazel, arguments):
|
|||||||
bazel_command_line.invoke_clean()
|
bazel_command_line.invoke_clean()
|
||||||
|
|
||||||
|
|
||||||
def resolve_configuration(bazel_command_line: BazelCommandLine, arguments):
|
def resolve_codesigning(arguments, base_path, build_configuration, certificates_path, provisioning_profiles_path):
|
||||||
if arguments.configurationGenerator is not None:
|
profile_source = None
|
||||||
configuration_generator_arguments = shlex.split(arguments.configurationGenerator)
|
if arguments.gitCodesigningRepository is not None:
|
||||||
|
password = os.getenv('TELEGRAM_CODESIGNING_GIT_PASSWORD')
|
||||||
|
if password is None:
|
||||||
|
print('TELEGRAM_CODESIGNING_GIT_PASSWORD environment variable is not set')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
configuration_generator_executable = resolve_executable(configuration_generator_arguments[0])
|
if arguments.gitCodesigningType is None:
|
||||||
|
print('--gitCodesigningType is required if --gitCodesigningRepository is set')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if configuration_generator_executable is None:
|
workdir_path = '{}/build-input/configuration-repository-workdir'.format(base_path)
|
||||||
print('{} is not a valid executable'.format(configuration_generator_arguments[0]))
|
os.makedirs(workdir_path, exist_ok=True)
|
||||||
exit(1)
|
|
||||||
|
|
||||||
temp_configuration_path = tempfile.mkdtemp()
|
profile_source = GitProvisioningProfileSource(
|
||||||
|
working_dir=workdir_path,
|
||||||
resolved_configuration_generator_arguments = [configuration_generator_executable]
|
repo_url=arguments.gitCodesigningRepository,
|
||||||
resolved_configuration_generator_arguments += configuration_generator_arguments[1:]
|
team_id=build_configuration.team_id,
|
||||||
resolved_configuration_generator_arguments += [temp_configuration_path]
|
bundle_id=build_configuration.bundle_id,
|
||||||
|
profile_type=arguments.gitCodesigningType,
|
||||||
call_executable(resolved_configuration_generator_arguments, use_clean_environment=False)
|
password=password,
|
||||||
|
always_fetch=arguments.gitCodesigningAlwaysFetch
|
||||||
print('TelegramBuild: using generated configuration in {}'.format(temp_configuration_path))
|
)
|
||||||
bazel_command_line.set_configuration_path(temp_configuration_path)
|
elif arguments.provisioningProfilesPath is not None:
|
||||||
elif arguments.configurationPath is not None:
|
profile_source = DirectoryProvisioningProfileSource(
|
||||||
absolute_configuration_path = os.path.abspath(arguments.configurationPath)
|
directory_path=arguments.provisioningProfilesPath,
|
||||||
if not os.path.isdir(absolute_configuration_path):
|
team_id=build_configuration.team_id,
|
||||||
print('Error: {} does not exist'.format(absolute_configuration_path))
|
bundle_id=build_configuration.bundle_id
|
||||||
exit(1)
|
)
|
||||||
bazel_command_line.set_configuration_path(absolute_configuration_path)
|
|
||||||
else:
|
else:
|
||||||
raise Exception('Neither configurationPath nor configurationGenerator are set')
|
raise Exception('Neither gitCodesigningRepository nor provisioningProfilesPath are set')
|
||||||
|
|
||||||
|
profile_source.copy_profiles_to_destination(destination_path=provisioning_profiles_path)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_configuration(base_path, bazel_command_line: BazelCommandLine, arguments, aps_environment):
|
||||||
|
configuration_repository_path = '{}/build-input/configuration-repository'.format(base_path)
|
||||||
|
os.makedirs(configuration_repository_path, exist_ok=True)
|
||||||
|
|
||||||
|
build_configuration = build_configuration_from_json(path=arguments.configurationPath)
|
||||||
|
|
||||||
|
with open(configuration_repository_path + '/WORKSPACE', 'w+') as file:
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(configuration_repository_path + '/BUILD', 'w+') as file:
|
||||||
|
pass
|
||||||
|
|
||||||
|
build_configuration.write_to_variables_file(aps_environment=aps_environment, path=configuration_repository_path + '/variables.bzl')
|
||||||
|
|
||||||
|
provisioning_path = '{}/provisioning'.format(configuration_repository_path)
|
||||||
|
if os.path.exists(provisioning_path):
|
||||||
|
shutil.rmtree(provisioning_path)
|
||||||
|
os.makedirs(provisioning_path, exist_ok=True)
|
||||||
|
|
||||||
|
resolve_codesigning(arguments=arguments, base_path=base_path, build_configuration=build_configuration, certificates_path=None, provisioning_profiles_path=provisioning_path)
|
||||||
|
|
||||||
|
provisioning_profile_files = []
|
||||||
|
for file_name in os.listdir(provisioning_path):
|
||||||
|
if file_name.endswith('.mobileprovision'):
|
||||||
|
provisioning_profile_files.append(file_name)
|
||||||
|
|
||||||
|
with open(provisioning_path + '/BUILD', 'w+') as file:
|
||||||
|
file.write('exports_files([\n')
|
||||||
|
for file_name in provisioning_profile_files:
|
||||||
|
file.write(' "{}",\n'.format(file_name))
|
||||||
|
file.write('])\n')
|
||||||
|
|
||||||
|
return configuration_repository_path
|
||||||
|
|
||||||
|
|
||||||
def generate_project(bazel, arguments):
|
def generate_project(bazel, arguments):
|
||||||
@ -425,7 +469,10 @@ def generate_project(bazel, arguments):
|
|||||||
|
|
||||||
bazel_command_line.set_continue_on_error(arguments.continueOnError)
|
bazel_command_line.set_continue_on_error(arguments.continueOnError)
|
||||||
|
|
||||||
resolve_configuration(bazel_command_line, arguments)
|
configuration_repository_path = resolve_configuration(base_path=os.getcwd(), bazel_command_line=bazel_command_line, arguments=arguments, aps_environment=arguments.apsEnvironment)
|
||||||
|
|
||||||
|
if bazel_command_line is not None:
|
||||||
|
bazel_command_line.set_configuration_path(configuration_repository_path)
|
||||||
|
|
||||||
bazel_command_line.set_build_number(arguments.buildNumber)
|
bazel_command_line.set_build_number(arguments.buildNumber)
|
||||||
|
|
||||||
@ -469,7 +516,7 @@ def build(bazel, arguments):
|
|||||||
elif arguments.cacheHost is not None:
|
elif arguments.cacheHost is not None:
|
||||||
bazel_command_line.add_remote_cache(arguments.cacheHost)
|
bazel_command_line.add_remote_cache(arguments.cacheHost)
|
||||||
|
|
||||||
resolve_configuration(bazel_command_line, arguments)
|
resolve_configuration(base_path=os.getcwd(), bazel_command_line=bazel_command_line, arguments=arguments, aps_environment=arguments.apsEnvironment)
|
||||||
|
|
||||||
bazel_command_line.set_configuration(arguments.configuration)
|
bazel_command_line.set_configuration(arguments.configuration)
|
||||||
bazel_command_line.set_build_number(arguments.buildNumber)
|
bazel_command_line.set_build_number(arguments.buildNumber)
|
||||||
@ -495,7 +542,7 @@ def test(bazel, arguments):
|
|||||||
elif arguments.cacheHost is not None:
|
elif arguments.cacheHost is not None:
|
||||||
bazel_command_line.add_remote_cache(arguments.cacheHost)
|
bazel_command_line.add_remote_cache(arguments.cacheHost)
|
||||||
|
|
||||||
resolve_configuration(bazel_command_line, arguments)
|
resolve_configuration(base_path=os.getcwd(), bazel_command_line=bazel_command_line, arguments=arguments, aps_environment=arguments.apsEnvironment)
|
||||||
|
|
||||||
bazel_command_line.set_configuration('debug_sim_arm64')
|
bazel_command_line.set_configuration('debug_sim_arm64')
|
||||||
bazel_command_line.set_build_number('10000')
|
bazel_command_line.set_build_number('10000')
|
||||||
@ -503,29 +550,75 @@ def test(bazel, arguments):
|
|||||||
bazel_command_line.invoke_test()
|
bazel_command_line.invoke_test()
|
||||||
|
|
||||||
|
|
||||||
def add_project_and_build_common_arguments(current_parser: argparse.ArgumentParser):
|
def add_codesigning_common_arguments(current_parser: argparse.ArgumentParser):
|
||||||
group = current_parser.add_mutually_exclusive_group(required=True)
|
configuration_group = current_parser.add_mutually_exclusive_group(required=True)
|
||||||
group.add_argument(
|
configuration_group.add_argument(
|
||||||
'--configurationPath',
|
'--configurationPath',
|
||||||
help='''
|
help='''
|
||||||
Path to a folder containing build configuration and provisioning profiles.
|
Path to a json containing build configuration.
|
||||||
See build-system/example-configuration for an example.
|
See build-system/appstore-configuration.json for an example.
|
||||||
''',
|
''',
|
||||||
metavar='path'
|
metavar='path'
|
||||||
)
|
)
|
||||||
group.add_argument(
|
|
||||||
'--configurationGenerator',
|
codesigning_group = current_parser.add_mutually_exclusive_group(required=True)
|
||||||
|
codesigning_group.add_argument(
|
||||||
|
'--reproducibleCodesigning',
|
||||||
|
action='store_true',
|
||||||
help='''
|
help='''
|
||||||
A command line invocation that will dynamically generate the configuration data
|
Use locally generated provisioning profiles and certificates for a reproducible build.
|
||||||
(project constants and provisioning profiles).
|
'''
|
||||||
The expression will be parsed according to the shell parsing rules into program and arguments parts.
|
)
|
||||||
The program will be then invoked with the given arguments plus the path to the output directory.
|
codesigning_group.add_argument(
|
||||||
See build-system/generate-configuration.sh for an example.
|
'--gitCodesigningRepository',
|
||||||
Example: --configurationGenerator="sh ~/my_script.sh argument1"
|
help='''
|
||||||
|
If specified, certificates and provisioning profiles will be loaded from git.
|
||||||
|
TELEGRAM_CODESIGNING_GIT_PASSWORD environment variable must be set.
|
||||||
|
''',
|
||||||
|
metavar='path'
|
||||||
|
)
|
||||||
|
codesigning_group.add_argument(
|
||||||
|
'--provisioningProfilesPath',
|
||||||
|
help='''
|
||||||
|
Use provisioning profiles from a local directory.
|
||||||
''',
|
''',
|
||||||
metavar='command'
|
metavar='command'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
current_parser.add_argument(
|
||||||
|
'--gitCodesigningType',
|
||||||
|
required=False,
|
||||||
|
help='''
|
||||||
|
The name of the folder to use inside "profiles" folder in the git repository.
|
||||||
|
Required if gitCodesigningRepository is specified.
|
||||||
|
''',
|
||||||
|
metavar='type'
|
||||||
|
)
|
||||||
|
|
||||||
|
current_parser.add_argument(
|
||||||
|
'--gitCodesigningAlwaysFetch',
|
||||||
|
action='store_true',
|
||||||
|
required=False,
|
||||||
|
default=True,
|
||||||
|
help='''
|
||||||
|
Always refresh codesigning repository.
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def add_project_and_build_common_arguments(current_parser: argparse.ArgumentParser):
|
||||||
|
current_parser.add_argument(
|
||||||
|
'--apsEnvironment',
|
||||||
|
choices=[
|
||||||
|
'development',
|
||||||
|
'production'
|
||||||
|
],
|
||||||
|
required=True,
|
||||||
|
help='APNS environment',
|
||||||
|
)
|
||||||
|
|
||||||
|
add_codesigning_common_arguments(current_parser=current_parser)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(prog='Make')
|
parser = argparse.ArgumentParser(prog='Make')
|
||||||
@ -575,7 +668,7 @@ if __name__ == '__main__':
|
|||||||
'--cacheHost',
|
'--cacheHost',
|
||||||
required=False,
|
required=False,
|
||||||
help='Use remote build artifact cache to speed up rebuilds (See https://github.com/buchgr/bazel-remote).',
|
help='Use remote build artifact cache to speed up rebuilds (See https://github.com/buchgr/bazel-remote).',
|
||||||
metavar='http://host:9092'
|
metavar='grpc://host:9092'
|
||||||
)
|
)
|
||||||
cacheTypeGroup.add_argument(
|
cacheTypeGroup.add_argument(
|
||||||
'--cacheDir',
|
'--cacheDir',
|
||||||
@ -701,6 +794,34 @@ if __name__ == '__main__':
|
|||||||
help='Enable sandbox.',
|
help='Enable sandbox.',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
remote_build_parser = subparsers.add_parser('remote-build', help='Build the app using a remote environment.')
|
||||||
|
add_codesigning_common_arguments(remote_build_parser)
|
||||||
|
remote_build_parser.add_argument(
|
||||||
|
'--darwinContainersHost',
|
||||||
|
required=True,
|
||||||
|
type=str,
|
||||||
|
help='DarwinContainers host address.'
|
||||||
|
)
|
||||||
|
buildParser.add_argument(
|
||||||
|
'--configuration',
|
||||||
|
choices=[
|
||||||
|
'debug_universal',
|
||||||
|
'debug_arm64',
|
||||||
|
'debug_armv7',
|
||||||
|
'release_arm64',
|
||||||
|
'release_armv7',
|
||||||
|
'release_universal'
|
||||||
|
],
|
||||||
|
required=True,
|
||||||
|
help='Build configuration'
|
||||||
|
)
|
||||||
|
remote_build_parser.add_argument(
|
||||||
|
'--bazelCacheHost',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
help='Bazel remote cache host address.'
|
||||||
|
)
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -726,6 +847,28 @@ if __name__ == '__main__':
|
|||||||
generate_project(bazel=bazel_path, arguments=args)
|
generate_project(bazel=bazel_path, arguments=args)
|
||||||
elif args.commandName == 'build':
|
elif args.commandName == 'build':
|
||||||
build(bazel=bazel_path, arguments=args)
|
build(bazel=bazel_path, arguments=args)
|
||||||
|
elif args.commandName == 'remote-build':
|
||||||
|
base_path = os.getcwd()
|
||||||
|
remote_input_path = '{}/build-input/remote-input'.format(base_path)
|
||||||
|
certificates_path = '{}/certs'.format(remote_input_path)
|
||||||
|
provisioning_profiles_path = '{}/profiles'.format(remote_input_path)
|
||||||
|
|
||||||
|
os.makedirs(certificates_path, exist_ok=True)
|
||||||
|
os.makedirs(provisioning_profiles_path, exist_ok=True)
|
||||||
|
|
||||||
|
configuration_repository_path = resolve_configuration(base_path=os.getcwd(), bazel_command_line=None, arguments=arguments, aps_environment='production')
|
||||||
|
|
||||||
|
certificates_path = '{}/certs'.format(configuration_repository_path)
|
||||||
|
provisioning_profiles_path = '{}/profiles'.format(configuration_repository_path)
|
||||||
|
|
||||||
|
RemoteBuild.remote_build(
|
||||||
|
darwin_containers_host=args.darwinContainersHost,
|
||||||
|
bazel_cache_host=args.bazelCacheHost,
|
||||||
|
configuration=args.configuration,
|
||||||
|
certificates_path=certificates_path,
|
||||||
|
provisioning_profiles_path=provisioning_profiles_path,
|
||||||
|
configurationPath=args.configurationPath
|
||||||
|
)
|
||||||
elif args.commandName == 'test':
|
elif args.commandName == 'test':
|
||||||
test(bazel=bazel_path, arguments=args)
|
test(bazel=bazel_path, arguments=args)
|
||||||
else:
|
else:
|
||||||
|
129
build-system/Make/RemoteBuild.py
Normal file
129
build-system/Make/RemoteBuild.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import shlex
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from BuildEnvironment import run_executable_with_output
|
||||||
|
|
||||||
|
def session_scp_upload(session, source_path, destination_path):
|
||||||
|
scp_command = 'scp -i {privateKeyPath} -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr {source_path} containerhost@"{ipAddress}":{destination_path}'.format(
|
||||||
|
privateKeyPath=session.privateKeyPath,
|
||||||
|
ipAddress=session.ipAddress,
|
||||||
|
source_path=shlex.quote(source_path),
|
||||||
|
destination_path=shlex.quote(destination_path)
|
||||||
|
)
|
||||||
|
if os.system(scp_command) != 0:
|
||||||
|
print('Command {} finished with a non-zero status'.format(scp_command))
|
||||||
|
|
||||||
|
def session_scp_download(session, source_path, destination_path):
|
||||||
|
scp_command = 'scp -i {privateKeyPath} -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr containerhost@"{ipAddress}":{source_path} {destination_path}'.format(
|
||||||
|
privateKeyPath=session.privateKeyPath,
|
||||||
|
ipAddress=session.ipAddress,
|
||||||
|
source_path=shlex.quote(source_path),
|
||||||
|
destination_path=shlex.quote(destination_path)
|
||||||
|
)
|
||||||
|
if os.system(scp_command) != 0:
|
||||||
|
print('Command {} finished with a non-zero status'.format(scp_command))
|
||||||
|
|
||||||
|
def session_ssh(session, command):
|
||||||
|
ssh_command = 'ssh -i {privateKeyPath} -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null containerhost@"{ipAddress}" -o ServerAliveInterval=60 -t "{command}"'.format(
|
||||||
|
privateKeyPath=session.privateKeyPath,
|
||||||
|
ipAddress=session.ipAddress,
|
||||||
|
command=command
|
||||||
|
)
|
||||||
|
return os.system(ssh_command)
|
||||||
|
|
||||||
|
def remote_build(darwin_containers_host, bazel_cache_host, configuration, certificates_path, provisioning_profiles_path, configurationPath):
|
||||||
|
from darwin_containers import DarwinContainers
|
||||||
|
|
||||||
|
base_dir = os.getcwd()
|
||||||
|
|
||||||
|
configuration_path = 'versions.json'
|
||||||
|
xcode_version = ''
|
||||||
|
with open(configuration_path) as file:
|
||||||
|
configuration_dict = json.load(file)
|
||||||
|
if configuration_dict['xcode'] is None:
|
||||||
|
raise Exception('Missing xcode version in {}'.format(configuration_path))
|
||||||
|
xcode_version = configuration_dict['xcode']
|
||||||
|
|
||||||
|
print('Xcode version: {}'.format(xcode_version))
|
||||||
|
|
||||||
|
commit_count = run_executable_with_output('git', [
|
||||||
|
'rev-list',
|
||||||
|
'--count',
|
||||||
|
'HEAD'
|
||||||
|
])
|
||||||
|
|
||||||
|
build_number_offset = 0
|
||||||
|
with open('build_number_offset') as file:
|
||||||
|
build_number_offset = int(file.read())
|
||||||
|
|
||||||
|
build_number = build_number_offset + int(commit_count)
|
||||||
|
print('Build number: {}'.format(build_number))
|
||||||
|
|
||||||
|
macos_version = '12.5'
|
||||||
|
image_name = 'macos-{macos_version}-xcode-{xcode_version}'.format(macos_version=macos_version, xcode_version=xcode_version)
|
||||||
|
|
||||||
|
print('Image name: {}'.format(image_name))
|
||||||
|
|
||||||
|
source_dir = os.path.basename(base_dir)
|
||||||
|
buildbox_dir = 'buildbox'
|
||||||
|
source_archive_path = '{buildbox_dir}/transient-data/source.tar'.format(buildbox_dir=buildbox_dir)
|
||||||
|
|
||||||
|
if os.path.exists(source_archive_path):
|
||||||
|
os.remove(source_archive_path)
|
||||||
|
|
||||||
|
print('Compressing source code...')
|
||||||
|
os.system('find . -type f -a -not -regex "\\." -a -not -regex ".*\\./git" -a -not -regex ".*\\./git/.*" -a -not -regex "\\./bazel-bin" -a -not -regex "\\./bazel-bin/.*" -a -not -regex "\\./bazel-out" -a -not -regex "\\./bazel-out/.*" -a -not -regex "\\./bazel-testlogs" -a -not -regex "\\./bazel-testlogs/.*" -a -not -regex "\\./bazel-telegram-ios" -a -not -regex "\\./bazel-telegram-ios/.*" -a -not -regex "\\./buildbox" -a -not -regex "\\./buildbox/.*" -a -not -regex "\\./buck-out" -a -not -regex "\\./buck-out/.*" -a -not -regex "\\./\\.buckd" -a -not -regex "\\./\\.buckd/.*" -a -not -regex "\\./build" -a -not -regex "\\./build/.*" -print0 | tar cf "{buildbox_dir}/transient-data/source.tar" --null -T -'.format(buildbox_dir=buildbox_dir))
|
||||||
|
|
||||||
|
darwinContainers = DarwinContainers(serverAddress=darwin_containers_host, verbose=False)
|
||||||
|
|
||||||
|
print('Opening container session...')
|
||||||
|
with darwinContainers.workingImageSession(name=image_name) as session:
|
||||||
|
print('Uploading data to container...')
|
||||||
|
session_scp_upload(session=session, source_path=certificates_path, destination_path='certs')
|
||||||
|
session_scp_upload(session=session, source_path=provisioning_profiles_path, destination_path='profiles')
|
||||||
|
session_scp_upload(session=session, source_path=configurationPath, destination_path='configuration.json')
|
||||||
|
session_scp_upload(session=session, source_path='{base_dir}/{buildbox_dir}/transient-data/source.tar'.format(base_dir=base_dir, buildbox_dir=buildbox_dir), destination_path='')
|
||||||
|
|
||||||
|
guest_build_sh = '''
|
||||||
|
mkdir telegram-ios
|
||||||
|
cd telegram-ios
|
||||||
|
tar -xf ../source.tar
|
||||||
|
|
||||||
|
python3 build-system/Make/ImportCertificates.py --path $HOME/certs
|
||||||
|
|
||||||
|
python3 build-system/Make/Make.py \\
|
||||||
|
build \\
|
||||||
|
--buildNumber={build_number} \\
|
||||||
|
--configuration={configuration} \\
|
||||||
|
--configurationPath=$HOME/configuration.json \\
|
||||||
|
--apsEnvironment=production \\
|
||||||
|
--provisioningProfilesPath=$HOME/profiles
|
||||||
|
'''.format(
|
||||||
|
build_number=build_number,
|
||||||
|
configuration=configuration
|
||||||
|
)
|
||||||
|
guest_build_file_path = tempfile.mktemp()
|
||||||
|
with open(guest_build_file_path, 'w+') as file:
|
||||||
|
file.write(guest_build_sh)
|
||||||
|
session_scp_upload(session=session, source_path=guest_build_file_path, destination_path='guest-build-telegram.sh')
|
||||||
|
os.unlink(guest_build_file_path)
|
||||||
|
|
||||||
|
print('Executing remote build...')
|
||||||
|
|
||||||
|
if bazel_cache_host is None:
|
||||||
|
bazel_cache_host = ''
|
||||||
|
session_ssh(session=session, command='bash -l guest-build-telegram.sh')
|
||||||
|
|
||||||
|
print('Retrieving build artifacts...')
|
||||||
|
|
||||||
|
artifacts_path='{base_dir}/build/artifacts'.format(base_dir=base_dir)
|
||||||
|
if os.path.exists(artifacts_path):
|
||||||
|
shutil.rmtree(artifacts_path)
|
||||||
|
os.makedirs(artifacts_path, exist_ok=True)
|
||||||
|
|
||||||
|
session_scp_download(session=session, source_path='telegram-ios/build/artifacts/*', destination_path='{artifacts_path}/'.format(artifacts_path=artifacts_path))
|
||||||
|
print('Artifacts have been stored at {}'.format(artifacts_path))
|
14
build-system/appstore-configuration.json
Executable file
14
build-system/appstore-configuration.json
Executable file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"bundle_id": "ph.telegra.Telegraph",
|
||||||
|
"api_id": "8",
|
||||||
|
"api_hash": "7245de8e747a0d6fbe11f7cc14fcc0bb",
|
||||||
|
"team_id": "C67CF9S4VU",
|
||||||
|
"app_center_id": "0",
|
||||||
|
"is_internal_build": "false",
|
||||||
|
"is_appstore_build": "true",
|
||||||
|
"appstore_id": "686449807",
|
||||||
|
"app_specific_url_scheme": "tg",
|
||||||
|
"premium_iap_product_id": "org.telegram.telegramPremium.monthly",
|
||||||
|
"enable_siri": true,
|
||||||
|
"enable_icloud": true
|
||||||
|
}
|
@ -8,8 +8,11 @@ import shlex
|
|||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
import pipes
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from darwin_containers import DarwinContainers
|
def quote_args(seq):
|
||||||
|
return ' '.join(pipes.quote(arg) for arg in seq)
|
||||||
|
|
||||||
def get_clean_env():
|
def get_clean_env():
|
||||||
clean_env = os.environ.copy()
|
clean_env = os.environ.copy()
|
||||||
@ -26,7 +29,7 @@ def resolve_executable(program):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def run_executable_with_output(path, arguments):
|
def run_executable_with_output(path, arguments, check_result=False):
|
||||||
executable_path = resolve_executable(path)
|
executable_path = resolve_executable(path)
|
||||||
if executable_path is None:
|
if executable_path is None:
|
||||||
raise Exception('Could not resolve {} to a valid executable file'.format(path))
|
raise Exception('Could not resolve {} to a valid executable file'.format(path))
|
||||||
@ -39,121 +42,110 @@ def run_executable_with_output(path, arguments):
|
|||||||
)
|
)
|
||||||
output_data, _ = process.communicate()
|
output_data, _ = process.communicate()
|
||||||
output_string = output_data.decode('utf-8')
|
output_string = output_data.decode('utf-8')
|
||||||
return output_string
|
|
||||||
|
|
||||||
def session_scp_upload(session, source_path, destination_path):
|
if check_result:
|
||||||
scp_command = 'scp -i {privateKeyPath} -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr {source_path} containerhost@"{ipAddress}":{destination_path}'.format(
|
if process.returncode != 0:
|
||||||
privateKeyPath=session.privateKeyPath,
|
print('Command {} {} finished with non-zero return code and output:\n{}'.format(executable_path, arguments, output_string))
|
||||||
ipAddress=session.ipAddress,
|
|
||||||
source_path=shlex.quote(source_path),
|
|
||||||
destination_path=shlex.quote(destination_path)
|
|
||||||
)
|
|
||||||
if os.system(scp_command) != 0:
|
|
||||||
print('Command {} finished with a non-zero status'.format(scp_command))
|
|
||||||
|
|
||||||
def session_ssh(session, command):
|
|
||||||
ssh_command = 'ssh -i {privateKeyPath} -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null containerhost@"{ipAddress}" -o ServerAliveInterval=60 -t "{command}"'.format(
|
|
||||||
privateKeyPath=session.privateKeyPath,
|
|
||||||
ipAddress=session.ipAddress,
|
|
||||||
command=command
|
|
||||||
)
|
|
||||||
return os.system(ssh_command)
|
|
||||||
|
|
||||||
def remote_build(darwin_containers_host, configuration):
|
|
||||||
base_dir = os.getcwd()
|
|
||||||
|
|
||||||
configuration_path = 'versions.json'
|
|
||||||
xcode_version = ''
|
|
||||||
with open(configuration_path) as file:
|
|
||||||
configuration_dict = json.load(file)
|
|
||||||
if configuration_dict['xcode'] is None:
|
|
||||||
raise Exception('Missing xcode version in {}'.format(configuration_path))
|
|
||||||
xcode_version = configuration_dict['xcode']
|
|
||||||
|
|
||||||
print('Xcode version: {}'.format(xcode_version))
|
|
||||||
|
|
||||||
commit_count = run_executable_with_output('git', [
|
|
||||||
'rev-list',
|
|
||||||
'--count',
|
|
||||||
'HEAD'
|
|
||||||
])
|
|
||||||
|
|
||||||
build_number_offset = 0
|
|
||||||
with open('build_number_offset') as file:
|
|
||||||
build_number_offset = int(file.read())
|
|
||||||
|
|
||||||
build_number = build_number_offset + int(commit_count)
|
|
||||||
print('Build number: {}'.format(build_number))
|
|
||||||
|
|
||||||
macos_version = '12.5'
|
|
||||||
image_name = 'macos-{macos_version}-xcode-{xcode_version}'.format(macos_version=macos_version, xcode_version=xcode_version)
|
|
||||||
|
|
||||||
print('Image name: {}'.format(image_name))
|
|
||||||
|
|
||||||
buildbox_dir = 'buildbox'
|
|
||||||
os.makedirs('{buildbox_dir}/transient-data'.format(buildbox_dir=buildbox_dir), exist_ok=True)
|
|
||||||
|
|
||||||
codesigning_subpath = ''
|
|
||||||
remote_configuration = ''
|
|
||||||
if configuration == 'appcenter':
|
|
||||||
remote_configuration = 'hockeyapp'
|
|
||||||
elif configuration == 'appstore':
|
|
||||||
remote_configuration = 'appstore'
|
|
||||||
elif configuration == 'reproducible':
|
|
||||||
codesigning_subpath = 'build-system/fake-codesigning'
|
|
||||||
remote_configuration = 'verify'
|
|
||||||
|
|
||||||
destination_codesigning_path = '{buildbox_dir}/transient-data/telegram-codesigning'.format(buildbox_dir=buildbox_dir)
|
|
||||||
destination_build_configuration_path = '{buildbox_dir}/transient-data/build-configuration'.format(buildbox_dir=buildbox_dir)
|
|
||||||
|
|
||||||
if os.path.exists(destination_codesigning_path):
|
|
||||||
shutil.rmtree(destination_codesigning_path)
|
|
||||||
if os.path.exists(destination_build_configuration_path):
|
|
||||||
shutil.rmtree(destination_build_configuration_path)
|
|
||||||
|
|
||||||
shutil.copytree('build-system/fake-codesigning', '{buildbox_dir}/transient-data/telegram-codesigning'.format(buildbox_dir=buildbox_dir))
|
|
||||||
shutil.copytree('build-system/example-configuration', '{buildbox_dir}/transient-data/build-configuration'.format(buildbox_dir=buildbox_dir))
|
|
||||||
else:
|
|
||||||
print('Unknown configuration {}'.format(configuration))
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
source_dir = os.path.basename(base_dir)
|
return output_string
|
||||||
source_archive_path = '{buildbox_dir}/transient-data/source.tar'.format(buildbox_dir=buildbox_dir)
|
|
||||||
|
|
||||||
if os.path.exists(source_archive_path):
|
|
||||||
os.remove(source_archive_path)
|
|
||||||
|
|
||||||
print('Compressing source code...')
|
|
||||||
os.system('find . -type f -a -not -regex "\\." -a -not -regex ".*\\./git" -a -not -regex ".*\\./git/.*" -a -not -regex "\\./bazel-bin" -a -not -regex "\\./bazel-bin/.*" -a -not -regex "\\./bazel-out" -a -not -regex "\\./bazel-out/.*" -a -not -regex "\\./bazel-testlogs" -a -not -regex "\\./bazel-testlogs/.*" -a -not -regex "\\./bazel-telegram-ios" -a -not -regex "\\./bazel-telegram-ios/.*" -a -not -regex "\\./buildbox" -a -not -regex "\\./buildbox/.*" -a -not -regex "\\./buck-out" -a -not -regex "\\./buck-out/.*" -a -not -regex "\\./\\.buckd" -a -not -regex "\\./\\.buckd/.*" -a -not -regex "\\./build" -a -not -regex "\\./build/.*" -print0 | tar cf "{buildbox_dir}/transient-data/source.tar" --null -T -'.format(buildbox_dir=buildbox_dir))
|
|
||||||
|
|
||||||
darwinContainers = DarwinContainers(serverAddress=darwin_containers_host, verbose=False)
|
|
||||||
|
|
||||||
print('Opening container session...')
|
def isolated_build(arguments):
|
||||||
with darwinContainers.workingImageSession(name=image_name) as session:
|
if arguments.certificatesPath is not None:
|
||||||
print('Uploading data to container...')
|
if not os.path.exists(arguments.certificatesPath):
|
||||||
session_scp_upload(session=session, source_path=codesigning_subpath, destination_path='codesigning_data')
|
print('{} does not exist'.format(arguments.certificatesPath))
|
||||||
session_scp_upload(session=session, source_path='{base_dir}/{buildbox_dir}/transient-data/build-configuration'.format(base_dir=base_dir, buildbox_dir=buildbox_dir), destination_path='telegram-configuration')
|
sys.exit(1)
|
||||||
session_scp_upload(session=session, source_path='{base_dir}/{buildbox_dir}/guest-build-telegram.sh'.format(base_dir=base_dir, buildbox_dir=buildbox_dir), destination_path='')
|
|
||||||
session_scp_upload(session=session, source_path='{base_dir}/{buildbox_dir}/transient-data/source.tar'.format(base_dir=base_dir, buildbox_dir=buildbox_dir), destination_path='')
|
|
||||||
|
|
||||||
print('Executing remote build...')
|
keychain_name = 'temp.keychain'
|
||||||
|
keychain_password = 'secret'
|
||||||
|
|
||||||
bazel_cache_host=''
|
existing_keychains = run_executable_with_output('security', arguments=['list-keychains'], check_result=True)
|
||||||
session_ssh(session=session, command='BUILD_NUMBER="{build_number}" BAZEL_HTTP_CACHE_URL="{bazel_cache_host}" bash -l guest-build-telegram.sh {remote_configuration}'.format(
|
if keychain_name in existing_keychains:
|
||||||
build_number=build_number,
|
run_executable_with_output('security', arguments=['delete-keychain'], check_result=True)
|
||||||
bazel_cache_host=bazel_cache_host,
|
|
||||||
remote_configuration=remote_configuration
|
|
||||||
))
|
|
||||||
|
|
||||||
print('Retrieving build artifacts...')
|
run_executable_with_output('security', arguments=[
|
||||||
|
'create-keychain',
|
||||||
|
'-p',
|
||||||
|
keychain_password,
|
||||||
|
keychain_name
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
artifacts_path='{base_dir}/build/artifacts'.format(base_dir=base_dir)
|
existing_keychains = run_executable_with_output('security', arguments=['list-keychains', '-d', 'user'])
|
||||||
if os.path.exists(artifacts_path):
|
existing_keychains.replace('"', '')
|
||||||
shutil.rmtree(artifacts_path)
|
|
||||||
os.makedirs(artifacts_path, exist_ok=True)
|
run_executable_with_output('security', arguments=[
|
||||||
|
'list-keychains',
|
||||||
|
'-d',
|
||||||
|
'user',
|
||||||
|
'-s',
|
||||||
|
keychain_name,
|
||||||
|
existing_keychains
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=['set-keychain-settings', keychain_name])
|
||||||
|
run_executable_with_output('security', arguments=['unlock-keychain', '-p', keychain_password, keychain_name])
|
||||||
|
|
||||||
|
for file_name in os.listdir(arguments.certificatesPath):
|
||||||
|
file_path = arguments.certificatesPath + '/' + file_name
|
||||||
|
if file_path.endwith('.p12') or file_path.endwith('.cer'):
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'import',
|
||||||
|
file_path,
|
||||||
|
'-k',
|
||||||
|
keychain_name,
|
||||||
|
'-P',
|
||||||
|
'',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/codesign',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/security'
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'import',
|
||||||
|
'build-system/AppleWWDRCAG3.cer',
|
||||||
|
'-k',
|
||||||
|
keychain_name,
|
||||||
|
'-P',
|
||||||
|
'',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/codesign',
|
||||||
|
'-T',
|
||||||
|
'/usr/bin/security'
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
run_executable_with_output('security', arguments=[
|
||||||
|
'set-key-partition-list',
|
||||||
|
'-S',
|
||||||
|
'apple-tool:,apple:',
|
||||||
|
'-k',
|
||||||
|
keychain_password,
|
||||||
|
keychain_name
|
||||||
|
], check_result=True)
|
||||||
|
|
||||||
|
build_arguments = ['build-system/Make/Make.py']
|
||||||
|
|
||||||
|
#build_arguments.append('--bazel="$(pwd)/tools/bazel"')
|
||||||
|
|
||||||
|
if arguments.cacheHost is not None:
|
||||||
|
build_arguments.append('--cacheHost={}'.format(arguments.cacheHost))
|
||||||
|
|
||||||
|
build_arguments.append('build')
|
||||||
|
|
||||||
|
build_arguments.append('--configurationPath={}'.format(arguments.configurationPath))
|
||||||
|
build_arguments.append('--buildNumber={}'.format(arguments.buildNumber))
|
||||||
|
build_arguments.append('--configuration={}'.format(arguments.configuration))
|
||||||
|
build_arguments.append('--apsEnvironment=production')
|
||||||
|
build_arguments.append('--disableParallelSwiftmoduleGeneration')
|
||||||
|
build_arguments.append('--provisioningProfilesPath={}'.format(arguments.provisioningProfilesPath))
|
||||||
|
|
||||||
|
build_command = 'python3 ' + quote_args(build_arguments)
|
||||||
|
print('Running {}'.format(build_command))
|
||||||
|
os.system(build_command)
|
||||||
|
|
||||||
session_scp_download(session=session, source_path='telegram-ios/build/artifacts/*', destination_path='{artifacts_path}/'.format(artifacts_path=artifacts_path))
|
|
||||||
print('Artifacts have been stored at {}'.format(artifacts_path))
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(prog='build')
|
parser = argparse.ArgumentParser(prog='build')
|
||||||
@ -184,6 +176,56 @@ if __name__ == '__main__':
|
|||||||
required=True,
|
required=True,
|
||||||
help='Build configuration'
|
help='Build configuration'
|
||||||
)
|
)
|
||||||
|
remote_build_parser.add_argument(
|
||||||
|
'--bazelCacheHost',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
help='Bazel remote cache host address.'
|
||||||
|
)
|
||||||
|
|
||||||
|
isolated_build_parser = subparsers.add_parser('isolated-build', help='Build the app inside an isolated environment.')
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--certificatesPath',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
help='Install codesigning certificates from the specified directory.'
|
||||||
|
)
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--provisioningProfilesPath',
|
||||||
|
required=True,
|
||||||
|
help='''
|
||||||
|
Use codesigning provisioning profiles from a local directory.
|
||||||
|
''',
|
||||||
|
metavar='command'
|
||||||
|
)
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--cacheHost',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
help='Bazel cache host url.'
|
||||||
|
)
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--configurationPath',
|
||||||
|
help='''
|
||||||
|
Path to a json containing build configuration.
|
||||||
|
See build-system/appstore-configuration.json for an example.
|
||||||
|
''',
|
||||||
|
required=True,
|
||||||
|
metavar='path'
|
||||||
|
)
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--buildNumber',
|
||||||
|
required=True,
|
||||||
|
type=int,
|
||||||
|
help='Build number.',
|
||||||
|
metavar='number'
|
||||||
|
)
|
||||||
|
isolated_build_parser.add_argument(
|
||||||
|
'--configuration',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help='Build configuration'
|
||||||
|
)
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
@ -195,164 +237,8 @@ if __name__ == '__main__':
|
|||||||
exit(0)
|
exit(0)
|
||||||
|
|
||||||
if args.commandName == 'remote-build':
|
if args.commandName == 'remote-build':
|
||||||
remote_build(darwin_containers_host=args.darwinContainersHost, configuration=args.configuration)
|
remote_build(darwin_containers_host=args.darwinContainersHost, bazel_cache_host=args.bazelCacheHost, configuration=args.configuration)
|
||||||
|
elif args.commandName == 'isolated-build':
|
||||||
|
isolated_build(arguments=args)
|
||||||
|
|
||||||
|
|
||||||
'''set -e
|
|
||||||
|
|
||||||
rm -f "tools/bazel"
|
|
||||||
cp "$BAZEL" "tools/bazel"
|
|
||||||
|
|
||||||
BUILD_CONFIGURATION="$1"
|
|
||||||
|
|
||||||
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental-2" ]; then
|
|
||||||
CODESIGNING_SUBPATH="$BUILDBOX_DIR/transient-data/telegram-codesigning/codesigning"
|
|
||||||
elif [ "$BUILD_CONFIGURATION" == "appstore" ] || [ "$BUILD_CONFIGURATION" == "appstore-development" ]; then
|
|
||||||
CODESIGNING_SUBPATH="$BUILDBOX_DIR/transient-data/telegram-codesigning/codesigning"
|
|
||||||
elif [ "$BUILD_CONFIGURATION" == "verify" ]; then
|
|
||||||
CODESIGNING_SUBPATH="build-system/fake-codesigning"
|
|
||||||
else
|
|
||||||
echo "Unknown configuration $1"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
COMMIT_COMMENT="$(git log -1 --pretty=%B)"
|
|
||||||
case "$COMMIT_COMMENT" in
|
|
||||||
*"[nocache]"*)
|
|
||||||
export BAZEL_HTTP_CACHE_URL=""
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
COMMIT_ID="$(git rev-parse HEAD)"
|
|
||||||
COMMIT_AUTHOR=$(git log -1 --pretty=format:'%an')
|
|
||||||
if [ -z "$2" ]; then
|
|
||||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
|
||||||
BUILD_NUMBER_OFFSET="$(cat build_number_offset)"
|
|
||||||
COMMIT_COUNT="$(($COMMIT_COUNT+$BUILD_NUMBER_OFFSET))"
|
|
||||||
BUILD_NUMBER="$COMMIT_COUNT"
|
|
||||||
else
|
|
||||||
BUILD_NUMBER="$2"
|
|
||||||
fi
|
|
||||||
|
|
||||||
BASE_DIR=$(pwd)
|
|
||||||
|
|
||||||
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental-2" ] || [ "$BUILD_CONFIGURATION" == "appstore" ] || [ "$BUILD_CONFIGURATION" == "appstore-development" ]; then
|
|
||||||
if [ ! `which generate-configuration.sh` ]; then
|
|
||||||
echo "generate-configuration.sh not found in PATH $PATH"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning"
|
|
||||||
mkdir -p "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration"
|
|
||||||
|
|
||||||
case "$BUILD_CONFIGURATION" in
|
|
||||||
"hockeyapp"|"appcenter-experimental"|"appcenter-experimental-2")
|
|
||||||
generate-configuration.sh internal release "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning" "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration"
|
|
||||||
;;
|
|
||||||
|
|
||||||
"appstore")
|
|
||||||
generate-configuration.sh appstore release "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning" "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration"
|
|
||||||
;;
|
|
||||||
|
|
||||||
"appstore-development")
|
|
||||||
generate-configuration.sh appstore development "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning" "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
echo "Unknown build configuration $BUILD_CONFIGURATION"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
elif [ "$BUILD_CONFIGURATION" == "verify" ]; then
|
|
||||||
mkdir -p "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning"
|
|
||||||
mkdir -p "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration"
|
|
||||||
|
|
||||||
cp -R build-system/fake-codesigning/* "$BASE_DIR/$BUILDBOX_DIR/transient-data/telegram-codesigning/"
|
|
||||||
cp -R build-system/example-configuration/* "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration/"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d "$CODESIGNING_SUBPATH" ]; then
|
|
||||||
echo "$CODESIGNING_SUBPATH does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
SOURCE_DIR=$(basename "$BASE_DIR")
|
|
||||||
rm -f "$BUILDBOX_DIR/transient-data/source.tar"
|
|
||||||
set -x
|
|
||||||
find . -type f -a -not -regex "\\." -a -not -regex ".*\\./git" -a -not -regex ".*\\./git/.*" -a -not -regex "\\./bazel-bin" -a -not -regex "\\./bazel-bin/.*" -a -not -regex "\\./bazel-out" -a -not -regex "\\./bazel-out/.*" -a -not -regex "\\./bazel-testlogs" -a -not -regex "\\./bazel-testlogs/.*" -a -not -regex "\\./bazel-telegram-ios" -a -not -regex "\\./bazel-telegram-ios/.*" -a -not -regex "\\./buildbox" -a -not -regex "\\./buildbox/.*" -a -not -regex "\\./buck-out" -a -not -regex "\\./buck-out/.*" -a -not -regex "\\./\\.buckd" -a -not -regex "\\./\\.buckd/.*" -a -not -regex "\\./build" -a -not -regex "\\./build/.*" -print0 | tar cf "$BUILDBOX_DIR/transient-data/source.tar" --null -T -
|
|
||||||
|
|
||||||
PROCESS_ID="$$"
|
|
||||||
|
|
||||||
if [ -z "$RUNNING_VM" ]; then
|
|
||||||
VM_NAME="$VM_BASE_NAME-$(openssl rand -hex 10)-build-telegram-$PROCESS_ID"
|
|
||||||
else
|
|
||||||
VM_NAME="$RUNNING_VM"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$BUILD_MACHINE" == "linux" ]; then
|
|
||||||
virt-clone --original "$VM_BASE_NAME" --name "$VM_NAME" --auto-clone
|
|
||||||
virsh start "$VM_NAME"
|
|
||||||
|
|
||||||
echo "Getting VM IP"
|
|
||||||
|
|
||||||
while [ 1 ]; do
|
|
||||||
TEST_IP=$(virsh domifaddr "$VM_NAME" 2>/dev/null | egrep -o 'ipv4.*' | sed -e 's/ipv4\s*//g' | sed -e 's|/.*||g')
|
|
||||||
if [ ! -z "$TEST_IP" ]; then
|
|
||||||
RESPONSE=$(ssh -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null telegram@"$TEST_IP" -o ServerAliveInterval=60 -t "echo -n 1")
|
|
||||||
if [ "$RESPONSE" == "1" ]; then
|
|
||||||
VM_IP="$TEST_IP"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
elif [ "$BUILD_MACHINE" == "macOS" ]; then
|
|
||||||
if [ -z "$RUNNING_VM" ]; then
|
|
||||||
prlctl clone "$VM_BASE_NAME" --linked --name "$VM_NAME"
|
|
||||||
prlctl start "$VM_NAME"
|
|
||||||
|
|
||||||
echo "Getting VM IP"
|
|
||||||
|
|
||||||
while [ 1 ]; do
|
|
||||||
TEST_IP=$(prlctl exec "$VM_NAME" "ifconfig | grep inet | grep broadcast | grep -Eo '([0-9]{1,3}\.){3}[0-9]{1,3}' | head -1 | tr '\n' '\0'" 2>/dev/null || echo "")
|
|
||||||
if [ ! -z "$TEST_IP" ]; then
|
|
||||||
RESPONSE=$(ssh -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null telegram@"$TEST_IP" -o ServerAliveInterval=60 -t "echo -n 1")
|
|
||||||
if [ "$RESPONSE" == "1" ]; then
|
|
||||||
VM_IP="$TEST_IP"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
echo "VM_IP=$VM_IP"
|
|
||||||
fi
|
|
||||||
|
|
||||||
scp -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr "$CODESIGNING_SUBPATH" telegram@"$VM_IP":codesigning_data
|
|
||||||
scp -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr "$BASE_DIR/$BUILDBOX_DIR/transient-data/build-configuration" telegram@"$VM_IP":telegram-configuration
|
|
||||||
|
|
||||||
scp -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr "$BUILDBOX_DIR/guest-build-telegram.sh" "$BUILDBOX_DIR/transient-data/source.tar" telegram@"$VM_IP":
|
|
||||||
|
|
||||||
ssh -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null telegram@"$VM_IP" -o ServerAliveInterval=60 -t "export BUILD_NUMBER=\"$BUILD_NUMBER\"; export BAZEL_HTTP_CACHE_URL=\"$BAZEL_HTTP_CACHE_URL\"; $GUEST_SHELL -l guest-build-telegram.sh $BUILD_CONFIGURATION" || true
|
|
||||||
|
|
||||||
OUTPUT_PATH="build/artifacts"
|
|
||||||
rm -rf "$OUTPUT_PATH"
|
|
||||||
mkdir -p "$OUTPUT_PATH"
|
|
||||||
|
|
||||||
scp -o LogLevel=ERROR -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -pr telegram@"$VM_IP":"telegram-ios/build/artifacts/*" "$OUTPUT_PATH/"
|
|
||||||
|
|
||||||
if [ -z "$RUNNING_VM" ]; then
|
|
||||||
if [ "$BUILD_MACHINE" == "linux" ]; then
|
|
||||||
virsh destroy "$VM_NAME"
|
|
||||||
virsh undefine "$VM_NAME" --remove-all-storage --nvram
|
|
||||||
elif [ "$BUILD_MACHINE" == "macOS" ]; then
|
|
||||||
echo "Deleting VM..."
|
|
||||||
#prlctl stop "$VM_NAME" --kill
|
|
||||||
#prlctl delete "$VM_NAME"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -f "$OUTPUT_PATH/Telegram.ipa" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
'''
|
|
@ -101,7 +101,8 @@ python3 build-system/Make/Make.py \
|
|||||||
--configurationPath="$HOME/telegram-configuration" \
|
--configurationPath="$HOME/telegram-configuration" \
|
||||||
--buildNumber="$BUILD_NUMBER" \
|
--buildNumber="$BUILD_NUMBER" \
|
||||||
--disableParallelSwiftmoduleGeneration \
|
--disableParallelSwiftmoduleGeneration \
|
||||||
--configuration="$APP_CONFIGURATION"
|
--configuration="$APP_CONFIGURATION" \
|
||||||
|
--apsEnvironment=production
|
||||||
|
|
||||||
OUTPUT_PATH="build/artifacts"
|
OUTPUT_PATH="build/artifacts"
|
||||||
rm -rf "$OUTPUT_PATH"
|
rm -rf "$OUTPUT_PATH"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user