aboutsummaryrefslogtreecommitdiff
path: root/ci/nightly.py
blob: 923c4e36df70074d7ca29042d7c250ae15bd177d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import os
import sys
from zipfile  import ZipFile, ZIP_DEFLATED
from b2sdk.v2 import InMemoryAccountInfo, B2Api
from datetime import datetime, timezone
import json

UPLOAD_FOLDER = "nightly/"

info   = InMemoryAccountInfo()
b2_api = B2Api(info)
application_key_id = os.environ['APPID']
application_key    = os.environ['APPKEY']
bucket_name        = os.environ['BUCKET']
days_to_keep       = os.environ['DAYS_TO_KEEP']

def auth() -> bool:
	try:
		realm = b2_api.account_info.get_realm()
		return True # Already authenticated
	except:
		pass        # Not yet authenticated

	err = b2_api.authorize_account("production", application_key_id, application_key)
	return err is None

def get_bucket():
	if not auth(): sys.exit(1)
	return b2_api.get_bucket_by_name(bucket_name)

def remove_prefix(text: str, prefix: str) -> str:
	return text[text.startswith(prefix) and len(prefix):]

def create_and_upload_artifact_zip(platform: str, artifact: str) -> int:
	now = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)

	source_archive: str
	destination_name = f'odin-{platform}-nightly+{now.strftime("%Y-%m-%d")}'

	if platform.startswith("linux") or platform.startswith("macos"):
		destination_name += ".tar.gz"
		source_archive = artifact
	else:
		destination_name += ".zip"
		source_archive = destination_name

		print(f"Creating archive {destination_name} from {artifact} and uploading to {bucket_name}")
		with ZipFile(source_archive, mode='w', compression=ZIP_DEFLATED, compresslevel=9) as z:
			for root, directory, filenames in os.walk(artifact):
				for file in filenames:
					file_path = os.path.join(root, file)
					zip_path  = os.path.join("dist", os.path.relpath(file_path, artifact))
					z.write(file_path, zip_path)

	if not os.path.exists(source_archive):
		print(f"Error: archive {source_archive} not found.")
		return 1

	print("Uploading {} to {}".format(source_archive, UPLOAD_FOLDER + destination_name))
	bucket = get_bucket()
	res = bucket.upload_local_file(
		source_archive,               # Local file to upload
		"nightly/" + destination_name, # B2 destination path
	)
	return 0

def prune_artifacts():
	print(f"Looking for binaries to delete older than {days_to_keep} days")

	bucket = get_bucket()
	for file, _ in bucket.ls(UPLOAD_FOLDER, latest_only=False):
		# Timestamp is in milliseconds
		date  = datetime.fromtimestamp(file.upload_timestamp / 1_000.0, tz=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
		now   = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
		delta = now - date

		if delta.days > int(days_to_keep):
			print("Deleting {}".format(file.file_name))
			file.delete()

	return 0

def update_nightly_json():
	print(f"Updating nightly.json with files {days_to_keep} days or newer")

	files_by_date = {}

	bucket = get_bucket()

	for file, _ in bucket.ls(UPLOAD_FOLDER, latest_only=True):
		# Timestamp is in milliseconds
		date = datetime.fromtimestamp(file.upload_timestamp / 1_000.0).replace(hour=0, minute=0, second=0, microsecond=0).strftime('%Y-%m-%d')
		name = remove_prefix(file.file_name, UPLOAD_FOLDER)
		sha1 = file.content_sha1
		size = file.size
		url  = bucket.get_download_url(file.file_name)

		if date not in files_by_date.keys():
			files_by_date[date] = []

		files_by_date[date].append({
			'name':        name,
			'url':         url,
			'sha1':        sha1,
			'sizeInBytes': size,
		})

	now = datetime.now(timezone.utc).isoformat()

	nightly = json.dumps({
		'last_updated' : now,
		'files': files_by_date
	}, sort_keys=True, indent=4, ensure_ascii=False).encode('utf-8')

	res = bucket.upload_bytes(
		nightly,        # JSON bytes
		"nightly.json", # B2 destination path
	)
	return 0

if __name__ == "__main__":
	if len(sys.argv) == 1:
		print("Usage: {} <verb> [arguments]".format(sys.argv[0]))
		print("\tartifact <platform prefix> <artifact path>\n\t\tCreates and uploads a platform artifact zip.")
		print("\tprune\n\t\tDeletes old artifacts from bucket")
		print("\tjson\n\t\tUpdate and upload nightly.json")
		sys.exit(1)
	else:
		command = sys.argv[1].lower()
		if command == "artifact":
			if len(sys.argv) != 4:
				print("Usage: {} artifact <platform prefix> <artifact path>".format(sys.argv[0]))
				print("Error: Expected artifact command to be given platform prefix and artifact path.\n")
				sys.exit(1)

			res = create_and_upload_artifact_zip(sys.argv[2], sys.argv[3])
			sys.exit(res)

		elif command == "prune":
			res = prune_artifacts()
			sys.exit(res)

		elif command == "json":
			res = update_nightly_json()
			sys.exit(res)