Skip to content

Commit a0e4eb1

Browse files
committed
Move prebuild tools to mongoose-os repo
1 parent ee4320b commit a0e4eb1

File tree

2 files changed

+507
-0
lines changed

2 files changed

+507
-0
lines changed

tools/prebuild.py

Lines changed: 341 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,341 @@
1+
#!/usr/bin/env python3
2+
#
3+
# A utility for prebuilding binary libs and apps.
4+
# Can publish outputs to GitHub.
5+
#
6+
# Main input is a YAML config file that specifies which libs/apps to build
7+
# in which variants and what to do with them.
8+
#
9+
# Top object of the config files is an array, each entry is as follows:
10+
# location: path to git repo
11+
# locations: [ multiple, paths, to, repos ]
12+
# location, locations or both can be used.
13+
# variants: array of variants, each variant must specify name and platform
14+
# and can additionally specify build vars and c/cxx flags.
15+
# out: output specification(s). currently only github output type is supported.
16+
# if output is not specified and input looks like a github repo,
17+
# github output is assumed.
18+
#
19+
# Example config file:
20+
#
21+
# - locations:
22+
# - https://github.com/mongoose-os-apps/demo-c
23+
# - https://github.com/mongoose-os-apps/demo-js
24+
# variants:
25+
# - name: esp8266
26+
# platform: esp8266
27+
# - name: esp8266-1M
28+
# platform: esp8266
29+
# build_vars:
30+
# FLASH_SIZE: 1048576
31+
#
32+
# This file will build 3 variants for each of the two apps and upload artifacts
33+
# to GitHub.
34+
35+
import argparse
36+
import copy
37+
import glob
38+
import logging
39+
import os
40+
import shutil
41+
import subprocess
42+
import time
43+
import yaml
44+
45+
import github_api
46+
47+
# NB: only support building from master right now.
48+
49+
50+
def RunCmd(cmd):
51+
logging.info(" %s", " ".join(cmd))
52+
subprocess.check_call(cmd)
53+
54+
55+
def DeleteRelease(repo, token, rel_id):
56+
github_api.CallReleasesAPI(
57+
repo, token,
58+
method="DELETE",
59+
releases_url=("/%d" % rel_id),
60+
decode_json=False)
61+
62+
63+
def CreateGitHubRelease(spec, tag, token, tmp_dir, re_create=False):
64+
logging.debug("GH release spec: %s", spec)
65+
repo = spec["repo"]
66+
67+
logging.info(" Publishing release %s / %s", repo, tag)
68+
69+
# If tag already exists, make sure it points to master.
70+
tag_ref, ok1 = github_api.CallRefsAPI(repo, token, ("/tags/%s" % tag))
71+
master_ref, ok2 = github_api.CallRefsAPI(repo, token, "/heads/master")
72+
if ok1 and ok2 and tag_ref["object"]["sha"] != master_ref["object"]["sha"]:
73+
logging.info(" Updating tag %s (%s -> %s)",
74+
tag, tag_ref["object"]["sha"], master_ref["object"]["sha"])
75+
r, ok = github_api.CallRefsAPI(
76+
repo, token, method="PATCH",
77+
uri=("/tags/%s" % tag),
78+
json_data={
79+
"sha": master_ref["object"]["sha"],
80+
"force": False,
81+
})
82+
# If target release already exists, avoid re-creating it - simply delete previous assets.
83+
rel, ok = github_api.CallReleasesAPI(repo, token, releases_url=("/tags/%s" % tag))
84+
if ok:
85+
if re_create:
86+
logging.info(" Release already exists (id %d), deleting", rel["id"])
87+
DeleteRelease(repo, token, rel["id"])
88+
rel = None
89+
else:
90+
logging.info(" Release already exists (id %d), deleting assets", rel["id"])
91+
for a in rel.get("assets", []):
92+
logging.info(" Deleting asset %s (%d)", a["name"], a["id"])
93+
github_api.CallReleasesAPI(
94+
repo, token,
95+
method="DELETE",
96+
releases_url=("/assets/%d" % a["id"]),
97+
decode_json=False)
98+
else:
99+
rel = None
100+
101+
if rel is None:
102+
logging.info(" Creating release")
103+
rel, ok = github_api.CallReleasesAPI(repo, token, "", method="POST", json_data={
104+
"name": tag,
105+
"draft": False,
106+
"tag_name": tag,
107+
"target_commitish": "master",
108+
})
109+
if not ok:
110+
logging.error("Failed to create a release draft: %s", r)
111+
raise RuntimeError
112+
rel_id = rel["id"]
113+
logging.debug("Release id: %d", rel_id)
114+
logging.info(" Uploading assets")
115+
for asset_name, asset_file in spec["assets"]:
116+
ct = "application/zip" if asset_name.endswith(".zip") else "application/octet-stream"
117+
logging.info(" Uploading %s to %s", asset_file, asset_name)
118+
with open(asset_file, "rb") as f:
119+
r, ok = github_api.CallReleasesAPI(
120+
repo, token, method="POST", subdomain="uploads", data=f,
121+
releases_url=("/%d/assets" % rel_id),
122+
headers = {"Content-Type": ct},
123+
params = {"name": asset_name})
124+
if not ok:
125+
logging.error("Failed to upload %s: %s", asset_name, r)
126+
raise RuntimeError
127+
logging.info(" Published release %s / %s (%d)", repo, tag, rel["id"])
128+
129+
130+
def UpdateGitHubRelease(spec, tag, token, tmp_dir):
131+
logging.debug("GH release spec: %s", spec)
132+
repo = spec["repo"]
133+
134+
rel, ok = github_api.CallReleasesAPI(repo, token, releases_url=("/tags/%s" % tag))
135+
if not ok:
136+
logging.error("Failed to get release info for %s/%s: %s", repo, tag, rel)
137+
raise RuntimeError
138+
139+
logging.info(" Updating release %s / %s (%d)", repo, tag, rel["id"])
140+
for asset_name, asset_file in spec["assets"]:
141+
ct = "application/zip" if asset_name.endswith(".zip") else "application/octet-stream"
142+
for a in rel.get("assets", []):
143+
if a["name"] == asset_name:
144+
logging.info(" Deleting asset %s (%d)", asset_name, a["id"])
145+
github_api.CallReleasesAPI(
146+
repo, token,
147+
method="DELETE",
148+
releases_url=("/assets/%d" % a["id"]),
149+
decode_json=False)
150+
logging.info(" Uploading %s to %s", asset_file, asset_name)
151+
with open(asset_file, "rb") as f:
152+
r, ok = github_api.CallReleasesAPI(
153+
repo, token, method="POST", subdomain="uploads", data=f,
154+
releases_url=("/%d/assets" % rel["id"]),
155+
headers = {"Content-Type": ct},
156+
params = {"name": asset_name})
157+
if not ok:
158+
logging.error("Failed to upload %s: %s", asset_name, r)
159+
if r and r.get("errors", [{}])[0].get("code", "") == "already_exists":
160+
# This is a bug in GitHub where sometimes "phantom asset" will block an upload.
161+
# The asset is not listed (or it would've been deleted), but an uplaod will fail.
162+
# There is no way around it exept re-creating a release.
163+
# Here we'll just delete it and next run will re-create properly. Ugh.
164+
logging.error("*BUG* Phantom asset, nuking release")
165+
DeleteRelease(repo, token, rel["id"])
166+
raise RuntimeError
167+
168+
logging.info(" Updated release %s / %s (%d)", repo, tag, rel["id"])
169+
170+
171+
def MakeAsset(an, asf, tmp_dir):
172+
af = os.path.join(tmp_dir, an)
173+
logging.info(" Copying %s -> %s", asf, af)
174+
shutil.copy(asf, af)
175+
return [an, af]
176+
177+
178+
def ProcessLoc(e, loc, args):
179+
parts = loc.split("/")
180+
pre, name, i, repo_loc, repo_subdir = "", "", 0, loc, ""
181+
for p in parts:
182+
pre = name.split(":")[-1]
183+
name = p
184+
i += 1
185+
if p.endswith(".git"):
186+
repo_loc = "/".join(parts[:i])
187+
repo_subdir = "/".join(parts[i:])
188+
name = p[:-4]
189+
break
190+
repo_dir = os.path.join(args.tmp_dir, pre, name)
191+
if os.path.exists(repo_loc):
192+
rl = repo_loc + ("/" if not repo_loc.endswith("/") else "")
193+
os.makedirs(repo_dir, exist_ok=True)
194+
cmd = ["rsync", "-a", "--delete", rl, repo_dir + "/"]
195+
name = os.path.basename(repo_loc)
196+
else:
197+
if not os.path.exists(repo_dir):
198+
logging.info("Cloning into %s", repo_dir)
199+
cmd = ["git", "clone", repo_loc, repo_dir]
200+
else:
201+
logging.info("Pulling %s", repo_dir)
202+
cmd = ["git", "-C", repo_dir, "pull"]
203+
if repo_subdir:
204+
name = os.path.basename(repo_subdir)
205+
logging.info("== %s: %s / %s", name, repo_loc, repo_subdir)
206+
else:
207+
name = os.path.basename(repo_loc)
208+
logging.info("== %s: %s", name, repo_loc)
209+
RunCmd(cmd)
210+
if repo_subdir:
211+
tgt_dir = os.path.join(repo_dir, repo_subdir)
212+
else:
213+
tgt_dir = repo_dir
214+
tgt_name = os.path.split(tgt_dir)[-1]
215+
assets = []
216+
common = e.get("common", {})
217+
# Build all the variants, collect assets
218+
for v in e["variants"]:
219+
logging.info(" %s %s", tgt_name, v["name"])
220+
mos_cmd = [args.mos, "build", "-C", tgt_dir, "--local", "--clean"]
221+
if args.repo_dir:
222+
mos_cmd.append("--repo=%s" % args.repo_dir)
223+
if args.deps_dir:
224+
mos_cmd.append("--deps-dir=%s" % args.deps_dir)
225+
if args.binary_libs_dir:
226+
mos_cmd.append("--binary-libs-dir=%s" % args.binary_libs_dir)
227+
if args.lib:
228+
for lib in args.lib:
229+
mos_cmd.append("--lib=%s" % lib)
230+
if args.libs_dir:
231+
mos_cmd.append("--libs-dir=%s" % args.libs_dir)
232+
if args.no_libs_update:
233+
mos_cmd.append("--no-libs-update")
234+
mos_cmd.append("--platform=%s" % v["platform"])
235+
for bvk, bvv in sorted(list(common.get("build_vars", {}).items()) +
236+
list(v.get("build_vars", {}).items())):
237+
mos_cmd.append("--build-var=%s=%s" % (bvk, bvv))
238+
cflags = (common.get("cflags", "") + " " + v.get("cflags", "")).strip()
239+
if cflags:
240+
mos_cmd.append("--cflags-extra=%s" % cflags)
241+
cxxflags = (common.get("cxxflags", "") + " " + v.get("cxxflags", "")).strip()
242+
if cflags:
243+
mos_cmd.append("--cxxflags-extra=%s" % cflags)
244+
mos_args = (common.get("mos_args", []) + v.get("mos_args", []))
245+
if mos_args:
246+
mos_cmd.extend(mos_args)
247+
RunCmd(mos_cmd)
248+
bl = os.path.join(args.tmp_dir, "%s-%s-build.log" % (tgt_name, v["name"]))
249+
logging.info(" Saving build log to %s", bl)
250+
shutil.copy(os.path.join(tgt_dir, "build", "build.log"), bl)
251+
# Ok, what did we just build?
252+
with open(os.path.join(tgt_dir, "mos.yml")) as f:
253+
m = yaml.load(f)
254+
if m.get("type", "") == "lib":
255+
assets.append(MakeAsset("lib%s-%s.a" % (tgt_name, v["name"]), os.path.join(tgt_dir, "build", "lib.a"), args.tmp_dir))
256+
else:
257+
assets.append(MakeAsset("%s-%s.zip" % (tgt_name, v["name"]), os.path.join(tgt_dir, "build", "fw.zip"), args.tmp_dir))
258+
for fn in glob.glob(os.path.join(tgt_dir, "build", "objs", "*.elf")):
259+
an = os.path.basename(fn).replace(tgt_name, "%s-%s" % (tgt_name, v["name"]))
260+
assets.append(MakeAsset(an, fn, args.tmp_dir))
261+
outs = e.get("out", [])
262+
if not outs and loc.startswith("https://github.com/"):
263+
outs = [{"github": {"repo": "%s/%s" % (pre, tgt_name)}}]
264+
for out in outs:
265+
gh_out = copy.deepcopy(out.get("github", {}))
266+
# Push to GitHub
267+
if gh_out:
268+
gh_out["assets"] = assets
269+
gh_out["repo"] = gh_out["repo"] % {
270+
"name": name,
271+
"repo_subdir": repo_subdir,
272+
}
273+
274+
if not args.gh_token_file:
275+
logging.info("Token file not set, GH uploads disabled")
276+
return
277+
if not os.path.isfile(args.gh_token_file):
278+
logging.error("Token file %s does not exist", args.gh_token_file)
279+
exit(1)
280+
logging.debug("Using token file at %s", args.gh_token_file)
281+
with open(args.gh_token_file, "r") as f:
282+
token = f.read().strip()
283+
i = 1
284+
while True:
285+
try:
286+
if not gh_out.get("update", False):
287+
# Looks like after some number of asset deletions / uploads GH release
288+
# gets into a bad state where new asset uploads are just failing.
289+
# So we try once, try twice and on the third time we re-create the release
290+
# even if it exists.
291+
re_create = (i > 2)
292+
CreateGitHubRelease(gh_out, args.gh_release_tag, token, args.tmp_dir, re_create=re_create)
293+
else:
294+
UpdateGitHubRelease(gh_out, args.gh_release_tag, token, args.tmp_dir)
295+
break
296+
except (Exception, KeyboardInterrupt) as e:
297+
logging.exception("Exception (attempt %d): %s", i, e)
298+
if not isinstance(e, KeyboardInterrupt) and i < 3:
299+
time.sleep(1)
300+
i += 1
301+
else:
302+
if not isinstance(e, KeyboardInterrupt) and gh_out.get("update"):
303+
logging.error("*BUG* Phantom asset (probably), nuking release")
304+
rel, ok = github_api.CallReleasesAPI(gh_out["repo"], token,
305+
releases_url=("/tags/%s" % args.gh_release_tag))
306+
if ok:
307+
DeleteRelease(gh_out["repo"], token, rel["id"])
308+
raise
309+
310+
311+
def ProcessEntry(e, args):
312+
for loc in e.get("locations", []) + [e.get("location")]:
313+
if loc:
314+
ProcessLoc(e, loc, args)
315+
316+
317+
if __name__ == "__main__":
318+
parser = argparse.ArgumentParser(description="Prebuild script for apps and libs")
319+
parser.add_argument("--v", type=int, default=logging.INFO)
320+
parser.add_argument("--config", type=str, required=True)
321+
parser.add_argument("--tmp-dir", type=str, default=os.path.join(os.getenv("TMPDIR", "/tmp"), "mos_prebuild"))
322+
parser.add_argument("--deps-dir", type=str)
323+
parser.add_argument("--binary-libs-dir", type=str)
324+
parser.add_argument("--lib", type=str, action="append")
325+
parser.add_argument("--libs-dir", type=str)
326+
parser.add_argument("--repo-dir", type=str)
327+
parser.add_argument("--no-libs-update", action="store_true")
328+
parser.add_argument("--mos", type=str, default="/usr/bin/mos")
329+
parser.add_argument("--gh-token-file", type=str)
330+
parser.add_argument("--gh-release-tag", type=str)
331+
args = parser.parse_args()
332+
333+
logging.basicConfig(level=args.v, format="[%(asctime)s %(levelno)d] %(message)s", datefmt="%Y/%m/%d %H:%M:%S")
334+
logging.info("Reading %s", args.config)
335+
336+
with open(args.config) as f:
337+
cfg = yaml.load(f)
338+
339+
for e in cfg:
340+
ProcessEntry(e, args)
341+
logging.info("All done")

0 commit comments

Comments
 (0)