forked from alisw/alibuild
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathaliBuild
executable file
·1333 lines (1222 loc) · 59.3 KB
/
aliBuild
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
from __future__ import print_function
import os, time, re, yaml, hashlib, argparse
import sys, shutil, subprocess, socket
try:
from commands import getstatusoutput
from urllib2 import urlopen, URLError
except ImportError:
from subprocess import getstatusoutput
from urllib.request import urlopen
from urllib.error import URLError
from os.path import basename, dirname, abspath, exists, realpath, join, islink, expanduser
from os import makedirs, unlink, readlink, getenv, sysconf, rmdir
from glob import glob
from datetime import datetime
import ssl
import json
import logging
from alibuild_helpers.log import debug, error, warning, banner, info
from alibuild_helpers.log import logger_handler, logger, LogFormatter, ProgressPrint, riemannStream
from alibuild_helpers.utilities import format, getVersion, detectArch, dockerStatusOutput, parseDefaults, readDefaults
from alibuild_helpers.utilities import parseRecipe, getPackageList, getRecipeReader
from alibuild_helpers.utilities import Hasher
from alibuild_helpers.analytics import decideAnalytics, askForAnalytics, report_screenview, report_exception, report_event
from alibuild_helpers.analytics import enable_analytics, disable_analytics
import traceback
def writeAll(fn, txt):
f = open(fn, "w")
f.write(txt)
f.close()
def readHashFile(fn):
try:
return open(fn).read().strip("\n")
except IOError:
return "0"
def star():
return basename(sys.argv[0]).lower().replace("build", "")
def gzip():
return getstatusoutput("which pigz")[0] and "gzip" or "pigz"
def execute(command, printer=debug):
if not printer:
printer = debug
popen = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
lines_iterator = iter(popen.stdout.readline, "")
for line in lines_iterator:
if not line: break
printer(line.decode('utf-8', 'ignore').strip("\n")) # yield line
output = popen.communicate()[0]
printer(output)
exitCode = popen.returncode
return exitCode
def dieOnError(err, msg):
if err:
riemannStream.setState('critical')
error(msg)
sys.exit(1)
def updateReferenceRepos(referenceSources, p, spec):
# Update source reference area, if possible.
# If the area is already there and cannot be written, assume it maintained
# by someone else.
#
# If the area can be created, clone a bare repository with the sources.
debug("Updating references.")
referenceRepo = "%s/%s" % (abspath(referenceSources), p.lower())
if os.access(dirname(referenceSources), os.W_OK):
getstatusoutput("mkdir -p %s" % referenceSources)
writeableReference = os.access(referenceSources, os.W_OK)
if not writeableReference and exists(referenceRepo):
debug("Using %s as reference for %s." % (referenceRepo, p))
spec["reference"] = referenceRepo
return
if not writeableReference:
debug("Cannot create reference for %s in specified folder.", p)
return
err, out = getstatusoutput("mkdir -p %s" % abspath(referenceSources))
if not "source" in spec:
return
if not exists(referenceRepo):
cmd = ["git", "clone", "--bare", spec["source"], referenceRepo]
debug(" ".join(cmd))
err = execute(" ".join(cmd))
else:
err = execute(format("cd %(referenceRepo)s && "
"git fetch --tags %(source)s 2>&1 && "
"git fetch %(source)s 2>&1",
referenceRepo=referenceRepo,
source=spec["source"]))
dieOnError(err, "Error while updating reference repos %s." % spec["source"])
spec["reference"] = referenceRepo
def getDirectoryHash(d):
if exists(join(d, ".git")):
err, out = getstatusoutput("GIT_DIR=%s/.git git rev-parse HEAD" % d)
dieOnError(err, "Impossible to find reference for %s " % d)
else:
err, out = getstatusoutput("pip show alibuild | grep -e \"^Version:\" | sed -e 's/.* //'")
dieOnError(err, "Impossible to find reference for %s " % d)
return out
# Creates a directory in the store which contains symlinks to the package
# and its direct / indirect dependencies
def createDistLinks(spec, specs, args, repoType, requiresType):
target = format("TARS/%(a)s/%(rp)s/%(p)s/%(p)s-%(v)s-%(r)s",
a=args.architecture,
rp=repoType,
p=spec["package"],
v=spec["version"],
r=spec["revision"])
shutil.rmtree(target, True)
for x in [spec["package"]] + list(spec[requiresType]):
dep = specs[x]
source = format("../../../../../TARS/%(a)s/store/%(sh)s/%(h)s/%(p)s-%(v)s-%(r)s.%(a)s.tar.gz",
a=args.architecture,
sh=dep["hash"][0:2],
h=dep["hash"],
p=dep["package"],
v=dep["version"],
r=dep["revision"])
err = execute(format("cd %(workDir)s &&"
"mkdir -p %(target)s &&"
"ln -sfn %(source)s %(target)s",
workDir = args.workDir,
target=target,
source=source))
rsyncOptions = ""
if args.writeStore:
cmd = format("cd %(w)s && "
"rsync -avR %(o)s --ignore-existing %(t)s/ %(rs)s/",
w=args.workDir,
rs=args.writeStore,
o=rsyncOptions,
t=target)
execute(cmd)
def filterByArchitecture(arch, requires):
for r in requires:
require, matcher = ":" in r and r.split(":", 1) or (r, ".*")
if re.match(matcher, arch):
yield require
VALID_ARCHS_RE = ["slc[5-9]+_(x86-64|ppc64)",
"(ubuntu|ubt|osx)[0-9]*_x86-64",
]
# Detect number of available CPUs. Fallback to 1.
def detectJobs():
# Python 2.6+
try:
import multiprocessing
return multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
pass
# POSIX
try:
res = int(os.sysconf("SC_NPROCESSORS_ONLN"))
if res > 0:
return res
except (AttributeError, ValueError):
pass
return 1
def matchValidArch(architecture):
return [x for x in VALID_ARCHS_RE if re.match(x, architecture)]
ARCHITECTURE_TABLE = [
"On Linux, x86-64:\n"
" RHEL5 / SLC5 compatible: slc5_x86-64\n"
" RHEL6 / SLC6 compatible: slc6_x86-64\n"
" RHEL7 / CC7 compatible: slc7_x86-64\n"
" Ubuntu 14.04 compatible: ubuntu1404_x86-64\n"
" Ubuntu 15.04 compatible: ubuntu1504_x86-64\n"
" Ubuntu 15.10 compatible: ubuntu1510_x86-64\n"
" Ubuntu 16.04 compatible: ubuntu1604_x86-64\n\n"
"On Linux, POWER8 / PPC64 (little endian):\n"
" RHEL7 / CC7 compatible: slc7_ppc64\n\n"
"On Mac, x86-64:\n"
" Yosemite and El-Captain: osx_x86-64\n\n"
]
# Helper class which does not do anything to sync
class NoRemoteSync:
def syncToLocal(self, p, spec):
pass
def syncToRemote(self, p, spec):
pass
class HttpRemoteSync:
def __init__(self, remoteStore, architecture, workdir, insecure):
self.remoteStore = remoteStore
self.writeStore = ""
self.architecture = architecture
self.workdir = workdir
self.insecure = insecure
def syncToLocal(self, p, spec):
debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
hashListUrl = format("%(rs)s/%(sp)s/",
rs=self.remoteStore,
sp=spec["storePath"])
pkgListUrl = format("%(rs)s/%(sp)s/",
rs=self.remoteStore,
sp=spec["linksPath"])
hashList = []
pkgList = []
try:
if self.insecure:
context = ssl._create_unverified_context()
hashList = json.loads(urlopen(hashListUrl, context=context).read())
pkgList = json.loads(urlopen(pkgListUrl, context=context).read())
else:
hashList = json.loads(urlopen(hashListUrl).read())
pkgList = json.loads(urlopen(pkgListUrl).read())
except URLError as e:
debug("Cannot find precompiled package for %s@%s" % (p, spec["hash"]))
pass
except Exception as e:
info(e)
error("Unknown response from server")
cmd = format("mkdir -p %(hd)s && "
"mkdir -p %(ld)s",
hd=spec["tarballHashDir"],
ld=spec["tarballLinkDir"])
execute(cmd)
hashList = [x["name"] for x in hashList]
for pkg in hashList:
cmd = format("curl %(i)s -o %(hd)s/%(n)s -L %(rs)s/%(sp)s/%(n)s\n",
i="-k" if self.insecure else "",
n=pkg,
sp=spec["storePath"],
rs=self.remoteStore,
hd=spec["tarballHashDir"])
debug(cmd)
execute(cmd)
relativeHashDir = spec["tarballHashDir"].replace(self.workdir, "")
for pkg in pkgList:
if pkg["name"] in hashList:
cmd = format("ln -sf ../../%(a)s/store/%(sh)s/%(h)s/%(n)s %(ld)s/%(n)s\n",
a = self.architecture,
h = spec["hash"],
sh = spec["hash"][0:2],
n = pkg["name"],
ld = spec["tarballLinkDir"])
execute(cmd)
else:
cmd = format("ln -s unknown %(ld)s/%(n)s 2>/dev/null || true\n",
ld = spec["tarballLinkDir"],
n = pkg["name"])
execute(cmd)
def syncToRemote(self, p, spec):
return
# Helper class to sync package build directory using RSync.
class RsyncRemoteSync:
def __init__(self, remoteStore, writeStore, architecture, workdir, rsyncOptions):
self.remoteStore = re.sub("^ssh://", "", remoteStore)
self.writeStore = re.sub("^ssh://", "", writeStore)
self.architecture = architecture
self.rsyncOptions = rsyncOptions
self.workdir = workdir
def syncToLocal(self, p, spec):
debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
cmd = format("mkdir -p %(tarballHashDir)s\n"
"rsync -av %(ro)s %(remoteStore)s/%(storePath)s/ %(tarballHashDir)s/ || true\n"
"rsync -av --delete %(ro)s %(remoteStore)s/%(linksPath)s/ %(tarballLinkDir)s/ || true\n",
ro=self.rsyncOptions,
remoteStore=self.remoteStore,
storePath=spec["storePath"],
linksPath=spec["linksPath"],
tarballHashDir=spec["tarballHashDir"],
tarballLinkDir=spec["tarballLinkDir"])
err = execute(cmd)
dieOnError(err, "Unable to update from specified store.")
def syncToRemote(self, p, spec):
if not self.writeStore:
return
tarballNameWithRev = format("%(package)s-%(version)s-%(revision)s.%(architecture)s.tar.gz",
architecture=self.architecture,
**spec)
cmd = format("cd %(workdir)s && "
"rsync -avR %(rsyncOptions)s --ignore-existing %(storePath)s/%(tarballNameWithRev)s %(remoteStore)s/ &&"
"rsync -avR %(rsyncOptions)s --ignore-existing %(linksPath)s/%(tarballNameWithRev)s %(remoteStore)s/",
workdir=self.workdir,
remoteStore=self.remoteStore,
rsyncOptions=self.rsyncOptions,
storePath=spec["storePath"],
linksPath=spec["linksPath"],
tarballNameWithRev=tarballNameWithRev)
err = execute(cmd)
dieOnError(err, "Unable to upload tarball.")
def prunePaths(workDir):
for x in ["PATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH"]:
if not x in os.environ:
continue
workDirEscaped = re.escape("%s" % workDir) + "[^:]*:?"
os.environ[x] = re.sub(workDirEscaped, "", os.environ[x])
for x in list(os.environ.keys()):
if x.endswith("_VERSION") and x != "ALIBUILD_VERSION":
os.environ.pop(x)
def doMain():
for x in ["LANG", "LANGUAGE", "LC_ALL", "LC_COLLATE",
"LC_CTYPE", "LC_MESSAGES", "LC_MONETARY",
"LC_NUMERIC", "LC_TIME", "LC_ALL"]:
os.environ[x] = "C"
# We need to unset BASH_ENV because in certain environments (e.g.
# NERSC) this is used to source a (non -e safe) bashrc, effectively
# breaking aliBuild.
os.environ["BASH_ENV"] = ""
parser = argparse.ArgumentParser(epilog="For complete documentation please refer to https://alisw.github.io/alibuild")
parser.add_argument("action", choices=["init", "build", "clean", "version", "analytics"], help="what alibuild should do")
parser.add_argument("pkgname", nargs="?", help="One (or more) of the packages in `alidist'")
parser.add_argument("--config-dir", "-c", dest="configDir", default="%%(prefix)s%sdist" % star())
parser.add_argument("--no-local", dest="noDevel", default=[],
help="Do not pick up the following packages from a local checkout.")
parser.add_argument("--docker", dest="docker", action="store_true", default=False)
parser.add_argument("--docker-image", dest="dockerImage",
help="Image to use in case you build with docker (implies --docker-image)")
parser.add_argument("--work-dir", "-w", dest="workDir", default="sw")
parser.add_argument("--architecture", "-a", dest="architecture",
default=detectArch())
parser.add_argument("-e", dest="environment", action='append', default=[])
parser.add_argument("-v", dest="volumes", action='append', default=[],
help="Specify volumes to be used in Docker")
parser.add_argument("--jobs", "-j", dest="jobs", type=int, default=detectJobs())
parser.add_argument("--reference-sources", dest="referenceSources", default="%(workDir)s/MIRROR")
parser.add_argument("--remote-store", dest="remoteStore", default="",
help="Where to find packages already built for reuse."
"Use ssh:// in front for remote store. End with ::rw if you want to upload.")
parser.add_argument("--write-store", dest="writeStore", default="",
help="Where to upload the built packages for reuse."
"Use ssh:// in front for remote store.")
parser.add_argument("--disable", dest="disable", default=[],
metavar="PACKAGE", action="append",
help="Do not build PACKAGE and all its (unique) dependencies.")
parser.add_argument("--defaults", dest="defaults", default="release", nargs="?",
metavar="FILE", help="Specify which defaults to use")
parser.add_argument("--always-prefer-system", dest="preferSystem", default=False,
action="store_true", help="Always use system packages when compatible")
parser.add_argument("--no-system", dest="noSystem", default=False,
action="store_true", help="Never use system packages")
parser.add_argument("--force-unknown-architecture", dest="forceUnknownArch", default=False,
action="store_true", help="Do not check for valid architecture")
parser.add_argument("--insecure", dest="insecure", default=False,
action="store_true", help="Do not check for valid certificates")
parser.add_argument("--aggressive-cleanup", dest="aggressiveCleanup", default=False,
action="store_true", help="Perform additional cleanups")
parser.add_argument("--debug", "-d", dest="debug", action="store_true", default=False)
parser.add_argument("--no-auto-cleanup", help="Do not cleanup build by products automatically",
dest="autoCleanup", action="store_false", default=True)
parser.add_argument("--devel-prefix", "-z", nargs="?", help="Version name to use for development packages. Defaults to branch name.",
dest="develPrefix", default=argparse.SUPPRESS)
parser.add_argument("--dist", dest="dist", default="",
help="Prepare development mode by downloading the given recipes set ([user/repo@]branch)")
parser.add_argument("--dry-run", "-n", dest="dryRun", default=False,
action="store_true", help="Prints what would happen, without actually doing the build.")
args = parser.parse_args()
cmdDisable = args.disable
os.environ["ALIBUILD_ARCHITECTURE"] = args.architecture
report_screenview(args.action)
if args.action == "version":
print("aliBuild version: %s" % getVersion())
sys.exit(0)
args.referenceSources = format(args.referenceSources, workDir=args.workDir)
args.dist = args.dist if "@" in args.dist else "alisw/%sdist@%s" % (star(),args.dist)
args.dist = dict(zip(["repo","ver"],args.dist.split("@", 2)))
if args.remoteStore or args.writeStore:
args.noSystem = True
if not args.architecture:
print(format("Cannot determine architecture. "
"Please pass it explicitly.\n\n%s" % ARCHITECTURE_TABLE[0]))
exit(1)
if not args.forceUnknownArch and not matchValidArch(args.architecture):
print(format("Unknown / unsupported architecture: %(architecture)s.\n\n"
"%(table)s"
"Alternatively, you can use the `--force-unknown-architecture' option.",
table=ARCHITECTURE_TABLE[0],
architecture=args.architecture))
exit(1)
if args.preferSystem and args.noSystem:
parser.error("choose either --always-prefer-system or --no-system")
if args.dockerImage:
args.docker = True
if args.docker and args.architecture.startswith("osx"):
parser.error("cannot use `-a %s` and --docker" % args.architecture)
if args.docker and getstatusoutput("which docker")[0]:
parser.error("cannot use --docker as docker executable is not found")
args.disable = [x for x in ",".join(args.disable).split(",") if x]
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# If specified, used the docker image requested, otherwise, if running
# in docker the docker image is given by the first part of the
# architecture we want to build for.
dockerImage = args.dockerImage if "dockerImage" in args else ""
if args.docker and not dockerImage:
dockerImage = "alisw/%s-builder" % args.architecture.split("_")[0]
dieOnError(args.remoteStore.endswith("::rw") and args.writeStore,
"You cannot specify ::rw and --write-store at the same time")
if args.remoteStore.endswith("::rw"):
args.remoteStore = args.remoteStore[0:-4]
args.writeStore = args.remoteStore
if args.remoteStore.startswith("http"):
syncHelper = HttpRemoteSync(args.remoteStore, args.architecture, args.workDir, args.insecure)
elif args.remoteStore:
syncHelper = RsyncRemoteSync(args.remoteStore, args.writeStore, args.architecture, args.workDir, "")
else:
syncHelper = NoRemoteSync()
if args.noDevel:
args.noDevel = args.noDevel.split(",")
if args.action == "build" and not args.pkgname:
parser.error("Please provide at least one package to build.")
# Setup build environment.
if args.action == "init":
setdir = args.develPrefix if "develPrefix" in args else "."
args.configDir = format(args.configDir, prefix=setdir+"/")
pkgs = args.pkgname if args.pkgname else ""
pkgs = [ dict(zip(["name","ver"], y.split("@")[0:2]))
for y in [ x+"@" for x in list(filter(lambda y: y, pkgs.split(","))) ] ]
if args.dryRun:
info("--dry-run / -n specified. Doing nothing.")
exit(0)
try:
os.path.exists(setdir) or os.mkdir(setdir)
os.path.exists(args.referenceSources) or makedirs(args.referenceSources)
except OSError as e:
dieOnError(True, str(e))
for p in [{"name":"stardist","ver":args.dist["ver"]}] + pkgs:
if p["name"] == "stardist":
spec = { "source": "https://github.com/"+args.dist["repo"],
"package": basename(args.configDir), "version": None }
dest = args.configDir
else:
filename = "%s/%s.sh" % (args.configDir, p["name"].lower())
err, spec, _ = parseRecipe(getRecipeReader(filename))
dieOnError(err, err)
dest = join(setdir, spec["package"])
writeRepo = spec.get("write_repo", spec.get("source"))
dieOnError(not writeRepo, "Package %s has no source field and cannot be developed" % spec["package"])
if os.path.exists(dest):
warning("not cloning %s since it already exists" % spec["package"])
continue
p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"])
debug("cloning %s%s for development" % (spec["package"], " version "+p["ver"] if p["ver"] else ""))
updateReferenceRepos(args.referenceSources, spec["package"], spec)
err = execute(format("git clone %(readRepo)s%(branch)s --reference %(refSource)s %(cd)s && " +
"cd %(cd)s && git remote set-url --push origin %(writeRepo)s",
readRepo=spec["source"],
writeRepo=writeRepo,
branch=" -b "+p["ver"] if p["ver"] else "",
refSource=join(args.referenceSources, spec["package"].lower()),
cd=dest))
dieOnError(err!=0, "cannot clone %s%s" %
(spec["package"], " version "+p["ver"] if p["ver"] else ""))
print(format("Development directory %(d)s created%(pkgs)s",
pkgs=" for "+", ".join([ x["name"].lower() for x in pkgs ]) if pkgs else "",
d=setdir))
exit(0)
elif args.action == "clean":
# Find all the symlinks in "BUILD"
# Find all the directories in "BUILD"
# Schedule a directory for deletion if it does not have a symlink
# Delete scheduled directories
symlinksBuild = [readlink(x) for x in glob("%s/BUILD/*-latest*" % args.workDir)]
# $WORK_DIR/TMP should always be cleaned up. This does not happen only
# in the case we run out of space while unpacking.
# $WORK_DIR/<architecture>/store can be cleaned up as well, because
# we do not need the actual tarballs after they have been built.
toDelete = ["%s/TMP" % args.workDir]
if args.aggressiveCleanup:
toDelete += ["%s/TARS/%s/store" % (args.workDir, args.architecture),
"%s/SOURCES" % (args.workDir)]
toDelete += [x for x in glob("%s/BUILD/*" % args.workDir)
if not islink(x) and not basename(x) in symlinksBuild]
installGlob ="%s/%s/*/" % (args.workDir, args.architecture)
installedPackages = set([dirname(x) for x in glob(installGlob)])
symlinksInstall = []
for x in installedPackages:
symlinksInstall += [realpath(y) for y in glob(x + "/latest*")]
toDelete += [x for x in glob(installGlob+ "*")
if not islink(x) and not realpath(x) in symlinksInstall]
toDelete = [x for x in toDelete if exists(x)]
if not toDelete:
print("Nothing to delete.")
exit(0)
print("This will delete the following directories")
print("\n".join(toDelete))
if args.dryRun:
print("--dry-run / -n specified. Doing nothing.")
exit(0)
for x in toDelete:
shutil.rmtree(x)
exit(0)
elif args.action == "analytics":
if not args.pkgname in ["on", "off"]:
parser.error("Please specify [on/off].")
exit(0)
elif not args.action == "build":
parser.error("Action %s unsupported" % args.action)
args.configDir = format(args.configDir, prefix="")
packages = [args.pkgname]
specs = {}
buildOrder = []
workDir = abspath(args.workDir)
prunePaths(workDir)
if not exists(args.configDir):
err = execute(format(
"git clone https://github.com/%(repo)s%(branch)s %(cd)s",
repo=args.dist["repo"],
branch=" -b "+args.dist["ver"] if args.dist["ver"] else "",
cd=abspath(args.configDir))
)
if err:
error("Unable to download default %sdist" % star())
exit(1)
if "develPrefix" in args and args.develPrefix == None:
args.develPrefix = basename(dirname(abspath(args.configDir)))
if dockerImage:
args.develPrefix = "%s-%s" % (args.develPrefix, args.architecture) if "develPrefix" in args else args.architecture
defaultsReader = lambda : readDefaults(args.configDir, args.defaults, parser.error)
(err, overrides, taps) = parseDefaults(args.disable,
defaultsReader, debug)
dieOnError(err, err)
specDir = "%s/SPECS" % workDir
if not exists(specDir):
makedirs(specDir)
os.environ["ALIBUILD_ALIDIST_HASH"] = getDirectoryHash(args.configDir)
debug("Building for architecture %s" % args.architecture)
debug("Number of parallel builds: %d" % args.jobs)
debug(format("Using %(star)sBuild from "
"%(star)sbuild@%(toolHash)s recipes "
"in %(star)sdist@%(distHash)s",
star=star(),
toolHash=getDirectoryHash(dirname(__file__)),
distHash=os.environ["ALIBUILD_ALIDIST_HASH"]))
(systemPackages, ownPackages, failed) = getPackageList(packages=packages,
specs=specs,
configDir=args.configDir,
preferSystem=args.preferSystem,
noSystem=args.noSystem,
architecture=args.architecture,
disable=args.disable,
defaults=args.defaults,
dieOnError=dieOnError,
performPreferCheck=lambda pkg, cmd : dockerStatusOutput(cmd, dockerImage),
performRequirementCheck=lambda pkg, cmd : dockerStatusOutput(cmd, dockerImage),
overrides=overrides,
taps=taps,
log=debug)
if failed:
error("The following packages are system requirements and could not be found:\n\n- " + "\n- ".join(sorted(list(failed))))
error("\nPlease run:\n\n\taliDoctor %s\n\nto get a full diagnosis." % args.pkgname)
sys.exit(1)
for x in specs.values():
x["requires"] = [r for r in x["requires"] if not r in args.disable]
x["build_requires"] = [r for r in x["build_requires"] if not r in args.disable]
x["runtime_requires"] = [r for r in x["runtime_requires"] if not r in args.disable]
if systemPackages:
banner("%sBuild can take the following packages from the system and will not build them:\n %s" %
(star(), ", ".join(systemPackages)))
if ownPackages:
banner("The following packages cannot be taken from the system and will be built:\n %s" %
", ".join(ownPackages))
# Do topological sort to have the correct build order even in the
# case of non-tree like dependencies..
# The actual algorith used can be found at:
#
# http://www.stoimen.com/blog/2012/10/01/computer-algorithms-topological-sort-of-a-graph/
#
edges = [(p["package"], d) for p in specs.values() for d in p["requires"] ]
L = [l for l in specs.values() if not l["requires"]]
S = []
while L:
spec = L.pop(0)
S.append(spec)
nextVertex = [e[0] for e in edges if e[1] == spec["package"]]
edges = [e for e in edges if e[1] != spec["package"]]
hasPredecessors = set([m for e in edges for m in nextVertex if e[0] == m])
withPredecessor = set(nextVertex) - hasPredecessors
L += [specs[m] for m in withPredecessor]
buildOrder = [s["package"] for s in S]
# Date fields to substitute: they are zero-padded
now = datetime.now()
nowKwds = { "year": str(now.year),
"month": str(now.month).zfill(2),
"day": str(now.day).zfill(2),
"hour": str(now.hour).zfill(2) }
# Check if any of the packages can be picked up from a local checkout
develCandidates = [basename(d) for d in glob("*") if os.path.isdir(d)]
develCandidatesUpper = [basename(d).upper() for d in glob("*") if os.path.isdir(d)]
develPkgs = [p for p in buildOrder
if p in develCandidates and p not in args.noDevel]
develPkgsUpper = [(p, p.upper()) for p in buildOrder
if p.upper() in develCandidatesUpper and p not in args.noDevel]
if set(develPkgs) != set(x for (x, y) in develPkgsUpper):
error(format("The following development packages have wrong spelling: %(pkgs)s.\n"
"Please check your local checkout and adapt to the correct one indicated.",
pkgs=", ".join(set(x.strip() for (x,y) in develPkgsUpper) - set(develPkgs))))
exit(1)
if buildOrder:
banner("Packages will be built in the following order:\n - %s" %
"\n - ".join([ x+" (development package)" if x in develPkgs else "%s@%s" % (x, specs[x]["tag"]) for x in buildOrder ]))
if develPkgs:
banner(format("You have packages in development mode.\n"
"This means their source code can be freely modified under:\n\n"
" %(pwd)s/<package_name>\n\n"
"%(star)sBuild does not automatically update such packages to avoid work loss.\n"
"In most cases this is achieved by doing in the package source directory:\n\n"
" git pull --rebase\n",
pwd=os.getcwd(), star=star()))
# Resolve the tag to the actual commit ref, so that
for p in buildOrder:
spec = specs[p]
spec["commit_hash"] = "0"
develPackageBranch = ""
if "source" in spec:
# Replace source with local one if we are in development mode.
if spec["package"] in develPkgs:
spec["source"] = join(os.getcwd(), spec["package"])
cmd = format("git ls-remote --heads %(source)s",
source = spec["source"])
err, out = getstatusoutput(cmd)
dieOnError(err, "Unable to fetch from %s" % spec["source"])
# Tag may contain date params like %(year)s, %(month)s, %(day)s, %(hour).
spec["tag"] = format(spec["tag"], **nowKwds)
# By default we assume tag is a commit hash. We then try to find
# out if the tag is actually a branch and we use the tip of the branch
# as commit_hash. Finally if the package is a development one, we use the
# name of the branch as commit_hash.
spec["commit_hash"] = spec["tag"]
for l in out.split("\n"):
if l.endswith("refs/heads/{0}".format(spec["tag"])) or spec["package"] in develPkgs:
spec["commit_hash"] = l.split("\t", 1)[0]
# We are in development mode, we need to rebuild if the commit hash
# is different and if there are extra changes on to.
if spec["package"] in develPkgs:
# Devel package: we get the commit hash from the checked source, not from remote.
cmd = "cd %s && git rev-parse HEAD" % spec["source"]
err, out = getstatusoutput(cmd)
dieOnError(err, "Unable to detect current commit hash.")
spec["commit_hash"] = out.strip()
cmd = "cd %s && git diff -r HEAD && git status --porcelain" % spec["source"]
h = Hasher()
err = execute(cmd, h)
dieOnError(err, "Unable to detect source code changes.")
spec["devel_hash"] = spec["commit_hash"] + h.hexdigest()
cmd = "cd %s && git rev-parse --abbrev-ref HEAD" % spec["source"]
err, out = getstatusoutput(cmd)
if out == "HEAD":
err, out = getstatusoutput("cd %s && git rev-parse HEAD" % spec["source"])
out = out[0:10]
if err:
error("Error, unable to lookup changes in development package %s. Is it a git clone?" % spec["source"])
exit(1)
develPackageBranch = out.replace("/", "-")
spec["tag"] = args.develPrefix if "develPrefix" in args else develPackageBranch
spec["commit_hash"] = "0"
break
# Version may contain date params like tag, plus %(commit_hash)s,
# %(short_hash)s and %(tag)s.
defaults_upper = args.defaults != "release" and "_" + args.defaults.upper().replace("-", "_") or ""
spec["version"] = format(spec["version"],
commit_hash=spec["commit_hash"],
short_hash=spec["commit_hash"][0:10],
tag=spec["tag"],
tag_basename=basename(spec["tag"]),
defaults_upper=defaults_upper,
**nowKwds)
if spec["package"] in develPkgs and "develPrefix" in args and args.develPrefix != "ali-master":
spec["version"] = args.develPrefix
# Decide what is the main package we are building and at what commit.
#
# We emit an event for the main package, when encountered, so that we can use
# it to index builds of the same hash on different architectures. We also
# make sure add the main package and it's hash to the debug log, so that we
# can always extract it from it.
# If one of the special packages is in the list of packages to be built,
# we use it as main package, rather than the last one.
if not buildOrder:
banner("Nothing to be done.")
exit(0)
mainPackage = buildOrder[-1]
mainPackage = "AliRoot" if "AliRoot" in buildOrder else mainPackage
mainPackage = "AliPhysics" if "AliPhysics" in buildOrder else mainPackage
mainPackage = "O2" if "O2" in buildOrder else mainPackage
mainHash = specs[mainPackage]["commit_hash"]
debug("Main package is %s@%s" % (mainPackage, mainHash))
if args.debug:
logger_handler.setFormatter(
LogFormatter("%%(levelname)s:%s:%s: %%(message)s" %
(mainPackage, args.develPrefix if "develPrefix" in args else mainHash[0:8])))
# Now that we have the main package set, we can print out Useful information
# which we will be able to associate with this build.
for p in buildOrder:
spec = specs[p]
if "source" in spec:
debug("Commit hash for %s@%s is %s" % (spec["source"], spec["tag"], spec["commit_hash"]))
# Calculate the hashes. We do this in build order so that we can guarantee
# that the hashes of the dependencies are calculated first. Also notice that
# if the commit hash is a real hash, and not a tag, we can safely assume
# that's unique, and therefore we can avoid putting the repository or the
# name of the branch in the hash.
debug("Calculating hashes.")
for p in buildOrder:
spec = specs[p]
h = Hasher()
dh = Hasher()
for x in ["recipe", "version", "package", "commit_hash",
"env", "append_path", "prepend_path"]:
h(str(spec.get(x, "none")))
if spec["commit_hash"] == spec.get("tag", "0"):
h(spec.get("source", "none"))
if "source" in spec:
h(spec["tag"])
for dep in spec.get("requires", []):
h(specs[dep]["hash"])
dh(specs[dep]["hash"] + specs[dep].get("devel_hash", ""))
if bool(spec.get("force_rebuild", False)):
h(str(time.time()))
if spec["package"] in develPkgs and "incremental_recipe" in spec:
h(spec["incremental_recipe"])
ih = Hasher()
ih(spec["incremental_recipe"])
spec["incremental_hash"] = ih.hexdigest()
elif p in develPkgs:
h(spec.get("devel_hash"))
spec["hash"] = h.hexdigest()
spec["deps_hash"] = dh.hexdigest()
debug("Hash for recipe %s is %s" % (p, spec["hash"]))
# This adds to the spec where it should find, locally or remotely the
# various tarballs and links.
for p in buildOrder:
spec = specs[p]
pkgSpec = {
"workDir": workDir,
"package": spec["package"],
"version": spec["version"],
"hash": spec["hash"],
"prefix": spec["hash"][0:2],
"architecture": args.architecture
}
varSpecs = [
("storePath", "TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
("linksPath", "TARS/%(architecture)s/%(package)s"),
("tarballHashDir", "%(workDir)s/TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
("tarballLinkDir", "%(workDir)s/TARS/%(architecture)s/%(package)s"),
("buildDir", "%(workDir)s/BUILD/%(hash)s/%(package)s")
]
spec.update(dict([(x, format(y, **pkgSpec)) for (x, y) in varSpecs]))
spec["old_devel_hash"] = readHashFile(spec["buildDir"]+"/.build_succeeded")
# We recursively calculate the full set of requires "full_requires"
# including build_requires and the subset of them which are needed at
# runtime "full_runtime_requires".
for p in buildOrder:
spec = specs[p]
todo = [p]
spec["full_requires"] = []
spec["full_runtime_requires"] = []
while todo:
i = todo.pop(0)
requires = specs[i].get("requires", [])
runTimeRequires = specs[i].get("runtime_requires", [])
spec["full_requires"] += requires
spec["full_runtime_requires"] += runTimeRequires
todo += requires
spec["full_requires"] = set(spec["full_requires"])
spec["full_runtime_requires"] = set(spec["full_runtime_requires"])
debug("We will build packages in the following order: %s" % " ".join(buildOrder))
if args.dryRun:
info("--dry-run / -n specified. Not building.")
exit(0)
# We now iterate on all the packages, making sure we build correctly every
# single one of them. This is done this way so that the second time we run we
# can check if the build was consistent and if it is, we bail out.
packageIterations = 0
report_event("install",
format("%(p)s disabled=%(dis)s devel=%(dev)s system=%(sys)s own=%(own)s deps=%(deps)s",
p=args.pkgname,
dis=",".join(sorted(cmdDisable)),
dev=",".join(sorted(develPkgs)),
sys=",".join(sorted(systemPackages)),
own=",".join(sorted(ownPackages)),
deps=",".join(buildOrder[:-1])
),
args.architecture)
while buildOrder:
packageIterations += 1
if packageIterations > 20:
error("Too many attempts at building %s. Something wrong with the repository?")
exit(1)
p = buildOrder[0]
spec = specs[p]
if spec["package"] in develPkgs and getattr(syncHelper, "writeStore", None):
warning("Disabling remote write store from now since %s is a development package." % spec["package"])
syncHelper.writeStore = ""
# Since we can execute this multiple times for a given package, in order to
# ensure consistency, we need to reset things and make them pristine.
spec.pop("revision", None)
riemannStream.setAttributes(package = spec["package"],
package_hash = spec["version"],
architecture = args.architecture,
defaults = args.defaults)
riemannStream.setState("warning")
debug("Updating from tarballs")
# If we arrived here it really means we have a tarball which was created
# using the same recipe. We will use it as a cache for the build. This means
# that while we will still perform the build process, rather than
# executing the build itself we will:
#
# - Unpack it in a temporary place.
# - Invoke the relocation specifying the correct work_dir and the
# correct path which should have been used.
# - Move the version directory to its final destination, including the
# correct revision.
# - Repack it and put it in the store with the
#
# this will result in a new package which has the same binary contents of
# the old one but where the relocation will work for the new dictory. Here
# we simply store the fact that we can reuse the contents of cachedTarball.
syncHelper.syncToLocal(p, spec)
# Decide how it should be called, based on the hash and what is already
# available.
debug("Checking for packages already built.")
linksGlob = format("%(w)s/TARS/%(a)s/%(p)s/%(p)s-%(v)s-*.%(a)s.tar.gz",
w=workDir,
a=args.architecture,
p=spec["package"],
v=spec["version"])
debug("Glob pattern used: %s" % linksGlob)
packages = glob(linksGlob)
# In case there is no installed software, revision is 1
# If there is already an installed package:
# - Remove it if we do not know its hash
# - Use the latest number in the version, to decide its revision
debug("Packages already built using this version\n%s" % "\n".join(packages))
busyRevisions = []
# Calculate the build_family for the package
#
# If the package is a devel package, we need to associate it a devel
# prefix, either via the -z option or using its checked out branch. This
# affects its build hash.
#
# Moreover we need to define a global "buildFamily" which is used
# to tag all the packages incurred in the build, this way we can have
# a latest-<buildFamily> link for all of them an we will not incur in the
# flip - flopping described in https://github.com/alisw/alibuild/issues/325.
develPrefix = ""
possibleDevelPrefix = getattr(args, "develPrefix", develPackageBranch)
if spec["package"] in develPkgs:
develPrefix = possibleDevelPrefix
if possibleDevelPrefix:
spec["build_family"] = "%s-%s" % (possibleDevelPrefix, args.defaults)
else:
spec["build_family"] = args.defaults
for d in packages:
realPath = readlink(d)
matcher = format("../../%(a)s/store/[0-9a-f]{2}/([0-9a-f]*)/%(p)s-%(v)s-([0-9]*).%(a)s.tar.gz$",
a=args.architecture,
p=spec["package"],
v=spec["version"])
m = re.match(matcher, realPath)
if not m:
continue
h, revision = m.groups()
revision = int(revision)
# If we have an hash match, we use the old revision for the package
# and we do not need to build it.
if h == spec["hash"]:
spec["revision"] = revision
if spec["package"] in develPkgs and "incremental_recipe" in spec:
spec["obsolete_tarball"] = d
else:
debug("Package %s with hash %s is already found in %s. Not building." % (p, h, d))
src = format("%(v)s-%(r)s",
w=workDir,
v=spec["version"],
r=spec["revision"])
dst1 = format("%(w)s/%(a)s/%(p)s/latest-%(bf)s",
w=workDir,
a=args.architecture,
p=spec["package"],
bf=spec["build_family"])
dst2 = format("%(w)s/%(a)s/%(p)s/latest",
w=workDir,
a=args.architecture,
p=spec["package"])
getstatusoutput("ln -snf %s %s" % (src, dst1))
getstatusoutput("ln -snf %s %s" % (src, dst2))
info("Using cached build for %s" % p)
break
else:
busyRevisions.append(revision)
if not "revision" in spec and busyRevisions:
spec["revision"] = min(set(range(1, max(busyRevisions)+2)) - set(busyRevisions))
elif not "revision" in spec:
spec["revision"] = "1"
# Check if this development package needs to be rebuilt.
if spec["package"] in develPkgs:
debug("Checking if devel package %s needs rebuild" % spec["package"])
if spec["devel_hash"]+spec["deps_hash"] == spec["old_devel_hash"]:
info("Development package %s does not need rebuild" % spec["package"])
buildOrder.pop(0)
continue
# Now that we have all the information about the package we want to build, let's
# check if it wasn't built / unpacked already.
hashFile = "%s/%s/%s/%s-%s/.build-hash" % (workDir,
args.architecture,
spec["package"],
spec["version"],
spec["revision"])
fileHash = readHashFile(hashFile)
if fileHash != spec["hash"]:
if fileHash != "0":
debug("Mismatch between local area and the one which I should build. Redoing.")
shutil.rmtree(dirname(hashFile), True)
else:
# If we get here, we know we are in sync with whatever remote store. We
# can therefore create a directory which contains all the packages which
# were used to compile this one.
riemannStream.setState('ok')
debug("Package %s was correctly compiled. Moving to next one." % spec["package"])
# If using incremental builds, next time we execute the script we need to remove
# the placeholders which avoid rebuilds.
if spec["package"] in develPkgs and "incremental_recipe" in spec:
unlink(hashFile)
if "obsolete_tarball" in spec:
unlink(realpath(spec["obsolete_tarball"]))