-
Notifications
You must be signed in to change notification settings - Fork 1
/
tc-builder.py
2200 lines (1995 loc) · 82.4 KB
/
tc-builder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Python(Anaconda) script for building Mingw-w64 cross-toolchain
Copyrighted 2017 Maverick Tse YM, Hong Kong
Twitter: @MaverickTse
This script attempts to build Mingw-w64 toolchain with reference
to Zeranoe's build script. This script only works under bash-like
shell and requires Anaconda with Python 3.6 or newer. DO NOT USE Python
from Ubuntu/Debian official/personal repo!
Besides Anaconda, you will also need:
build-essential, automake, texinfo, yasm
for standard build facilities
Unlike Zeranoe's build script, wget, git, subversion, etc. are not needed
since this script only utilize FTP and HTTP through Python's built-in
networking functions.
Download is parallelized with 2 threads by default.
The building is complicated with some steps of uncertain function.
Roughly speaking, the build order (corresponds to functions order):
binutils [i686 and x86_64, with host compiler]
mingw-w64 header [i686 and x86_64]
gmp [x86_64, host compiler]
mpfr/isl/cloog [x86_64, host compiler]
mpc [x86_64, host compiler]
GCC bootstrap compiler [i686 and x86_64, host compiler]
^ NEED to specify posix as threading lib, or will be win32 by default
^ This is where you set SJLJ or DW2 exceptions model (for win32)
Mingw-w64 CRT [i686 and x86_64, Mingw-w64 compiler]
winpthreads [i686 and x86_64, Mingw-w64 compiler]
^ MUST be built before libGCC or threading won't work properly
GCC [i686 and x86_64, host compiler]
The CRT build script is relatively dumb and need to set
CC variable to the the mingw compiler in order to work.
"""
import os
import stat
import re
import tarfile
import ftplib
import multiprocessing as mp
import subprocess
import argparse
import hashlib
import datetime
import time
import socket
from ftplib import FTP
from urllib import request, response
from urllib.parse import urljoin, urlparse
from operator import itemgetter
import sys
from bs4 import BeautifulSoup
from shutil import rmtree, move
from colorama import init, Fore, Back, Style, deinit
# Constants
WORK_FOLDER = "~/MWTC/"
CONFIG_GUESS = "https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess"
GNU_SERVER = "ftp.yzu.edu.tw" # All GNU FTP servers do not support MLST/D!
GCC_SERVER = "gcc.gnu.org"
GNU_MIRRORS = [
"mirror.jre655.com", # Japan
"ftp.yzu.edu.tw", # Taiwan
"reflection.oss.ou.edu", # US
"mirrors.ocf.berkeley.edu", # US
"mirrorservice.org", # UK
"ftp.igh.cnrs.fr", # France
"mirror.checkdomain.de", # Germany
"ftp.unicamp.br", # Brazil
"gnu.mirror.iweb.com", # Canada
"mirror.tochlab.net", # Russia
"ftp.gnu.org" # Official
]
GCC_MIRRORS = [
"ftp.irisa.fr", # France
"ftp.fu-berlin.de", # Germany
"ftp.ntua.gr", # Greece
"ftp.nluug.nl", # Netherlands
"gcc.gnu.org" # Official
]
FTP_DOWNLOADS = ["binutils", "gcc", "gmp", "mpfr", "mpc", "isl", "cloog"]
FTP_SERVERS = {
"gcc": GCC_SERVER,
"binutils": GNU_SERVER,
"gmp": GCC_SERVER,
"mpfr": GCC_SERVER,
"mpc": GCC_SERVER,
"isl": GCC_SERVER,
"cloog": GCC_SERVER
}
PRIMARY_FTP_FOLDERS = {
"gcc": "/pub/gcc/releases/",
"binutils": "/pub/gnu/binutils/",
"gmp": "/pub/gcc/infrastructure/",
"mpfr": "/pub/gcc/infrastructure/",
"mpc": "/pub/gcc/infrastructure/",
"isl": "/pub/gcc/infrastructure/",
"cloog": "/pub/gcc/infrastructure/"
}
HTML_DOWNLOADS = ["mingw64", "pkgconf"]
HTML_URLS = {
"mingw64": "https://github.com/mirror/mingw-w64/releases",
"pkgconf": "https://distfiles.dereferenced.org/pkgconf/"
}
FILENAME_PATTERNS = {
"gcc": r"^gcc-([0-9.]+).tar.gz$",
"binutils": r"^binutils-([0-9.]+).tar.bz2$",
"gmp": r"^gmp-([0-9.]+).tar.bz2$",
"mpfr": r"^mpfr-([0-9.]+).tar.bz2$",
"mpc": r"^mpc-([0-9.]+).tar.gz$",
"isl": r"^isl-([0-9.]+).tar.bz2$",
"cloog": r"^cloog-([0-9.]+).tar.gz$",
"mingw64": r".+?([0-9.]+).tar.gz$",
"pkgconf": r".+?([0-9.]+).tar.gz$"
}
FILENAME_VERSION_CAPTURE = {
"gcc": 1,
"binutils": 1,
"gmp": 1,
"mpfr": 1,
"mpc": 1,
"isl": 1,
"cloog": 1,
"mingw64": 1,
"pkgconf": 1
}
FOLDER_PATTERNS = {
"gcc": r"^gcc-([0-9.]+)$",
"binutils": None,
"gmp": None,
"mpfr": None,
"mpc": None,
"isl": None,
"cloog": None
}
FOLDER_VERSION_CAPTURE = {
"gcc": 1,
"binutils": 1,
"gmp": 1,
"mpfr": 1,
"mpc": 1,
"isl": 1,
"cloog": 1
}
PREFERRED_FOLDER_VERSION = { # ignored if pattern is None
"gcc": "99",
"binutils": "99",
"gmp": "99",
"mpfr": "99",
"mpc": "99",
"isl": "99",
"cloog": "99"
}
PREFERRED_FILE_VERSION = {
"gcc": "99",
"binutils": "99",
"gmp": "99",
"mpfr": "99",
"mpc": "99",
"isl": "99",
"cloog": "99",
"mingw64": "99",
"pkgconf": "99"
}
SAVE_PATH = {
"gcc": "./dl/gcc.tar.gz",
"binutils": "./dl/binutils.tar.bz2",
"gmp": "./dl/gmp.tar.bz2",
"mpfr": "./dl/mpfr.tar.bz2",
"mpc": "./dl/mpc.tar.gz",
"isl": "./dl/isl.tar.bz2",
"cloog": "./dl/cloog.tar.gz",
"mingw64": "./dl/mingw64.tar.gz",
"pkgconf": "./dl/pkgconf.tar.gz"
}
LOCATIONS = {
"pkg_dir": "./pkgs/",
"mingw_w64_i686_prefix": "./mingw-w64-i686/",
"mingw_w64_x86_64_prefix": "./mingw-w64-x86_64/",
"mingw_w64_source_dir": "./source/",
"mingw_w64_build_dir": "./build/"
}
TARGET = {
"i686": "i686-w64-mingw32",
"x86_64": "x86_64-w64-mingw32"
}
USE_SJLJ = False
PERFORMANCE_COUNTER = {}
HELP_TEXT = """\
{hl}Python(Anaconda) script for building Mingw-w64 cross-toolchain{chl}
Copyrighted 2017 Maverick Tse YM, Hong Kong
Twitter: @MaverickTse
This script attempts to build Mingw-w64 toolchain with reference
to Zeranoe's build script. This script only works under bash-like
shell and {hl}requires Anaconda with Python 3.6 or newer{chl}. DO NOT USE Python
from Ubuntu/Debian official/personal repo!
Besides Anaconda, you will also need:
{hl}build-essential, automake, texinfo, yasm{chl}
for standard build facilities.
Unlike Zeranoe's build script, wget, git, subversion, etc. are not needed
since this script only utilize FTP and HTTP through Python's built-in
networking functions. This script also cut down redundant build steps
in Zeranoe's script when looping.
Download is parallelized with 2 threads by default.
Currently, win32 and win64 toolchains will be built without multilib.
If unspecified, the sandbox will be ~/MWTC by default,
and latest release versions of each component will be used.
All errors will be logged into files in the component's build folder:
config_error.log
build_error.log
install_error.log
may be generated with all the details. Console output is kept minimal.
At the end of the build process, a readme file and 3 shell scripts
would be generated.
"""
# Common functions
def print_ok():
print("【" + Fore.GREEN + Style.BRIGHT + "OK" + Fore.RESET + Style.RESET_ALL + "】 ", end='')
return None
def print_error():
print("【" + Fore.RED + Back.LIGHTYELLOW_EX + Style.BRIGHT + "ERROR" + Fore.RESET + Style.RESET_ALL + "】 ", end='')
return None
def ftp_get(server, folder, filename_re, file_version_capture_group, save_path=None,
preferred_version="99", folder_re=None, folder_version_capture_group=1,
preferred_folder_version="99"):
"""
Download a file from FTP server with the preferred version or get the latest
:param server: The ftp server name without sub-directory or protocol name
:param folder: The first folder to move to after log in
:param filename_re: A regex string that match the full intended filename, with version string in capture group
:param file_version_capture_group: Specify which capture group holds the version string. Default=1
:param save_path: Destination folder or filename for saving the downloaded file
:param preferred_version: A file with this version string will be downloaded first. If no match, get the latest file
:param folder_re: An optional regex for moving to a child folder a second time, basing on version string
:param folder_version_capture_group: Specify which capture group in folder_re holds the version string
:param preferred_folder_version: The preferred version string when moving to a child folder
:return: None if failed. Return the saving path on success
"""
if not server:
return None
if not folder:
return None
if not filename_re:
return None
if file_version_capture_group < 1:
return None
ftp = FTP()
try:
ftp.connect(server, timeout=30)
ftp.login()
except ftplib.all_errors as e:
print('FTP Error: ', str(e))
ftp.quit()
return False
ftp.cwd(folder)
if folder_re: # runs only when there is a regex for folder
fre = re.compile(folder_re)
available_version = {} # id: version
folder_data = {} # id: folder name
modify_data = {} # id: last modified date
keyid = 0
try:
listing = ftp.mlsd(facts=["type", "modify"])
for name, fact in listing:
if fact["type"] != "dir":
continue
m = fre.fullmatch(name)
if not m:
continue
available_version[keyid] = m.group(folder_version_capture_group)
folder_data[keyid] = name
modify_data[keyid] = fact["modify"]
keyid += 1
except ftplib.all_errors as e:
print("FTP Server does not support MLST/MLSD command! Falling back to NLST")
print("Note: No file type or date info available")
name_list = ftp.nlst()
for name in name_list:
m = fre.fullmatch(name)
if not m:
continue
available_version[keyid] = m.group(folder_version_capture_group)
folder_data[keyid] = name
modify_data[keyid] = keyid
keyid += 1
# Check for preferred folder version
foundkey = None
for key, ver in available_version.items():
if ver == str(preferred_folder_version):
foundkey = key
break
final_folder = folder
if foundkey:
final_folder = urljoin(final_folder, folder_data[foundkey])
else: # get latest, default action
#latestid, stamp = sorted(modify_data.items(), key=itemgetter(1), reverse=False).pop()
latestid, stamp = sorted(available_version.items(), key=itemgetter(1), reverse=False).pop()
final_folder = urljoin(final_folder, folder_data[latestid])
ftp.cwd(final_folder) # change to our final target folder
# We should now be inside the final target folder
# Get file listing and match filename
file_version = {} # id: version
file_names = {} # id: filename
file_date = {} # id: modified date
keyid = 0
fnre = re.compile(filename_re)
try:
listing = ftp.mlsd(facts=["type", "modify"])
for name, fact in listing:
if fact["type"] != "file":
continue
fm = fnre.fullmatch(name)
if not fm:
continue
file_version[keyid] = fm.group(file_version_capture_group)
file_names[keyid] = name
file_date[keyid] = fact["modify"]
keyid += 1
except ftplib.all_errors as e:
print("No MLST/MLSD support again... falling back")
name_list = ftp.nlst()
for name in name_list:
fm = fnre.fullmatch(name)
if not fm:
continue
file_version[keyid] = fm.group(file_version_capture_group)
file_names[keyid] = name
file_date[keyid] = keyid
keyid += 1
# Check for preferred file version
foundkey = None
for key, ver in file_version.items():
if ver == str(preferred_version):
foundkey = key
break
final_archive = None
if foundkey:
final_archive = file_names[foundkey]
else:
#latestid, stamp = sorted(file_date.items(), key=itemgetter(1), reverse=False).pop()
latestid, stamp = sorted(file_version.items(), key=itemgetter(1), reverse=False).pop()
final_archive = file_names[latestid]
# Download file
# Before downloading, set the save path
final_path = None
if not save_path: # when not specified, save in current folder, preserve name
final_path = os.path.join(os.getcwd(), final_archive)
else:
dir_name = os.path.dirname(save_path) # can be empty
filename = os.path.basename(save_path) # can be empty
if dir_name:
if not os.path.exists(dir_name):
os.makedirs(dir_name)
if filename:
final_path = save_path
else:
final_path = os.path.join(dir_name, final_archive)
final_path = os.path.abspath(final_path)
if os.path.exists(final_path):
print(final_path, "already exists, skipping...")
ftp.quit()
return final_path
# prepare the file
retrieve_cmd = "RETR " + final_archive
print("[FTP] Downloading ", final_archive, " to ", final_path, " ...")
ftp.retrbinary(retrieve_cmd, open(final_path, "wb").write)
print("[FTP] Download Finished")
ftp.quit()
return final_path
def select_mirror(server_list=[], priority=1, protocol="FTP", timeout=5.0):
"""
Select a server mirror based on connection time
:param server_list: A list of server names
:param priority: 1 for the fastest server, 2 for the 2nd fast, etc.
:param protocol: Decide PORT to use. Accepts HTTP, HTTPS, FTP, SFTP, SSH
:param timeout: time-out threshold in second
:return: a tuple of (server_name, latency)
"""
benchmark = {} # to hold the server: connect time pairs
# Set the ports to connect based on protocol
port = 1
if protocol == "HTTP":
port = 80
elif protocol == "HTTPS":
port = 443
elif protocol == "FTP":
port = 21
elif (protocol == "SFTP") or (protocol == "SSH"):
port = 22
else:
port = 1
# Test each server
for server in server_list:
# First need a default socket
sock = socket.socket()
sock.settimeout(timeout)
start = time.time()
try:
sock.connect((server, port))
end = time.time()
benchmark[server] = end - start
sock.close()
except socket.herror as e:
print("Hostname error for ", server)
print(e)
sock.close()
continue
except socket.gaierror as e:
print("Address error for ", server)
print(e)
sock.close()
continue
except socket.timeout:
print("Server ", server, " timed out")
sock.close()
continue
# fix priority number
valid_servers = len(benchmark)
if valid_servers <= 0:
print("[WARNING] No mirror available!")
return None, None
priority = min([valid_servers, priority]) - 1
priority = max([0, priority]) # keep it >=0
# Sort server list
sorted_servers = sorted(benchmark.items(), key=itemgetter(1), reverse=False)
return sorted_servers[priority]
def ftp_get_by_component(component):
os.chdir(WORK_FOLDER)
ftp_get(FTP_SERVERS[component], PRIMARY_FTP_FOLDERS[component], FILENAME_PATTERNS[component],
FILENAME_VERSION_CAPTURE[component],
SAVE_PATH[component], PREFERRED_FILE_VERSION[component], FOLDER_PATTERNS[component],
FOLDER_VERSION_CAPTURE[component], PREFERRED_FOLDER_VERSION[component])
untar(SAVE_PATH[component], LOCATIONS["pkg_dir"])
def html_get_by_component(component):
os.chdir(WORK_FOLDER)
html_get(HTML_URLS[component], FILENAME_PATTERNS[component], FILENAME_VERSION_CAPTURE[component],
SAVE_PATH[component])
untar(SAVE_PATH[component], LOCATIONS["pkg_dir"])
def html_get(url, filename_re, file_version_capture_group=1, save_path=None, preferred_version="99"):
"""
Scrape a HTML page for links, then download the preferred version or the latest
:param url: The URL string for the HTML page
:param filename_re: A regex string that will run against links to search for target files
:param file_version_capture_group: Specify which group holds the version string. Default=1
:param save_path: Folder, filename or full path for saving the downloaded file
:param preferred_version: A string for your preferred version
:return: None if failed. Saved path on success
"""
if not url:
return None
if not filename_re:
return None
html = request.urlopen(url)
soup = BeautifulSoup(html, "html.parser", from_encoding=html.info().get_param("charset"))
file_regex = re.compile(filename_re)
archive_info = {} # id: url
version_info = {} # id: version
keyid = 0
for link in soup.find_all("a", href=True):
m = file_regex.fullmatch(link["href"])
if m:
archive_info[keyid] = link["href"]
version_info[keyid] = m.group(file_version_capture_group)
keyid += 1
file_id = None
# Search for preferred version
for key, version in version_info.items():
if version == preferred_version:
file_id = key
break
# If not found, get the latest
if not file_id:
file_id, ver = sorted(version_info.items(), key=itemgetter(1), reverse=False).pop()
final_url = archive_info[file_id]
# make url absolute if not yet
parsed_url = urlparse(final_url)
if not parsed_url[1]: # 1 as netloc
final_url = urljoin(url, final_url)
# Set save path
final_archive = os.path.basename(parsed_url[2]) # 2 as path
final_path = None
if not save_path: # when not specified, save in current folder, preserve name
final_path = os.path.join(os.getcwd(), final_archive)
else:
dir_name = os.path.dirname(save_path) # can be empty
filename = os.path.basename(save_path) # can be empty
if dir_name:
if not os.path.exists(dir_name):
os.makedirs(dir_name)
if filename:
final_path = save_path
else:
final_path = os.path.join(dir_name, final_archive)
final_path = os.path.abspath(final_path)
if os.path.exists(final_path):
print(final_path, " already exists, skipping...")
return final_path
# Download
print("[HTML] Downloading from ", final_url, " to ", final_path)
saved_name, header = request.urlretrieve(final_url, final_path)
if saved_name:
print("[HTML] Download finished")
return saved_name
else:
print("[HTML] Download Failed")
return None
def hash_file_md5(filename):
"""
Obtain a MD5 digest of a file
:param filename: The file to be hashed
:return: MD5 digest on success. None if file is not found.
"""
if not os.path.exists(filename):
print("[MD5 HASH]:", filename, " not found")
return None
md5 = hashlib.md5()
with open(filename, "rb") as f:
while True:
data = f.read(128)
if not data:
break
md5.update(data)
return md5.hexdigest()
def untar(source_archive, destination_folder):
"""
Decompress the source archive tarball into the destination foler
:param source_archive: the path to tarball
:param destination_folder: where to put the extracted files
:return: None if failed. Destination path if success.
"""
if not os.path.exists(source_archive):
print("The tarball ", source_archive, " cannot be found")
return None
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
source_md5_file = source_archive + ".md5"
new_md5 = hash_file_md5(source_archive)
if os.path.exists(source_md5_file):
old_md5 = None
with open(source_md5_file,"r") as f:
old_md5 = f.read()
if new_md5 in old_md5:
print("[TAR] Same archive found. Skipping")
return destination_folder
hFile = tarfile.open(source_archive)
if hFile:
print("[TAR] Extracting ", source_archive, " to ", destination_folder)
extract_ok = False
try:
hFile.extractall(destination_folder)
extract_ok = True
except tarfile.ExtractError as e:
print("Error extracting tarfile: skipping")
print(str(e))
except IOError as e:
print("IO error: skipping")
print(str(e))
hFile.close()
print("[TAR] Extraction finished")
if extract_ok:
with open(source_md5_file, "w") as f:
f.write(new_md5)
return destination_folder
else:
print("Cannot open tarball ", source_archive, " for extraction")
return None
def guess_config():
global WORK_FOLDER
os.chdir(WORK_FOLDER)
saved_name, header = request.urlretrieve(CONFIG_GUESS, os.path.join(WORK_FOLDER, "config.guess"))
if not os.path.exists("./config.guess"):
return None
shell_type = os.getenv("SHELL")
if not shell_type:
print("Need to be running in a bash-like shell environment")
return None
if "sh" not in shell_type:
print("guess.config need to be run in bash-like shell")
return None
system_string = subprocess.run(["sh", "config.guess"], stdout=subprocess.PIPE).stdout.decode("utf-8")
return system_string
def run_nproc():
"""
Get cpu count via nproc
:return: number of cpu core
"""
return_string = subprocess.run(["nproc"], stdout=subprocess.PIPE).stdout.decode("utf-8")
cores = int(return_string)
if cores > 2:
return cores-1
else:
return cores
def set_env(x86_64=True):
"""
Set environment variables for specific compiler usage
:param x86_64: When True[default], set for 64bit usage, 32bit otherwise
:return: a map with "old_path" and "old_cc"
"""
origin = {
"old_path": os.environ["PATH"],
"old_cc": os.environ["CC"]
}
current_dir = os.getcwd()
global WORK_FOLDER, LOCATIONS, TARGET
os.chdir(WORK_FOLDER)
prefix_x86 = os.path.abspath(LOCATIONS["mingw_w64_i686_prefix"])
prefix_x86 = os.path.join(prefix_x86, "bin")
prefix_x86_64 = os.path.abspath(LOCATIONS["mingw_w64_x86_64_prefix"])
prefix_x86_64 = os.path.join(prefix_x86_64, "bin")
old_path_var = os.environ["PATH"]
x86_env_path = prefix_x86 + ":" + old_path_var
x86_64_env_path = prefix_x86_64 + ":" + old_path_var
if x86_64:
os.environ["PATH"] = x86_64_env_path
else:
os.environ["PATH"] = x86_env_path
os.environ["CC"] = "gcc"
os.chdir(current_dir)
return origin
def restore_env(old_env):
"""
Restore environment variables as changed by set_env()
:param old_env: the map returned by set_env()
:return: None
"""
old_cc = old_env["old_cc"]
old_path = old_env["old_path"]
if old_cc:
os.environ["CC"] = old_cc
if old_path:
os.environ["PATH"] = old_path
def build_binutils(source_folder, build_folder, system_type):
"""
Build both x86 and x86_64 versions of Binutils
:param source_folder: The folder containing the source code for Binutils
:param build_folder: A folder outside of source_folder for building
:param system_type: a string as returned by guess_config function
:return: None if failed
"""
global WORK_FOLDER, LOCATIONS, TARGET
os.chdir(WORK_FOLDER)
i686_prefix = os.path.abspath(LOCATIONS["mingw_w64_i686_prefix"])
x86_64_prefix = os.path.abspath(LOCATIONS["mingw_w64_x86_64_prefix"])
full_source_path = os.path.abspath(source_folder)
full_build_path = os.path.abspath(build_folder)
full_build_path = os.path.join(full_build_path, "binutils")
build_path_x86 = os.path.join(full_build_path, "x86")
build_path_x86_64 = os.path.join(full_build_path, "x86_64")
# print(i686_prefix)
# print(x86_64_prefix)
# print(full_source_path)
# print(full_build_path)
# print(build_path_x86)
# print(build_path_x86_64)
# purge old build file
if os.path.exists(full_build_path):
print("Deleting old Binutils build folders")
rmtree(full_build_path)
# recreate folders
os.makedirs(build_path_x86)
os.makedirs(build_path_x86_64)
# create prefix paths if absent
if not os.path.exists(i686_prefix):
os.makedirs(i686_prefix)
if not os.path.exists(x86_64_prefix):
os.makedirs(x86_64_prefix)
# build x86
os.chdir(build_path_x86)
os.environ["CC"] = "gcc"
configure_script = os.path.join(full_source_path, "configure")
arg_build = "--build=" + system_type
arg_target = "--target=" + TARGET["i686"]
arg_prefix = "--prefix=" + i686_prefix
arg_sysroot = "--with-sysroot=" + i686_prefix
arg_others = "--disable-multilib --disable-nls --disable-shared --enable-static"
print("Configuring Binutils x86...")
run_result = subprocess.run(["sh", configure_script, arg_build, arg_target, arg_prefix, arg_sysroot,
"--disable-multilib", "--disable-nls", "--disable-shared", "--enable-static"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error configuring Binutils x86!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./configure_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
print("Done configuring Binutils x86")
cpu_count = str(run_nproc())
print("Building Binutils x86")
run_result = subprocess.run(["make", "-j", str(cpu_count)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error building Binutils x86!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./make_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
print("Finished building Binutils x86")
print("Installing Binutils x86")
run_result = subprocess.run(["make", "install"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error installing Binutils x86!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./make_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
# build x86_64
os.chdir(build_path_x86_64)
arg_target = "--target=" + TARGET["x86_64"]
arg_prefix = "--prefix=" + x86_64_prefix
arg_sysroot = "--with-sysroot=" + x86_64_prefix
print("Configuring Binutils x86_64...")
run_result = subprocess.run(["sh", configure_script, arg_build, arg_target, arg_prefix, arg_sysroot,
"--disable-multilib", "--disable-nls", "--disable-shared", "--enable-static"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error configuring Binutils x86_64!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./configure_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
print("Done configuring Binutils x86_64")
print("Building Binutils x86_64")
run_result = subprocess.run(["make", "-j", str(cpu_count)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error building Binutils x86_64!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./make_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
print("Finished building Binutils x86_64")
print("Installing Binutils x86_64")
run_result = subprocess.run(["make", "install"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if run_result.returncode:
print("Error installing Binutils x86!")
output_message = run_result.stdout.decode("utf-8")
error_message = run_result.stderr.decode("utf-8")
with open("./make_error.log", "w") as file_handle:
file_handle.write(output_message)
file_handle.write(error_message)
return None
os.chdir(WORK_FOLDER)
return True
def build_mingw_header(source_folder, build_folder, system_type):
"""
Config and Install Mingw-w64 header files and make symlinks
:param source_folder: Path to mingw-w64 source folder
:param build_folder: Build location outside source folder
:param system_type: string as returned by guess_config()
:return: None if failed. True if success.
"""
global WORK_FOLDER, LOCATIONS, TARGET
os.chdir(WORK_FOLDER)
config_source = os.path.join(os.path.abspath(source_folder), "mingw-w64-headers", "configure")
prefix_x86 = os.path.abspath(LOCATIONS["mingw_w64_i686_prefix"])
prefix_x86_64 = os.path.abspath(LOCATIONS["mingw_w64_x86_64_prefix"])
build_common = os.path.join(os.path.abspath(build_folder), "header")
build_x86 = os.path.join(os.path.abspath(build_folder), "header", "x86")
build_x86_64 = os.path.join(os.path.abspath(build_folder), "header", "x86_64")
arg_build = "--build=" + system_type
arg_host = "--host=" + TARGET["i686"]
arg_prefix = "--prefix=" + prefix_x86
# purge old build file
if os.path.exists(build_common):
rmtree(build_common)
# create working folders
os.makedirs(build_x86)
os.makedirs(build_x86_64)
# set path, save old path for x86_64 use
old_path_var = os.environ["PATH"]
x86_env_path = prefix_x86 + ":" + old_path_var
x86_64_env_path = prefix_x86_64 + ":" + old_path_var
os.environ["PATH"] = x86_env_path
# configure x86
os.chdir(build_x86)
print("Configuring Mingw-w64 x86 headers...")
result = subprocess.run(["sh", config_source, "--enable-sdk-all", arg_build
, arg_host, arg_prefix], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode:
print("Failed to configure Mingw-w64 x86 headers!")
log_file = os.path.join(build_x86, "config_error.log")
with open(log_file, "w") as f:
message = result.stdout.decode("utf-8")
f.write(message)
message = result.stderr.decode("utf-8")
f.write(message)
return None
print("Configured Mingw-w64 x86 headers")
# Install headers
print("Installing Mingw-w64 x86 headers...")
result = subprocess.run(["make", "install"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode:
print("Failed to install Mingw-w64 x86 headers!")
log_file = os.path.join(build_x86, "install_error.log")
with open(log_file, "w") as f:
message = result.stdout.decode("utf-8")
f.write(message)
message = result.stderr.decode("utf-8")
f.write(message)
return None
print("[OK] Mingw-w64 x86 headers installed")
print("Making symlinks...")
os.chdir(prefix_x86)
target_folder = "./" + TARGET["i686"]
include_folder = os.path.join(target_folder, "include")
if not os.path.exists("./mingw"):
os.symlink(target_folder, "./mingw")
if not os.path.exists(include_folder):
os.chdir(target_folder)
os.symlink("../include", "./include")
os.chdir(prefix_x86)
print("[OK] symlinks done")
# x86_64 part
os.environ["PATH"] = x86_64_env_path
arg_host = "--host=" + TARGET["x86_64"]
arg_prefix = "--prefix=" + prefix_x86_64
# configure x86_64
os.chdir(build_x86_64)
print("Configuring Mingw-w64 x86_64 headers...")
result = subprocess.run(["sh", config_source, "--enable-sdk-all", arg_build
, arg_host, arg_prefix], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode:
print("Failed to configure Mingw-w64 x86 headers!")
log_file = os.path.join(build_x86_64, "config_error.log")
with open(log_file, "w") as f:
message = result.stdout.decode("utf-8")
f.write(message)
message = result.stderr.decode("utf-8")
f.write(message)
return None
print("Configured Mingw-w64 x86_64 headers")
# Install headers
print("Installing Mingw-w64 x86_64 headers...")
result = subprocess.run(["make", "install"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode:
print("Failed to install Mingw-w64 x86_64 headers!")
log_file = os.path.join(build_x86_64, "install_error.log")
with open(log_file, "w") as f:
message = result.stdout.decode("utf-8")
f.write(message)
message = result.stderr.decode("utf-8")
f.write(message)
return None
print("[OK] Mingw-w64 x86_64 headers installed")
print("Making symlinks...")
os.chdir(prefix_x86_64)
target_folder = "./" + TARGET["x86_64"]
include_folder = os.path.join(target_folder, "include")
if not os.path.exists("./mingw"):
os.symlink(target_folder, "./mingw")
if not os.path.exists(include_folder):
os.chdir(target_folder)
os.symlink("../include", "./include")
os.chdir(prefix_x86_64)
os.chdir(WORK_FOLDER)
os.environ["PATH"] = old_path_var
print("[OK] symlink done")
return True
def build_gmp(source_folder, build_folder, system_type):
"""
Build GMP library
:param source_folder: Source folder of GMP
:param build_folder: Folder for holding build
:param system_type: string as returned by guess_config()
:return: gmp_prefix string on success, None on Fail.
"""
global WORK_FOLDER, LOCATIONS
os.chdir(WORK_FOLDER)
abs_source = os.path.abspath(source_folder)
abs_build = os.path.abspath(build_folder)
abs_pkg = os.path.abspath(LOCATIONS["pkg_dir"])
build_common = os.path.join(abs_build, "gmp")
config_path = os.path.join(abs_source, "configure")
uname_info = os.uname()
prefix = os.path.join(abs_pkg, "gmp", "gmp-"+uname_info.machine)
# purge build folder
if os.path.exists(build_common):
rmtree(build_common)
os.makedirs(build_common)
os.chdir(build_common)
arg_build = "--build=" + system_type
arg_prefix = "--prefix=" + prefix
old_env = None
if "64" in uname_info.machine:
old_env = set_env()
else:
old_env = set_env(False)
print("Configuring GMP...")
result = subprocess.run(["sh", config_path, arg_build, arg_prefix, "--enable-fat",
"--disable-shared", "--enable-static", "--enable-cxx",
"CPPFLAGS=-fexceptions"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode:
print("Error configuring GMP!")
with open("config_error.log", "w") as f:
message = result.stdout.decode("utf-8")
f.write(message)
message = result.stderr.decode("utf-8")
f.write(message)
restore_env(old_env)
return None
cpu_cores = str(run_nproc())
print("Building GMP...")
result = subprocess.run(["make", "-j", cpu_cores], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode: