-
Notifications
You must be signed in to change notification settings - Fork 9
/
run-dart-analysis.py
executable file
·112 lines (96 loc) · 3.57 KB
/
run-dart-analysis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#!/usr/bin/env python
# Copyright 2016 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import multiprocessing
import os
import paths
import Queue
import subprocess
import sys
import threading
def gn_describe(out, path):
gn = os.path.join(paths.FUCHSIA_ROOT, 'packages', 'gn', 'gn.py')
data = subprocess.check_output(
[gn, 'desc', out, path, '--format=json'], cwd=paths.FUCHSIA_ROOT)
return json.loads(data)
class WorkerThread(threading.Thread):
'''
A worker thread to run scripts from a queue and return exit codes and output on a queue.
'''
def __init__(self, script_queue, result_queue, args):
threading.Thread.__init__(self)
self.script_queue = script_queue
self.result_queue = result_queue
self.args = args
def run(self):
while True:
try:
script = self.script_queue.get(False)
except Queue.Empty, e:
# no more scripts to run
return
job = subprocess.Popen(
[script] + self.args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = job.communicate()
self.result_queue.put((script, job.returncode, stdout + stderr))
def main():
parser = argparse.ArgumentParser(
'''Run Dart analysis for Dart build targets
Extra flags will be passed to the analyzer.
''')
parser.add_argument(
'--out',
help='Path to the base output directory, e.g. out/debug-x86-64',
required=True)
parser.add_argument(
'--tree',
help='Restrict analysis to a source subtree, e.g. //apps/sysui/*',
default='*')
args, extras = parser.parse_known_args()
# Ask gn about all the dart analyzer scripts.
scripts = []
targets = gn_describe(args.out, args.tree)
for target_name, properties in targets.items():
if ('type' not in properties or
properties['type'] != 'action' or
'script' not in properties or
properties['script'] != '//build/dart/gen_analyzer_invocation.py' or
'outputs' not in properties or
not len(properties['outputs'])):
continue
script_path = properties['outputs'][0]
script_path = script_path[2:] # Remove the leading //
scripts.append(os.path.join(paths.FUCHSIA_ROOT, script_path))
# Put all the analyzer scripts in a queue that workers will work from
script_queue = Queue.Queue()
for script in scripts:
script_queue.put(script)
# Make a queue to receive results from workers.
result_queue = Queue.Queue()
# Track return codes from scripts.
script_results = []
failed_scripts = []
# Create a worker thread for each CPU on the machine.
for i in range(multiprocessing.cpu_count()):
WorkerThread(script_queue, result_queue, extras).start()
# Handle results from workers.
while len(script_results) < len(scripts):
script, returncode, output = result_queue.get(True)
script_results.append(returncode)
if returncode != 0:
failed_scripts.append(script)
print '----------------------------------------------------------'
print output
if len(failed_scripts):
failed_scripts.sort()
print 'Analysis failed in:'
for script in failed_scripts:
print ' %s' % script
exit(1)
if __name__ == '__main__':
sys.exit(main())