Skip to content

Commit f88965f

Browse files
authored
Merge branch 'main' into make-hugify-available-aarhc64
2 parents fd6c5fb + 0c34d7a commit f88965f

File tree

1,855 files changed

+73603
-40981
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,855 files changed

+73603
-40981
lines changed

.ci/compute-projects.sh

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,14 @@ function compute-projects-to-test() {
1818
shift
1919
projects=${@}
2020
for project in ${projects}; do
21-
echo "${project}"
2221
case ${project} in
2322
lld)
24-
for p in bolt cross-project-tests; do
23+
for p in lld bolt cross-project-tests; do
2524
echo $p
2625
done
2726
;;
2827
llvm)
29-
for p in bolt clang clang-tools-extra lld lldb mlir polly; do
28+
for p in llvm bolt clang clang-tools-extra lld lldb mlir polly; do
3029
echo $p
3130
done
3231
# Flang is not stable in Windows CI at the moment
@@ -36,21 +35,30 @@ function compute-projects-to-test() {
3635
;;
3736
clang)
3837
# lldb is temporarily removed to alleviate Linux pre-commit CI waiting times
39-
for p in clang-tools-extra compiler-rt cross-project-tests; do
38+
for p in clang clang-tools-extra compiler-rt cross-project-tests; do
4039
echo $p
4140
done
4241
;;
4342
clang-tools-extra)
44-
echo libc
43+
for p in clang-tools-extra libc; do
44+
echo $p
45+
done
4546
;;
4647
mlir)
48+
echo mlir
49+
# Flang is not stable in Windows CI at the moment
50+
if [[ $isForWindows == 0 ]]; then
51+
echo flang
52+
fi
53+
;;
54+
flang-rt)
4755
# Flang is not stable in Windows CI at the moment
4856
if [[ $isForWindows == 0 ]]; then
4957
echo flang
5058
fi
5159
;;
5260
*)
53-
# Nothing to do
61+
echo "${project}"
5462
;;
5563
esac
5664
done
@@ -65,6 +73,11 @@ function compute-runtimes-to-test() {
6573
echo $p
6674
done
6775
;;
76+
flang)
77+
for p in flang-rt; do
78+
echo $p
79+
done
80+
;;
6881
*)
6982
# Nothing to do
7083
;;

.ci/generate-buildkite-pipeline-premerge

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ fi
7373
# needs while letting them run on the infrastructure provided by LLVM.
7474

7575
# Figure out which projects need to be built on each platform
76-
all_projects="bolt clang clang-tools-extra compiler-rt cross-project-tests flang libc libclc lld lldb llvm mlir openmp polly pstl"
76+
all_projects="bolt clang clang-tools-extra compiler-rt cross-project-tests flang flang-rt libc libclc lld lldb llvm mlir openmp polly pstl"
7777
modified_projects="$(keep-modified-projects ${all_projects})"
7878

7979
linux_projects_to_test=$(exclude-linux $(compute-projects-to-test 0 ${modified_projects}))

.ci/metrics/metrics.py

Lines changed: 211 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import collections
22
import datetime
3+
import dateutil
34
import github
5+
import json
46
import logging
57
import os
68
import requests
@@ -53,6 +55,18 @@
5355
# by trial and error).
5456
GRAFANA_METRIC_MAX_AGE_MN = 120
5557

58+
# Lists the BuildKite jobs we want to track. Maps the BuildKite job name to
59+
# the metric name in Grafana. This is important not to lose metrics history
60+
# if the workflow name changes.
61+
BUILDKITE_WORKFLOW_TO_TRACK = {
62+
":linux: Linux x64": "buildkite_linux",
63+
":windows: Windows x64": "buildkite_windows",
64+
}
65+
66+
# Number of builds to fetch per page. Since we scrape regularly, this can
67+
# remain small.
68+
BUILDKITE_GRAPHQL_BUILDS_PER_PAGE = 50
69+
5670
@dataclass
5771
class JobMetrics:
5872
job_name: str
@@ -70,6 +84,191 @@ class GaugeMetric:
7084
time_ns: int
7185

7286

87+
def buildkite_fetch_page_build_list(
88+
buildkite_token: str, after_cursor: str = None
89+
) -> list[dict[str, str]]:
90+
"""Fetches a page of the build list using the GraphQL BuildKite API.
91+
92+
Returns the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE last running/queued builds,
93+
or the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE running/queued builds
94+
older than the one pointer by |after_cursor| if provided.
95+
The |after_cursor| value is taken from the previous page returned by the
96+
API.
97+
98+
Args:
99+
buildkite_token: the secret token to authenticate GraphQL requests.
100+
after_cursor: cursor after which to start the page fetch.
101+
102+
Returns:
103+
The most recent builds after cursor (if set) with the following format:
104+
[
105+
{
106+
"cursor": <value>,
107+
"number": <build-number>,
108+
}
109+
]
110+
"""
111+
112+
BUILDKITE_GRAPHQL_QUERY = """
113+
query OrganizationShowQuery {{
114+
organization(slug: "llvm-project") {{
115+
pipelines(search: "Github pull requests", first: 1) {{
116+
edges {{
117+
node {{
118+
builds (state: [CANCELING, CREATING, FAILING, RUNNING], first: {PAGE_SIZE}, after: {AFTER}) {{
119+
edges {{
120+
cursor
121+
node {{
122+
number
123+
}}
124+
}}
125+
}}
126+
}}
127+
}}
128+
}}
129+
}}
130+
}}
131+
"""
132+
query = BUILDKITE_GRAPHQL_QUERY.format(
133+
PAGE_SIZE=BUILDKITE_GRAPHQL_BUILDS_PER_PAGE,
134+
AFTER="null" if after_cursor is None else '"{}"'.format(after_cursor),
135+
)
136+
query = json.dumps({"query": query})
137+
url = "https://graphql.buildkite.com/v1"
138+
headers = {
139+
"Authorization": "Bearer " + buildkite_token,
140+
"Content-Type": "application/json",
141+
}
142+
data = requests.post(url, data=query, headers=headers).json()
143+
# De-nest the build list.
144+
if "errors" in data:
145+
logging.info("Failed to fetch BuildKite jobs: {}".format(data["errors"]))
146+
return []
147+
builds = data["data"]["organization"]["pipelines"]["edges"][0]["node"]["builds"][
148+
"edges"
149+
]
150+
# Fold cursor info into the node dictionnary.
151+
return [{**x["node"], "cursor": x["cursor"]} for x in builds]
152+
153+
154+
def buildkite_get_build_info(build_number: str) -> dict:
155+
"""Returns all the info associated with the provided build number.
156+
157+
Note: for unknown reasons, graphql returns no jobs for a given build,
158+
while this endpoint does, hence why this uses this API instead of graphql.
159+
160+
Args:
161+
build_number: which build number to fetch info for.
162+
163+
Returns:
164+
The info for the target build, a JSON dictionnary.
165+
"""
166+
167+
URL = "https://buildkite.com/llvm-project/github-pull-requests/builds/{}.json"
168+
return requests.get(URL.format(build_number)).json()
169+
170+
171+
def buildkite_get_incomplete_tasks(buildkite_token: str) -> list:
172+
"""Returns all the running/pending BuildKite builds.
173+
174+
Args:
175+
buildkite_token: the secret token to authenticate GraphQL requests.
176+
last_cursor: the cursor to stop at if set. If None, a full page is fetched.
177+
"""
178+
output = []
179+
cursor = None
180+
while True:
181+
page = buildkite_fetch_page_build_list(buildkite_token, cursor)
182+
if len(page) == 0:
183+
break
184+
cursor = page[-1]["cursor"]
185+
output += page
186+
return output
187+
188+
189+
def buildkite_get_metrics(
190+
buildkite_token: str, previously_incomplete: set[int]
191+
) -> (list[JobMetrics], set[int]):
192+
"""Returns a tuple with:
193+
194+
- the metrics recorded for newly completed workflow jobs.
195+
- the set of workflow still running now.
196+
197+
Args:
198+
buildkite_token: the secret token to authenticate GraphQL requests.
199+
previously_incomplete: the set of running workflows the last time this
200+
function was called.
201+
"""
202+
203+
running_builds = buildkite_get_incomplete_tasks(buildkite_token)
204+
incomplete_now = set([x["number"] for x in running_builds])
205+
output = []
206+
207+
for build_id in previously_incomplete:
208+
if build_id in incomplete_now:
209+
continue
210+
211+
info = buildkite_get_build_info(build_id)
212+
metric_timestamp = dateutil.parser.isoparse(info["finished_at"])
213+
for job in info["jobs"]:
214+
# This workflow is not interesting to us.
215+
if job["name"] not in BUILDKITE_WORKFLOW_TO_TRACK:
216+
continue
217+
218+
created_at = dateutil.parser.isoparse(job["created_at"])
219+
scheduled_at = (
220+
created_at
221+
if job["scheduled_at"] is None
222+
else dateutil.parser.isoparse(job["scheduled_at"])
223+
)
224+
started_at = (
225+
scheduled_at
226+
if job["started_at"] is None
227+
else dateutil.parser.isoparse(job["started_at"])
228+
)
229+
if job["canceled_at"] is None:
230+
finished_at = (
231+
started_at
232+
if job["finished_at"] is None
233+
else dateutil.parser.isoparse(job["finished_at"])
234+
)
235+
else:
236+
finished_at = dateutil.parser.isoparse(job["canceled_at"])
237+
238+
job_name = BUILDKITE_WORKFLOW_TO_TRACK[job["name"]]
239+
queue_time = (started_at - scheduled_at).seconds
240+
run_time = (finished_at - started_at).seconds
241+
status = bool(job["passed"])
242+
243+
# Grafana will refuse to ingest metrics older than ~2 hours, so we
244+
# should avoid sending historical data.
245+
metric_age_mn = (
246+
datetime.datetime.now(datetime.timezone.utc) - metric_timestamp
247+
).total_seconds() / 60
248+
if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN:
249+
logging.warning(
250+
f"Job {job['name']} from workflow {build_id} dropped due"
251+
+ f" to staleness: {metric_age_mn}mn old."
252+
)
253+
continue
254+
255+
metric_timestamp_ns = int(metric_timestamp.timestamp()) * 10**9
256+
workflow_id = build_id
257+
workflow_name = "Github pull requests"
258+
output.append(
259+
JobMetrics(
260+
job_name,
261+
queue_time,
262+
run_time,
263+
status,
264+
metric_timestamp_ns,
265+
workflow_id,
266+
workflow_name,
267+
)
268+
)
269+
270+
return output, incomplete_now
271+
73272
def github_get_metrics(
74273
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
75274
) -> tuple[list[JobMetrics], int]:
@@ -195,7 +394,7 @@ def github_get_metrics(
195394
datetime.datetime.now(datetime.timezone.utc) - completed_at
196395
).total_seconds() / 60
197396
if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN:
198-
logging.info(
397+
logging.warning(
199398
f"Job {job.id} from workflow {task.id} dropped due"
200399
+ f" to staleness: {metric_age_mn}mn old."
201400
)
@@ -292,23 +491,33 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
292491
def main():
293492
# Authenticate with Github
294493
github_auth = Auth.Token(os.environ["GITHUB_TOKEN"])
494+
buildkite_token = os.environ["BUILDKITE_TOKEN"]
295495
grafana_api_key = os.environ["GRAFANA_API_KEY"]
296496
grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"]
297497

298498
# The last workflow this script processed.
299499
# Because the Github queries are broken, we'll simply log a 'processed'
300500
# bit for the last COUNT_TO_PROCESS workflows.
301501
gh_last_workflows_seen_as_completed = set()
502+
# Stores the list of pending/running builds in BuildKite we need to check
503+
# at the next iteration.
504+
bk_incomplete = set()
302505

303506
# Enter the main loop. Every five minutes we wake up and dump metrics for
304507
# the relevant jobs.
305508
while True:
306509
github_object = Github(auth=github_auth)
307510
github_repo = github_object.get_repo("llvm/llvm-project")
308511

309-
metrics, gh_last_workflows_seen_as_completed = github_get_metrics(
512+
gh_metrics, gh_last_workflows_seen_as_completed = github_get_metrics(
310513
github_repo, gh_last_workflows_seen_as_completed
311514
)
515+
516+
bk_metrics, bk_incomplete = buildkite_get_metrics(
517+
buildkite_token, bk_incomplete
518+
)
519+
520+
metrics = gh_metrics + bk_metrics
312521
upload_metrics(metrics, grafana_metrics_userid, grafana_api_key)
313522
logging.info(f"Uploaded {len(metrics)} metrics")
314523

.ci/metrics/requirements.lock.txt

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -247,10 +247,18 @@ pynacl==1.5.0 \
247247
--hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \
248248
--hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543
249249
# via pygithub
250+
python-dateutil==2.9.0.post0 \
251+
--hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
252+
--hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
253+
# via -r ./requirements.txt
250254
requests==2.32.3 \
251255
--hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
252256
--hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
253257
# via pygithub
258+
six==1.17.0 \
259+
--hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
260+
--hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
261+
# via python-dateutil
254262
typing-extensions==4.12.2 \
255263
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
256264
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8

.ci/metrics/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
pygithub==2.5.0
2+
python-dateutil==2.9.0.post0

.ci/monolithic-linux.sh

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \
6565
-D CMAKE_CXX_FLAGS=-gmlt \
6666
-D LLVM_CCACHE_BUILD=ON \
6767
-D MLIR_ENABLE_BINDINGS_PYTHON=ON \
68+
-D FLANG_ENABLE_FLANG_RT=OFF \
6869
-D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}"
6970

7071
echo "--- ninja"
@@ -95,6 +96,9 @@ if [[ "${runtimes}" != "" ]]; then
9596
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
9697
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
9798
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
99+
-D CMAKE_Fortran_COMPILER="${BUILD_DIR}/bin/flang" \
100+
-D CMAKE_Fortran_COMPILER_WORKS=ON \
101+
-D LLVM_BINARY_DIR="${BUILD_DIR}" \
98102
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
99103
-D LIBCXX_CXX_ABI=libcxxabi \
100104
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
@@ -113,6 +117,9 @@ if [[ "${runtimes}" != "" ]]; then
113117
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
114118
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
115119
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
120+
-D CMAKE_Fortran_COMPILER="${BUILD_DIR}/bin/flang" \
121+
-D CMAKE_Fortran_COMPILER_WORKS=ON \
122+
-D LLVM_BINARY_DIR="${BUILD_DIR}" \
116123
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
117124
-D LIBCXX_CXX_ABI=libcxxabi \
118125
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
@@ -131,6 +138,9 @@ if [[ "${runtimes}" != "" ]]; then
131138
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
132139
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
133140
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
141+
-D CMAKE_Fortran_COMPILER="${BUILD_DIR}/bin/flang" \
142+
-D CMAKE_Fortran_COMPILER_WORKS=ON \
143+
-D LLVM_BINARY_DIR="${BUILD_DIR}" \
134144
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
135145
-D LIBCXX_CXX_ABI=libcxxabi \
136146
-D CMAKE_BUILD_TYPE=RelWithDebInfo \

0 commit comments

Comments
 (0)