sync code with last improvements from OpenBSD
This commit is contained in:
commit
88965415ff
26235 changed files with 29195616 additions and 0 deletions
2
lib/mesa/bin/.editorconfig
Normal file
2
lib/mesa/bin/.editorconfig
Normal file
|
@ -0,0 +1,2 @@
|
|||
[*.sh]
|
||||
indent_style = tab
|
0
lib/mesa/bin/__init__.py
Normal file
0
lib/mesa/bin/__init__.py
Normal file
141
lib/mesa/bin/commit_in_branch.py
Executable file
141
lib/mesa/bin/commit_in_branch.py
Executable file
|
@ -0,0 +1,141 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def print_(args: argparse.Namespace, success: bool, message: str) -> None:
|
||||
"""
|
||||
Print function with extra coloring when supported and/or requested,
|
||||
and with a "quiet" switch
|
||||
"""
|
||||
|
||||
COLOR_SUCCESS = '\033[32m'
|
||||
COLOR_FAILURE = '\033[31m'
|
||||
COLOR_RESET = '\033[0m'
|
||||
|
||||
if args.quiet:
|
||||
return
|
||||
|
||||
if args.color == 'auto':
|
||||
use_colors = sys.stdout.isatty()
|
||||
else:
|
||||
use_colors = args.color == 'always'
|
||||
|
||||
s = ''
|
||||
if use_colors:
|
||||
if success:
|
||||
s += COLOR_SUCCESS
|
||||
else:
|
||||
s += COLOR_FAILURE
|
||||
|
||||
s += message
|
||||
|
||||
if use_colors:
|
||||
s += COLOR_RESET
|
||||
|
||||
print(s)
|
||||
|
||||
|
||||
def is_commit_valid(commit: str) -> bool:
|
||||
ret = subprocess.call(['git', 'cat-file', '-e', commit],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL)
|
||||
return ret == 0
|
||||
|
||||
|
||||
def branch_has_commit(upstream: str, branch: str, commit: str) -> bool:
|
||||
"""
|
||||
Returns True if the commit is actually present in the branch
|
||||
"""
|
||||
ret = subprocess.call(['git', 'merge-base', '--is-ancestor',
|
||||
commit, upstream + '/' + branch],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL)
|
||||
return ret == 0
|
||||
|
||||
|
||||
def branch_has_backport_of_commit(upstream: str, branch: str, commit: str) -> str:
|
||||
"""
|
||||
Returns the commit hash if the commit has been backported to the branch,
|
||||
or an empty string if is hasn't
|
||||
"""
|
||||
out = subprocess.check_output(['git', 'log', '--format=%H',
|
||||
upstream + '..' + upstream + '/' + branch,
|
||||
'--grep', 'cherry picked from commit ' + commit],
|
||||
stderr=subprocess.DEVNULL)
|
||||
return out.decode().strip()
|
||||
|
||||
|
||||
def canonicalize_commit(commit: str) -> str:
|
||||
"""
|
||||
Takes a commit-ish and returns a commit sha1 if the commit exists
|
||||
"""
|
||||
|
||||
# Make sure input is valid first
|
||||
if not is_commit_valid(commit):
|
||||
raise argparse.ArgumentTypeError('invalid commit identifier: ' + commit)
|
||||
|
||||
out = subprocess.check_output(['git', 'rev-parse', commit],
|
||||
stderr=subprocess.DEVNULL)
|
||||
return out.decode().strip()
|
||||
|
||||
|
||||
def validate_branch(branch: str) -> str:
|
||||
if '/' not in branch:
|
||||
raise argparse.ArgumentTypeError('must be in the form `remote/branch`')
|
||||
|
||||
out = subprocess.check_output(['git', 'remote', '--verbose'],
|
||||
stderr=subprocess.DEVNULL)
|
||||
remotes = out.decode().splitlines()
|
||||
upstream, _ = branch.split('/', 1)
|
||||
valid_remote = False
|
||||
for line in remotes:
|
||||
if line.startswith(upstream + '\t'):
|
||||
valid_remote = True
|
||||
|
||||
if not valid_remote:
|
||||
raise argparse.ArgumentTypeError('Invalid remote: ' + upstream)
|
||||
|
||||
if not is_commit_valid(branch):
|
||||
raise argparse.ArgumentTypeError('Invalid branch: ' + branch)
|
||||
|
||||
return branch
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="""
|
||||
Returns 0 if the commit is present in the branch,
|
||||
1 if it's not,
|
||||
and 2 if it couldn't be determined (eg. invalid commit)
|
||||
""")
|
||||
parser.add_argument('commit',
|
||||
type=canonicalize_commit,
|
||||
help='commit sha1')
|
||||
parser.add_argument('branch',
|
||||
type=validate_branch,
|
||||
help='branch to check, in the form `remote/branch`')
|
||||
parser.add_argument('--quiet',
|
||||
action='store_true',
|
||||
help='suppress all output; exit code can still be used')
|
||||
parser.add_argument('--color',
|
||||
choices=['auto', 'always', 'never'],
|
||||
default='auto',
|
||||
help='colorize output (default: true if stdout is a terminal)')
|
||||
args = parser.parse_args()
|
||||
|
||||
upstream, branch = args.branch.split('/', 1)
|
||||
|
||||
if branch_has_commit(upstream, branch, args.commit):
|
||||
print_(args, True, 'Commit ' + args.commit + ' is in branch ' + branch)
|
||||
exit(0)
|
||||
|
||||
backport = branch_has_backport_of_commit(upstream, branch, args.commit)
|
||||
if backport:
|
||||
print_(args, True,
|
||||
'Commit ' + args.commit + ' was backported to branch ' + branch + ' as commit ' + backport)
|
||||
exit(0)
|
||||
|
||||
print_(args, False, 'Commit ' + args.commit + ' is NOT in branch ' + branch)
|
||||
exit(1)
|
120
lib/mesa/bin/commit_in_branch_test.py
Normal file
120
lib/mesa/bin/commit_in_branch_test.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
import argparse
|
||||
import pytest # type: ignore
|
||||
import subprocess
|
||||
|
||||
from .commit_in_branch import (
|
||||
is_commit_valid,
|
||||
branch_has_commit,
|
||||
branch_has_backport_of_commit,
|
||||
canonicalize_commit,
|
||||
validate_branch,
|
||||
)
|
||||
|
||||
|
||||
def get_upstream() -> str:
|
||||
# Let's assume main is bound to the upstream remote and not a fork
|
||||
out = subprocess.check_output(['git', 'for-each-ref',
|
||||
'--format=%(upstream)',
|
||||
'refs/heads/main'],
|
||||
stderr=subprocess.DEVNULL)
|
||||
return out.decode().strip().split('/')[2]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'commit, expected',
|
||||
[
|
||||
('20.1-branchpoint', True),
|
||||
('main', True),
|
||||
('e58a10af640ba58b6001f5c5ad750b782547da76', True),
|
||||
('d043d24654c851f0be57dbbf48274b5373dea42b', True),
|
||||
('dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
|
||||
('0000000000000000000000000000000000000000', False),
|
||||
('not-even-a-valid-commit-format', False),
|
||||
])
|
||||
def test_canonicalize_commit(commit: str, expected: bool) -> None:
|
||||
if expected:
|
||||
assert canonicalize_commit(commit)
|
||||
else:
|
||||
try:
|
||||
assert canonicalize_commit(commit)
|
||||
except argparse.ArgumentTypeError:
|
||||
return
|
||||
assert False
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'commit, expected',
|
||||
[
|
||||
(get_upstream() + '/20.1', True),
|
||||
(get_upstream() + '/staging/20.1', True),
|
||||
(get_upstream() + '/main', True),
|
||||
('20.1', False),
|
||||
('main', False),
|
||||
('e58a10af640ba58b6001f5c5ad750b782547da76', False),
|
||||
('d043d24654c851f0be57dbbf48274b5373dea42b', False),
|
||||
('dd2bd68fa69124c86cd008b256d06f44fab8e6cd', False),
|
||||
('0000000000000000000000000000000000000000', False),
|
||||
('not-even-a-valid-commit-format', False),
|
||||
])
|
||||
def test_validate_branch(commit: str, expected: bool) -> None:
|
||||
if expected:
|
||||
assert validate_branch(commit)
|
||||
else:
|
||||
try:
|
||||
assert validate_branch(commit)
|
||||
except argparse.ArgumentTypeError:
|
||||
return
|
||||
assert False
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'commit, expected',
|
||||
[
|
||||
('main', True),
|
||||
('20.1-branchpoint', True),
|
||||
('20.1', False),
|
||||
(get_upstream() + '/20.1', True),
|
||||
(get_upstream() + '/staging/20.1', True),
|
||||
('e58a10af640ba58b6001f5c5ad750b782547da76', True),
|
||||
('d043d24654c851f0be57dbbf48274b5373dea42b', True),
|
||||
('dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
|
||||
('0000000000000000000000000000000000000000', False),
|
||||
('not-even-a-valid-commit-format', False),
|
||||
])
|
||||
def test_is_commit_valid(commit: str, expected: bool) -> None:
|
||||
assert is_commit_valid(commit) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'branch, commit, expected',
|
||||
[
|
||||
('20.1', '20.1-branchpoint', True),
|
||||
('20.1', '20.0', False),
|
||||
('20.1', 'main', False),
|
||||
('20.1', 'e58a10af640ba58b6001f5c5ad750b782547da76', True),
|
||||
('20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
|
||||
('staging/20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', True),
|
||||
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', False),
|
||||
('main', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', True),
|
||||
('20.0', 'd043d24654c851f0be57dbbf48274b5373dea42b', False),
|
||||
])
|
||||
def test_branch_has_commit(branch: str, commit: str, expected: bool) -> None:
|
||||
upstream = get_upstream()
|
||||
assert branch_has_commit(upstream, branch, commit) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'branch, commit, expected',
|
||||
[
|
||||
('20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
|
||||
('staging/20.1', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', 'd043d24654c851f0be57dbbf48274b5373dea42b'),
|
||||
('20.1', '20.1-branchpoint', ''),
|
||||
('20.1', '20.0', ''),
|
||||
('20.1', '20.2', ''),
|
||||
('20.1', 'main', ''),
|
||||
('20.1', 'd043d24654c851f0be57dbbf48274b5373dea42b', ''),
|
||||
('20.0', 'dd2bd68fa69124c86cd008b256d06f44fab8e6cd', ''),
|
||||
])
|
||||
def test_branch_has_backport_of_commit(branch: str, commit: str, expected: bool) -> None:
|
||||
upstream = get_upstream()
|
||||
assert branch_has_backport_of_commit(upstream, branch, commit) == expected
|
253
lib/mesa/bin/gen_calendar_entries.py
Executable file
253
lib/mesa/bin/gen_calendar_entries.py
Executable file
|
@ -0,0 +1,253 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Copyright © 2021 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Helper script for manipulating the release calendar."""
|
||||
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import csv
|
||||
import contextlib
|
||||
import datetime
|
||||
import pathlib
|
||||
import subprocess
|
||||
import typing
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import _csv
|
||||
from typing_extensions import Protocol
|
||||
|
||||
class RCArguments(Protocol):
|
||||
"""Typing information for release-candidate command arguments."""
|
||||
|
||||
manager: str
|
||||
|
||||
class FinalArguments(Protocol):
|
||||
"""Typing information for release command arguments."""
|
||||
|
||||
series: str
|
||||
manager: str
|
||||
zero_released: bool
|
||||
|
||||
class ExtendArguments(Protocol):
|
||||
"""Typing information for extend command arguments."""
|
||||
|
||||
series: str
|
||||
count: int
|
||||
|
||||
|
||||
CalendarRowType = typing.Tuple[typing.Optional[str], str, str, str, typing.Optional[str]]
|
||||
|
||||
|
||||
_ROOT = pathlib.Path(__file__).parent.parent
|
||||
CALENDAR_CSV = _ROOT / 'docs' / 'release-calendar.csv'
|
||||
VERSION = _ROOT / 'VERSION'
|
||||
LAST_RELEASE = 'This is the last planned release of the {}.x series.'
|
||||
OR_FINAL = 'Or {}.0 final.'
|
||||
|
||||
|
||||
def read_calendar() -> typing.List[CalendarRowType]:
|
||||
"""Read the calendar and return a list of it's rows."""
|
||||
with CALENDAR_CSV.open('r') as f:
|
||||
return [typing.cast('CalendarRowType', tuple(r)) for r in csv.reader(f)]
|
||||
|
||||
|
||||
def commit(message: str) -> None:
|
||||
"""Commit the changes the the release-calendar.csv file."""
|
||||
subprocess.run(['git', 'commit', str(CALENDAR_CSV), '--message', message])
|
||||
|
||||
|
||||
|
||||
def _calculate_release_start(major: str, minor: str) -> datetime.date:
|
||||
"""Calclulate the start of the release for release candidates.
|
||||
|
||||
This is quarterly, on the second wednesday, in Januray, April, July, and Octobor.
|
||||
"""
|
||||
quarter = datetime.date.fromisoformat(f'20{major}-0{[1, 4, 7, 10][int(minor)]}-01')
|
||||
|
||||
# Wednesday is 3
|
||||
day = quarter.isoweekday()
|
||||
if day > 3:
|
||||
# this will walk back into the previous month, it's much simpler to
|
||||
# duplicate the 14 than handle the calculations for the month and year
|
||||
# changing.
|
||||
return quarter.replace(day=quarter.day - day + 3 + 14)
|
||||
elif day < 3:
|
||||
quarter = quarter.replace(day=quarter.day + 3 - day)
|
||||
return quarter.replace(day=quarter.day + 14)
|
||||
|
||||
|
||||
def release_candidate(args: RCArguments) -> None:
|
||||
"""Add release candidate entries."""
|
||||
with VERSION.open('r') as f:
|
||||
version = f.read().rstrip('-devel')
|
||||
major, minor, _ = version.split('.')
|
||||
date = _calculate_release_start(major, minor)
|
||||
|
||||
data = read_calendar()
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(data)
|
||||
|
||||
writer.writerow([f'{major}.{minor}', date.isoformat(), f'{major}.{minor}.0-rc1', args.manager])
|
||||
for row in range(2, 4):
|
||||
date = date + datetime.timedelta(days=7)
|
||||
writer.writerow([None, date.isoformat(), f'{major}.{minor}.0-rc{row}', args.manager])
|
||||
date = date + datetime.timedelta(days=7)
|
||||
writer.writerow([None, date.isoformat(), f'{major}.{minor}.0-rc4', args.manager, OR_FINAL.format(f'{major}.{minor}')])
|
||||
|
||||
commit(f'docs: Add calendar entries for {major}.{minor} release candidates.')
|
||||
|
||||
|
||||
def _calculate_next_release_date(next_is_zero: bool) -> datetime.date:
|
||||
"""Calculate the date of the next release.
|
||||
|
||||
If the next is .0, we have the release in seven days, if the next is .1,
|
||||
then it's in 14
|
||||
"""
|
||||
date = datetime.date.today()
|
||||
day = date.isoweekday()
|
||||
if day < 3:
|
||||
delta = 3 - day
|
||||
elif day > 3:
|
||||
# this will walk back into the previous month, it's much simpler to
|
||||
# duplicate the 14 than handle the calculations for the month and year
|
||||
# changing.
|
||||
delta = (3 - day)
|
||||
else:
|
||||
delta = 0
|
||||
delta += 7
|
||||
if not next_is_zero:
|
||||
delta += 7
|
||||
return date + datetime.timedelta(days=delta)
|
||||
|
||||
|
||||
def final_release(args: FinalArguments) -> None:
|
||||
"""Add final release entries."""
|
||||
data = read_calendar()
|
||||
date = _calculate_next_release_date(not args.zero_released)
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(data)
|
||||
|
||||
base = 1 if args.zero_released else 0
|
||||
|
||||
writer.writerow([args.series, date.isoformat(), f'{args.series}.{base}', args.manager])
|
||||
for row in range(base + 1, 3):
|
||||
date = date + datetime.timedelta(days=14)
|
||||
writer.writerow([None, date.isoformat(), f'{args.series}.{row}', args.manager])
|
||||
date = date + datetime.timedelta(days=14)
|
||||
writer.writerow([None, date.isoformat(), f'{args.series}.3', args.manager, LAST_RELEASE.format(args.series)])
|
||||
|
||||
commit(f'docs: Add calendar entries for {args.series} release.')
|
||||
|
||||
|
||||
def extend(args: ExtendArguments) -> None:
|
||||
"""Extend a release."""
|
||||
@contextlib.contextmanager
|
||||
def write_existing(writer: _csv._writer, current: typing.List[CalendarRowType]) -> typing.Iterator[CalendarRowType]:
|
||||
"""Write the orinal file, yield to insert new entries.
|
||||
|
||||
This is a bit clever, basically what happens it writes out the
|
||||
original csv file until it reaches the start of the release after the
|
||||
one we're appending, then it yields the last row. When control is
|
||||
returned it writes out the rest of the original calendar data.
|
||||
"""
|
||||
last_row: typing.Optional[CalendarRowType] = None
|
||||
in_wanted = False
|
||||
for row in current:
|
||||
if in_wanted and row[0]:
|
||||
in_wanted = False
|
||||
assert last_row is not None
|
||||
yield last_row
|
||||
if row[0] == args.series:
|
||||
in_wanted = True
|
||||
if in_wanted and len(row) >= 5 and row[4] in {LAST_RELEASE.format(args.series), OR_FINAL.format(args.series)}:
|
||||
# If this was the last planned release and we're adding more,
|
||||
# then we need to remove that message and add it elsewhere
|
||||
r = list(row)
|
||||
r[4] = None
|
||||
# Mypy can't figure this out…
|
||||
row = typing.cast('CalendarRowType', tuple(r))
|
||||
last_row = row
|
||||
writer.writerow(row)
|
||||
# If this is the only entry we can hit a case where the contextmanager
|
||||
# hasn't yielded
|
||||
if in_wanted:
|
||||
yield row
|
||||
|
||||
current = read_calendar()
|
||||
|
||||
with CALENDAR_CSV.open('w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
with write_existing(writer, current) as row:
|
||||
# Get rid of -rcX as well
|
||||
if '-rc' in row[2]:
|
||||
first_point = int(row[2].split('rc')[-1]) + 1
|
||||
template = '{}.0-rc{}'
|
||||
days = 7
|
||||
else:
|
||||
first_point = int(row[2].split('-')[0].split('.')[-1]) + 1
|
||||
template = '{}.{}'
|
||||
days = 14
|
||||
|
||||
date = datetime.date.fromisoformat(row[1])
|
||||
for i in range(first_point, first_point + args.count):
|
||||
date = date + datetime.timedelta(days=days)
|
||||
r = [None, date.isoformat(), template.format(args.series, i), row[3], None]
|
||||
if i == first_point + args.count - 1:
|
||||
if days == 14:
|
||||
r[4] = LAST_RELEASE.format(args.series)
|
||||
else:
|
||||
r[4] = OR_FINAL.format(args.series)
|
||||
writer.writerow(r)
|
||||
|
||||
commit(f'docs: Extend calendar entries for {args.series} by {args.count} releases.')
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
sub = parser.add_subparsers()
|
||||
|
||||
rc = sub.add_parser('release-candidate', aliases=['rc'], help='Generate calendar entries for a release candidate.')
|
||||
rc.add_argument('manager', help="the name of the person managing the release.")
|
||||
rc.set_defaults(func=release_candidate)
|
||||
|
||||
fr = sub.add_parser('release', help='Generate calendar entries for a final release.')
|
||||
fr.add_argument('manager', help="the name of the person managing the release.")
|
||||
fr.add_argument('series', help='The series to extend, such as "29.3" or "30.0".')
|
||||
fr.add_argument('--zero-released', action='store_true', help='The .0 release was today, the next release is .1')
|
||||
fr.set_defaults(func=final_release)
|
||||
|
||||
ex = sub.add_parser('extend', help='Generate additional entries for a release.')
|
||||
ex.add_argument('series', help='The series to extend, such as "29.3" or "30.0".')
|
||||
ex.add_argument('count', type=int, help='The number of new entries to add.')
|
||||
ex.set_defaults(func=extend)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
319
lib/mesa/bin/gen_calendar_entries_test.py
Normal file
319
lib/mesa/bin/gen_calendar_entries_test.py
Normal file
|
@ -0,0 +1,319 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Copyright © 2021 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import annotations
|
||||
from unittest import mock
|
||||
import argparse
|
||||
import csv
|
||||
import contextlib
|
||||
import datetime
|
||||
import tempfile
|
||||
import os
|
||||
import pathlib
|
||||
import typing
|
||||
|
||||
import pytest
|
||||
|
||||
from . import gen_calendar_entries
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def mock_csv(data: typing.List[gen_calendar_entries.CalendarRowType]) -> typing.Iterator[None]:
|
||||
"""Replace the actual CSV data with our test data."""
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
c = os.path.join(d, 'calendar.csv')
|
||||
with open(c, 'w') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(data)
|
||||
|
||||
with mock.patch('bin.gen_calendar_entries.CALENDAR_CSV', pathlib.Path(c)):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope='module')
|
||||
def disable_git_commits() -> None:
|
||||
"""Mock out the commit function so no git commits are made durring testing."""
|
||||
with mock.patch('bin.gen_calendar_entries.commit', mock.Mock()):
|
||||
yield
|
||||
|
||||
|
||||
class TestReleaseStart:
|
||||
|
||||
def test_first_is_wednesday(self) -> None:
|
||||
d = gen_calendar_entries._calculate_release_start('20', '0')
|
||||
assert d.day == 15
|
||||
assert d.month == 1
|
||||
assert d.year == 2020
|
||||
|
||||
def test_first_is_before_wednesday(self) -> None:
|
||||
d = gen_calendar_entries._calculate_release_start('19', '0')
|
||||
assert d.day == 16
|
||||
assert d.month == 1
|
||||
assert d.year == 2019
|
||||
|
||||
def test_first_is_after_wednesday(self) -> None:
|
||||
d = gen_calendar_entries._calculate_release_start('21', '0')
|
||||
assert d.day == 13
|
||||
assert d.month == 1
|
||||
assert d.year == 2021
|
||||
|
||||
|
||||
class TestNextReleaseDate:
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _patch_date(date: datetime.date) -> typing.Iterator[None]:
|
||||
mdate = mock.Mock()
|
||||
mdate.today = mock.Mock(return_value=date)
|
||||
with mock.patch('bin.gen_calendar_entries.datetime.date', mdate):
|
||||
yield
|
||||
|
||||
class TestIsWeds:
|
||||
|
||||
@pytest.fixture(scope='class', autouse=True)
|
||||
def data(self) -> None:
|
||||
date = datetime.date(2021, 1, 6)
|
||||
with TestNextReleaseDate._patch_date(date):
|
||||
yield
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'is_zero, expected',
|
||||
[
|
||||
(True, 13),
|
||||
(False, 20),
|
||||
],
|
||||
)
|
||||
def test(self, is_zero: bool, expected: int) -> None:
|
||||
date = gen_calendar_entries._calculate_next_release_date(is_zero)
|
||||
assert date.day == expected
|
||||
|
||||
class TestBeforeWeds:
|
||||
|
||||
@pytest.fixture(scope='class', autouse=True)
|
||||
def data(self) -> None:
|
||||
date = datetime.date(2021, 1, 5)
|
||||
with TestNextReleaseDate._patch_date(date):
|
||||
yield
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'is_zero, expected',
|
||||
[
|
||||
(True, 13),
|
||||
(False, 20),
|
||||
],
|
||||
)
|
||||
def test(self, is_zero: bool, expected: int) -> None:
|
||||
date = gen_calendar_entries._calculate_next_release_date(is_zero)
|
||||
assert date.day == expected
|
||||
|
||||
class TestAfterWeds:
|
||||
|
||||
@pytest.fixture(scope='class', autouse=True)
|
||||
def data(self) -> None:
|
||||
date = datetime.date(2021, 1, 8)
|
||||
with TestNextReleaseDate._patch_date(date):
|
||||
yield
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'is_zero, expected',
|
||||
[
|
||||
(True, 13),
|
||||
(False, 20),
|
||||
],
|
||||
)
|
||||
def test(self, is_zero: bool, expected: int) -> None:
|
||||
date = gen_calendar_entries._calculate_next_release_date(is_zero)
|
||||
assert date.day == expected
|
||||
|
||||
|
||||
class TestRC:
|
||||
|
||||
ORIGINAL_DATA = [
|
||||
('20.3', '2021-01-13', '20.3.3', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', 'Last planned release of the 20.3.x series'),
|
||||
]
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
def mock_version(self) -> None:
|
||||
"""Keep the version set at a specific value."""
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
v = os.path.join(d, 'version')
|
||||
with open(v, 'w') as f:
|
||||
f.write('21.0.0-devel\n')
|
||||
|
||||
with mock.patch('bin.gen_calendar_entries.VERSION', pathlib.Path(v)):
|
||||
yield
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def csv(self) -> None:
|
||||
"""inject our test data.."""
|
||||
with mock_csv(self.ORIGINAL_DATA):
|
||||
yield
|
||||
|
||||
def test_basic(self) -> None:
|
||||
args: gen_calendar_entries.RCArguments = argparse.Namespace()
|
||||
args.manager = "Dylan Baker"
|
||||
gen_calendar_entries.release_candidate(args)
|
||||
|
||||
expected = self.ORIGINAL_DATA.copy()
|
||||
expected.append(('21.0', '2021-01-13', f'21.0.0-rc1', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-01-20', f'21.0.0-rc2', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-01-27', f'21.0.0-rc3', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-02-03', f'21.0.0-rc4', 'Dylan Baker', 'Or 21.0.0 final.'))
|
||||
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
class TestExtend:
|
||||
|
||||
def test_one_release(self) -> None:
|
||||
data = [
|
||||
('20.3', '2021-01-13', '20.3.3', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', 'This is the last planned release of the 20.3.x series.'),
|
||||
]
|
||||
|
||||
args: gen_calendar_entries.ExtendArguments = argparse.Namespace()
|
||||
args.series = '20.3'
|
||||
args.count = 2
|
||||
|
||||
with mock_csv(data):
|
||||
gen_calendar_entries.extend(args)
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
expected = [
|
||||
data[0],
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', ''),
|
||||
('', '2021-02-10', '20.3.5', 'Dylan Baker', ''),
|
||||
('', '2021-02-24', '20.3.6', 'Dylan Baker', 'This is the last planned release of the 20.3.x series.'),
|
||||
]
|
||||
|
||||
assert actual == expected
|
||||
def test_one_release(self) -> None:
|
||||
data = [
|
||||
('20.3', '2021-01-13', '20.3.3', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', 'This is the last planned release of the 20.3.x series.'),
|
||||
('21.0', '2021-01-13', '21.0.1', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '21.0.2', 'Dylan Baker', ''),
|
||||
('', '2021-02-10', '21.0.3', 'Dylan Baker', ''),
|
||||
('', '2021-02-24', '21.0.4', 'Dylan Baker', 'This is the last planned release of the 21.0.x series.'),
|
||||
]
|
||||
|
||||
args: gen_calendar_entries.ExtendArguments = argparse.Namespace()
|
||||
args.series = '21.0'
|
||||
args.count = 1
|
||||
|
||||
with mock_csv(data):
|
||||
gen_calendar_entries.extend(args)
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
expected = data.copy()
|
||||
d = list(data[-1])
|
||||
d[-1] = ''
|
||||
expected[-1] = tuple(d)
|
||||
expected.extend([
|
||||
('', '2021-03-10', '21.0.5', 'Dylan Baker', 'This is the last planned release of the 21.0.x series.'),
|
||||
])
|
||||
|
||||
assert actual == expected
|
||||
|
||||
def test_rc(self) -> None:
|
||||
data = [
|
||||
('20.3', '2021-01-13', '20.3.3', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', 'This is the last planned release of the 20.3.x series.'),
|
||||
('21.0', '2021-01-13', '21.0.0-rc1', 'Dylan Baker', ''),
|
||||
('', '2021-01-20', '21.0.0-rc2', 'Dylan Baker', gen_calendar_entries.OR_FINAL.format('21.0')),
|
||||
]
|
||||
|
||||
args: gen_calendar_entries.ExtendArguments = argparse.Namespace()
|
||||
args.series = '21.0'
|
||||
args.count = 2
|
||||
|
||||
with mock_csv(data):
|
||||
gen_calendar_entries.extend(args)
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
expected = data.copy()
|
||||
d = list(expected[-1])
|
||||
d[-1] = ''
|
||||
expected[-1] = tuple(d)
|
||||
expected.extend([
|
||||
('', '2021-01-27', '21.0.0-rc3', 'Dylan Baker', ''),
|
||||
('', '2021-02-03', '21.0.0-rc4', 'Dylan Baker', gen_calendar_entries.OR_FINAL.format('21.0')),
|
||||
])
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
||||
class TestFinal:
|
||||
|
||||
@pytest.fixture(autouse=True, scope='class')
|
||||
def _patch_date(self) -> typing.Iterator[None]:
|
||||
mdate = mock.Mock()
|
||||
mdate.today = mock.Mock(return_value=datetime.date(2021, 1, 6))
|
||||
with mock.patch('bin.gen_calendar_entries.datetime.date', mdate):
|
||||
yield
|
||||
|
||||
ORIGINAL_DATA = [
|
||||
('20.3', '2021-01-13', '20.3.3', 'Dylan Baker', ''),
|
||||
('', '2021-01-27', '20.3.4', 'Dylan Baker', 'Last planned release of the 20.3.x series'),
|
||||
]
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def csv(self) -> None:
|
||||
"""inject our test data.."""
|
||||
with mock_csv(self.ORIGINAL_DATA):
|
||||
yield
|
||||
|
||||
def test_zero_released(self) -> None:
|
||||
args: gen_calendar_entries.FinalArguments = argparse.Namespace()
|
||||
args.manager = "Dylan Baker"
|
||||
args.zero_released = True
|
||||
args.series = '21.0'
|
||||
gen_calendar_entries.final_release(args)
|
||||
|
||||
expected = self.ORIGINAL_DATA.copy()
|
||||
expected.append(('21.0', '2021-01-20', f'21.0.1', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-02-03', f'21.0.2', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-02-17', f'21.0.3', 'Dylan Baker', gen_calendar_entries.LAST_RELEASE.format(args.series)))
|
||||
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
assert actual == expected
|
||||
|
||||
def test_zero_not_released(self) -> None:
|
||||
args: gen_calendar_entries.FinalArguments = argparse.Namespace()
|
||||
args.manager = "Dylan Baker"
|
||||
args.zero_released = False
|
||||
args.series = '21.0'
|
||||
gen_calendar_entries.final_release(args)
|
||||
|
||||
expected = self.ORIGINAL_DATA.copy()
|
||||
expected.append(('21.0', '2021-01-13', f'21.0.0', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-01-27', f'21.0.1', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-02-10', f'21.0.2', 'Dylan Baker'))
|
||||
expected.append(( '', '2021-02-24', f'21.0.3', 'Dylan Baker', gen_calendar_entries.LAST_RELEASE.format(args.series)))
|
||||
|
||||
actual = gen_calendar_entries.read_calendar()
|
||||
|
||||
assert actual == expected
|
382
lib/mesa/bin/gen_release_notes.py
Executable file
382
lib/mesa/bin/gen_release_notes.py
Executable file
|
@ -0,0 +1,382 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Generates release notes for a given version of mesa."""
|
||||
|
||||
import asyncio
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
import typing
|
||||
import urllib.parse
|
||||
|
||||
import aiohttp
|
||||
from mako.template import Template
|
||||
from mako import exceptions
|
||||
|
||||
import docutils.utils
|
||||
import docutils.parsers.rst.states as states
|
||||
|
||||
CURRENT_GL_VERSION = '4.6'
|
||||
CURRENT_VK_VERSION = '1.3'
|
||||
|
||||
TEMPLATE = Template(textwrap.dedent("""\
|
||||
${header}
|
||||
${header_underline}
|
||||
|
||||
%if not bugfix:
|
||||
Mesa ${this_version} is a new development release. People who are concerned
|
||||
with stability and reliability should stick with a previous release or
|
||||
wait for Mesa ${this_version[:-1]}1.
|
||||
%else:
|
||||
Mesa ${this_version} is a bug fix release which fixes bugs found since the ${previous_version} release.
|
||||
%endif
|
||||
|
||||
Mesa ${this_version} implements the OpenGL ${gl_version} API, but the version reported by
|
||||
glGetString(GL_VERSION) or glGetIntegerv(GL_MAJOR_VERSION) /
|
||||
glGetIntegerv(GL_MINOR_VERSION) depends on the particular driver being used.
|
||||
Some drivers don't support all the features required in OpenGL ${gl_version}. OpenGL
|
||||
${gl_version} is **only** available if requested at context creation.
|
||||
Compatibility contexts may report a lower version depending on each driver.
|
||||
|
||||
Mesa ${this_version} implements the Vulkan ${vk_version} API, but the version reported by
|
||||
the apiVersion property of the VkPhysicalDeviceProperties struct
|
||||
depends on the particular driver being used.
|
||||
|
||||
SHA256 checksum
|
||||
---------------
|
||||
|
||||
::
|
||||
|
||||
TBD.
|
||||
|
||||
|
||||
New features
|
||||
------------
|
||||
|
||||
%for f in features:
|
||||
- ${rst_escape(f)}
|
||||
%endfor
|
||||
|
||||
|
||||
Bug fixes
|
||||
---------
|
||||
|
||||
%for b in bugs:
|
||||
- ${rst_escape(b)}
|
||||
%endfor
|
||||
|
||||
|
||||
Changes
|
||||
-------
|
||||
%for c, author_line in changes:
|
||||
%if author_line:
|
||||
|
||||
${rst_escape(c)}
|
||||
|
||||
%else:
|
||||
- ${rst_escape(c)}
|
||||
%endif
|
||||
%endfor
|
||||
"""))
|
||||
|
||||
|
||||
# copied from https://docutils.sourceforge.io/sandbox/xml2rst/xml2rstlib/markup.py
|
||||
class Inliner(states.Inliner):
|
||||
"""
|
||||
Recognizer for inline markup. Derive this from the original inline
|
||||
markup parser for best results.
|
||||
"""
|
||||
|
||||
# Copy static attributes from super class
|
||||
vars().update(vars(states.Inliner))
|
||||
|
||||
def quoteInline(self, text):
|
||||
"""
|
||||
`text`: ``str``
|
||||
Return `text` with inline markup quoted.
|
||||
"""
|
||||
# Method inspired by `states.Inliner.parse`
|
||||
self.document = docutils.utils.new_document("<string>")
|
||||
self.document.settings.trim_footnote_reference_space = False
|
||||
self.document.settings.character_level_inline_markup = False
|
||||
self.document.settings.pep_references = False
|
||||
self.document.settings.rfc_references = False
|
||||
|
||||
self.init_customizations(self.document.settings)
|
||||
|
||||
self.reporter = self.document.reporter
|
||||
self.reporter.stream = None
|
||||
self.language = None
|
||||
self.parent = self.document
|
||||
remaining = docutils.utils.escape2null(text)
|
||||
checked = ""
|
||||
processed = []
|
||||
unprocessed = []
|
||||
messages = []
|
||||
while remaining:
|
||||
original = remaining
|
||||
match = self.patterns.initial.search(remaining)
|
||||
if match:
|
||||
groups = match.groupdict()
|
||||
method = self.dispatch[groups['start'] or groups['backquote']
|
||||
or groups['refend'] or groups['fnend']]
|
||||
before, inlines, remaining, sysmessages = method(self, match, 0)
|
||||
checked += before
|
||||
if inlines:
|
||||
assert len(inlines) == 1, "More than one inline found"
|
||||
inline = original[len(before)
|
||||
:len(original) - len(remaining)]
|
||||
rolePfx = re.search("^:" + self.simplename + ":(?=`)",
|
||||
inline)
|
||||
refSfx = re.search("_+$", inline)
|
||||
if rolePfx:
|
||||
# Prefixed roles need to be quoted in the middle
|
||||
checked += (inline[:rolePfx.end()] + "\\"
|
||||
+ inline[rolePfx.end():])
|
||||
elif refSfx and not re.search("^`", inline):
|
||||
# Pure reference markup needs to be quoted at the end
|
||||
checked += (inline[:refSfx.start()] + "\\"
|
||||
+ inline[refSfx.start():])
|
||||
else:
|
||||
# Quote other inlines by prefixing
|
||||
checked += "\\" + inline
|
||||
else:
|
||||
checked += remaining
|
||||
break
|
||||
# Quote all original backslashes
|
||||
checked = re.sub('\x00', "\\\x00", checked)
|
||||
return docutils.utils.unescape(checked, 1)
|
||||
|
||||
inliner = Inliner();
|
||||
|
||||
|
||||
async def gather_commits(version: str) -> str:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'log', '--oneline', f'mesa-{version}..', '-i', '--grep', r'\(Closes\|Fixes\): \(https\|#\).*',
|
||||
stdout=asyncio.subprocess.PIPE)
|
||||
out, _ = await p.communicate()
|
||||
assert p.returncode == 0, f"git log didn't work: {version}"
|
||||
return out.decode().strip()
|
||||
|
||||
|
||||
async def parse_issues(commits: str) -> typing.List[str]:
|
||||
issues: typing.List[str] = []
|
||||
for commit in commits.split('\n'):
|
||||
sha, message = commit.split(maxsplit=1)
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'log', '--max-count', '1', r'--format=%b', sha,
|
||||
stdout=asyncio.subprocess.PIPE)
|
||||
_out, _ = await p.communicate()
|
||||
out = _out.decode().split('\n')
|
||||
|
||||
for line in reversed(out):
|
||||
if not line.lower().startswith(('closes:', 'fixes:')):
|
||||
continue
|
||||
bug = line.split(':', 1)[1].strip()
|
||||
if (bug.startswith('https://gitlab.freedesktop.org/mesa/mesa')
|
||||
# Avoid parsing "merge_requests" URL. Note that a valid issue
|
||||
# URL may or may not contain the "/-/" text, so we check if
|
||||
# the word "issues" is contained in URL.
|
||||
and '/issues' in bug):
|
||||
# This means we have a bug in the form "Closes: https://..."
|
||||
issues.append(os.path.basename(urllib.parse.urlparse(bug).path))
|
||||
elif ',' in bug:
|
||||
multiple_bugs = [b.strip().lstrip('#') for b in bug.split(',')]
|
||||
if not all(b.isdigit() for b in multiple_bugs):
|
||||
# this is likely a "Fixes" tag that refers to a commit name
|
||||
continue
|
||||
issues.extend(multiple_bugs)
|
||||
elif bug.startswith('#'):
|
||||
issues.append(bug.lstrip('#'))
|
||||
|
||||
return issues
|
||||
|
||||
|
||||
async def gather_bugs(version: str) -> typing.List[str]:
|
||||
commits = await gather_commits(version)
|
||||
issues = await parse_issues(commits)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
async with aiohttp.ClientSession(loop=loop) as session:
|
||||
results = await asyncio.gather(*[get_bug(session, i) for i in issues])
|
||||
typing.cast(typing.Tuple[str, ...], results)
|
||||
bugs = list(results)
|
||||
if not bugs:
|
||||
bugs = ['None']
|
||||
return bugs
|
||||
|
||||
|
||||
async def get_bug(session: aiohttp.ClientSession, bug_id: str) -> str:
|
||||
"""Query gitlab to get the name of the issue that was closed."""
|
||||
# Mesa's gitlab id is 176,
|
||||
url = 'https://gitlab.freedesktop.org/api/v4/projects/176/issues'
|
||||
params = {'iids[]': bug_id}
|
||||
async with session.get(url, params=params) as response:
|
||||
content = await response.json()
|
||||
if not content:
|
||||
# issues marked as "confidential" look like "404" page for
|
||||
# unauthorized users
|
||||
return f'Confidential issue #{bug_id}'
|
||||
else:
|
||||
return content[0]['title']
|
||||
|
||||
|
||||
async def get_shortlog(version: str) -> str:
|
||||
"""Call git shortlog."""
|
||||
p = await asyncio.create_subprocess_exec('git', 'shortlog', f'mesa-{version}..',
|
||||
stdout=asyncio.subprocess.PIPE)
|
||||
out, _ = await p.communicate()
|
||||
assert p.returncode == 0, 'error getting shortlog'
|
||||
assert out is not None, 'just for mypy'
|
||||
return out.decode()
|
||||
|
||||
|
||||
def walk_shortlog(log: str) -> typing.Generator[typing.Tuple[str, bool], None, None]:
|
||||
for l in log.split('\n'):
|
||||
if l.startswith(' '): # this means we have a patch description
|
||||
yield l.lstrip(), False
|
||||
elif l.strip():
|
||||
yield l, True
|
||||
|
||||
|
||||
def calculate_next_version(version: str, is_point: bool) -> str:
|
||||
"""Calculate the version about to be released."""
|
||||
if '-' in version:
|
||||
version = version.split('-')[0]
|
||||
if is_point:
|
||||
base = version.split('.')
|
||||
base[2] = str(int(base[2]) + 1)
|
||||
return '.'.join(base)
|
||||
return version
|
||||
|
||||
|
||||
def calculate_previous_version(version: str, is_point: bool) -> str:
|
||||
"""Calculate the previous version to compare to.
|
||||
|
||||
In the case of -rc to final that verison is the previous .0 release,
|
||||
(19.3.0 in the case of 20.0.0, for example). for point releases that is
|
||||
the last point release. This value will be the same as the input value
|
||||
for a point release, but different for a major release.
|
||||
"""
|
||||
if '-' in version:
|
||||
version = version.split('-')[0]
|
||||
if is_point:
|
||||
return version
|
||||
base = version.split('.')
|
||||
if base[1] == '0':
|
||||
base[0] = str(int(base[0]) - 1)
|
||||
base[1] = '3'
|
||||
else:
|
||||
base[1] = str(int(base[1]) - 1)
|
||||
return '.'.join(base)
|
||||
|
||||
|
||||
def get_features(is_point_release: bool) -> typing.Generator[str, None, None]:
|
||||
p = pathlib.Path(__file__).parent.parent / 'docs' / 'relnotes' / 'new_features.txt'
|
||||
if p.exists() and p.stat().st_size > 0:
|
||||
if is_point_release:
|
||||
print("WARNING: new features being introduced in a point release", file=sys.stderr)
|
||||
with p.open('rt') as f:
|
||||
for line in f:
|
||||
yield line.rstrip()
|
||||
p.unlink()
|
||||
else:
|
||||
yield "None"
|
||||
|
||||
|
||||
def update_release_notes_index(version: str) -> None:
|
||||
relnotes_index_path = pathlib.Path('docs') / 'relnotes.rst'
|
||||
|
||||
with relnotes_index_path.open('r') as f:
|
||||
relnotes = f.readlines()
|
||||
|
||||
new_relnotes = []
|
||||
first_list = True
|
||||
second_list = True
|
||||
for line in relnotes:
|
||||
if first_list and line.startswith('-'):
|
||||
first_list = False
|
||||
new_relnotes.append(f'- :doc:`{version} release notes <relnotes/{version}>`\n')
|
||||
if not first_list and second_list and line.startswith(' relnotes/'):
|
||||
second_list = False
|
||||
new_relnotes.append(f' relnotes/{version}\n')
|
||||
new_relnotes.append(line)
|
||||
|
||||
with relnotes_index_path.open('w') as f:
|
||||
for line in new_relnotes:
|
||||
f.write(line)
|
||||
|
||||
subprocess.run(['git', 'add', relnotes_index_path])
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
v = pathlib.Path(__file__).parent.parent / 'VERSION'
|
||||
with v.open('rt') as f:
|
||||
raw_version = f.read().strip()
|
||||
is_point_release = '-rc' not in raw_version
|
||||
assert '-devel' not in raw_version, 'Do not run this script on -devel'
|
||||
version = raw_version.split('-')[0]
|
||||
previous_version = calculate_previous_version(version, is_point_release)
|
||||
this_version = calculate_next_version(version, is_point_release)
|
||||
today = datetime.date.today()
|
||||
header = f'Mesa {this_version} Release Notes / {today}'
|
||||
header_underline = '=' * len(header)
|
||||
|
||||
shortlog, bugs = await asyncio.gather(
|
||||
get_shortlog(previous_version),
|
||||
gather_bugs(previous_version),
|
||||
)
|
||||
|
||||
final = pathlib.Path(__file__).parent.parent / 'docs' / 'relnotes' / f'{this_version}.rst'
|
||||
with final.open('wt') as f:
|
||||
try:
|
||||
f.write(TEMPLATE.render(
|
||||
bugfix=is_point_release,
|
||||
bugs=bugs,
|
||||
changes=walk_shortlog(shortlog),
|
||||
features=get_features(is_point_release),
|
||||
gl_version=CURRENT_GL_VERSION,
|
||||
this_version=this_version,
|
||||
header=header,
|
||||
header_underline=header_underline,
|
||||
previous_version=previous_version,
|
||||
vk_version=CURRENT_VK_VERSION,
|
||||
rst_escape=inliner.quoteInline,
|
||||
))
|
||||
except:
|
||||
print(exceptions.text_error_template().render())
|
||||
|
||||
subprocess.run(['git', 'add', final])
|
||||
|
||||
update_release_notes_index(this_version)
|
||||
|
||||
subprocess.run(['git', 'commit', '-m',
|
||||
f'docs: add release notes for {this_version}'])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(main())
|
200
lib/mesa/bin/gen_release_notes_test.py
Normal file
200
lib/mesa/bin/gen_release_notes_test.py
Normal file
|
@ -0,0 +1,200 @@
|
|||
# Copyright © 2019,2021 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
import typing
|
||||
|
||||
import pytest
|
||||
|
||||
# AsyncMock is new in 3.8, so if we're using an older version we need the
|
||||
# backported version of mock
|
||||
if sys.version_info >= (3, 8):
|
||||
from unittest import mock
|
||||
else:
|
||||
import mock
|
||||
|
||||
from .gen_release_notes import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'current, is_point, expected',
|
||||
[
|
||||
('19.2.0', True, '19.2.1'),
|
||||
('19.3.6', True, '19.3.7'),
|
||||
('20.0.0-rc4', False, '20.0.0'),
|
||||
])
|
||||
def test_next_version(current: str, is_point: bool, expected: str) -> None:
|
||||
assert calculate_next_version(current, is_point) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'current, is_point, expected',
|
||||
[
|
||||
('19.3.6', True, '19.3.6'),
|
||||
('20.0.0-rc4', False, '19.3.0'),
|
||||
])
|
||||
def test_previous_version(current: str, is_point: bool, expected: str) -> None:
|
||||
assert calculate_previous_version(current, is_point) == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_shortlog():
|
||||
# Certainly not perfect, but it's something
|
||||
version = '19.2.0'
|
||||
out = await get_shortlog(version)
|
||||
assert out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_gather_commits():
|
||||
# Certainly not perfect, but it's something
|
||||
version = '19.2.0'
|
||||
out = await gather_commits(version)
|
||||
assert out
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
'content, bugs',
|
||||
[
|
||||
# It is important to have the title on a new line, as
|
||||
# textwrap.dedent wont work otherwise.
|
||||
|
||||
# Test the `Closes: #N` syntax
|
||||
(
|
||||
'''\
|
||||
A commit
|
||||
|
||||
It has a message in it
|
||||
|
||||
Closes: #1
|
||||
''',
|
||||
['1'],
|
||||
),
|
||||
|
||||
# Test the Full url
|
||||
(
|
||||
'''\
|
||||
A commit with no body
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3456
|
||||
''',
|
||||
['3456'],
|
||||
),
|
||||
|
||||
# Test projects that are not mesa
|
||||
(
|
||||
'''\
|
||||
A commit for libdrm
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/drm/-/3456
|
||||
''',
|
||||
[],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
A commit for for something else completely
|
||||
|
||||
Closes: https://github.com/Organiztion/project/1234
|
||||
''',
|
||||
[],
|
||||
),
|
||||
|
||||
# Test multiple issues on one line
|
||||
(
|
||||
'''\
|
||||
Fix many bugs
|
||||
|
||||
Closes: #1, #2
|
||||
''',
|
||||
['1', '2'],
|
||||
),
|
||||
|
||||
# Test multiple closes
|
||||
(
|
||||
'''\
|
||||
Fix many bugs
|
||||
|
||||
Closes: #1
|
||||
Closes: #2
|
||||
''',
|
||||
['1', '2'],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
With long form
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3456
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3457
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/3458
|
||||
''',
|
||||
['3456', '3457', '3458'],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
Without /-/
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/issues/36
|
||||
''',
|
||||
['36'],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
Ignore merge_requests
|
||||
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/20241
|
||||
''',
|
||||
[],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
Parse "Fixes:" tag too
|
||||
|
||||
Fixes: https://gitlab.freedesktop.org/mesa/mesa/issues/36
|
||||
Fixes: 142565a3bc2
|
||||
Fixes: 142565a3bc2 ("docs: do something very useful")
|
||||
Fixes: 142565a3bc2 ("docs: fix #1234, have a comma")
|
||||
Fixes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/37
|
||||
''',
|
||||
['36', '37'],
|
||||
),
|
||||
(
|
||||
'''\
|
||||
Parse Fixes/Closes in weird cases
|
||||
|
||||
fixes: https://gitlab.freedesktop.org/mesa/mesa/issues/36
|
||||
fiXES: https://gitlab.freedesktop.org/mesa/mesa/issues/37
|
||||
closes: https://gitlab.freedesktop.org/mesa/mesa/issues/38
|
||||
cloSES: https://gitlab.freedesktop.org/mesa/mesa/issues/39
|
||||
''',
|
||||
['36', '37', '38', '39'],
|
||||
),
|
||||
])
|
||||
async def test_parse_issues(content: str, bugs: typing.List[str]) -> None:
|
||||
mock_com = mock.AsyncMock(return_value=(textwrap.dedent(content).encode(), ''))
|
||||
mock_p = mock.Mock()
|
||||
mock_p.communicate = mock_com
|
||||
mock_exec = mock.AsyncMock(return_value=mock_p)
|
||||
|
||||
with mock.patch('bin.gen_release_notes.asyncio.create_subprocess_exec', mock_exec), \
|
||||
mock.patch('bin.gen_release_notes.gather_commits', mock.AsyncMock(return_value='sha\n')):
|
||||
ids = await parse_issues('1234 not used')
|
||||
assert set(ids) == set(bugs)
|
99
lib/mesa/bin/gen_vs_module_defs.py
Normal file
99
lib/mesa/bin/gen_vs_module_defs.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © 2021-2021 Yonggang Luo
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
gen_help = """Generates visual studio module definition file."""
|
||||
|
||||
import argparse
|
||||
|
||||
"""
|
||||
For input template definition file
|
||||
For gcc/x64,gcc/arm64,visual studio
|
||||
`wglMakeCurrent@8 @357` => `wglMakeCurrent @357`
|
||||
`DrvCopyContext@12` => `DrvCopyContext`
|
||||
`stw_get_device` => `stw_get_device`
|
||||
For gcc/x86,gcc/arm
|
||||
`wglMakeCurrent@8 @357` => `wglMakeCurrent@8 @357 == wglMakeCurrent`
|
||||
`DrvCopyContext@12` => `DrvCopyContext@12 == DrvCopyContext`
|
||||
`stw_get_device` => `stw_get_device`
|
||||
|
||||
"""
|
||||
def gen_vs_module_def(in_file: str, out_file: str, compiler_abi: str, compiler_id: str, cpu_family: str) -> None:
|
||||
out_file_lines = ['EXPORTS']
|
||||
with open(in_file, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
tokens = line.split(';')
|
||||
if not tokens:
|
||||
continue
|
||||
def_infos = [x for x in tokens[0].split(' ') if len(x) > 0]
|
||||
if not def_infos:
|
||||
if line:
|
||||
out_file_lines.append('\t' + line)
|
||||
else:
|
||||
out_file_lines.append('')
|
||||
continue
|
||||
name_infos = def_infos[0].split('@')
|
||||
if not name_infos:
|
||||
out_file_lines.append('\t;' + line)
|
||||
continue
|
||||
order_info = '' if len(def_infos) <= 1 else def_infos[1]
|
||||
if def_infos[0] != name_infos[0] and \
|
||||
(compiler_abi == 'gcc' and compiler_id != 'clang') and (cpu_family not in {'x86_64', 'aarch64'}):
|
||||
if order_info:
|
||||
out_file_lines.append('\t' + def_infos[0] + ' ' + order_info + ' == ' + name_infos[0])
|
||||
else:
|
||||
out_file_lines.append('\t' + def_infos[0] + ' == ' + name_infos[0])
|
||||
else:
|
||||
if order_info:
|
||||
out_file_lines.append('\t' + name_infos[0] + ' ' + order_info)
|
||||
else:
|
||||
out_file_lines.append('\t' + name_infos[0])
|
||||
with open(out_file, 'wb') as f:
|
||||
out_file_content = '\n'.join(out_file_lines) + '\n'
|
||||
f.write(out_file_content.encode('utf-8'))
|
||||
'''
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.def --compiler_abi gcc --cpu_family x86_64
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/libgl-gdi/opengl32.def.in --out_file src/gallium/targets/libgl-gdi/opengl32.mingw.def --compiler_abi gcc --cpu_family x86
|
||||
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.def --compiler_abi gcc --cpu_family x86_64
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/osmesa/osmesa.def.in --out_file src/gallium/targets/osmesa/osmesa.mingw.def --compiler_abi gcc --cpu_family x86
|
||||
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.def --compiler_abi gcc --cpu_family x86_64
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/wgl/gallium_wgl.def.in --out_file src/gallium/targets/wgl/gallium_wgl.mingw.def --compiler_abi gcc --cpu_family x86
|
||||
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.def --compiler_abi gcc --cpu_family x86_64
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/egl/main/egl.def.in --out_file src/egl/main/egl.mingw.def --compiler_abi gcc --cpu_family x86
|
||||
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.def --compiler_abi gcc --cpu_family x86_64
|
||||
python ./bin/gen_vs_module_defs.py --in_file src/gallium/targets/lavapipe/vulkan_lvp.def.in --out_file src/gallium/targets/lavapipe/vulkan_lvp.mingw.def --compiler_abi gcc --cpu_family x86
|
||||
|
||||
'''
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description=gen_help)
|
||||
parser.add_argument('--in_file', help='input template moudle definition file')
|
||||
parser.add_argument('--out_file', help='output moudle definition file')
|
||||
parser.add_argument('--compiler_abi', help='compiler abi')
|
||||
parser.add_argument('--compiler_id', help='compiler id')
|
||||
parser.add_argument('--cpu_family', help='cpu family')
|
||||
args = parser.parse_args()
|
||||
# print(args)
|
||||
gen_vs_module_def(args.in_file, args.out_file, args.compiler_abi, args.compiler_id, args.cpu_family)
|
50
lib/mesa/bin/git_sha1_gen.py
Executable file
50
lib/mesa/bin/git_sha1_gen.py
Executable file
|
@ -0,0 +1,50 @@
|
|||
"""
|
||||
Generate the contents of the git_sha1.h file.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def get_git_sha1():
|
||||
"""Try to get the git SHA1 with git rev-parse."""
|
||||
git_dir = os.path.join(os.path.dirname(sys.argv[0]), '..', '.git')
|
||||
try:
|
||||
git_sha1 = subprocess.check_output([
|
||||
'git',
|
||||
'--git-dir=' + git_dir,
|
||||
'rev-parse',
|
||||
'HEAD',
|
||||
], stderr=open(os.devnull, 'w')).decode("ascii")
|
||||
except Exception:
|
||||
# don't print anything if it fails
|
||||
git_sha1 = ''
|
||||
return git_sha1
|
||||
|
||||
|
||||
def write_if_different(contents):
|
||||
"""
|
||||
Avoid touching the output file if it doesn't need modifications
|
||||
Useful to avoid triggering rebuilds when nothing has changed.
|
||||
"""
|
||||
if os.path.isfile(args.output):
|
||||
with open(args.output, 'r') as file:
|
||||
if file.read() == contents:
|
||||
return
|
||||
with open(args.output, 'w') as file:
|
||||
file.write(contents)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--output', help='File to write the #define in',
|
||||
required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
git_sha1 = os.environ.get('MESA_GIT_SHA1_OVERRIDE', get_git_sha1())[:10]
|
||||
if git_sha1:
|
||||
write_if_different('#define MESA_GIT_SHA1 " (git-' + git_sha1 + ')"')
|
||||
else:
|
||||
write_if_different('#define MESA_GIT_SHA1 ""')
|
83
lib/mesa/bin/install_megadrivers.py
Normal file
83
lib/mesa/bin/install_megadrivers.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
#!/usr/bin/env python3
|
||||
# encoding=utf-8
|
||||
# Copyright 2017-2018 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Script to install megadriver symlinks for meson."""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('megadriver')
|
||||
parser.add_argument('libdir')
|
||||
parser.add_argument('drivers', nargs='+')
|
||||
args = parser.parse_args()
|
||||
|
||||
if os.path.isabs(args.libdir):
|
||||
destdir = os.environ.get('DESTDIR')
|
||||
if destdir:
|
||||
to = os.path.join(destdir, args.libdir[1:])
|
||||
else:
|
||||
to = args.libdir
|
||||
else:
|
||||
to = os.path.join(os.environ['MESON_INSTALL_DESTDIR_PREFIX'], args.libdir)
|
||||
|
||||
master = os.path.join(to, os.path.basename(args.megadriver))
|
||||
|
||||
if not os.path.exists(to):
|
||||
if os.path.lexists(to):
|
||||
os.unlink(to)
|
||||
os.makedirs(to)
|
||||
|
||||
for driver in args.drivers:
|
||||
abs_driver = os.path.join(to, driver)
|
||||
|
||||
if os.path.lexists(abs_driver):
|
||||
os.unlink(abs_driver)
|
||||
print('installing {} to {}'.format(args.megadriver, abs_driver))
|
||||
os.link(master, abs_driver)
|
||||
|
||||
try:
|
||||
ret = os.getcwd()
|
||||
os.chdir(to)
|
||||
|
||||
name, ext = os.path.splitext(driver)
|
||||
while ext != '.so':
|
||||
if os.path.lexists(name):
|
||||
os.unlink(name)
|
||||
os.symlink(driver, name)
|
||||
name, ext = os.path.splitext(name)
|
||||
finally:
|
||||
os.chdir(ret)
|
||||
|
||||
# Remove meson-created master .so and symlinks
|
||||
os.unlink(master)
|
||||
name, ext = os.path.splitext(master)
|
||||
while ext != '.so':
|
||||
if os.path.lexists(name):
|
||||
os.unlink(name)
|
||||
name, ext = os.path.splitext(name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
224
lib/mesa/bin/khronos-update.py
Executable file
224
lib/mesa/bin/khronos-update.py
Executable file
|
@ -0,0 +1,224 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import pathlib
|
||||
import requests
|
||||
import subprocess
|
||||
import typing
|
||||
|
||||
|
||||
def error(msg: str) -> None:
|
||||
print('\033[31m' + msg + '\033[0m')
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, filename: str, url: typing.Optional[str]):
|
||||
self.file = pathlib.Path(filename)
|
||||
self.url = url
|
||||
|
||||
def sync(self) -> None:
|
||||
if self.url is None:
|
||||
return
|
||||
|
||||
print('Syncing {}...'.format(self.file), end=' ', flush=True)
|
||||
req = requests.get(self.url)
|
||||
|
||||
if not req.ok:
|
||||
error('Failed to retrieve file: {} {}'.format(req.status_code, req.reason))
|
||||
return
|
||||
|
||||
# Gitiles returns base64-encoded strings.
|
||||
# Google has been resisting for years to the idea of allowing plain text: https://github.com/google/gitiles/issues/7
|
||||
if 'format=TEXT' in self.url:
|
||||
content = base64.b64decode(req.content)
|
||||
else:
|
||||
content = req.content
|
||||
|
||||
with open(self.file, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
print('Done')
|
||||
|
||||
|
||||
# a URL of `None` means there is no upstream, because *we* are the upstream
|
||||
SOURCES = [
|
||||
{
|
||||
'api': 'khr',
|
||||
'inc_folder': 'KHR',
|
||||
'sources': [
|
||||
Source('include/KHR/khrplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/KHR/khrplatform.h'),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'egl',
|
||||
'inc_folder': 'EGL',
|
||||
'sources': [
|
||||
Source('src/egl/generate/egl.xml', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/egl.xml'),
|
||||
Source('include/EGL/egl.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/egl.h'),
|
||||
Source('include/EGL/eglplatform.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglplatform.h'),
|
||||
Source('include/EGL/eglext.h', 'https://github.com/KhronosGroup/EGL-Registry/raw/main/api/EGL/eglext.h'),
|
||||
Source('include/EGL/eglext_angle.h', 'https://chromium.googlesource.com/angle/angle/+/refs/heads/main/include/EGL/eglext_angle.h?format=TEXT'),
|
||||
Source('include/EGL/eglmesaext.h', None),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'gl',
|
||||
'inc_folder': 'GL',
|
||||
'sources': [
|
||||
Source('src/mapi/glapi/registry/gl.xml', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/xml/gl.xml'),
|
||||
Source('include/GL/glcorearb.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glcorearb.h'),
|
||||
Source('include/GL/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glext.h'),
|
||||
Source('include/GL/glxext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/glxext.h'),
|
||||
Source('include/GL/wglext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GL/wglext.h'),
|
||||
Source('include/GL/gl.h', None), # FIXME: I don't know what the canonical source is
|
||||
Source('include/GL/glx.h', None), # FIXME: I don't know what the canonical source is
|
||||
Source('include/GL/internal/', None),
|
||||
Source('include/GL/mesa_glinterop.h', None),
|
||||
Source('include/GL/osmesa.h', None),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'gles1',
|
||||
'inc_folder': 'GLES',
|
||||
'sources': [
|
||||
Source('include/GLES/gl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/gl.h'),
|
||||
Source('include/GLES/glplatform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/glplatform.h'),
|
||||
Source('include/GLES/glext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/glext.h'),
|
||||
Source('include/GLES/egl.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES/egl.h'),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'gles2',
|
||||
'inc_folder': 'GLES2',
|
||||
'sources': [
|
||||
Source('include/GLES2/gl2.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2.h'),
|
||||
Source('include/GLES2/gl2platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2platform.h'),
|
||||
Source('include/GLES2/gl2ext.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES2/gl2ext.h'),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'gles3',
|
||||
'inc_folder': 'GLES3',
|
||||
'sources': [
|
||||
Source('include/GLES3/gl3.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl3.h'),
|
||||
Source('include/GLES3/gl31.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl31.h'),
|
||||
Source('include/GLES3/gl32.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl32.h'),
|
||||
Source('include/GLES3/gl3platform.h', 'https://github.com/KhronosGroup/OpenGL-Registry/raw/main/api/GLES3/gl3platform.h'),
|
||||
Source('include/GLES3/gl3ext.h', None), # FIXME: I don't know what the canonical source is
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'opencl',
|
||||
'inc_folder': 'CL',
|
||||
'sources': [
|
||||
Source('include/CL/opencl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/opencl.h'),
|
||||
Source('include/CL/cl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl.h'),
|
||||
Source('include/CL/cl_platform.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_platform.h'),
|
||||
Source('include/CL/cl_gl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_gl.h'),
|
||||
Source('include/CL/cl_gl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_gl_ext.h'),
|
||||
Source('include/CL/cl_ext.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_ext.h'),
|
||||
Source('include/CL/cl_version.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_version.h'),
|
||||
Source('include/CL/cl_icd.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_icd.h'),
|
||||
Source('include/CL/cl_egl.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_egl.h'),
|
||||
Source('include/CL/cl_d3d10.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_d3d10.h'),
|
||||
Source('include/CL/cl_d3d11.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_d3d11.h'),
|
||||
Source('include/CL/cl_dx9_media_sharing.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_dx9_media_sharing.h'),
|
||||
Source('include/CL/cl_dx9_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_dx9_media_sharing_intel.h'),
|
||||
Source('include/CL/cl_ext_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_ext_intel.h'),
|
||||
Source('include/CL/cl_va_api_media_sharing_intel.h', 'https://github.com/KhronosGroup/OpenCL-Headers/raw/master/CL/cl_va_api_media_sharing_intel.h'),
|
||||
|
||||
Source('include/CL/cl.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/master/include/CL/cl.hpp'),
|
||||
Source('include/CL/cl2.hpp', 'https://github.com/KhronosGroup/OpenCL-CLHPP/raw/master/include/CL/cl2.hpp'),
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'spirv',
|
||||
'sources': [
|
||||
Source('src/compiler/spirv/spirv.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/spirv.h'),
|
||||
Source('src/compiler/spirv/spirv.core.grammar.json', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/spirv.core.grammar.json'),
|
||||
Source('src/compiler/spirv/OpenCL.std.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/OpenCL.std.h'),
|
||||
Source('src/compiler/spirv/GLSL.std.450.h', 'https://github.com/KhronosGroup/SPIRV-Headers/raw/master/include/spirv/unified1/GLSL.std.450.h'),
|
||||
Source('src/compiler/spirv/GLSL.ext.AMD.h', 'https://github.com/KhronosGroup/glslang/raw/master/SPIRV/GLSL.ext.AMD.h'), # FIXME: is this the canonical source?
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
'api': 'vulkan',
|
||||
'inc_folder': 'vulkan',
|
||||
'sources': [
|
||||
Source('src/vulkan/registry/vk.xml', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/registry/vk.xml'),
|
||||
Source('include/vulkan/vulkan.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan.h'),
|
||||
Source('include/vulkan/vulkan_core.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_core.h'),
|
||||
Source('include/vulkan/vulkan_beta.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_beta.h'),
|
||||
Source('include/vulkan/vk_icd.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_icd.h'),
|
||||
Source('include/vulkan/vk_layer.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_layer.h'),
|
||||
Source('include/vulkan/vk_platform.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vk_platform.h'),
|
||||
Source('include/vulkan/vulkan_android.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_android.h'),
|
||||
Source('include/vulkan/vulkan_directfb.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_directfb.h'),
|
||||
Source('include/vulkan/vulkan_fuchsia.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_fuchsia.h'),
|
||||
Source('include/vulkan/vulkan_ggp.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_ggp.h'),
|
||||
Source('include/vulkan/vulkan_ios.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_ios.h'),
|
||||
Source('include/vulkan/vulkan_macos.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_macos.h'),
|
||||
Source('include/vulkan/vulkan_metal.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_metal.h'),
|
||||
Source('include/vulkan/vulkan_screen.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_screen.h'),
|
||||
Source('include/vulkan/vulkan_vi.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_vi.h'),
|
||||
Source('include/vulkan/vulkan_wayland.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_wayland.h'),
|
||||
Source('include/vulkan/vulkan_win32.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_win32.h'),
|
||||
Source('include/vulkan/vulkan_xcb.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xcb.h'),
|
||||
Source('include/vulkan/vulkan_xlib.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xlib.h'),
|
||||
Source('include/vulkan/vulkan_xlib_xrandr.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vulkan/vulkan_xlib_xrandr.h'),
|
||||
Source('include/vulkan/vk_android_native_buffer.h', 'https://android.googlesource.com/platform/frameworks/native/+/master/vulkan/include/vulkan/vk_android_native_buffer.h?format=TEXT'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std_decode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std_decode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h264std_encode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h264std_encode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std_decode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std_decode.h'),
|
||||
Source('include/vk_video/vulkan_video_codec_h265std_encode.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codec_h265std_encode.h'),
|
||||
Source('include/vk_video/vulkan_video_codecs_common.h', 'https://github.com/KhronosGroup/Vulkan-Headers/raw/main/include/vk_video/vulkan_video_codecs_common.h'),
|
||||
Source('include/vulkan/.editorconfig', None),
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
git_toplevel = subprocess.check_output(['git', 'rev-parse', '--show-toplevel'],
|
||||
stderr=subprocess.DEVNULL).decode("ascii").strip()
|
||||
if not pathlib.Path(git_toplevel).resolve() == pathlib.Path('.').resolve():
|
||||
error('Please run this script from the root folder ({})'.format(git_toplevel))
|
||||
exit(1)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('apis', nargs='*',
|
||||
# the `[[]]` here is a workaround for python bug 9625
|
||||
# where having `choices` breaks `nargs='*'`:
|
||||
# https://bugs.python.org/issue9625
|
||||
choices=[group['api'] for group in SOURCES] + [[]],
|
||||
help='Only update the APIs specified.')
|
||||
args = parser.parse_args()
|
||||
|
||||
# These APIs all depend on the KHR header
|
||||
depend_on_khr = set(['egl', 'gl', 'gles', 'gles2', 'gles3'])
|
||||
if args.apis and 'khr' not in args.apis and depend_on_khr.intersection(set(args.apis)):
|
||||
args.apis = ['khr'] + args.apis
|
||||
|
||||
for group in SOURCES:
|
||||
if args.apis and group['api'] not in args.apis:
|
||||
continue
|
||||
|
||||
for source in group['sources']:
|
||||
source.sync()
|
||||
|
||||
# Make sure all the API files are handled by this script
|
||||
if 'inc_folder' in group:
|
||||
for file in pathlib.Path('include/' + group['inc_folder']).iterdir():
|
||||
if file not in [source.file for source in group['sources']]:
|
||||
error('{} is unknown, please add it to SOURCES'.format(file))
|
88
lib/mesa/bin/meson-cmd-extract.py
Executable file
88
lib/mesa/bin/meson-cmd-extract.py
Executable file
|
@ -0,0 +1,88 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © 2019 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""This script reads a meson build directory and gives back the command line it
|
||||
was configured with.
|
||||
|
||||
This only works for meson 0.49.0 and newer.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import configparser
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
"""Parse arguments."""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'build_dir',
|
||||
help='Path the meson build directory')
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def load_config(path: pathlib.Path) -> configparser.ConfigParser:
|
||||
"""Load config file."""
|
||||
conf = configparser.ConfigParser()
|
||||
with path.open() as f:
|
||||
conf.read_file(f)
|
||||
return conf
|
||||
|
||||
|
||||
def build_cmd(conf: configparser.ConfigParser) -> str:
|
||||
"""Rebuild the command line."""
|
||||
args = []
|
||||
for k, v in conf['options'].items():
|
||||
if ' ' in v:
|
||||
args.append(f'-D{k}="{v}"')
|
||||
else:
|
||||
args.append(f'-D{k}={v}')
|
||||
|
||||
cf = conf['properties'].get('cross_file')
|
||||
if cf:
|
||||
args.append('--cross-file={}'.format(cf))
|
||||
nf = conf['properties'].get('native_file')
|
||||
if nf:
|
||||
# this will be in the form "['str', 'str']", so use ast.literal_eval to
|
||||
# convert it to a list of strings.
|
||||
nf = ast.literal_eval(nf)
|
||||
args.extend(['--native-file={}'.format(f) for f in nf])
|
||||
return ' '.join(args)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
path = pathlib.Path(args.build_dir, 'meson-private', 'cmd_line.txt')
|
||||
if not path.exists():
|
||||
print('Cannot find the necessary file to rebuild command line. '
|
||||
'Is your meson version >= 0.49.0?', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
conf = load_config(path)
|
||||
cmd = build_cmd(conf)
|
||||
print(cmd)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
63
lib/mesa/bin/meson-options.py
Executable file
63
lib/mesa/bin/meson-options.py
Executable file
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from os import get_terminal_size
|
||||
from textwrap import wrap
|
||||
from mesonbuild import coredata
|
||||
from mesonbuild import optinterpreter
|
||||
|
||||
(COLUMNS, _) = get_terminal_size()
|
||||
|
||||
def describe_option(option_name: str, option_default_value: str,
|
||||
option_type: str, option_message: str) -> None:
|
||||
print('name: ' + option_name)
|
||||
print('default: ' + option_default_value)
|
||||
print('type: ' + option_type)
|
||||
for line in wrap(option_message, width=COLUMNS - 9):
|
||||
print(' ' + line)
|
||||
print('---')
|
||||
|
||||
oi = optinterpreter.OptionInterpreter('')
|
||||
oi.process('meson_options.txt')
|
||||
|
||||
for (name, value) in oi.options.items():
|
||||
if isinstance(value, coredata.UserStringOption):
|
||||
describe_option(name,
|
||||
value.value,
|
||||
'string',
|
||||
"You can type what you want, but make sure it makes sense")
|
||||
elif isinstance(value, coredata.UserBooleanOption):
|
||||
describe_option(name,
|
||||
'true' if value.value else 'false',
|
||||
'boolean',
|
||||
"You can set it to 'true' or 'false'")
|
||||
elif isinstance(value, coredata.UserIntegerOption):
|
||||
describe_option(name,
|
||||
str(value.value),
|
||||
'integer',
|
||||
"You can set it to any integer value between '{}' and '{}'".format(value.min_value, value.max_value))
|
||||
elif isinstance(value, coredata.UserUmaskOption):
|
||||
describe_option(name,
|
||||
str(value.value),
|
||||
'umask',
|
||||
"You can set it to 'preserve' or a value between '0000' and '0777'")
|
||||
elif isinstance(value, coredata.UserComboOption):
|
||||
choices = '[' + ', '.join(["'" + v + "'" for v in value.choices]) + ']'
|
||||
describe_option(name,
|
||||
value.value,
|
||||
'combo',
|
||||
"You can set it to any one of those values: " + choices)
|
||||
elif isinstance(value, coredata.UserArrayOption):
|
||||
choices = '[' + ', '.join(["'" + v + "'" for v in value.choices]) + ']'
|
||||
value = '[' + ', '.join(["'" + v + "'" for v in value.value]) + ']'
|
||||
describe_option(name,
|
||||
value,
|
||||
'array',
|
||||
"You can set it to one or more of those values: " + choices)
|
||||
elif isinstance(value, coredata.UserFeatureOption):
|
||||
describe_option(name,
|
||||
value.value,
|
||||
'feature',
|
||||
"You can set it to 'auto', 'enabled', or 'disabled'")
|
||||
else:
|
||||
print(name + ' is an option of a type unknown to this script')
|
||||
print('---')
|
30
lib/mesa/bin/meson.build
Normal file
30
lib/mesa/bin/meson.build
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Copyright © 2017 Eric Engestrom
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
git_sha1_gen_py = files('git_sha1_gen.py')
|
||||
gen_vs_module_defs_py = files('gen_vs_module_defs.py')
|
||||
gen_vs_module_defs_normal_command = [
|
||||
prog_python, gen_vs_module_defs_py,
|
||||
'--in_file', '@INPUT@', '--out_file', '@OUTPUT@',
|
||||
'--compiler_abi', cc.get_argument_syntax(),
|
||||
'--compiler_id', cc.get_id(), '--cpu_family', host_machine.cpu_family()
|
||||
]
|
||||
symbols_check = find_program('symbols-check.py')
|
||||
install_megadrivers_py = find_program('install_megadrivers.py')
|
34
lib/mesa/bin/meson_get_version.py
Normal file
34
lib/mesa/bin/meson_get_version.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env python3
|
||||
# encoding=utf-8
|
||||
# Copyright © 2017 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def main():
|
||||
filename = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'VERSION')
|
||||
with open(filename) as f:
|
||||
version = f.read().strip()
|
||||
print(version, end='')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
251
lib/mesa/bin/perf-annotate-jit.py
Executable file
251
lib/mesa/bin/perf-annotate-jit.py
Executable file
|
@ -0,0 +1,251 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2012 VMware Inc
|
||||
# Copyright 2008-2009 Jose Fonseca
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
|
||||
"""Perf annotate for JIT code.
|
||||
|
||||
Linux `perf annotate` does not work with JIT code. This script takes the data
|
||||
produced by `perf script` command, plus the diassemblies outputed by gallivm
|
||||
into /tmp/perf-XXXXX.map.asm and produces output similar to `perf annotate`.
|
||||
|
||||
See docs/llvmpipe.rst for usage instructions.
|
||||
|
||||
The `perf script` output parser was derived from the gprof2dot.py script.
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import re
|
||||
import optparse
|
||||
import subprocess
|
||||
|
||||
|
||||
class Parser:
|
||||
"""Parser interface."""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def parse(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class LineParser(Parser):
|
||||
"""Base class for parsers that read line-based formats."""
|
||||
|
||||
def __init__(self, file):
|
||||
Parser.__init__(self)
|
||||
self._file = file
|
||||
self.__line = None
|
||||
self.__eof = False
|
||||
self.line_no = 0
|
||||
|
||||
def readline(self):
|
||||
line = self._file.readline()
|
||||
if not line:
|
||||
self.__line = ''
|
||||
self.__eof = True
|
||||
else:
|
||||
self.line_no += 1
|
||||
self.__line = line.rstrip('\r\n')
|
||||
|
||||
def lookahead(self):
|
||||
assert self.__line is not None
|
||||
return self.__line
|
||||
|
||||
def consume(self):
|
||||
assert self.__line is not None
|
||||
line = self.__line
|
||||
self.readline()
|
||||
return line
|
||||
|
||||
def eof(self):
|
||||
assert self.__line is not None
|
||||
return self.__eof
|
||||
|
||||
|
||||
mapFile = None
|
||||
|
||||
def lookupMap(filename, matchSymbol):
|
||||
global mapFile
|
||||
mapFile = filename
|
||||
stream = open(filename, 'rt')
|
||||
for line in stream:
|
||||
start, length, symbol = line.split()
|
||||
|
||||
start = int(start, 16)
|
||||
length = int(length,16)
|
||||
|
||||
if symbol == matchSymbol:
|
||||
return start
|
||||
|
||||
return None
|
||||
|
||||
def lookupAsm(filename, desiredFunction):
|
||||
stream = open(filename + '.asm', 'rt')
|
||||
while stream.readline() != desiredFunction + ':\n':
|
||||
pass
|
||||
|
||||
asm = []
|
||||
line = stream.readline().strip()
|
||||
while line:
|
||||
addr, instr = line.split(':', 1)
|
||||
addr = int(addr)
|
||||
asm.append((addr, instr))
|
||||
line = stream.readline().strip()
|
||||
|
||||
return asm
|
||||
|
||||
|
||||
|
||||
samples = {}
|
||||
|
||||
|
||||
class PerfParser(LineParser):
|
||||
"""Parser for linux perf callgraph output.
|
||||
|
||||
It expects output generated with
|
||||
|
||||
perf record -g
|
||||
perf script
|
||||
"""
|
||||
|
||||
def __init__(self, infile, symbol):
|
||||
LineParser.__init__(self, infile)
|
||||
self.symbol = symbol
|
||||
|
||||
def readline(self):
|
||||
# Override LineParser.readline to ignore comment lines
|
||||
while True:
|
||||
LineParser.readline(self)
|
||||
if self.eof() or not self.lookahead().startswith('#'):
|
||||
break
|
||||
|
||||
def parse(self):
|
||||
# read lookahead
|
||||
self.readline()
|
||||
|
||||
while not self.eof():
|
||||
self.parse_event()
|
||||
|
||||
asm = lookupAsm(mapFile, self.symbol)
|
||||
|
||||
addresses = samples.keys()
|
||||
addresses.sort()
|
||||
total_samples = 0
|
||||
|
||||
sys.stdout.write('%s:\n' % self.symbol)
|
||||
for address, instr in asm:
|
||||
try:
|
||||
sample = samples.pop(address)
|
||||
except KeyError:
|
||||
sys.stdout.write(6*' ')
|
||||
else:
|
||||
sys.stdout.write('%6u' % (sample))
|
||||
total_samples += sample
|
||||
sys.stdout.write('%6u: %s\n' % (address, instr))
|
||||
print('total:', total_samples)
|
||||
assert len(samples) == 0
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
def parse_event(self):
|
||||
if self.eof():
|
||||
return
|
||||
|
||||
line = self.consume()
|
||||
assert line
|
||||
|
||||
callchain = self.parse_callchain()
|
||||
if not callchain:
|
||||
return
|
||||
|
||||
def parse_callchain(self):
|
||||
callchain = []
|
||||
while self.lookahead():
|
||||
function = self.parse_call(len(callchain) == 0)
|
||||
if function is None:
|
||||
break
|
||||
callchain.append(function)
|
||||
if self.lookahead() == '':
|
||||
self.consume()
|
||||
return callchain
|
||||
|
||||
call_re = re.compile(r'^\s+(?P<address>[0-9a-fA-F]+)\s+(?P<symbol>.*)\s+\((?P<module>[^)]*)\)$')
|
||||
|
||||
def parse_call(self, first):
|
||||
line = self.consume()
|
||||
mo = self.call_re.match(line)
|
||||
assert mo
|
||||
if not mo:
|
||||
return None
|
||||
|
||||
if not first:
|
||||
return None
|
||||
|
||||
function_name = mo.group('symbol')
|
||||
if not function_name:
|
||||
function_name = mo.group('address')
|
||||
|
||||
module = mo.group('module')
|
||||
|
||||
function_id = function_name + ':' + module
|
||||
|
||||
address = mo.group('address')
|
||||
address = int(address, 16)
|
||||
|
||||
if function_name != self.symbol:
|
||||
return None
|
||||
|
||||
start_address = lookupMap(module, function_name)
|
||||
address -= start_address
|
||||
|
||||
#print(function_name, module, address)
|
||||
|
||||
samples[address] = samples.get(address, 0) + 1
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program."""
|
||||
|
||||
optparser = optparse.OptionParser(
|
||||
usage="\n\t%prog [options] symbol_name")
|
||||
(options, args) = optparser.parse_args(sys.argv[1:])
|
||||
if len(args) != 1:
|
||||
optparser.error('wrong number of arguments')
|
||||
|
||||
symbol = args[0]
|
||||
|
||||
p = subprocess.Popen(['perf', 'script'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
parser = PerfParser(p.stdout, symbol)
|
||||
parser.parse()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
# vim: set sw=4 et:
|
33
lib/mesa/bin/pick-ui.py
Executable file
33
lib/mesa/bin/pick-ui.py
Executable file
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import asyncio
|
||||
|
||||
import urwid
|
||||
|
||||
from pick.ui import UI, PALETTE
|
||||
|
||||
if __name__ == "__main__":
|
||||
u = UI()
|
||||
evl = urwid.AsyncioEventLoop(loop=asyncio.get_event_loop())
|
||||
loop = urwid.MainLoop(u.render(), PALETTE, event_loop=evl, handle_mouse=False)
|
||||
u.mainloop = loop
|
||||
loop.run()
|
0
lib/mesa/bin/pick/__init__.py
Normal file
0
lib/mesa/bin/pick/__init__.py
Normal file
387
lib/mesa/bin/pick/core.py
Normal file
387
lib/mesa/bin/pick/core.py
Normal file
|
@ -0,0 +1,387 @@
|
|||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Core data structures and routines for pick."""
|
||||
|
||||
import asyncio
|
||||
import enum
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import typing
|
||||
|
||||
import attr
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .ui import UI
|
||||
|
||||
import typing_extensions
|
||||
|
||||
class CommitDict(typing_extensions.TypedDict):
|
||||
|
||||
sha: str
|
||||
description: str
|
||||
nominated: bool
|
||||
nomination_type: typing.Optional[int]
|
||||
resolution: typing.Optional[int]
|
||||
main_sha: typing.Optional[str]
|
||||
because_sha: typing.Optional[str]
|
||||
|
||||
IS_FIX = re.compile(r'^\s*fixes:\s*([a-f0-9]{6,40})', flags=re.MULTILINE | re.IGNORECASE)
|
||||
# FIXME: I dislike the duplication in this regex, but I couldn't get it to work otherwise
|
||||
IS_CC = re.compile(r'^\s*cc:\s*["\']?([0-9]{2}\.[0-9])?["\']?\s*["\']?([0-9]{2}\.[0-9])?["\']?\s*\<?mesa-stable',
|
||||
flags=re.MULTILINE | re.IGNORECASE)
|
||||
IS_REVERT = re.compile(r'This reverts commit ([0-9a-f]{40})')
|
||||
|
||||
# XXX: hack
|
||||
SEM = asyncio.Semaphore(50)
|
||||
|
||||
COMMIT_LOCK = asyncio.Lock()
|
||||
|
||||
git_toplevel = subprocess.check_output(['git', 'rev-parse', '--show-toplevel'],
|
||||
stderr=subprocess.DEVNULL).decode("ascii").strip()
|
||||
pick_status_json = pathlib.Path(git_toplevel) / '.pick_status.json'
|
||||
|
||||
|
||||
class PickUIException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@enum.unique
|
||||
class NominationType(enum.Enum):
|
||||
|
||||
CC = 0
|
||||
FIXES = 1
|
||||
REVERT = 2
|
||||
|
||||
|
||||
@enum.unique
|
||||
class Resolution(enum.Enum):
|
||||
|
||||
UNRESOLVED = 0
|
||||
MERGED = 1
|
||||
DENOMINATED = 2
|
||||
BACKPORTED = 3
|
||||
NOTNEEDED = 4
|
||||
|
||||
|
||||
async def commit_state(*, amend: bool = False, message: str = 'Update') -> bool:
|
||||
"""Commit the .pick_status.json file."""
|
||||
async with COMMIT_LOCK:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'add', pick_status_json.as_posix(),
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
v = await p.wait()
|
||||
if v != 0:
|
||||
return False
|
||||
|
||||
if amend:
|
||||
cmd = ['--amend', '--no-edit']
|
||||
else:
|
||||
cmd = ['--message', f'.pick_status.json: {message}']
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'commit', *cmd,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
v = await p.wait()
|
||||
if v != 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Commit:
|
||||
|
||||
sha: str = attr.ib()
|
||||
description: str = attr.ib()
|
||||
nominated: bool = attr.ib(False)
|
||||
nomination_type: typing.Optional[NominationType] = attr.ib(None)
|
||||
resolution: Resolution = attr.ib(Resolution.UNRESOLVED)
|
||||
main_sha: typing.Optional[str] = attr.ib(None)
|
||||
because_sha: typing.Optional[str] = attr.ib(None)
|
||||
|
||||
def to_json(self) -> 'CommitDict':
|
||||
d: typing.Dict[str, typing.Any] = attr.asdict(self)
|
||||
if self.nomination_type is not None:
|
||||
d['nomination_type'] = self.nomination_type.value
|
||||
if self.resolution is not None:
|
||||
d['resolution'] = self.resolution.value
|
||||
return typing.cast('CommitDict', d)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: 'CommitDict') -> 'Commit':
|
||||
c = cls(data['sha'], data['description'], data['nominated'], main_sha=data['main_sha'], because_sha=data['because_sha'])
|
||||
if data['nomination_type'] is not None:
|
||||
c.nomination_type = NominationType(data['nomination_type'])
|
||||
if data['resolution'] is not None:
|
||||
c.resolution = Resolution(data['resolution'])
|
||||
return c
|
||||
|
||||
def date(self) -> str:
|
||||
# Show commit date, ie. when the commit actually landed
|
||||
# (as opposed to when it was first written)
|
||||
return subprocess.check_output(
|
||||
['git', 'show', '--no-patch', '--format=%cs', self.sha],
|
||||
stderr=subprocess.DEVNULL
|
||||
).decode("ascii").strip()
|
||||
|
||||
async def apply(self, ui: 'UI') -> typing.Tuple[bool, str]:
|
||||
# FIXME: This isn't really enough if we fail to cherry-pick because the
|
||||
# git tree will still be dirty
|
||||
async with COMMIT_LOCK:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'cherry-pick', '-x', self.sha,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
_, err = await p.communicate()
|
||||
|
||||
if p.returncode != 0:
|
||||
return (False, err.decode())
|
||||
|
||||
self.resolution = Resolution.MERGED
|
||||
await ui.feedback(f'{self.sha} ({self.description}) applied successfully')
|
||||
|
||||
# Append the changes to the .pickstatus.json file
|
||||
ui.save()
|
||||
v = await commit_state(amend=True)
|
||||
return (v, '')
|
||||
|
||||
async def abort_cherry(self, ui: 'UI', err: str) -> None:
|
||||
await ui.feedback(f'{self.sha} ({self.description}) failed to apply\n{err}')
|
||||
async with COMMIT_LOCK:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'cherry-pick', '--abort',
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
r = await p.wait()
|
||||
await ui.feedback(f'{"Successfully" if r == 0 else "Failed to"} abort cherry-pick.')
|
||||
|
||||
async def denominate(self, ui: 'UI') -> bool:
|
||||
self.resolution = Resolution.DENOMINATED
|
||||
ui.save()
|
||||
v = await commit_state(message=f'Mark {self.sha} as denominated')
|
||||
assert v
|
||||
await ui.feedback(f'{self.sha} ({self.description}) denominated successfully')
|
||||
return True
|
||||
|
||||
async def backport(self, ui: 'UI') -> bool:
|
||||
self.resolution = Resolution.BACKPORTED
|
||||
ui.save()
|
||||
v = await commit_state(message=f'Mark {self.sha} as backported')
|
||||
assert v
|
||||
await ui.feedback(f'{self.sha} ({self.description}) backported successfully')
|
||||
return True
|
||||
|
||||
async def resolve(self, ui: 'UI') -> None:
|
||||
self.resolution = Resolution.MERGED
|
||||
ui.save()
|
||||
v = await commit_state(amend=True)
|
||||
assert v
|
||||
await ui.feedback(f'{self.sha} ({self.description}) committed successfully')
|
||||
|
||||
|
||||
async def get_new_commits(sha: str) -> typing.List[typing.Tuple[str, str]]:
|
||||
# Try to get the authoritative upstream main
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'for-each-ref', '--format=%(upstream)', 'refs/heads/main',
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL)
|
||||
out, _ = await p.communicate()
|
||||
upstream = out.decode().strip()
|
||||
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'log', '--pretty=oneline', f'{sha}..{upstream}',
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL)
|
||||
out, _ = await p.communicate()
|
||||
assert p.returncode == 0, f"git log didn't work: {sha}"
|
||||
return list(split_commit_list(out.decode().strip()))
|
||||
|
||||
|
||||
def split_commit_list(commits: str) -> typing.Generator[typing.Tuple[str, str], None, None]:
|
||||
if not commits:
|
||||
return
|
||||
for line in commits.split('\n'):
|
||||
v = tuple(line.split(' ', 1))
|
||||
assert len(v) == 2, 'this is really just for mypy'
|
||||
yield typing.cast(typing.Tuple[str, str], v)
|
||||
|
||||
|
||||
async def is_commit_in_branch(sha: str) -> bool:
|
||||
async with SEM:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'merge-base', '--is-ancestor', sha, 'HEAD',
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
await p.wait()
|
||||
return p.returncode == 0
|
||||
|
||||
|
||||
async def full_sha(sha: str) -> str:
|
||||
async with SEM:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'rev-parse', sha,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
out, _ = await p.communicate()
|
||||
if p.returncode:
|
||||
raise PickUIException(f'Invalid Sha {sha}')
|
||||
return out.decode().strip()
|
||||
|
||||
|
||||
async def resolve_nomination(commit: 'Commit', version: str) -> 'Commit':
|
||||
async with SEM:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
'git', 'log', '--format=%B', '-1', commit.sha,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
_out, _ = await p.communicate()
|
||||
assert p.returncode == 0, f'git log for {commit.sha} failed'
|
||||
out = _out.decode()
|
||||
|
||||
# We give precedence to fixes and cc tags over revert tags.
|
||||
# XXX: not having the walrus operator available makes me sad :=
|
||||
m = IS_FIX.search(out)
|
||||
if m:
|
||||
# We set the nomination_type and because_sha here so that we can later
|
||||
# check to see if this fixes another staged commit.
|
||||
try:
|
||||
commit.because_sha = fixed = await full_sha(m.group(1))
|
||||
except PickUIException:
|
||||
pass
|
||||
else:
|
||||
commit.nomination_type = NominationType.FIXES
|
||||
if await is_commit_in_branch(fixed):
|
||||
commit.nominated = True
|
||||
return commit
|
||||
|
||||
m = IS_CC.search(out)
|
||||
if m:
|
||||
if m.groups() == (None, None) or version in m.groups():
|
||||
commit.nominated = True
|
||||
commit.nomination_type = NominationType.CC
|
||||
return commit
|
||||
|
||||
m = IS_REVERT.search(out)
|
||||
if m:
|
||||
# See comment for IS_FIX path
|
||||
try:
|
||||
commit.because_sha = reverted = await full_sha(m.group(1))
|
||||
except PickUIException:
|
||||
pass
|
||||
else:
|
||||
commit.nomination_type = NominationType.REVERT
|
||||
if await is_commit_in_branch(reverted):
|
||||
commit.nominated = True
|
||||
return commit
|
||||
|
||||
return commit
|
||||
|
||||
|
||||
async def resolve_fixes(commits: typing.List['Commit'], previous: typing.List['Commit']) -> None:
|
||||
"""Determine if any of the undecided commits fix/revert a staged commit.
|
||||
|
||||
The are still needed if they apply to a commit that is staged for
|
||||
inclusion, but not yet included.
|
||||
|
||||
This must be done in order, because a commit 3 might fix commit 2 which
|
||||
fixes commit 1.
|
||||
"""
|
||||
shas: typing.Set[str] = set(c.sha for c in previous if c.nominated)
|
||||
assert None not in shas, 'None in shas'
|
||||
|
||||
for commit in reversed(commits):
|
||||
if not commit.nominated and commit.nomination_type is NominationType.FIXES:
|
||||
commit.nominated = commit.because_sha in shas
|
||||
|
||||
if commit.nominated:
|
||||
shas.add(commit.sha)
|
||||
|
||||
for commit in commits:
|
||||
if (commit.nomination_type is NominationType.REVERT and
|
||||
commit.because_sha in shas):
|
||||
for oldc in reversed(commits):
|
||||
if oldc.sha == commit.because_sha:
|
||||
# In this case a commit that hasn't yet been applied is
|
||||
# reverted, we don't want to apply that commit at all
|
||||
oldc.nominated = False
|
||||
oldc.resolution = Resolution.DENOMINATED
|
||||
commit.nominated = False
|
||||
commit.resolution = Resolution.DENOMINATED
|
||||
shas.remove(commit.because_sha)
|
||||
break
|
||||
|
||||
|
||||
async def gather_commits(version: str, previous: typing.List['Commit'],
|
||||
new: typing.List[typing.Tuple[str, str]], cb) -> typing.List['Commit']:
|
||||
# We create an array of the final size up front, then we pass that array
|
||||
# to the "inner" co-routine, which is turned into a list of tasks and
|
||||
# collected by asyncio.gather. We do this to allow the tasks to be
|
||||
# asynchronously gathered, but to also ensure that the commits list remains
|
||||
# in order.
|
||||
m_commits: typing.List[typing.Optional['Commit']] = [None] * len(new)
|
||||
tasks = []
|
||||
|
||||
async def inner(commit: 'Commit', version: str,
|
||||
commits: typing.List[typing.Optional['Commit']],
|
||||
index: int, cb) -> None:
|
||||
commits[index] = await resolve_nomination(commit, version)
|
||||
cb()
|
||||
|
||||
for i, (sha, desc) in enumerate(new):
|
||||
tasks.append(asyncio.ensure_future(
|
||||
inner(Commit(sha, desc), version, m_commits, i, cb)))
|
||||
|
||||
await asyncio.gather(*tasks)
|
||||
assert None not in m_commits
|
||||
commits = typing.cast(typing.List[Commit], m_commits)
|
||||
|
||||
await resolve_fixes(commits, previous)
|
||||
|
||||
for commit in commits:
|
||||
if commit.resolution is Resolution.UNRESOLVED and not commit.nominated:
|
||||
commit.resolution = Resolution.NOTNEEDED
|
||||
|
||||
return commits
|
||||
|
||||
|
||||
def load() -> typing.List['Commit']:
|
||||
if not pick_status_json.exists():
|
||||
return []
|
||||
with pick_status_json.open('r') as f:
|
||||
raw = json.load(f)
|
||||
return [Commit.from_json(c) for c in raw]
|
||||
|
||||
|
||||
def save(commits: typing.Iterable['Commit']) -> None:
|
||||
commits = list(commits)
|
||||
with pick_status_json.open('wt') as f:
|
||||
json.dump([c.to_json() for c in commits], f, indent=4)
|
||||
|
||||
asyncio.ensure_future(commit_state(message=f'Update to {commits[0].sha}'))
|
470
lib/mesa/bin/pick/core_test.py
Normal file
470
lib/mesa/bin/pick/core_test.py
Normal file
|
@ -0,0 +1,470 @@
|
|||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Tests for pick's core data structures and routines."""
|
||||
|
||||
from unittest import mock
|
||||
import textwrap
|
||||
import typing
|
||||
|
||||
import attr
|
||||
import pytest
|
||||
|
||||
from . import core
|
||||
|
||||
|
||||
class TestCommit:
|
||||
|
||||
@pytest.fixture
|
||||
def unnominated_commit(self) -> 'core.Commit':
|
||||
return core.Commit('abc123', 'sub: A commit', main_sha='45678')
|
||||
|
||||
@pytest.fixture
|
||||
def nominated_commit(self) -> 'core.Commit':
|
||||
return core.Commit('abc123', 'sub: A commit', True,
|
||||
core.NominationType.CC, core.Resolution.UNRESOLVED)
|
||||
|
||||
class TestToJson:
|
||||
|
||||
def test_not_nominated(self, unnominated_commit: 'core.Commit'):
|
||||
c = unnominated_commit
|
||||
v = c.to_json()
|
||||
assert v == {'sha': 'abc123', 'description': 'sub: A commit', 'nominated': False,
|
||||
'nomination_type': None, 'resolution': core.Resolution.UNRESOLVED.value,
|
||||
'main_sha': '45678', 'because_sha': None}
|
||||
|
||||
def test_nominated(self, nominated_commit: 'core.Commit'):
|
||||
c = nominated_commit
|
||||
v = c.to_json()
|
||||
assert v == {'sha': 'abc123',
|
||||
'description': 'sub: A commit',
|
||||
'nominated': True,
|
||||
'nomination_type': core.NominationType.CC.value,
|
||||
'resolution': core.Resolution.UNRESOLVED.value,
|
||||
'main_sha': None,
|
||||
'because_sha': None}
|
||||
|
||||
class TestFromJson:
|
||||
|
||||
def test_not_nominated(self, unnominated_commit: 'core.Commit'):
|
||||
c = unnominated_commit
|
||||
v = c.to_json()
|
||||
c2 = core.Commit.from_json(v)
|
||||
assert c == c2
|
||||
|
||||
def test_nominated(self, nominated_commit: 'core.Commit'):
|
||||
c = nominated_commit
|
||||
v = c.to_json()
|
||||
c2 = core.Commit.from_json(v)
|
||||
assert c == c2
|
||||
|
||||
|
||||
class TestRE:
|
||||
|
||||
"""Tests for the regular expressions used to identify commits."""
|
||||
|
||||
class TestFixes:
|
||||
|
||||
def test_simple(self):
|
||||
message = textwrap.dedent("""\
|
||||
etnaviv: fix vertex buffer state emission for single stream GPUs
|
||||
|
||||
GPUs with a single supported vertex stream must use the single state
|
||||
address to program the stream.
|
||||
|
||||
Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)
|
||||
Signed-off-by: Lucas Stach <l.stach@pengutronix.de>
|
||||
Reviewed-by: Jonathan Marek <jonathan@marek.ca>
|
||||
""")
|
||||
|
||||
m = core.IS_FIX.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '3d09bb390a39'
|
||||
|
||||
class TestCC:
|
||||
|
||||
def test_single_branch(self):
|
||||
"""Tests commit meant for a single branch, ie, 19.1"""
|
||||
message = textwrap.dedent("""\
|
||||
radv: fix DCC fast clear code for intensity formats
|
||||
|
||||
This fixes a rendering issue with DiRT 4 on GFX10. Only GFX10 was
|
||||
affected because intensity formats are different.
|
||||
|
||||
Cc: 19.2 <mesa-stable@lists.freedesktop.org>
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/1923
|
||||
Signed-off-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
|
||||
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '19.2'
|
||||
|
||||
def test_multiple_branches(self):
|
||||
"""Tests commit with more than one branch specified"""
|
||||
message = textwrap.dedent("""\
|
||||
radeonsi: enable zerovram for Rocket League
|
||||
|
||||
Fixes corruption on game startup.
|
||||
Closes: https://gitlab.freedesktop.org/mesa/mesa/-/issues/1888
|
||||
|
||||
Cc: 19.1 19.2 <mesa-stable@lists.freedesktop.org>
|
||||
Reviewed-by: Pierre-Eric Pelloux-Prayer <pierre-eric.pelloux-prayer@amd.com>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '19.1'
|
||||
assert m.group(2) == '19.2'
|
||||
|
||||
def test_no_branch(self):
|
||||
"""Tests commit with no branch specification"""
|
||||
message = textwrap.dedent("""\
|
||||
anv/android: fix images created with external format support
|
||||
|
||||
This fixes a case where user first creates image and then later binds it
|
||||
with memory created from AHW buffer.
|
||||
|
||||
Cc: <mesa-stable@lists.freedesktop.org>
|
||||
Signed-off-by: Tapani Pälli <tapani.palli@intel.com>
|
||||
Reviewed-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
|
||||
def test_quotes(self):
|
||||
"""Tests commit with quotes around the versions"""
|
||||
message = textwrap.dedent("""\
|
||||
anv: Always fill out the AUX table even if CCS is disabled
|
||||
|
||||
Cc: "20.0" mesa-stable@lists.freedesktop.org
|
||||
Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
|
||||
Tested-by: Marge Bot <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '20.0'
|
||||
|
||||
def test_multiple_quotes(self):
|
||||
"""Tests commit with quotes around the versions"""
|
||||
message = textwrap.dedent("""\
|
||||
anv: Always fill out the AUX table even if CCS is disabled
|
||||
|
||||
Cc: "20.0" "20.1" mesa-stable@lists.freedesktop.org
|
||||
Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
|
||||
Tested-by: Marge Bot <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '20.0'
|
||||
assert m.group(2) == '20.1'
|
||||
|
||||
def test_single_quotes(self):
|
||||
"""Tests commit with quotes around the versions"""
|
||||
message = textwrap.dedent("""\
|
||||
anv: Always fill out the AUX table even if CCS is disabled
|
||||
|
||||
Cc: '20.0' mesa-stable@lists.freedesktop.org
|
||||
Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
|
||||
Tested-by: Marge Bot <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '20.0'
|
||||
|
||||
def test_multiple_single_quotes(self):
|
||||
"""Tests commit with quotes around the versions"""
|
||||
message = textwrap.dedent("""\
|
||||
anv: Always fill out the AUX table even if CCS is disabled
|
||||
|
||||
Cc: '20.0' '20.1' mesa-stable@lists.freedesktop.org
|
||||
Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
|
||||
Tested-by: Marge Bot <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/3454>
|
||||
""")
|
||||
|
||||
m = core.IS_CC.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '20.0'
|
||||
assert m.group(2) == '20.1'
|
||||
|
||||
class TestRevert:
|
||||
|
||||
def test_simple(self):
|
||||
message = textwrap.dedent("""\
|
||||
Revert "radv: do not emit PKT3_CONTEXT_CONTROL with AMDGPU 3.6.0+"
|
||||
|
||||
This reverts commit 2ca8629fa9b303e24783b76a7b3b0c2513e32fbd.
|
||||
|
||||
This was initially ported from RadeonSI, but in the meantime it has
|
||||
been reverted because it might hang. Be conservative and re-introduce
|
||||
this packet emission.
|
||||
|
||||
Unfortunately this doesn't fix anything known.
|
||||
|
||||
Cc: 19.2 <mesa-stable@lists.freedesktop.org>
|
||||
Signed-off-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
|
||||
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
|
||||
""")
|
||||
|
||||
m = core.IS_REVERT.search(message)
|
||||
assert m is not None
|
||||
assert m.group(1) == '2ca8629fa9b303e24783b76a7b3b0c2513e32fbd'
|
||||
|
||||
|
||||
class TestResolveNomination:
|
||||
|
||||
@attr.s(slots=True)
|
||||
class FakeSubprocess:
|
||||
|
||||
"""A fake asyncio.subprocess like classe for use with mock."""
|
||||
|
||||
out: typing.Optional[bytes] = attr.ib(None)
|
||||
returncode: int = attr.ib(0)
|
||||
|
||||
async def mock(self, *_, **__):
|
||||
"""A dirtly little helper for mocking."""
|
||||
return self
|
||||
|
||||
async def communicate(self) -> typing.Tuple[bytes, bytes]:
|
||||
assert self.out is not None
|
||||
return self.out, b''
|
||||
|
||||
async def wait(self) -> int:
|
||||
return self.returncode
|
||||
|
||||
@staticmethod
|
||||
async def return_true(*_, **__) -> bool:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
async def return_false(*_, **__) -> bool:
|
||||
return False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_fix_is_nominated(self):
|
||||
s = self.FakeSubprocess(b'Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
|
||||
await core.resolve_nomination(c, '')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.FIXES
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_fix_is_not_nominated(self):
|
||||
s = self.FakeSubprocess(b'Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_false):
|
||||
await core.resolve_nomination(c, '')
|
||||
|
||||
assert not c.nominated
|
||||
assert c.nomination_type is core.NominationType.FIXES
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cc_is_nominated(self):
|
||||
s = self.FakeSubprocess(b'Cc: 16.2 <mesa-stable@lists.freedesktop.org>')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
await core.resolve_nomination(c, '16.2')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.CC
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cc_is_nominated2(self):
|
||||
s = self.FakeSubprocess(b'Cc: mesa-stable@lists.freedesktop.org')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
await core.resolve_nomination(c, '16.2')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.CC
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cc_is_not_nominated(self):
|
||||
s = self.FakeSubprocess(b'Cc: 16.2 <mesa-stable@lists.freedesktop.org>')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
await core.resolve_nomination(c, '16.1')
|
||||
|
||||
assert not c.nominated
|
||||
assert c.nomination_type is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_revert_is_nominated(self):
|
||||
s = self.FakeSubprocess(b'This reverts commit 1234567890123456789012345678901234567890.')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
|
||||
await core.resolve_nomination(c, '')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.REVERT
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_revert_is_not_nominated(self):
|
||||
s = self.FakeSubprocess(b'This reverts commit 1234567890123456789012345678901234567890.')
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_false):
|
||||
await core.resolve_nomination(c, '')
|
||||
|
||||
assert not c.nominated
|
||||
assert c.nomination_type is core.NominationType.REVERT
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_is_fix_and_cc(self):
|
||||
s = self.FakeSubprocess(
|
||||
b'Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)\n'
|
||||
b'Cc: 16.1 <mesa-stable@lists.freedesktop.org>'
|
||||
)
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
|
||||
await core.resolve_nomination(c, '16.1')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.FIXES
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_is_fix_and_revert(self):
|
||||
s = self.FakeSubprocess(
|
||||
b'Fixes: 3d09bb390a39 (etnaviv: GC7000: State changes for HALTI3..5)\n'
|
||||
b'This reverts commit 1234567890123456789012345678901234567890.'
|
||||
)
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
|
||||
await core.resolve_nomination(c, '16.1')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.FIXES
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_is_cc_and_revert(self):
|
||||
s = self.FakeSubprocess(
|
||||
b'This reverts commit 1234567890123456789012345678901234567890.\n'
|
||||
b'Cc: 16.1 <mesa-stable@lists.freedesktop.org>'
|
||||
)
|
||||
c = core.Commit('abcdef1234567890', 'a commit')
|
||||
|
||||
with mock.patch('bin.pick.core.asyncio.create_subprocess_exec', s.mock):
|
||||
with mock.patch('bin.pick.core.is_commit_in_branch', self.return_true):
|
||||
await core.resolve_nomination(c, '16.1')
|
||||
|
||||
assert c.nominated
|
||||
assert c.nomination_type is core.NominationType.CC
|
||||
|
||||
|
||||
class TestResolveFixes:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_in_new(self):
|
||||
"""Because commit abcd is nominated, so f123 should be as well."""
|
||||
c = [
|
||||
core.Commit('f123', 'desc', nomination_type=core.NominationType.FIXES, because_sha='abcd'),
|
||||
core.Commit('abcd', 'desc', True),
|
||||
]
|
||||
await core.resolve_fixes(c, [])
|
||||
assert c[1].nominated
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_not_in_new(self):
|
||||
"""Because commit abcd is not nominated, commit f123 shouldn't be either."""
|
||||
c = [
|
||||
core.Commit('f123', 'desc', nomination_type=core.NominationType.FIXES, because_sha='abcd'),
|
||||
core.Commit('abcd', 'desc'),
|
||||
]
|
||||
await core.resolve_fixes(c, [])
|
||||
assert not c[0].nominated
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_in_previous(self):
|
||||
"""Because commit abcd is nominated, so f123 should be as well."""
|
||||
p = [
|
||||
core.Commit('abcd', 'desc', True),
|
||||
]
|
||||
c = [
|
||||
core.Commit('f123', 'desc', nomination_type=core.NominationType.FIXES, because_sha='abcd'),
|
||||
]
|
||||
await core.resolve_fixes(c, p)
|
||||
assert c[0].nominated
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_not_in_previous(self):
|
||||
"""Because commit abcd is not nominated, commit f123 shouldn't be either."""
|
||||
p = [
|
||||
core.Commit('abcd', 'desc'),
|
||||
]
|
||||
c = [
|
||||
core.Commit('f123', 'desc', nomination_type=core.NominationType.FIXES, because_sha='abcd'),
|
||||
]
|
||||
await core.resolve_fixes(c, p)
|
||||
assert not c[0].nominated
|
||||
|
||||
|
||||
class TestIsCommitInBranch:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no(self):
|
||||
# Hopefully this is never true?
|
||||
value = await core.is_commit_in_branch('ffffffffffffffffffffffffffffff')
|
||||
assert not value
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_yes(self):
|
||||
# This commit is from 2000, it better always be in the branch
|
||||
value = await core.is_commit_in_branch('88f3b89a2cb77766d2009b9868c44e03abe2dbb2')
|
||||
assert value
|
||||
|
||||
|
||||
class TestFullSha:
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic(self):
|
||||
# This commit is from 2000, it better always be in the branch
|
||||
value = await core.full_sha('88f3b89a2cb777')
|
||||
assert value
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid(self):
|
||||
# This commit is from 2000, it better always be in the branch
|
||||
with pytest.raises(core.PickUIException):
|
||||
await core.full_sha('fffffffffffffffffffffffffffffffffff')
|
264
lib/mesa/bin/pick/ui.py
Normal file
264
lib/mesa/bin/pick/ui.py
Normal file
|
@ -0,0 +1,264 @@
|
|||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Urwid UI for pick script."""
|
||||
|
||||
import asyncio
|
||||
import itertools
|
||||
import textwrap
|
||||
import typing
|
||||
|
||||
import attr
|
||||
import urwid
|
||||
|
||||
from . import core
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
WidgetType = typing.TypeVar('WidgetType', bound=urwid.Widget)
|
||||
|
||||
PALETTE = [
|
||||
('a', 'black', 'light gray'),
|
||||
('b', 'black', 'dark red'),
|
||||
('bg', 'black', 'dark blue'),
|
||||
('reversed', 'standout', ''),
|
||||
]
|
||||
|
||||
|
||||
class RootWidget(urwid.Frame):
|
||||
|
||||
def __init__(self, *args, ui: 'UI', **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.ui = ui
|
||||
|
||||
def keypress(self, size: int, key: str) -> typing.Optional[str]:
|
||||
if key == 'q':
|
||||
raise urwid.ExitMainLoop()
|
||||
elif key == 'u':
|
||||
asyncio.ensure_future(self.ui.update())
|
||||
elif key == 'a':
|
||||
self.ui.add()
|
||||
else:
|
||||
return super().keypress(size, key)
|
||||
return None
|
||||
|
||||
|
||||
class CommitWidget(urwid.Text):
|
||||
|
||||
# urwid.Text is normally not interactable, this is required to tell urwid
|
||||
# to use our keypress method
|
||||
_selectable = True
|
||||
|
||||
def __init__(self, ui: 'UI', commit: 'core.Commit'):
|
||||
reason = commit.nomination_type.name.ljust(6)
|
||||
super().__init__(f'{commit.date()} {reason} {commit.sha[:10]} {commit.description}')
|
||||
self.ui = ui
|
||||
self.commit = commit
|
||||
|
||||
async def apply(self) -> None:
|
||||
async with self.ui.git_lock:
|
||||
result, err = await self.commit.apply(self.ui)
|
||||
if not result:
|
||||
self.ui.chp_failed(self, err)
|
||||
else:
|
||||
self.ui.remove_commit(self)
|
||||
|
||||
async def denominate(self) -> None:
|
||||
async with self.ui.git_lock:
|
||||
await self.commit.denominate(self.ui)
|
||||
self.ui.remove_commit(self)
|
||||
|
||||
async def backport(self) -> None:
|
||||
async with self.ui.git_lock:
|
||||
await self.commit.backport(self.ui)
|
||||
self.ui.remove_commit(self)
|
||||
|
||||
def keypress(self, size: int, key: str) -> typing.Optional[str]:
|
||||
if key == 'c':
|
||||
asyncio.ensure_future(self.apply())
|
||||
elif key == 'd':
|
||||
asyncio.ensure_future(self.denominate())
|
||||
elif key == 'b':
|
||||
asyncio.ensure_future(self.backport())
|
||||
else:
|
||||
return key
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class UI:
|
||||
|
||||
"""Main management object.
|
||||
|
||||
:previous_commits: A list of commits to main since this branch was created
|
||||
:new_commits: Commits added to main since the last time this script was run
|
||||
"""
|
||||
|
||||
commit_list: typing.List['urwid.Button'] = attr.ib(factory=lambda: urwid.SimpleFocusListWalker([]), init=False)
|
||||
feedback_box: typing.List['urwid.Text'] = attr.ib(factory=lambda: urwid.SimpleFocusListWalker([]), init=False)
|
||||
header: 'urwid.Text' = attr.ib(factory=lambda: urwid.Text('Mesa Stable Picker', align='center'), init=False)
|
||||
body: 'urwid.Columns' = attr.ib(attr.Factory(lambda s: s._make_body(), True), init=False)
|
||||
footer: 'urwid.Columns' = attr.ib(attr.Factory(lambda s: s._make_footer(), True), init=False)
|
||||
root: RootWidget = attr.ib(attr.Factory(lambda s: s._make_root(), True), init=False)
|
||||
mainloop: urwid.MainLoop = attr.ib(None, init=False)
|
||||
|
||||
previous_commits: typing.List['core.Commit'] = attr.ib(factory=list, init=False)
|
||||
new_commits: typing.List['core.Commit'] = attr.ib(factory=list, init=False)
|
||||
git_lock: asyncio.Lock = attr.ib(factory=asyncio.Lock, init=False)
|
||||
|
||||
def _make_body(self) -> 'urwid.Columns':
|
||||
commits = urwid.ListBox(self.commit_list)
|
||||
feedback = urwid.ListBox(self.feedback_box)
|
||||
return urwid.Columns([commits, feedback])
|
||||
|
||||
def _make_footer(self) -> 'urwid.Columns':
|
||||
body = [
|
||||
urwid.Text('[U]pdate'),
|
||||
urwid.Text('[Q]uit'),
|
||||
urwid.Text('[C]herry Pick'),
|
||||
urwid.Text('[D]enominate'),
|
||||
urwid.Text('[B]ackport'),
|
||||
urwid.Text('[A]pply additional patch')
|
||||
]
|
||||
return urwid.Columns(body)
|
||||
|
||||
def _make_root(self) -> 'RootWidget':
|
||||
return RootWidget(self.body, self.header, self.footer, 'body', ui=self)
|
||||
|
||||
def render(self) -> 'WidgetType':
|
||||
asyncio.ensure_future(self.update())
|
||||
return self.root
|
||||
|
||||
def load(self) -> None:
|
||||
self.previous_commits = core.load()
|
||||
|
||||
async def update(self) -> None:
|
||||
self.load()
|
||||
with open('VERSION', 'r') as f:
|
||||
version = '.'.join(f.read().split('.')[:2])
|
||||
if self.previous_commits:
|
||||
sha = self.previous_commits[0].sha
|
||||
else:
|
||||
sha = f'{version}-branchpoint'
|
||||
|
||||
new_commits = await core.get_new_commits(sha)
|
||||
|
||||
if new_commits:
|
||||
pb = urwid.ProgressBar('a', 'b', done=len(new_commits))
|
||||
o = self.mainloop.widget
|
||||
self.mainloop.widget = urwid.Overlay(
|
||||
urwid.Filler(urwid.LineBox(pb)), o, 'center', ('relative', 50), 'middle', ('relative', 50))
|
||||
self.new_commits = await core.gather_commits(
|
||||
version, self.previous_commits, new_commits,
|
||||
lambda: pb.set_completion(pb.current + 1))
|
||||
self.mainloop.widget = o
|
||||
|
||||
for commit in reversed(list(itertools.chain(self.new_commits, self.previous_commits))):
|
||||
if commit.nominated and commit.resolution is core.Resolution.UNRESOLVED:
|
||||
b = urwid.AttrMap(CommitWidget(self, commit), None, focus_map='reversed')
|
||||
self.commit_list.append(b)
|
||||
self.save()
|
||||
|
||||
async def feedback(self, text: str) -> None:
|
||||
self.feedback_box.append(urwid.AttrMap(urwid.Text(text), None))
|
||||
latest_item_index = len(self.feedback_box) - 1
|
||||
self.feedback_box.set_focus(latest_item_index)
|
||||
|
||||
def remove_commit(self, commit: CommitWidget) -> None:
|
||||
for i, c in enumerate(self.commit_list):
|
||||
if c.base_widget is commit:
|
||||
del self.commit_list[i]
|
||||
break
|
||||
|
||||
def save(self):
|
||||
core.save(itertools.chain(self.new_commits, self.previous_commits))
|
||||
|
||||
def add(self) -> None:
|
||||
"""Add an additional commit which isn't nominated."""
|
||||
o = self.mainloop.widget
|
||||
|
||||
def reset_cb(_) -> None:
|
||||
self.mainloop.widget = o
|
||||
|
||||
async def apply_cb(edit: urwid.Edit) -> None:
|
||||
text: str = edit.get_edit_text()
|
||||
|
||||
# In case the text is empty
|
||||
if not text:
|
||||
return
|
||||
|
||||
sha = await core.full_sha(text)
|
||||
for c in reversed(list(itertools.chain(self.new_commits, self.previous_commits))):
|
||||
if c.sha == sha:
|
||||
commit = c
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(f"Couldn't find {sha}")
|
||||
|
||||
await commit.apply(self)
|
||||
|
||||
q = urwid.Edit("Commit sha\n")
|
||||
ok_btn = urwid.Button('Ok')
|
||||
urwid.connect_signal(ok_btn, 'click', lambda _: asyncio.ensure_future(apply_cb(q)))
|
||||
urwid.connect_signal(ok_btn, 'click', reset_cb)
|
||||
|
||||
can_btn = urwid.Button('Cancel')
|
||||
urwid.connect_signal(can_btn, 'click', reset_cb)
|
||||
|
||||
cols = urwid.Columns([ok_btn, can_btn])
|
||||
pile = urwid.Pile([q, cols])
|
||||
box = urwid.LineBox(pile)
|
||||
|
||||
self.mainloop.widget = urwid.Overlay(
|
||||
urwid.Filler(box), o, 'center', ('relative', 50), 'middle', ('relative', 50)
|
||||
)
|
||||
|
||||
def chp_failed(self, commit: 'CommitWidget', err: str) -> None:
|
||||
o = self.mainloop.widget
|
||||
|
||||
def reset_cb(_) -> None:
|
||||
self.mainloop.widget = o
|
||||
|
||||
t = urwid.Text(textwrap.dedent(f"""
|
||||
Failed to apply {commit.commit.sha} {commit.commit.description} with the following error:
|
||||
|
||||
{err}
|
||||
|
||||
You can either cancel, or resolve the conflicts (`git mergetool`), finish the
|
||||
cherry-pick (`git cherry-pick --continue`) and select ok."""))
|
||||
|
||||
can_btn = urwid.Button('Cancel')
|
||||
urwid.connect_signal(can_btn, 'click', reset_cb)
|
||||
urwid.connect_signal(
|
||||
can_btn, 'click', lambda _: asyncio.ensure_future(commit.commit.abort_cherry(self, err)))
|
||||
|
||||
ok_btn = urwid.Button('Ok')
|
||||
urwid.connect_signal(ok_btn, 'click', reset_cb)
|
||||
urwid.connect_signal(
|
||||
ok_btn, 'click', lambda _: asyncio.ensure_future(commit.commit.resolve(self)))
|
||||
urwid.connect_signal(
|
||||
ok_btn, 'click', lambda _: self.remove_commit(commit))
|
||||
|
||||
cols = urwid.Columns([ok_btn, can_btn])
|
||||
pile = urwid.Pile([t, cols])
|
||||
box = urwid.LineBox(pile)
|
||||
|
||||
self.mainloop.widget = urwid.Overlay(
|
||||
urwid.Filler(box), o, 'center', ('relative', 50), 'middle', ('relative', 50)
|
||||
)
|
65
lib/mesa/bin/post_version.py
Executable file
65
lib/mesa/bin/post_version.py
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © 2019-2020 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Update the main page, release notes, and calendar."""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
|
||||
def update_calendar(version: str) -> None:
|
||||
p = pathlib.Path('docs') / 'release-calendar.csv'
|
||||
|
||||
with p.open('r') as f:
|
||||
calendar = list(csv.reader(f))
|
||||
|
||||
branch = None
|
||||
for i, line in enumerate(calendar):
|
||||
if line[2] == version:
|
||||
if line[0]:
|
||||
branch = line[0]
|
||||
break
|
||||
if branch is not None:
|
||||
calendar[i + 1][0] = branch
|
||||
del calendar[i]
|
||||
|
||||
with p.open('w') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(calendar)
|
||||
|
||||
subprocess.run(['git', 'add', p])
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('version', help="The released version.")
|
||||
args = parser.parse_args()
|
||||
|
||||
update_calendar(args.version)
|
||||
|
||||
subprocess.run(['git', 'commit', '-m',
|
||||
f'docs: update calendar for {args.version}'])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
67
lib/mesa/bin/post_version_test.py
Normal file
67
lib/mesa/bin/post_version_test.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
# Copyright © 2019 Intel Corporation
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from . import post_version
|
||||
|
||||
|
||||
@mock.patch('bin.post_version.subprocess.run', mock.Mock())
|
||||
class TestUpdateCalendar:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_sideffects(self) -> None:
|
||||
"""Mock out side effects."""
|
||||
with mock.patch('bin.post_version.subprocess.run', mock.Mock()), \
|
||||
mock.patch('bin.post_version.pathlib', mock.MagicMock()):
|
||||
yield
|
||||
|
||||
def test_basic(self):
|
||||
data = [
|
||||
['20.3', '2021-01-13', '20.3.3', 'Dylan Baker', None],
|
||||
[None, '2021-01-27', '20.3.4', 'Dylan Baker', None],
|
||||
]
|
||||
|
||||
m = mock.Mock()
|
||||
with mock.patch('bin.post_version.csv.reader', mock.Mock(return_value=data.copy())), \
|
||||
mock.patch('bin.post_version.csv.writer', mock.Mock(return_value=m)):
|
||||
post_version.update_calendar('20.3.3')
|
||||
|
||||
m.writerows.assert_called_with([data[1]])
|
||||
|
||||
def test_two_releases(self):
|
||||
data = [
|
||||
['20.3', '2021-01-13', '20.3.3', 'Dylan Baker', None],
|
||||
[None, '2021-01-27', '20.3.4', 'Dylan Baker', None],
|
||||
['21.0', '2021-01-13', '21.0.0', 'Dylan Baker', None],
|
||||
[None, '2021-01-13', '21.0.1', 'Dylan Baker', None],
|
||||
]
|
||||
|
||||
m = mock.Mock()
|
||||
with mock.patch('bin.post_version.csv.reader', mock.Mock(return_value=data.copy())), \
|
||||
mock.patch('bin.post_version.csv.writer', mock.Mock(return_value=m)):
|
||||
post_version.update_calendar('20.3.3')
|
||||
|
||||
d = data.copy()
|
||||
del d[0]
|
||||
d[0][0] = '20.3'
|
||||
m.writerows.assert_called_with(d)
|
78
lib/mesa/bin/refcnt-log-helper.py
Normal file
78
lib/mesa/bin/refcnt-log-helper.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright © Microsoft Corporation
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import argparse
|
||||
|
||||
# Take a log file produced by GALLIUM_REFCNT_LOG, filter it to the objects that
|
||||
# weren't destroyed by the end of the log, and write the results out sorted.
|
||||
# Strips stacks by default to prevent OOM. Could probably be rewritten to walk
|
||||
# the file twice to preserve stacks without OOM, but this was the easy way.
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--input',
|
||||
action='store',
|
||||
required=True,
|
||||
help='path to file containing refcount log')
|
||||
parser.add_argument('--output',
|
||||
action='store',
|
||||
required=True,
|
||||
help='path to trimmed log')
|
||||
parser.add_argument('--filter',
|
||||
help='object type filter')
|
||||
parser.add_argument('--keep-stacks',
|
||||
help='keep stacks, otherwise only headers')
|
||||
args = parser.parse_args()
|
||||
|
||||
objects = {}
|
||||
|
||||
with open(args.input) as in_file:
|
||||
stack = []
|
||||
cur_object = ''
|
||||
|
||||
for line in in_file:
|
||||
if line[0] == '<':
|
||||
|
||||
parts = line.split(' ')
|
||||
prev_object = cur_object
|
||||
cur_object = parts[1]
|
||||
if parts[3].strip() == 'Destroy':
|
||||
if cur_object in objects:
|
||||
del objects[cur_object]
|
||||
else:
|
||||
if parts[3].strip() == 'Create':
|
||||
if (not args.filter) or (args.filter in parts[0]):
|
||||
objects[cur_object] = []
|
||||
if prev_object in objects:
|
||||
objects[prev_object] += stack
|
||||
|
||||
stack = [line]
|
||||
elif args.keep_stacks:
|
||||
stack += line
|
||||
|
||||
with open(args.output, 'wt') as out_file:
|
||||
for stack in objects.values():
|
||||
for stack_line in stack:
|
||||
out_file.write(stack_line)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
186
lib/mesa/bin/symbols-check.py
Normal file
186
lib/mesa/bin/symbols-check.py
Normal file
|
@ -0,0 +1,186 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
# This list contains symbols that _might_ be exported for some platforms
|
||||
PLATFORM_SYMBOLS = [
|
||||
'__bss_end__',
|
||||
'__bss_start__',
|
||||
'__bss_start',
|
||||
'__cxa_guard_abort',
|
||||
'__cxa_guard_acquire',
|
||||
'__cxa_guard_release',
|
||||
'__end__',
|
||||
'__odr_asan._glapi_Context',
|
||||
'__odr_asan._glapi_Dispatch',
|
||||
'_bss_end__',
|
||||
'_edata',
|
||||
'_end',
|
||||
'_fini',
|
||||
'_init',
|
||||
'_fbss',
|
||||
'_fdata',
|
||||
'_ftext',
|
||||
]
|
||||
|
||||
def get_symbols_nm(nm, lib):
|
||||
'''
|
||||
List all the (non platform-specific) symbols exported by the library
|
||||
using `nm`
|
||||
'''
|
||||
symbols = []
|
||||
platform_name = platform.system()
|
||||
output = subprocess.check_output([nm, '-gP', lib],
|
||||
stderr=open(os.devnull, 'w')).decode("ascii")
|
||||
for line in output.splitlines():
|
||||
fields = line.split()
|
||||
if len(fields) == 2 or fields[1] == 'U':
|
||||
continue
|
||||
symbol_name = fields[0]
|
||||
if platform_name == 'Linux':
|
||||
if symbol_name in PLATFORM_SYMBOLS:
|
||||
continue
|
||||
elif platform_name == 'Darwin':
|
||||
assert symbol_name[0] == '_'
|
||||
symbol_name = symbol_name[1:]
|
||||
symbols.append(symbol_name)
|
||||
return symbols
|
||||
|
||||
|
||||
def get_symbols_dumpbin(dumpbin, lib):
|
||||
'''
|
||||
List all the (non platform-specific) symbols exported by the library
|
||||
using `dumpbin`
|
||||
'''
|
||||
symbols = []
|
||||
output = subprocess.check_output([dumpbin, '/exports', lib],
|
||||
stderr=open(os.devnull, 'w')).decode("ascii")
|
||||
for line in output.splitlines():
|
||||
fields = line.split()
|
||||
# The lines with the symbols are made of at least 4 columns; see details below
|
||||
if len(fields) < 4:
|
||||
continue
|
||||
try:
|
||||
# Making sure the first 3 columns are a dec counter, a hex counter
|
||||
# and a hex address
|
||||
_ = int(fields[0], 10)
|
||||
_ = int(fields[1], 16)
|
||||
_ = int(fields[2], 16)
|
||||
except ValueError:
|
||||
continue
|
||||
symbol_name = fields[3]
|
||||
# De-mangle symbols
|
||||
if symbol_name[0] == '_' and '@' in symbol_name:
|
||||
symbol_name = symbol_name[1:].split('@')[0]
|
||||
symbols.append(symbol_name)
|
||||
return symbols
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--symbols-file',
|
||||
action='store',
|
||||
required=True,
|
||||
help='path to file containing symbols')
|
||||
parser.add_argument('--lib',
|
||||
action='store',
|
||||
required=True,
|
||||
help='path to library')
|
||||
parser.add_argument('--nm',
|
||||
action='store',
|
||||
help='path to binary (or name in $PATH)')
|
||||
parser.add_argument('--dumpbin',
|
||||
action='store',
|
||||
help='path to binary (or name in $PATH)')
|
||||
parser.add_argument('--ignore-symbol',
|
||||
action='append',
|
||||
help='do not process this symbol')
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
if platform.system() == 'Windows':
|
||||
if not args.dumpbin:
|
||||
parser.error('--dumpbin is mandatory')
|
||||
lib_symbols = get_symbols_dumpbin(args.dumpbin, args.lib)
|
||||
else:
|
||||
if not args.nm:
|
||||
parser.error('--nm is mandatory')
|
||||
lib_symbols = get_symbols_nm(args.nm, args.lib)
|
||||
except:
|
||||
# We can't run this test, but we haven't technically failed it either
|
||||
# Return the GNU "skip" error code
|
||||
exit(77)
|
||||
mandatory_symbols = []
|
||||
optional_symbols = []
|
||||
with open(args.symbols_file) as symbols_file:
|
||||
qualifier_optional = '(optional)'
|
||||
for line in symbols_file.readlines():
|
||||
|
||||
# Strip comments
|
||||
line = line.split('#')[0]
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Line format:
|
||||
# [qualifier] symbol
|
||||
qualifier = None
|
||||
symbol = None
|
||||
|
||||
fields = line.split()
|
||||
if len(fields) == 1:
|
||||
symbol = fields[0]
|
||||
elif len(fields) == 2:
|
||||
qualifier = fields[0]
|
||||
symbol = fields[1]
|
||||
else:
|
||||
print(args.symbols_file + ': invalid format: ' + line)
|
||||
exit(1)
|
||||
|
||||
# The only supported qualifier is 'optional', which means the
|
||||
# symbol doesn't have to be exported by the library
|
||||
if qualifier and not qualifier == qualifier_optional:
|
||||
print(args.symbols_file + ': invalid qualifier: ' + qualifier)
|
||||
exit(1)
|
||||
|
||||
if qualifier == qualifier_optional:
|
||||
optional_symbols.append(symbol)
|
||||
else:
|
||||
mandatory_symbols.append(symbol)
|
||||
|
||||
unknown_symbols = []
|
||||
for symbol in lib_symbols:
|
||||
if symbol in mandatory_symbols:
|
||||
continue
|
||||
if symbol in optional_symbols:
|
||||
continue
|
||||
if args.ignore_symbol and symbol in args.ignore_symbol:
|
||||
continue
|
||||
if symbol[:2] == '_Z':
|
||||
# As ajax found out, the compiler intentionally exports symbols
|
||||
# that we explicitely asked it not to export, and we can't do
|
||||
# anything about it:
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=36022#c4
|
||||
continue
|
||||
unknown_symbols.append(symbol)
|
||||
|
||||
missing_symbols = [
|
||||
sym for sym in mandatory_symbols if sym not in lib_symbols
|
||||
]
|
||||
|
||||
for symbol in unknown_symbols:
|
||||
print(args.lib + ': unknown symbol exported: ' + symbol)
|
||||
|
||||
for symbol in missing_symbols:
|
||||
print(args.lib + ': missing symbol: ' + symbol)
|
||||
|
||||
if unknown_symbols or missing_symbols:
|
||||
exit(1)
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
60
lib/mesa/bin/update-android-headers.sh
Executable file
60
lib/mesa/bin/update-android-headers.sh
Executable file
|
@ -0,0 +1,60 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
if [ ! -e .git ]; then
|
||||
echo must run from top-level directory;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d platform-hardware-libhardware ]; then
|
||||
git clone --depth 1 https://android.googlesource.com/platform/frameworks/native platform-frameworks-native
|
||||
git clone --depth 1 https://android.googlesource.com/platform/hardware/libhardware platform-hardware-libhardware
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/core platform-system-core
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/logging platform-system-logging
|
||||
git clone --depth 1 https://android.googlesource.com/platform/system/unwinding platform-system-unwinding
|
||||
fi
|
||||
|
||||
dest=include/android_stub
|
||||
|
||||
# Persist the frozen Android N system/window.h for backward compatibility
|
||||
|
||||
cp -av ${dest}/system/window.h platform-system-core/libsystem/include/system
|
||||
|
||||
rm -rf ${dest}
|
||||
mkdir ${dest}
|
||||
|
||||
|
||||
# These directories contains mostly only the files we need, so copy wholesale
|
||||
|
||||
cp -av \
|
||||
platform-frameworks-native/libs/nativewindow/include/vndk \
|
||||
platform-frameworks-native/libs/nativebase/include/nativebase \
|
||||
platform-system-core/libsync/include/ndk \
|
||||
platform-system-core/libsync/include/sync \
|
||||
platform-system-core/libsystem/include/system \
|
||||
platform-system-logging/liblog/include/log \
|
||||
platform-system-unwinding/libbacktrace/include/backtrace \
|
||||
${dest}
|
||||
|
||||
|
||||
# We only need a few files from these big directories so just copy those
|
||||
|
||||
mkdir ${dest}/hardware
|
||||
cp -av platform-hardware-libhardware/include/hardware/{hardware,gralloc,gralloc1,fb}.h ${dest}/hardware
|
||||
cp -av platform-frameworks-native/vulkan/include/hardware/hwvulkan.h ${dest}/hardware
|
||||
|
||||
mkdir ${dest}/cutils
|
||||
cp -av platform-system-core/libcutils/include/cutils/{compiler,log,native_handle,properties,trace}.h ${dest}/cutils
|
||||
|
||||
|
||||
# include/android has files from a few different projects
|
||||
|
||||
mkdir ${dest}/android
|
||||
cp -av \
|
||||
platform-frameworks-native/libs/nativewindow/include/android/* \
|
||||
platform-frameworks-native/libs/arect/include/android/* \
|
||||
platform-system-core/libsync/include/android/* \
|
||||
platform-system-logging/liblog/include/android/* \
|
||||
${dest}/android
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue