Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/dotnet/runtime.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorBruce Forstall <brucefo@microsoft.com>2022-10-01 17:31:50 +0300
committerGitHub <noreply@github.com>2022-10-01 17:31:50 +0300
commitdc3c200c8f588faf8739c8ccf9e68f5f96289fd9 (patch)
treee49f710c1e41c05a9711ebe9eb1d32d41e15196d /src
parentfd9fc1d195121f4dc13143f6232f6d516a567333 (diff)
Update jitrollingbuild.py (#76472)
Pick up changes made to superpmi.py for more general finding of `main` branch. Make "list" choose a default git hash if not specified. Change `list --all` to show all the JITs for a particular git hash. Add new `list --global_all` to show all JITs for all git hashes (this was previously `list --all`). Fix obsoleted `logging.warn`; use `logging.warning` instead.
Diffstat (limited to 'src')
-rw-r--r--src/coreclr/scripts/jitrollingbuild.py130
1 files changed, 85 insertions, 45 deletions
diff --git a/src/coreclr/scripts/jitrollingbuild.py b/src/coreclr/scripts/jitrollingbuild.py
index 8653c78edad..dc40ba0f96c 100644
--- a/src/coreclr/scripts/jitrollingbuild.py
+++ b/src/coreclr/scripts/jitrollingbuild.py
@@ -121,7 +121,8 @@ download_parser.add_argument("--skip_cleanup", action="store_true", help=skip_cl
list_parser = subparsers.add_parser("list", description=list_description, parents=[common_parser])
list_parser.add_argument("-git_hash", help=git_hash_help)
-list_parser.add_argument("--all", action="store_true", help="Show all JITs, not just those for the specified (or default) git hash, OS, architecture, and flavor")
+list_parser.add_argument("--all", action="store_true", help="Show all JITs, not just those for the specified (or default) OS, architecture, and flavor")
+list_parser.add_argument("--global_all", action="store_true", help="Show all JITs in Azure Storage")
################################################################################
# Helper classes
@@ -192,30 +193,35 @@ def determine_jit_name(coreclr_args):
raise RuntimeError("Unknown OS.")
-def process_git_hash_arg(coreclr_args):
+def process_git_hash_arg(coreclr_args, return_first_hash=False):
""" Process the -git_hash argument.
If the argument is present, use that to download a JIT.
- If not present, try to find and download a JIT based on the current environment:
+ If not present, try to find a JIT based on the current environment:
1. Determine the current directory git hash using:
git rev-parse HEAD
- Call the result `current_git_hash`.
- 2. Determine the baseline: where does this hash meet `main` using:
- git merge-base `current_git_hash` main
- Call the result `base_git_hash`.
- 3. Figure out the latest hash, starting with `base_git_hash`, that contains any changes to
+ Call the result `current_hash`.
+ 2. Determine the baseline: where does this hash meet the newest `main` branch of any remote using:
+ git branch -r --sort=-committerdate -v --list "*/main"
+ Call the result `main_hash`.
+ 3. Determine the baseline: where does this hash meet `main_hash` using:
+ git merge-base `current_hash` main
+ Call the result `baseline_hash`.
+ 4. Figure out the latest hash, starting with `baseline_hash`, that contains any changes to
the src/coreclr/jit directory. (We do this because the JIT rolling build only includes
builds for changes to this directory. So, this logic needs to stay in sync with the logic
that determines what causes the JIT rolling build to run. E.g., it should also get
rebuilt if the JIT-EE interface GUID changes. Alternatively, we can take the entire list
of changes, and probe the rolling build drop for all of them.)
- 4. Starting with `base_git_hash`, and possibly walking to older changes, look for matching builds
+ 5. Starting with `baseline_hash`, and possibly walking to older changes, look for matching builds
in the JIT rolling build drops.
- 5. If a JIT directory in Azure Storage is found, set coreclr_args.git_hash to that git hash to use
+ 6. If a JIT directory in Azure Storage is found, set coreclr_args.git_hash to that git hash to use
for downloading.
Args:
coreclr_args (CoreclrArguments) : parsed args
+ return_first_hash (bool) : if true, return the first git_hash in `main` that has any JIT changes.
+ This is used by `list` to pick a reasonable default if a git hash wasn't specified.
Returns:
Nothing
@@ -234,31 +240,43 @@ def process_git_hash_arg(coreclr_args):
with ChangeDir(coreclr_args.runtime_repo_location):
command = [ "git", "rev-parse", "HEAD" ]
- logging.info("Invoking: {}".format(" ".join(command)))
+ logging.debug("Invoking: {}".format(" ".join(command)))
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
stdout_git_rev_parse, _ = proc.communicate()
return_code = proc.returncode
if return_code == 0:
- current_git_hash = stdout_git_rev_parse.decode('utf-8').strip()
- logging.info("Current hash: {}".format(current_git_hash))
+ current_hash = stdout_git_rev_parse.decode('utf-8').strip()
+ logging.info("Current hash: {}".format(current_hash))
else:
raise RuntimeError("Couldn't determine current git hash")
# We've got the current hash; figure out the baseline hash.
- command = [ "git", "merge-base", current_git_hash, "origin/main" ]
- logging.info("Invoking: {}".format(" ".join(command)))
+ # First find the newest hash for any branch matching */main.
+ command = [ "git", "branch", "-r", "--sort=-committerdate", "-v", "--list", "*/main" ]
+ logging.debug("Invoking: %s", " ".join(command))
+ proc = subprocess.Popen(command, stdout=subprocess.PIPE)
+ stdout_git_main_branch, _ = proc.communicate()
+ return_code = proc.returncode
+ if return_code != 0:
+ raise RuntimeError("Couldn't determine newest 'main' git hash")
+
+ main_hash = stdout_git_main_branch.decode('utf-8').strip().split()[1]
+
+ # Get the merge-base between the newest main and our current rev
+ command = [ "git", "merge-base", current_hash, main_hash ]
+ logging.debug("Invoking: %s", " ".join(command))
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
stdout_git_merge_base, _ = proc.communicate()
return_code = proc.returncode
- if return_code == 0:
- base_git_hash = stdout_git_merge_base.decode('utf-8').strip()
- logging.info("Baseline hash: {}".format(base_git_hash))
- else:
+ if return_code != 0:
raise RuntimeError("Couldn't determine baseline git hash")
+ baseline_hash = stdout_git_merge_base.decode('utf-8').strip()
+ logging.info("Baseline hash: %s", baseline_hash)
+
# Enumerate the last 20 changes, starting with the baseline, that included JIT changes.
- command = [ "git", "log", "--pretty=format:%H", base_git_hash, "-20", "--", "src/coreclr/jit/*" ]
- logging.info("Invoking: {}".format(" ".join(command)))
+ command = [ "git", "log", "--pretty=format:%H", baseline_hash, "-20", "--", "src/coreclr/jit/*" ]
+ logging.debug("Invoking: {}".format(" ".join(command)))
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
stdout_change_list, _ = proc.communicate()
return_code = proc.returncode
@@ -282,10 +300,15 @@ def process_git_hash_arg(coreclr_args):
# then when we go to download, we do the same search again because we don't cache the result and pass it
# directly on to the downloader.
coreclr_args.git_hash = git_hash
- urls = get_jit_urls(coreclr_args, find_all=False)
+
+ if return_first_hash:
+ # Just use the first one
+ break
+
+ urls = get_jit_urls(coreclr_args)
if len(urls) > 1:
if hashnum > 1:
- logging.warn("Warning: the baseline found is not built with the first git hash with JIT code changes; there may be extraneous diffs")
+ logging.warning("Warning: the baseline found is not built with the first git hash with JIT code changes; there may be extraneous diffs")
return
# We didn't find a baseline; keep looking
@@ -399,7 +422,7 @@ def upload_command(coreclr_args):
try:
blob_client.get_blob_properties()
# If no exception, then the blob already exists. Delete it!
- logging.warn("Warning: replacing existing blob!")
+ logging.warning("Warning: replacing existing blob!")
blob_client.delete_blob()
except Exception:
# Blob doesn't exist already; that's good
@@ -489,7 +512,7 @@ def upload_command(coreclr_args):
change_list_hashes = stdout_change_list.decode('utf-8').strip().splitlines()
if len(change_list_hashes) == 0:
- logging.warn("Couldn't find any JIT changes! Just using the argument git_hash")
+ logging.warning("Couldn't find any JIT changes! Just using the argument git_hash")
else:
jit_git_hash = change_list_hashes[0]
logging.info("Using git_hash {}".format(jit_git_hash))
@@ -502,9 +525,9 @@ def upload_command(coreclr_args):
from azure.storage.blob import BlobServiceClient
except:
- logging.warn("Please install:")
- logging.warn(" pip install azure-storage-blob")
- logging.warn("See also https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python")
+ logging.warning("Please install:")
+ logging.warning(" pip install azure-storage-blob")
+ logging.warning("See also https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python")
raise RuntimeError("Missing azure storage package.")
blob_service_client = BlobServiceClient(account_url=az_blob_storage_account_uri, credential=coreclr_args.az_storage_key)
@@ -547,26 +570,30 @@ def upload_command(coreclr_args):
logging.info("Finished JIT upload")
-def get_jit_urls(coreclr_args, find_all=False):
+def get_jit_urls(coreclr_args, find_all=False, find_global_all=False):
""" Helper method: collect a list of URLs for all the JIT files to download or list.
Args:
coreclr_args (CoreclrArguments): parsed args
- find_all (bool): True to show all, or False to filter based on coreclr_args
+ find_all (bool): True to show all for a git hash, or False to filter based on coreclr_args
+ find_global_all (bool): True to show all on the server, or False to filter based on coreclr_args
"""
- blob_filter_string = "{}/{}/{}/{}".format(coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
+ if find_all:
+ blob_filter_string = "{}/".format(coreclr_args.git_hash)
+ else:
+ blob_filter_string = "{}/{}/{}/{}".format(coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
blob_prefix_filter = "{}/{}/{}".format(az_blob_storage_jitrollingbuild_container_uri, az_builds_root_folder, blob_filter_string).lower()
# Determine if a URL in Azure Storage should be allowed. The URL looks like:
# https://clrjit.blob.core.windows.net/jitrollingbuild/builds/git_hash/Linux/x64/Checked/clrjit.dll
# Filter to just the current git_hash, OS, architecture, and build_flavor.
- # If "find_all" is True, then no filtering happens: everything is returned.
+ # If "find_global_all" is True, then no filtering happens: everything is returned.
def filter_jits(url):
url = url.lower()
- return find_all or url.startswith(blob_prefix_filter)
+ return find_global_all or url.startswith(blob_prefix_filter)
- return list_az_jits(filter_jits, None if find_all else blob_filter_string)
+ return list_az_jits(filter_jits, None if find_global_all else blob_filter_string)
def download_command(coreclr_args):
@@ -576,9 +603,9 @@ def download_command(coreclr_args):
coreclr_args (CoreclrArguments): parsed args
"""
- urls = get_jit_urls(coreclr_args, find_all=False)
+ urls = get_jit_urls(coreclr_args)
if len(urls) == 0:
- logging.warn("Nothing to download")
+ logging.warning("Nothing to download")
return
if coreclr_args.target_dir is None:
@@ -600,25 +627,22 @@ def list_command(coreclr_args):
coreclr_args (CoreclrArguments) : parsed args
"""
- urls = get_jit_urls(coreclr_args, find_all=coreclr_args.all)
- if len(urls) == 0:
- logging.warn("No JITs found")
- return
-
+ urls = get_jit_urls(coreclr_args, find_all=coreclr_args.all, find_global_all=coreclr_args.global_all)
count = len(urls)
- if coreclr_args.all:
+ if coreclr_args.global_all:
logging.info("{} JIT files".format(count))
else:
- blob_filter_string = "{}/{}/{}/{}".format(coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
+ if coreclr_args.all:
+ blob_filter_string = "{}/".format(coreclr_args.git_hash)
+ else:
+ blob_filter_string = "{}/{}/{}/{}".format(coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
logging.info("{} JIT files for {}".format(count, blob_filter_string))
logging.info("")
for url in urls:
logging.info("{}".format(url))
- logging.info("")
-
def setup_args(args):
""" Setup the args for SuperPMI to use.
@@ -714,6 +738,22 @@ def setup_args(args):
lambda unused: True,
"Unable to set all")
+ coreclr_args.verify(args,
+ "global_all",
+ lambda unused: True,
+ "Unable to set global_all")
+
+ if coreclr_args.all and coreclr_args.global_all:
+ print("Only one of --all or --global_all can be specfied")
+ sys.exit(1)
+
+ if coreclr_args.global_all and coreclr_args.git_hash is not None:
+ print("If --global_all is specified, do not specify -git_hash")
+ sys.exit(1)
+
+ if not coreclr_args.global_all and coreclr_args.git_hash is None:
+ process_git_hash_arg(coreclr_args)
+
return coreclr_args
################################################################################