8000 Support local running of "conda-forge-tick --dry-run auto-tick" by chrisburr · Pull Request #3680 · regro/cf-scripts · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Support local running of "conda-forge-tick --dry-run auto-tick" #3680

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -281,9 +281,10 @@ pip install -e .
Then you can use the CLI like this:

```bash
conda-forge-tick --help
conda-forge-tick --dry-run ...
```

See `--help` for a list of the commands available.
For debugging, use the `--debug` flag. This enables debug logging and disables multiprocessing.

Note that the bot expects the [conda-forge dependency graph](https://github.com/regro/cf-graph-countyfair) to be
Expand All @@ -298,6 +299,7 @@ The local debugging functionality is still work in progress and might not work f
Currently, the following commands are supported and tested:

- `update-upstream-versions`
- `auto-tick`: To ease debugging, pass `--filter-pattern=` to give a regex of which migrators to enable.

### Structure of the Bot's Jobs

Expand Down
56 changes: 38 additions & 18 deletions conda_forge_tick/auto_tick.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,7 @@ def run_with_tmpdir(
git_backend: GitPlatformBackend,
rerender: bool = True,
base_branch: str = "main",
dry_run: bool = False,
**kwargs: typing.Any,
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
"""
Expand All @@ -454,19 +455,20 @@ def run_with_tmpdir(
git_backend=git_backend,
rerender=rerender,
base_branch=base_branch,
dry_run=dry_run,
**kwargs,
)


def _make_and_sync_pr_lazy_json(pr_data) -> LazyJson:
def _make_and_sync_pr_lazy_json(pr_data, dry_run) -> LazyJson:
if pr_data:
pr_lazy_json = LazyJson(
os.path.join("pr_json", f"{pr_data.id}.json"),
)
with pr_lazy_json as __edit_pr_lazy_json:
__edit_pr_lazy_json.update(**pr_data.model_dump(mode="json"))

if "id" in pr_lazy_json:
if "id" in pr_lazy_json and not dry_run:
sync_lazy_json_object(pr_lazy_json, "file", ["github_api"])

else:
Expand All @@ -481,6 +483,7 @@ def run(
git_backend: GitPlatformBackend,
rerender: bool = True,
base_branch: str = "main",
dry_run: bool = False,
**kwargs: typing.Any,
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
"""For a given feedstock and migration run the migration
Expand Down Expand Up @@ -557,7 +560,7 @@ def run(

# spoof this so it looks like the package is done
pr_data = get_spoofed_closed_pr_info()
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data)
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data, dry_run)
_reset_pre_pr_migrator_fields(
context.attrs, migrator_name, is_version=is_version_migration
)
Expand Down Expand Up @@ -652,7 +655,7 @@ def run(
comment=rerender_info.rerender_comment,
)

pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data)
pr_lazy_json = _make_and_sync_pr_lazy_json(pr_data, dry_run)

# If we've gotten this far then the node is good
with context.attrs["pr_info"] as pri:
Expand Down Expand Up @@ -731,6 +734,7 @@ def _run_migrator_on_feedstock_branch(
mctx,
migrator_name,
good_prs,
dry_run,
):
break_loop = False
sync_pr_info = False
Expand All @@ -748,6 +752,7 @@ def _run_migrator_on_feedstock_branch(
rerender=migrator.rerender,
base_branch=base_branch,
hash_type=attrs.get("hash_type", "sha256"),
dry_run=dry_run,
)
finally:
fctx.attrs.pop("new_version", None)
Expand Down Expand Up @@ -901,19 +906,22 @@ def _run_migrator_on_feedstock_branch(
if sync_pr_info:
with attrs["pr_info"] as pri:
pass
sync_lazy_json_object(pri, "file", ["github_api"])
if not dry_run:
sync_lazy_json_object(pri, "file", ["github_api"])

if sync_version_pr_info:
with attrs["version_pr_info"] as vpri:
pass
sync_lazy_json_object(vpri, "file", ["github_api"])
if not dry_run:
sync_lazy_json_object(vpri, "file", ["github_api"])

return good_prs, break_loop


def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, tried_prs):
def _is_migrator_done(
_mg_start, good_prs, time_per, pr_limit, tried_prs, backend: GitPlatformBackend
):
curr_time = time.time()
backend = github_backend()
api_req = backend.get_api_requests_left()

if curr_time - START_TIME > TIMEOUT:
Expand Down Expand Up @@ -957,7 +965,9 @@ def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, tried_prs):
return False


def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBackend):
def _run_migrator(
migrator, mctx, temp, time_per, git_backend: GitPlatformBackend, dry_run
):
_mg_start = time.time()

migrator_name = get_migrator_name(migrator)
Expand Down Expand Up @@ -1013,7 +1023,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
)

if _is_migrator_done(
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs, git_backend
):
return 0

Expand All @@ -1032,7 +1042,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
# Don't let CI timeout, break ahead of the timeout so we make certain
# to write to the repo
if _is_migrator_done(
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs
_mg_start, good_prs, time_per, migrator.pr_limit, tried_prs, git_backend
):
break

Expand Down Expand Up @@ -1089,6 +1099,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken
mctx=mctx,
migrator_name=migrator_name,
good_prs=good_prs,
dry_run=dry_run,
)
if break_loop:
break
Expand Down Expand Up @@ -1278,15 +1289,16 @@ def _update_graph_with_pr_info():
dump_graph(gx)


def main(ctx: CliContext) -> None:
def main(ctx: CliContext, no_update_graph: bool, filter_pattern: str | None) -> None:
global START_TIME
START_TIME = time.time()

_setup_limits()

with fold_log_lines("updating graph with PR info"):
_update_graph_with_pr_info()
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])
if not no_update_graph:
with fold_log_lines("updating graph with PR info"):
_update_graph_with_pr_info()
deploy(ctx, dirs_to_deploy=["version_pr_info", "pr_json", "pr_info"])

# record tmp dir so we can be sure to clean it later
temp = glob.glob("/tmp/*")
Expand All @@ -1305,7 +1317,7 @@ def main(ctx: CliContext) -> None:
smithy_version=smithy_version,
pinning_version=pinning_version,
)
migrators = load_migrators()
migrators = load_migrators(pattern=filter_pattern)

# compute the time per migrator
with fold_log_lines("computing migrator run times"):
Expand Down Expand Up @@ -1339,7 +1351,15 @@ def main(ctx: CliContext) -> None:
git_backend = github_backend() if not ctx.dry_run else DryRunBackend()

for mg_ind, migrator in enumerate(migrators):
_run_migrator(migrator, mctx, temp, time_per_migrator[mg_ind], git_backend)
_run_migrator(
migrator,
mctx,
temp,
time_per_migrator[mg_ind],
git_backend,
dry_run=ctx.dry_run,
)

logger.info("API Calls Remaining: %d", github_backend().get_api_requests_left())
if not ctx.dry_run:
logger.info("API Calls Remaining: %d", git_backend.get_api_requests_left())
logger.info("Done")
12 changes: 10 additions & 2 deletions conda_forge_tick/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,11 +160,19 @@ def update_upstream_versions(


@main.command(name="auto-tick")
@click.option(
"--no-update-graph", is_flag=True, help="Don't update the graph with PR info"
)
@click.option(
"--filter-pattern", default=None, help="Only run migrators matching this pattern"
)
@pass_context
def auto_tick(ctx: CliContext) -> None:
def auto_tick(
ctx: CliContext, no_update_graph: bool, filter_pattern=Optional[str]
) -> None:
from . import auto_tick

auto_tick.main(ctx)
auto_tick.main(ctx, no_update_graph, filter_pattern)


@main.command(name="make-status-report")
Expand Down
22 changes: 19 additions & 3 deletions conda_forge_tick/make_migrators.py
Original file line number Diff line number Diff line change
Expand Up @@ -872,13 +872,17 @@ def _load(name):
return make_from_lazy_json_data(lzj.data)


def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
def load_migrators(
skip_paused: bool = True, pattern: str | None = None
) -> MutableSequence[Migrator]:
"""Loads all current migrators.

Parameters
----------
skip_paused : bool, optional
Whether to skip paused migrators, defaults to True.
pattern : str, optional
A regular expression pattern to filter migrators, defaults to None.

Returns
-------
Expand All @@ -890,6 +894,15 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
pinning_migrators = []
longterm_migrators = []
all_names = get_all_keys_for_hashmap("migrators")
if pattern is not None:
original_all_names = all_names
all_names = [n for n in all_names if re.fullmatch(pattern, n)]
if not all_names:
raise ValueError(
f"No migrators found matching pattern {pattern}. "
f"Available migrators: {original_all_names}"
)
print(f"Reduced migrators from {len(original_all_names)} to {len(all_names)}")
with executor("process", 4) as pool:
futs = [pool.submit(_load, name) for name in all_names]

Expand All @@ -914,11 +927,14 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]:
migrators.append(migrator)

if version_migrator is None:
raise RuntimeError("No version migrator found in the migrators directory!")
if pattern is None:
raise RuntimeError("No version migrator found in the migrators directory!")
else:
migrators.insert(0, version_migrator)

RNG.shuffle(pinning_migrators)
RNG.shuffle(longterm_migrators)
migrators = [version_migrator] + migrators + pinning_migrators + longterm_migrators
migrators += pinning_migrators + longterm_migrators

return migrators

Expand Down
Loading
0