mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2026-02-13 05:59:46 +08:00
Compare commits
10 Commits
0fabd5f22f
...
b1c5315a5f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1c5315a5f | ||
|
|
0493eab8ce | ||
|
|
1d31ff6037 | ||
|
|
eff648aab0 | ||
|
|
eb2ff84a2c | ||
|
|
423b9a8ded | ||
|
|
a258eb4547 | ||
|
|
13c8a10e39 | ||
|
|
83110e8ce1 | ||
|
|
d91f4e83ef |
@ -6,3 +6,6 @@ insert_final_newline = true
|
|||||||
charset = utf-8
|
charset = utf-8
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
|||||||
2
.github/dependencies.yml
vendored
2
.github/dependencies.yml
vendored
@ -39,7 +39,7 @@ dependencies:
|
|||||||
plugins/wd:
|
plugins/wd:
|
||||||
repo: mfaerevaag/wd
|
repo: mfaerevaag/wd
|
||||||
branch: master
|
branch: master
|
||||||
version: tag:v0.6.0
|
version: tag:v0.6.1
|
||||||
precopy: |
|
precopy: |
|
||||||
set -e
|
set -e
|
||||||
rm -r test
|
rm -r test
|
||||||
|
|||||||
11
.github/workflows/dependencies.yml
vendored
11
.github/workflows/dependencies.yml
vendored
@ -1,8 +1,8 @@
|
|||||||
name: Update dependencies
|
name: Update dependencies
|
||||||
on:
|
on:
|
||||||
workflow_dispatch: {}
|
workflow_dispatch: {}
|
||||||
# schedule:
|
schedule:
|
||||||
# - cron: '34 3 * * */8'
|
- cron: "34 3 * * */8"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check:
|
check:
|
||||||
@ -12,12 +12,19 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- name: Authenticate as @ohmyzsh
|
- name: Authenticate as @ohmyzsh
|
||||||
id: generate_token
|
id: generate_token
|
||||||
uses: ohmyzsh/github-app-token@v2
|
uses: ohmyzsh/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.OHMYZSH_APP_ID }}
|
app_id: ${{ secrets.OHMYZSH_APP_ID }}
|
||||||
private_key: ${{ secrets.OHMYZSH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.OHMYZSH_APP_PRIVATE_KEY }}
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
cache: "pip"
|
||||||
- name: Process dependencies
|
- name: Process dependencies
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
|
|||||||
@ -1,2 +1,7 @@
|
|||||||
PyYAML~=6.0.1
|
certifi==2024.2.2
|
||||||
requests~=2.31.0
|
charset-normalizer==3.3.2
|
||||||
|
idna==3.7
|
||||||
|
PyYAML==6.0.1
|
||||||
|
requests==2.31.0
|
||||||
|
semver==3.0.2
|
||||||
|
urllib3==2.2.1
|
||||||
|
|||||||
800
.github/workflows/dependencies/updater.py
vendored
800
.github/workflows/dependencies/updater.py
vendored
@ -1,11 +1,16 @@
|
|||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import requests
|
import timeit
|
||||||
import shutil
|
|
||||||
import yaml
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Optional, TypedDict
|
from typing import Literal, NotRequired, Optional, TypedDict
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import yaml
|
||||||
|
from semver import Version
|
||||||
|
|
||||||
# Get TMP_DIR variable from environment
|
# Get TMP_DIR variable from environment
|
||||||
TMP_DIR = os.path.join(os.environ.get("TMP_DIR", "/tmp"), "ohmyzsh")
|
TMP_DIR = os.path.join(os.environ.get("TMP_DIR", "/tmp"), "ohmyzsh")
|
||||||
@ -14,28 +19,58 @@ DEPS_YAML_FILE = ".github/dependencies.yml"
|
|||||||
# Dry run flag
|
# Dry run flag
|
||||||
DRY_RUN = os.environ.get("DRY_RUN", "0") == "1"
|
DRY_RUN = os.environ.get("DRY_RUN", "0") == "1"
|
||||||
|
|
||||||
import timeit
|
# utils for tag comparison
|
||||||
|
BASEVERSION = re.compile(
|
||||||
|
r"""[vV]?
|
||||||
|
(?P<major>(0|[1-9])\d*)
|
||||||
|
(\.
|
||||||
|
(?P<minor>(0|[1-9])\d*)
|
||||||
|
(\.
|
||||||
|
(?P<patch>(0|[1-9])\d*)
|
||||||
|
)?
|
||||||
|
)?
|
||||||
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def coerce(version: str) -> Optional[Version]:
|
||||||
|
match = BASEVERSION.search(version)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# BASEVERSION looks for `MAJOR.minor.patch` in the string given
|
||||||
|
# it fills with None if any of them is missing (for example `2.1`)
|
||||||
|
ver = {
|
||||||
|
key: 0 if value is None else value for key, value in match.groupdict().items()
|
||||||
|
}
|
||||||
|
# Version takes `major`, `minor`, `patch` arguments
|
||||||
|
ver = Version(**ver) # pyright: ignore[reportArgumentType]
|
||||||
|
return ver
|
||||||
|
|
||||||
|
|
||||||
class CodeTimer:
|
class CodeTimer:
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
self.name = " '" + name + "'" if name else ''
|
self.name = " '" + name + "'" if name else ""
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self.start = timeit.default_timer()
|
self.start = timeit.default_timer()
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
self.took = (timeit.default_timer() - self.start) * 1000.0
|
self.took = (timeit.default_timer() - self.start) * 1000.0
|
||||||
print('Code block' + self.name + ' took: ' + str(self.took) + ' ms')
|
print("Code block" + self.name + " took: " + str(self.took) + " ms")
|
||||||
|
|
||||||
|
|
||||||
### YAML representation
|
### YAML representation
|
||||||
def str_presenter(dumper, data):
|
def str_presenter(dumper, data):
|
||||||
"""
|
"""
|
||||||
Configures yaml for dumping multiline strings
|
Configures yaml for dumping multiline strings
|
||||||
Ref: https://stackoverflow.com/a/33300001
|
Ref: https://stackoverflow.com/a/33300001
|
||||||
"""
|
"""
|
||||||
if len(data.splitlines()) > 1: # check for multiline string
|
if len(data.splitlines()) > 1: # check for multiline string
|
||||||
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
|
return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
|
||||||
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
|
return dumper.represent_scalar("tag:yaml.org,2002:str", data)
|
||||||
|
|
||||||
|
|
||||||
yaml.add_representer(str, str_presenter)
|
yaml.add_representer(str, str_presenter)
|
||||||
yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
|
yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
|
||||||
@ -43,408 +78,511 @@ yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
|
|||||||
|
|
||||||
# Types
|
# Types
|
||||||
class DependencyDict(TypedDict):
|
class DependencyDict(TypedDict):
|
||||||
repo: str
|
repo: str
|
||||||
branch: str
|
branch: str
|
||||||
version: str
|
version: str
|
||||||
precopy: Optional[str]
|
precopy: NotRequired[str]
|
||||||
postcopy: Optional[str]
|
postcopy: NotRequired[str]
|
||||||
|
|
||||||
|
|
||||||
class DependencyYAML(TypedDict):
|
class DependencyYAML(TypedDict):
|
||||||
dependencies: dict[str, DependencyDict]
|
dependencies: dict[str, DependencyDict]
|
||||||
|
|
||||||
class UpdateStatus(TypedDict):
|
|
||||||
has_updates: bool
|
class UpdateStatusFalse(TypedDict):
|
||||||
version: Optional[str]
|
has_updates: Literal[False]
|
||||||
compare_url: Optional[str]
|
|
||||||
head_ref: Optional[str]
|
|
||||||
head_url: Optional[str]
|
class UpdateStatusTrue(TypedDict):
|
||||||
|
has_updates: Literal[True]
|
||||||
|
version: str
|
||||||
|
compare_url: str
|
||||||
|
head_ref: str
|
||||||
|
head_url: str
|
||||||
|
|
||||||
|
|
||||||
class CommandRunner:
|
class CommandRunner:
|
||||||
class Exception(Exception):
|
class Exception(Exception):
|
||||||
def __init__(self, message, returncode, stage, stdout, stderr):
|
def __init__(self, message, returncode, stage, stdout, stderr):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
self.returncode = returncode
|
self.returncode = returncode
|
||||||
self.stage = stage
|
self.stage = stage
|
||||||
self.stdout = stdout
|
self.stdout = stdout
|
||||||
self.stderr = stderr
|
self.stderr = stderr
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def run_or_fail(command: list[str], stage: str, *args, **kwargs):
|
def run_or_fail(command: list[str], stage: str, *args, **kwargs):
|
||||||
if DRY_RUN and command[0] == "gh":
|
if DRY_RUN and command[0] == "gh":
|
||||||
command.insert(0, "echo")
|
command.insert(0, "echo")
|
||||||
|
|
||||||
result = subprocess.run(command, *args, capture_output=True, **kwargs)
|
result = subprocess.run(command, *args, capture_output=True, **kwargs)
|
||||||
|
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
raise CommandRunner.Exception(
|
raise CommandRunner.Exception(
|
||||||
f"{stage} command failed with exit code {result.returncode}", returncode=result.returncode,
|
f"{stage} command failed with exit code {result.returncode}",
|
||||||
stage=stage,
|
returncode=result.returncode,
|
||||||
stdout=result.stdout.decode("utf-8"),
|
stage=stage,
|
||||||
stderr=result.stderr.decode("utf-8")
|
stdout=result.stdout.decode("utf-8"),
|
||||||
)
|
stderr=result.stderr.decode("utf-8"),
|
||||||
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class DependencyStore:
|
class DependencyStore:
|
||||||
store: DependencyYAML = {
|
store: DependencyYAML = {"dependencies": {}}
|
||||||
"dependencies": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set(data: DependencyYAML):
|
def set(data: DependencyYAML):
|
||||||
DependencyStore.store = data
|
DependencyStore.store = data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_dependency_version(path: str, version: str) -> DependencyYAML:
|
def update_dependency_version(path: str, version: str) -> DependencyYAML:
|
||||||
with CodeTimer(f"store deepcopy: {path}"):
|
with CodeTimer(f"store deepcopy: {path}"):
|
||||||
store_copy = deepcopy(DependencyStore.store)
|
store_copy = deepcopy(DependencyStore.store)
|
||||||
|
|
||||||
dependency = store_copy["dependencies"].get(path, {})
|
dependency = store_copy["dependencies"].get(path)
|
||||||
dependency["version"] = version
|
if dependency is None:
|
||||||
store_copy["dependencies"][path] = dependency
|
raise ValueError(f"Dependency {path} {version} not found")
|
||||||
|
dependency["version"] = version
|
||||||
|
store_copy["dependencies"][path] = dependency
|
||||||
|
|
||||||
return store_copy
|
return store_copy
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def write_store(file: str, data: DependencyYAML):
|
def write_store(file: str, data: DependencyYAML):
|
||||||
with open(file, "w") as yaml_file:
|
with open(file, "w") as yaml_file:
|
||||||
yaml.safe_dump(data, yaml_file, sort_keys=False)
|
yaml.safe_dump(data, yaml_file, sort_keys=False)
|
||||||
|
|
||||||
|
|
||||||
class Dependency:
|
class Dependency:
|
||||||
def __init__(self, path: str, values: DependencyDict):
|
def __init__(self, path: str, values: DependencyDict):
|
||||||
self.path = path
|
self.path = path
|
||||||
self.values = values
|
self.values = values
|
||||||
|
|
||||||
self.name: str = ""
|
self.name: str = ""
|
||||||
self.desc: str = ""
|
self.desc: str = ""
|
||||||
self.kind: str = ""
|
self.kind: str = ""
|
||||||
|
|
||||||
match path.split("/"):
|
match path.split("/"):
|
||||||
case ["plugins", name]:
|
case ["plugins", name]:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.kind = "plugin"
|
self.kind = "plugin"
|
||||||
self.desc = f"{name} plugin"
|
self.desc = f"{name} plugin"
|
||||||
case ["themes", name]:
|
case ["themes", name]:
|
||||||
self.name = name.replace(".zsh-theme", "")
|
self.name = name.replace(".zsh-theme", "")
|
||||||
self.kind = "theme"
|
self.kind = "theme"
|
||||||
self.desc = f"{self.name} theme"
|
self.desc = f"{self.name} theme"
|
||||||
case _:
|
case _:
|
||||||
self.name = self.desc = path
|
self.name = self.desc = path
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
output: str = ""
|
output: str = ""
|
||||||
for key in DependencyDict.__dict__['__annotations__'].keys():
|
for key in DependencyDict.__dict__["__annotations__"].keys():
|
||||||
if key not in self.values:
|
if key not in self.values:
|
||||||
output += f"{key}: None\n"
|
output += f"{key}: None\n"
|
||||||
continue
|
continue
|
||||||
|
|
||||||
value = self.values[key]
|
value = self.values[key]
|
||||||
if "\n" not in value:
|
if "\n" not in value:
|
||||||
output += f"{key}: {value}\n"
|
output += f"{key}: {value}\n"
|
||||||
else:
|
else:
|
||||||
output += f"{key}:\n "
|
output += f"{key}:\n "
|
||||||
output += value.replace("\n", "\n ", value.count("\n") - 1)
|
output += value.replace("\n", "\n ", value.count("\n") - 1)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def update_or_notify(self):
|
def update_or_notify(self):
|
||||||
# Print dependency settings
|
# Print dependency settings
|
||||||
print(f"Processing {self.desc}...", file=sys.stderr)
|
print(f"Processing {self.desc}...", file=sys.stderr)
|
||||||
print(self, file=sys.stderr)
|
print(self, file=sys.stderr)
|
||||||
|
|
||||||
# Check for updates
|
# Check for updates
|
||||||
repo = self.values["repo"]
|
repo = self.values["repo"]
|
||||||
remote_branch = self.values["branch"]
|
remote_branch = self.values["branch"]
|
||||||
version = self.values["version"]
|
version = self.values["version"]
|
||||||
is_tag = version.startswith("tag:")
|
is_tag = version.startswith("tag:")
|
||||||
|
|
||||||
try:
|
|
||||||
with CodeTimer(f"update check: {repo}"):
|
|
||||||
if is_tag:
|
|
||||||
status = GitHub.check_newer_tag(repo, version.replace("tag:", ""))
|
|
||||||
else:
|
|
||||||
status = GitHub.check_updates(repo, remote_branch, version)
|
|
||||||
|
|
||||||
if status["has_updates"]:
|
|
||||||
short_sha = status["head_ref"][:8]
|
|
||||||
new_version = status["version"] if is_tag else short_sha
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Create new branch
|
with CodeTimer(f"update check: {repo}"):
|
||||||
branch = Git.create_branch(self.path, new_version)
|
if is_tag:
|
||||||
|
status = GitHub.check_newer_tag(repo, version.replace("tag:", ""))
|
||||||
|
else:
|
||||||
|
status = GitHub.check_updates(repo, remote_branch, version)
|
||||||
|
|
||||||
# Update dependencies.yml file
|
if status["has_updates"] is True:
|
||||||
self.__update_yaml(f"tag:{new_version}" if is_tag else status["version"])
|
short_sha = status["head_ref"][:8]
|
||||||
|
new_version = status["version"] if is_tag else short_sha
|
||||||
|
|
||||||
# Update dependency files
|
try:
|
||||||
self.__apply_upstream_changes()
|
branch_name = f"update/{self.path}/{new_version}"
|
||||||
|
|
||||||
# Add all changes and commit
|
# Create new branch
|
||||||
Git.add_and_commit(self.name, short_sha)
|
branch = Git.checkout_or_create_branch(branch_name)
|
||||||
|
|
||||||
# Push changes to remote
|
# Update dependencies.yml file
|
||||||
Git.push(branch)
|
self.__update_yaml(
|
||||||
|
f"tag:{new_version}" if is_tag else status["version"]
|
||||||
|
)
|
||||||
|
|
||||||
# Create GitHub PR
|
# Update dependency files
|
||||||
GitHub.create_pr(
|
self.__apply_upstream_changes()
|
||||||
branch,
|
|
||||||
f"feat({self.name}): update to version {new_version}",
|
# Add all changes and commit
|
||||||
f"""## Description
|
Git.add_and_commit(self.name, short_sha)
|
||||||
|
|
||||||
|
# Push changes to remote
|
||||||
|
Git.push(branch)
|
||||||
|
|
||||||
|
# Create GitHub PR
|
||||||
|
GitHub.create_pr(
|
||||||
|
branch,
|
||||||
|
f"feat({self.name}): update to version {new_version}",
|
||||||
|
f"""## Description
|
||||||
|
|
||||||
Update for **{self.desc}**: update to version [{new_version}]({status['head_url']}).
|
Update for **{self.desc}**: update to version [{new_version}]({status['head_url']}).
|
||||||
Check out the [list of changes]({status['compare_url']}).
|
Check out the [list of changes]({status['compare_url']}).
|
||||||
"""
|
""",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Clean up repository
|
# Clean up repository
|
||||||
Git.clean_repo()
|
Git.clean_repo()
|
||||||
except (CommandRunner.Exception, shutil.Error) as e:
|
except (CommandRunner.Exception, shutil.Error) as e:
|
||||||
# Handle exception on automatic update
|
# Handle exception on automatic update
|
||||||
match type(e):
|
match type(e):
|
||||||
case CommandRunner.Exception:
|
case CommandRunner.Exception:
|
||||||
# Print error message
|
# Print error message
|
||||||
print(f"Error running {e.stage} command: {e.returncode}", file=sys.stderr)
|
print(
|
||||||
print(e.stderr, file=sys.stderr)
|
f"Error running {e.stage} command: {e.returncode}", # pyright: ignore[reportAttributeAccessIssue]
|
||||||
case shutil.Error:
|
file=sys.stderr,
|
||||||
print(f"Error copying files: {e}", file=sys.stderr)
|
)
|
||||||
|
print(e.stderr, file=sys.stderr) # pyright: ignore[reportAttributeAccessIssue]
|
||||||
|
case shutil.Error:
|
||||||
|
print(f"Error copying files: {e}", file=sys.stderr)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Git.clean_repo()
|
Git.clean_repo()
|
||||||
except CommandRunner.Exception as e:
|
except CommandRunner.Exception as e:
|
||||||
print(f"Error reverting repository to clean state: {e}", file=sys.stderr)
|
print(
|
||||||
sys.exit(1)
|
f"Error reverting repository to clean state: {e}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Create a GitHub issue to notify maintainer
|
# Create a GitHub issue to notify maintainer
|
||||||
title = f"{self.path}: update to {new_version}"
|
title = f"{self.path}: update to {new_version}"
|
||||||
body = (
|
body = f"""## Description
|
||||||
f"""## Description
|
|
||||||
|
|
||||||
There is a new version of `{self.name}` {self.kind} available.
|
There is a new version of `{self.name}` {self.kind} available.
|
||||||
|
|
||||||
New version: [{new_version}]({status['head_url']})
|
New version: [{new_version}]({status['head_url']})
|
||||||
Check out the [list of changes]({status['compare_url']}).
|
Check out the [list of changes]({status['compare_url']}).
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
|
|
||||||
print(f"Creating GitHub issue", file=sys.stderr)
|
print("Creating GitHub issue", file=sys.stderr)
|
||||||
print(f"{title}\n\n{body}", file=sys.stderr)
|
print(f"{title}\n\n{body}", file=sys.stderr)
|
||||||
GitHub.create_issue(title, body)
|
GitHub.create_issue(title, body)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e, file=sys.stderr)
|
print(e, file=sys.stderr)
|
||||||
|
|
||||||
def __update_yaml(self, new_version: str) -> None:
|
def __update_yaml(self, new_version: str) -> None:
|
||||||
dep_yaml = DependencyStore.update_dependency_version(self.path, new_version)
|
dep_yaml = DependencyStore.update_dependency_version(self.path, new_version)
|
||||||
DependencyStore.write_store(DEPS_YAML_FILE, dep_yaml)
|
DependencyStore.write_store(DEPS_YAML_FILE, dep_yaml)
|
||||||
|
|
||||||
def __apply_upstream_changes(self) -> None:
|
def __apply_upstream_changes(self) -> None:
|
||||||
# Patterns to ignore in copying files from upstream repo
|
# Patterns to ignore in copying files from upstream repo
|
||||||
GLOBAL_IGNORE = [
|
GLOBAL_IGNORE = [".git", ".github", ".gitignore"]
|
||||||
".git",
|
|
||||||
".github",
|
|
||||||
".gitignore"
|
|
||||||
]
|
|
||||||
|
|
||||||
path = os.path.abspath(self.path)
|
path = os.path.abspath(self.path)
|
||||||
precopy = self.values.get("precopy")
|
precopy = self.values.get("precopy")
|
||||||
postcopy = self.values.get("postcopy")
|
postcopy = self.values.get("postcopy")
|
||||||
|
|
||||||
repo = self.values["repo"]
|
repo = self.values["repo"]
|
||||||
branch = self.values["branch"]
|
branch = self.values["branch"]
|
||||||
remote_url = f"https://github.com/{repo}.git"
|
remote_url = f"https://github.com/{repo}.git"
|
||||||
repo_dir = os.path.join(TMP_DIR, repo)
|
repo_dir = os.path.join(TMP_DIR, repo)
|
||||||
|
|
||||||
# Clone repository
|
# Clone repository
|
||||||
Git.clone(remote_url, branch, repo_dir, reclone=True)
|
Git.clone(remote_url, branch, repo_dir, reclone=True)
|
||||||
|
|
||||||
# Run precopy on tmp repo
|
# Run precopy on tmp repo
|
||||||
if precopy is not None:
|
if precopy is not None:
|
||||||
print("Running precopy script:", end="\n ", file=sys.stderr)
|
print("Running precopy script:", end="\n ", file=sys.stderr)
|
||||||
print(precopy.replace("\n", "\n ", precopy.count("\n") - 1), file=sys.stderr)
|
print(
|
||||||
CommandRunner.run_or_fail(["bash", "-c", precopy], cwd=repo_dir, stage="Precopy")
|
precopy.replace("\n", "\n ", precopy.count("\n") - 1), file=sys.stderr
|
||||||
|
)
|
||||||
|
CommandRunner.run_or_fail(
|
||||||
|
["bash", "-c", precopy], cwd=repo_dir, stage="Precopy"
|
||||||
|
)
|
||||||
|
|
||||||
# Copy files from upstream repo
|
# Copy files from upstream repo
|
||||||
print(f"Copying files from {repo_dir} to {path}", file=sys.stderr)
|
print(f"Copying files from {repo_dir} to {path}", file=sys.stderr)
|
||||||
shutil.copytree(repo_dir, path, dirs_exist_ok=True, ignore=shutil.ignore_patterns(*GLOBAL_IGNORE))
|
shutil.copytree(
|
||||||
|
repo_dir,
|
||||||
|
path,
|
||||||
|
dirs_exist_ok=True,
|
||||||
|
ignore=shutil.ignore_patterns(*GLOBAL_IGNORE),
|
||||||
|
)
|
||||||
|
|
||||||
# Run postcopy on our repository
|
# Run postcopy on our repository
|
||||||
if postcopy is not None:
|
if postcopy is not None:
|
||||||
print("Running postcopy script:", end="\n ", file=sys.stderr)
|
print("Running postcopy script:", end="\n ", file=sys.stderr)
|
||||||
print(postcopy.replace("\n", "\n ", postcopy.count("\n") - 1), file=sys.stderr)
|
print(
|
||||||
CommandRunner.run_or_fail(["bash", "-c", postcopy], cwd=path, stage="Postcopy")
|
postcopy.replace("\n", "\n ", postcopy.count("\n") - 1),
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
CommandRunner.run_or_fail(
|
||||||
|
["bash", "-c", postcopy], cwd=path, stage="Postcopy"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Git:
|
class Git:
|
||||||
default_branch = "master"
|
default_branch = "master"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clone(remote_url: str, branch: str, repo_dir: str, reclone=False):
|
def clone(remote_url: str, branch: str, repo_dir: str, reclone=False):
|
||||||
# If repo needs to be fresh
|
# If repo needs to be fresh
|
||||||
if reclone and os.path.exists(repo_dir):
|
if reclone and os.path.exists(repo_dir):
|
||||||
shutil.rmtree(repo_dir)
|
shutil.rmtree(repo_dir)
|
||||||
|
|
||||||
# Clone repo in tmp directory and checkout branch
|
# Clone repo in tmp directory and checkout branch
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(f"Cloning {remote_url} to {repo_dir} and checking out {branch}", file=sys.stderr)
|
print(
|
||||||
CommandRunner.run_or_fail(["git", "clone", "--depth=1", "-b", branch, remote_url, repo_dir], stage="Clone")
|
f"Cloning {remote_url} to {repo_dir} and checking out {branch}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
CommandRunner.run_or_fail(
|
||||||
|
["git", "clone", "--depth=1", "-b", branch, remote_url, repo_dir],
|
||||||
|
stage="Clone",
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_branch(path: str, version: str):
|
def checkout_or_create_branch(branch_name: str):
|
||||||
# Get current branch name
|
# Get current branch name
|
||||||
result = CommandRunner.run_or_fail(["git", "rev-parse", "--abbrev-ref", "HEAD"], stage="GetDefaultBranch")
|
result = CommandRunner.run_or_fail(
|
||||||
Git.default_branch = result.stdout.decode("utf-8").strip()
|
["git", "rev-parse", "--abbrev-ref", "HEAD"], stage="GetDefaultBranch"
|
||||||
|
)
|
||||||
|
Git.default_branch = result.stdout.decode("utf-8").strip()
|
||||||
|
|
||||||
# Create new branch and return created branch name
|
# Create new branch and return created branch name
|
||||||
branch_name = f"update/{path}/{version}"
|
try:
|
||||||
CommandRunner.run_or_fail(["git", "checkout", "-b", branch_name], stage="CreateBranch")
|
# try to checkout already existing branch
|
||||||
return branch_name
|
CommandRunner.run_or_fail(
|
||||||
|
["git", "checkout", branch_name], stage="CreateBranch"
|
||||||
|
)
|
||||||
|
except CommandRunner.Exception:
|
||||||
|
# otherwise create new branch
|
||||||
|
CommandRunner.run_or_fail(
|
||||||
|
["git", "checkout", "-b", branch_name], stage="CreateBranch"
|
||||||
|
)
|
||||||
|
return branch_name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_and_commit(scope: str, version: str):
|
def add_and_commit(scope: str, version: str):
|
||||||
user_name = os.environ.get("GIT_APP_NAME")
|
user_name = os.environ.get("GIT_APP_NAME")
|
||||||
user_email = os.environ.get("GIT_APP_EMAIL")
|
user_email = os.environ.get("GIT_APP_EMAIL")
|
||||||
|
|
||||||
# Add all files to git staging
|
# Add all files to git staging
|
||||||
CommandRunner.run_or_fail(["git", "add", "-A", "-v"], stage="AddFiles")
|
CommandRunner.run_or_fail(["git", "add", "-A", "-v"], stage="AddFiles")
|
||||||
|
|
||||||
# Reset environment and git config
|
# Reset environment and git config
|
||||||
clean_env = os.environ.copy()
|
clean_env = os.environ.copy()
|
||||||
clean_env["LANG"]="C.UTF-8"
|
clean_env["LANG"] = "C.UTF-8"
|
||||||
clean_env["GIT_CONFIG_GLOBAL"]="/dev/null"
|
clean_env["GIT_CONFIG_GLOBAL"] = "/dev/null"
|
||||||
clean_env["GIT_CONFIG_NOSYSTEM"]="1"
|
clean_env["GIT_CONFIG_NOSYSTEM"] = "1"
|
||||||
|
|
||||||
# Commit with settings above
|
# check if repo is clean (clean => no error, no commit)
|
||||||
CommandRunner.run_or_fail([
|
try:
|
||||||
"git",
|
CommandRunner.run_or_fail(
|
||||||
"-c", f"user.name={user_name}",
|
["git", "diff", "--exit-code"], stage="CheckRepoClean", env=clean_env
|
||||||
"-c", f"user.email={user_email}",
|
)
|
||||||
"commit",
|
except CommandRunner.Exception:
|
||||||
"-m", f"feat({scope}): update to {version}"
|
# Commit with settings above
|
||||||
], stage="CreateCommit", env=clean_env)
|
CommandRunner.run_or_fail(
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"-c",
|
||||||
|
f"user.name={user_name}",
|
||||||
|
"-c",
|
||||||
|
f"user.email={user_email}",
|
||||||
|
"commit",
|
||||||
|
"-m",
|
||||||
|
f"feat({scope}): update to {version}",
|
||||||
|
],
|
||||||
|
stage="CreateCommit",
|
||||||
|
env=clean_env,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def push(branch: str):
|
def push(branch: str):
|
||||||
CommandRunner.run_or_fail(["git", "push", "-u", "origin", branch], stage="PushBranch")
|
CommandRunner.run_or_fail(
|
||||||
|
["git", "push", "-u", "origin", branch], stage="PushBranch"
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clean_repo():
|
def clean_repo():
|
||||||
CommandRunner.run_or_fail(["git", "reset", "--hard", "HEAD"], stage="ResetRepository")
|
CommandRunner.run_or_fail(
|
||||||
CommandRunner.run_or_fail(["git", "checkout", Git.default_branch], stage="CheckoutDefaultBranch")
|
["git", "reset", "--hard", "HEAD"], stage="ResetRepository"
|
||||||
|
)
|
||||||
|
CommandRunner.run_or_fail(
|
||||||
|
["git", "checkout", Git.default_branch], stage="CheckoutDefaultBranch"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GitHub:
|
class GitHub:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_newer_tag(repo, current_tag) -> UpdateStatus:
|
def check_newer_tag(repo, current_tag) -> UpdateStatusFalse | UpdateStatusTrue:
|
||||||
# GET /repos/:owner/:repo/git/refs/tags
|
# GET /repos/:owner/:repo/git/refs/tags
|
||||||
url = f"https://api.github.com/repos/{repo}/git/refs/tags"
|
url = f"https://api.github.com/repos/{repo}/git/refs/tags"
|
||||||
|
|
||||||
# Send a GET request to the GitHub API
|
# Send a GET request to the GitHub API
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
|
current_version = coerce(current_tag)
|
||||||
|
if current_version is None:
|
||||||
|
raise ValueError(
|
||||||
|
f"Stored {current_version} from {repo} does not follow semver"
|
||||||
|
)
|
||||||
|
|
||||||
# If the request was successful
|
# If the request was successful
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Parse the JSON response
|
# Parse the JSON response
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|
||||||
if len(data) == 0:
|
if len(data) == 0:
|
||||||
return {
|
return {
|
||||||
"has_updates": False,
|
"has_updates": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
latest_ref = data[-1]
|
latest_ref = None
|
||||||
latest_tag = latest_ref["ref"].replace("refs/tags/", "")
|
latest_version: Optional[Version] = None
|
||||||
|
for ref in data:
|
||||||
|
# we find the tag since GitHub returns it as plain git ref
|
||||||
|
tag_version = coerce(ref["ref"].replace("refs/tags/", ""))
|
||||||
|
if tag_version is None:
|
||||||
|
# we skip every tag that is not semver-complaint
|
||||||
|
continue
|
||||||
|
if latest_version is None or tag_version.compare(latest_version) > 0:
|
||||||
|
# if we have a "greater" semver version, set it as latest
|
||||||
|
latest_version = tag_version
|
||||||
|
latest_ref = ref
|
||||||
|
|
||||||
if latest_tag == current_tag:
|
# raise if no valid semver tag is found
|
||||||
return {
|
if latest_ref is None or latest_version is None:
|
||||||
"has_updates": False,
|
raise ValueError(f"No tags following semver found in {repo}")
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
# we get the tag since GitHub returns it as plain git ref
|
||||||
"has_updates": True,
|
latest_tag = latest_ref["ref"].replace("refs/tags/", "")
|
||||||
"version": latest_tag,
|
|
||||||
"compare_url": f"https://github.com/{repo}/compare/{current_tag}...{latest_tag}",
|
|
||||||
"head_ref": latest_ref["object"]["sha"],
|
|
||||||
"head_url": f"https://github.com/{repo}/releases/tag/{latest_tag}",
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
# If the request was not successful, raise an exception
|
|
||||||
raise Exception(f"GitHub API request failed with status code {response.status_code}: {response.json()}")
|
|
||||||
|
|
||||||
@staticmethod
|
if latest_version.compare(current_version) <= 0:
|
||||||
def check_updates(repo, branch, version) -> UpdateStatus:
|
return {
|
||||||
# TODO: add support for semver updating (based on tags)
|
"has_updates": False,
|
||||||
# Check if upstream github repo has a new version
|
}
|
||||||
# GitHub API URL for comparing two commits
|
|
||||||
url = f"https://api.github.com/repos/{repo}/compare/{version}...{branch}"
|
|
||||||
|
|
||||||
# Send a GET request to the GitHub API
|
return {
|
||||||
response = requests.get(url)
|
"has_updates": True,
|
||||||
|
"version": latest_tag,
|
||||||
|
"compare_url": f"https://github.com/{repo}/compare/{current_tag}...{latest_tag}",
|
||||||
|
"head_ref": latest_ref["object"]["sha"],
|
||||||
|
"head_url": f"https://github.com/{repo}/releases/tag/{latest_tag}",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# If the request was not successful, raise an exception
|
||||||
|
raise Exception(
|
||||||
|
f"GitHub API request failed with status code {response.status_code}: {response.json()}"
|
||||||
|
)
|
||||||
|
|
||||||
# If the request was successful
|
@staticmethod
|
||||||
if response.status_code == 200:
|
def check_updates(repo, branch, version) -> UpdateStatusFalse | UpdateStatusTrue:
|
||||||
# Parse the JSON response
|
url = f"https://api.github.com/repos/{repo}/compare/{version}...{branch}"
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
# If the base is behind the head, there is a newer version
|
# Send a GET request to the GitHub API
|
||||||
has_updates = data["status"] != "identical"
|
response = requests.get(url)
|
||||||
|
|
||||||
if not has_updates:
|
# If the request was successful
|
||||||
return {
|
if response.status_code == 200:
|
||||||
"has_updates": False,
|
# Parse the JSON response
|
||||||
}
|
data = response.json()
|
||||||
|
|
||||||
return {
|
# If the base is behind the head, there is a newer version
|
||||||
"has_updates": data["status"] != "identical",
|
has_updates = data["status"] != "identical"
|
||||||
"version": data["commits"][-1]["sha"],
|
|
||||||
"compare_url": data["permalink_url"],
|
|
||||||
"head_ref": data["commits"][-1]["sha"],
|
|
||||||
"head_url": data["commits"][-1]["html_url"]
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
# If the request was not successful, raise an exception
|
|
||||||
raise Exception(f"GitHub API request failed with status code {response.status_code}: {response.json()}")
|
|
||||||
|
|
||||||
@staticmethod
|
if not has_updates:
|
||||||
def create_issue(title: str, body: str) -> None:
|
return {
|
||||||
cmd = [
|
"has_updates": False,
|
||||||
"gh",
|
}
|
||||||
"issue",
|
|
||||||
"create",
|
|
||||||
"-t", title,
|
|
||||||
"-b", body
|
|
||||||
]
|
|
||||||
CommandRunner.run_or_fail(cmd, stage="CreateIssue")
|
|
||||||
|
|
||||||
@staticmethod
|
return {
|
||||||
def create_pr(branch: str, title: str, body: str) -> None:
|
"has_updates": data["status"] != "identical",
|
||||||
cmd = [
|
"version": data["commits"][-1]["sha"],
|
||||||
"gh",
|
"compare_url": data["permalink_url"],
|
||||||
"pr",
|
"head_ref": data["commits"][-1]["sha"],
|
||||||
"create",
|
"head_url": data["commits"][-1]["html_url"],
|
||||||
"-B", Git.default_branch,
|
}
|
||||||
"-H", branch,
|
else:
|
||||||
"-t", title,
|
# If the request was not successful, raise an exception
|
||||||
"-b", body
|
raise Exception(
|
||||||
]
|
f"GitHub API request failed with status code {response.status_code}: {response.json()}"
|
||||||
CommandRunner.run_or_fail(cmd, stage="CreatePullRequest")
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_issue(title: str, body: str) -> None:
|
||||||
|
cmd = ["gh", "issue", "create", "-t", title, "-b", body]
|
||||||
|
CommandRunner.run_or_fail(cmd, stage="CreateIssue")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_pr(branch: str, title: str, body: str) -> None:
|
||||||
|
# first of all let's check if PR is already open
|
||||||
|
check_cmd = [
|
||||||
|
"gh",
|
||||||
|
"pr",
|
||||||
|
"list",
|
||||||
|
"--state",
|
||||||
|
"open",
|
||||||
|
"--head",
|
||||||
|
branch,
|
||||||
|
"--json",
|
||||||
|
"title",
|
||||||
|
]
|
||||||
|
# returncode is 0 also if no PRs are found
|
||||||
|
output = json.loads(
|
||||||
|
CommandRunner.run_or_fail(check_cmd, stage="CheckPullRequestOpen")
|
||||||
|
.stdout.decode("utf-8")
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
# we have PR in this case!
|
||||||
|
if len(output) > 0:
|
||||||
|
return
|
||||||
|
cmd = [
|
||||||
|
"gh",
|
||||||
|
"pr",
|
||||||
|
"create",
|
||||||
|
"-B",
|
||||||
|
Git.default_branch,
|
||||||
|
"-H",
|
||||||
|
branch,
|
||||||
|
"-t",
|
||||||
|
title,
|
||||||
|
"-b",
|
||||||
|
body,
|
||||||
|
]
|
||||||
|
CommandRunner.run_or_fail(cmd, stage="CreatePullRequest")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# Load the YAML file
|
# Load the YAML file
|
||||||
with open(DEPS_YAML_FILE, "r") as yaml_file:
|
with open(DEPS_YAML_FILE, "r") as yaml_file:
|
||||||
data: DependencyYAML = yaml.safe_load(yaml_file)
|
data: DependencyYAML = yaml.safe_load(yaml_file)
|
||||||
|
|
||||||
if "dependencies" not in data:
|
if "dependencies" not in data:
|
||||||
raise Exception(f"dependencies.yml not properly formatted")
|
raise Exception("dependencies.yml not properly formatted")
|
||||||
|
|
||||||
# Cache YAML version
|
# Cache YAML version
|
||||||
DependencyStore.set(data)
|
DependencyStore.set(data)
|
||||||
|
|
||||||
|
dependencies = data["dependencies"]
|
||||||
|
for path in dependencies:
|
||||||
|
dependency = Dependency(path, dependencies[path])
|
||||||
|
dependency.update_or_notify()
|
||||||
|
|
||||||
dependencies = data["dependencies"]
|
|
||||||
for path in dependencies:
|
|
||||||
dependency = Dependency(path, dependencies[path])
|
|
||||||
dependency.update_or_notify()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@ -4,6 +4,8 @@ function fzf_setup_using_fzf() {
|
|||||||
# we remove "fzf " prefix, this fixes really old fzf versions behaviour
|
# we remove "fzf " prefix, this fixes really old fzf versions behaviour
|
||||||
# see https://github.com/ohmyzsh/ohmyzsh/issues/12387
|
# see https://github.com/ohmyzsh/ohmyzsh/issues/12387
|
||||||
local fzf_ver=${"$(fzf --version)"#fzf }
|
local fzf_ver=${"$(fzf --version)"#fzf }
|
||||||
|
|
||||||
|
autoload -Uz is-at-least
|
||||||
is-at-least 0.48.0 ${${(s: :)fzf_ver}[1]} || return 1
|
is-at-least 0.48.0 ${${(s: :)fzf_ver}[1]} || return 1
|
||||||
|
|
||||||
eval "$(fzf --zsh)"
|
eval "$(fzf --zsh)"
|
||||||
|
|||||||
@ -57,6 +57,24 @@ wd() {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### [Home Manager](https://github.com/nix-community/home-manager)
|
||||||
|
|
||||||
|
Add the following to your `home.nix` then run `home-manager switch`:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
programs.zsh.plugins = [
|
||||||
|
{
|
||||||
|
name = "wd";
|
||||||
|
src = pkgs.fetchFromGitHub {
|
||||||
|
owner = "mfaerevaag";
|
||||||
|
repo = "wd";
|
||||||
|
rev = "v0.5.2";
|
||||||
|
sha256 = "sha256-4yJ1qhqhNULbQmt6Z9G22gURfDLe30uV1ascbzqgdhg=";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
];
|
||||||
|
```
|
||||||
|
|
||||||
### [zplug](https://github.com/zplug/zplug)
|
### [zplug](https://github.com/zplug/zplug)
|
||||||
|
|
||||||
```zsh
|
```zsh
|
||||||
@ -119,6 +137,14 @@ Also, you may have to force a rebuild of `zcompdump` by running:
|
|||||||
rm -f ~/.zcompdump; compinit
|
rm -f ~/.zcompdump; compinit
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Browse
|
||||||
|
|
||||||
|
If you want to make use of the `fzf`-powered browse feature to fuzzy search through all your warp points, set up a keybind in your `.zshrc`:
|
||||||
|
|
||||||
|
```zsh
|
||||||
|
bindkey '^G' wd_browse
|
||||||
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
* Add warp point to current working directory:
|
* Add warp point to current working directory:
|
||||||
|
|||||||
@ -8,8 +8,13 @@
|
|||||||
# @github.com/mfaerevaag/wd
|
# @github.com/mfaerevaag/wd
|
||||||
|
|
||||||
# Handle $0 according to the standard:
|
# Handle $0 according to the standard:
|
||||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
# # https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||||
|
|
||||||
eval "wd() { source '${0:A:h}/wd.sh' }"
|
eval "wd() { source '${0:A:h}/wd.sh' }"
|
||||||
|
wd > /dev/null
|
||||||
|
# Register the function as a Zsh widget
|
||||||
|
zle -N wd_browse
|
||||||
|
# Bind the widget to a key combination
|
||||||
|
bindkey '^G' wd_browse
|
||||||
|
|||||||
37
plugins/wd/wd.sh
Normal file → Executable file
37
plugins/wd/wd.sh
Normal file → Executable file
@ -8,7 +8,7 @@
|
|||||||
# @github.com/mfaerevaag/wd
|
# @github.com/mfaerevaag/wd
|
||||||
|
|
||||||
# version
|
# version
|
||||||
readonly WD_VERSION=0.5.0
|
readonly WD_VERSION=0.6.1
|
||||||
|
|
||||||
# colors
|
# colors
|
||||||
readonly WD_BLUE="\033[96m"
|
readonly WD_BLUE="\033[96m"
|
||||||
@ -57,12 +57,11 @@ wd_print_msg()
|
|||||||
{
|
{
|
||||||
if [[ -z $wd_quiet_mode ]]
|
if [[ -z $wd_quiet_mode ]]
|
||||||
then
|
then
|
||||||
local color=$1
|
local color="${1:-$WD_BLUE}" # Default to blue if no color is provided
|
||||||
local msg=$2
|
local msg="$2"
|
||||||
|
|
||||||
if [[ $color == "" || $msg == "" ]]
|
if [[ -z "$msg" ]]; then
|
||||||
then
|
print "${WD_RED}*${WD_NOC} Could not print message. Sorry!"
|
||||||
print " ${WD_RED}*${WD_NOC} Could not print message. Sorry!"
|
|
||||||
else
|
else
|
||||||
print " ${color}*${WD_NOC} ${msg}"
|
print " ${color}*${WD_NOC} ${msg}"
|
||||||
fi
|
fi
|
||||||
@ -230,6 +229,20 @@ wd_remove()
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
wd_browse() {
|
||||||
|
if ! command -v fzf >/dev/null; then
|
||||||
|
echo "This functionality requires fzf. Please install fzf first."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
local entries=("${(@f)$(sed "s:${HOME}:~:g" "$WD_CONFIG" | awk -F ':' '{print $1 " -> " $2}')}")
|
||||||
|
local selected_entry=$(printf '%s\n' "${entries[@]}" | fzf --height 40% --reverse)
|
||||||
|
if [[ -n $selected_entry ]]; then
|
||||||
|
local selected_point="${selected_entry%% ->*}"
|
||||||
|
selected_point=$(echo "$selected_point" | xargs)
|
||||||
|
wd $selected_point
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
wd_list_all()
|
wd_list_all()
|
||||||
{
|
{
|
||||||
wd_print_msg "$WD_BLUE" "All warp points:"
|
wd_print_msg "$WD_BLUE" "All warp points:"
|
||||||
@ -396,7 +409,9 @@ fi
|
|||||||
# disable extendedglob for the complete wd execution time
|
# disable extendedglob for the complete wd execution time
|
||||||
setopt | grep -q extendedglob
|
setopt | grep -q extendedglob
|
||||||
wd_extglob_is_set=$?
|
wd_extglob_is_set=$?
|
||||||
(( ! $wd_extglob_is_set )) && setopt noextendedglob
|
if (( wd_extglob_is_set == 0 )); then
|
||||||
|
setopt noextendedglob
|
||||||
|
fi
|
||||||
|
|
||||||
# load warp points
|
# load warp points
|
||||||
typeset -A points
|
typeset -A points
|
||||||
@ -436,6 +451,10 @@ else
|
|||||||
wd_add "$2" "$wd_force_mode"
|
wd_add "$2" "$wd_force_mode"
|
||||||
break
|
break
|
||||||
;;
|
;;
|
||||||
|
"-b"|"browse")
|
||||||
|
wd_browse
|
||||||
|
break
|
||||||
|
;;
|
||||||
"-e"|"export")
|
"-e"|"export")
|
||||||
wd_export_static_named_directories
|
wd_export_static_named_directories
|
||||||
break
|
break
|
||||||
@ -484,7 +503,9 @@ fi
|
|||||||
# if not, next time warp will pick up variables from this run
|
# if not, next time warp will pick up variables from this run
|
||||||
# remember, there's no sub shell
|
# remember, there's no sub shell
|
||||||
|
|
||||||
(( ! $wd_extglob_is_set )) && setopt extendedglob
|
if (( wd_extglob_is_set == 0 )); then
|
||||||
|
setopt extendedglob
|
||||||
|
fi
|
||||||
|
|
||||||
unset wd_extglob_is_set
|
unset wd_extglob_is_set
|
||||||
unset wd_warp
|
unset wd_warp
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user