Compare commits

..

1 Commits

Author SHA1 Message Date
renovate[bot]
ec17b125da chore(deps): update image python to v3.14.3
Signed-off-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-02-06 00:52:46 +00:00
90 changed files with 19287 additions and 1922 deletions

View File

@@ -8,10 +8,6 @@ indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.py]
indent_size = 4
max_line_length = 120
[*.fish]
max_line_length = 120

6
.github/README.md vendored
View File

@@ -37,7 +37,7 @@ see what interesting stuff you've done with it. Sharing is caring.
### Interesting folders
| Path | Description |
|---------------------|----------------------------------------------|
| ------------------- | -------------------------------------------- |
| `.github` | GitHub Repository configuration files, meta. |
| `hosts/{hostname}/` | Configs that should apply to that host only. |
| `local/bin` | Helper scripts that I've collected or wrote. |
@@ -52,7 +52,7 @@ is processed by Dotbot during installation.
### dotfile folders
| Repo | Destination | Description |
|-----------|-------------|---------------------------------------------|
| --------- | ----------- | ------------------------------------------- |
| `base/` | `.*` | `$HOME` level files. |
| `config/` | `.config/` | Configurations for applications. |
| `local/` | `.local/` | XDG Base folder: `bin`, `share` and `state` |
@@ -86,7 +86,7 @@ The folder structure follows [XDG Base Directory Specification][xdg] where possi
### XDG Variables
| Env | Default | Short description |
|--------------------|----------------------|------------------------------------------------|
| ------------------ | -------------------- | ---------------------------------------------- |
| `$XDG_BIN_HOME` | `$HOME/.local/bin` | Local binaries |
| `$XDG_CONFIG_HOME` | `$HOME/.config` | User-specific configs |
| `$XDG_DATA_HOME` | `$HOME/.local/share` | User-specific data files |

View File

@@ -9,15 +9,13 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
debug-changelog:
runs-on: ubuntu-latest
permissions:
contents: read
permissions: write-all
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -29,7 +27,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
config_file: .github/tag-changelog-config.js
- name: "Echo results"
- name: 'Echo results'
id: output-changelog
run: |
echo "${{ steps.changelog.outputs.changes }}"

View File

@@ -11,8 +11,7 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
Linter:

View File

@@ -5,21 +5,19 @@ name: Release Daily State
on:
workflow_dispatch:
schedule:
- cron: "0 21 * * *" # 00:00 at Europe/Helsinki
- cron: '0 21 * * *' # 00:00 at Europe/Helsinki
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
new-daily-release:
runs-on: ubuntu-latest
permissions:
contents: write
permissions: write-all
outputs:
created: ${{ steps.daily-version.outputs.created }}

View File

@@ -5,15 +5,14 @@ name: Pre-commit autoupdate
on:
schedule:
# At 04:00 on Monday and Thursday.
- cron: "0 4 * * 1,4"
- cron: '0 4 * * 1,4'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
auto-update:
@@ -34,6 +33,6 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: update/pre-commit-hooks
title: "chore: update pre-commit hooks"
commit-message: "chore: update pre-commit hooks"
title: 'chore: update pre-commit hooks'
commit-message: 'chore: update pre-commit hooks'
body: Update versions of pre-commit hooks to latest version.

View File

@@ -14,8 +14,7 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
pull-requests: read
permissions: read-all
jobs:
semantic-pr:

View File

@@ -11,7 +11,7 @@ on:
- .github/workflows/sync-labels.yml
- .github/labels.yml
schedule:
- cron: "34 5 * * *"
- cron: '34 5 * * *'
workflow_call:
workflow_dispatch:
@@ -19,8 +19,7 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
SyncLabels:

View File

@@ -5,22 +5,20 @@ name: Update submodules
on:
schedule:
# At 04:00 on Monday and Thursday.
- cron: "0 4 * * 1"
- cron: '0 4 * * 1'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
permissions: read-all
jobs:
update-submodules:
runs-on: ubuntu-latest
permissions:
contents: write
permissions: write-all
steps:
- name: Checkout repository

1
.gitignore vendored
View File

@@ -56,6 +56,5 @@ local/man/yabai.1
local/share/fonts/*
lock
node_modules
__pycache__
ssh/local.d/*
config/fish/fish_variables*

View File

@@ -9,21 +9,16 @@ VALIDATE_ALL_CODEBASE: true
FILEIO_REPORTER: false # Generate file.io report
GITHUB_STATUS_REPORTER: true # Generate GitHub status report
IGNORE_GENERATED_FILES: true # Ignore generated files
JAVASCRIPT_DEFAULT_STYLE: prettier # Default style for JavaScript
PRINT_ALPACA: false # Print Alpaca logo in console
SARIF_REPORTER: true # Generate SARIF report
SHOW_SKIPPED_LINTERS: false # Show skipped linters in MegaLinter log
TYPESCRIPT_DEFAULT_STYLE: prettier # Default style for TypeScript
DISABLE_LINTERS:
- REPOSITORY_DEVSKIM
- JAVASCRIPT_ES # using biome
- JAVASCRIPT_PRETTIER # using biome
- TYPESCRIPT_PRETTIER # using biome
- JSON_PRETTIER # using biome
- PYTHON_BLACK # using ruff
- PYTHON_FLAKE8 # using ruff
- PYTHON_PYLINT # using ruff
- PYTHON_ISORT # using ruff (I rules)
YAML_YAMLLINT_CONFIG_FILE: .yamllint.yml
REPOSITORY_GIT_DIFF_DISABLE_ERRORS: true
BASH_SHFMT_ARGUMENTS: -i 2 -bn -ci -sr -fn
FILTER_REGEX_EXCLUDE: >
(node_modules|tools|config/cheat/cheatsheets/community|config/cheat/cheatsheets/tldr|config/fzf|config/zsh|config/tmux/plugins)

View File

@@ -28,25 +28,12 @@ repos:
entry: yarn biome check --write --files-ignore-unknown=true --no-errors-on-unmatched
language: system
files: \.(js|ts|jsx|tsx|json|md)$
- id: markdown-table-formatter
name: Markdown Table Formatter
entry: yarn markdown-table-formatter
language: system
types: [markdown]
- repo: https://github.com/adrienverge/yamllint
rev: v1.38.0
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8
hooks:
- id: prettier
types_or: [yaml]
additional_dependencies:
- prettier@3.8.1
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.11.0.1
hooks:
@@ -56,7 +43,6 @@ repos:
rev: v3.12.0-2
hooks:
- id: shfmt
args: [-i, "2", -bn, -ci, -sr, -fn, -w]
- repo: https://github.com/rhysd/actionlint
rev: v1.7.10
@@ -74,10 +60,3 @@ repos:
hooks:
- id: fish_syntax
- id: fish_indent
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.15.0
hooks:
- id: ruff-check
args: [--fix]
- id: ruff-format

View File

@@ -1,18 +0,0 @@
node_modules
.yarn
.pnp.*
.mypy_cache
Brewfile.lock.json
lazy-lock.json
config/cheat/cheatsheets/community
config/cheat/cheatsheets/tldr
config/fzf
config/nvim
config/op/plugins/used_plugins
config/tmux/plugins
config/vim/plugged
config/zsh
local/bin/antigen.zsh
local/bin/asdf
tools
docs/plans

View File

@@ -1,9 +0,0 @@
{
"$schema": "https://json.schemastore.org/prettierrc",
"printWidth": 200,
"tabWidth": 2,
"useTabs": false,
"endOfLine": "lf",
"singleQuote": false,
"proseWrap": "preserve"
}

View File

@@ -1 +1 @@
3.14.2
3.14.3

View File

@@ -13,11 +13,11 @@ ignore_all_files_in_gitignore: true
# Was previously called `ignored_dirs`, please update your config if you are using that.
# Added (renamed) on 2025-04-07
ignored_paths:
- "*.swp"
- "*.tmp"
- "*.tmp.*"
- ".DS_Store"
- ".git/**"
- '*.swp'
- '*.tmp'
- '*.tmp.*'
- '.DS_Store'
- '.git/**'
- /config/cheat/cheatsheets/community/**
- /config/cheat/cheatsheets/pure-bash-bible/**
- /config/cheat/cheatsheets/tldr/**
@@ -85,6 +85,6 @@ excluded_tools: []
# initial prompt for the project. It will always be given to the LLM upon activating the project
# (contrary to the memories, which are loaded on demand).
initial_prompt: ""
initial_prompt: ''
project_name: ".dotfiles"
project_name: '.dotfiles'

123
CLAUDE.md
View File

@@ -1,123 +0,0 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code)
when working with code in this repository.
## Repository Overview
Personal dotfiles repository for Ismo Vuorinen.
Uses **Dotbot** (not GNU Stow) to symlink configuration files into place.
The directory layout follows the XDG Base Directory Specification.
## Directory Layout and Linking
| Source | Destination | Notes |
|---------------------|-------------------|-------------------------------------------|
| `base/*` | `~/.*` | Home-level dotfiles (`.` added by Dotbot) |
| `config/*` | `~/.config/` | Application configurations |
| `local/bin/*` | `~/.local/bin/` | Helper scripts and utilities |
| `local/share/*` | `~/.local/share/` | Data files |
| `local/man/**` | `~/.local/man/` | Manual pages |
| `ssh/*` | `~/.ssh/` | SSH configuration (mode 0600) |
| `hosts/<hostname>/` | Overlays | Host-specific overrides |
Installation: `./install` runs Dotbot with `install.conf.yaml`,
then applies `hosts/<hostname>/install.conf.yaml` if it exists.
## Commands
```bash
# Install dependencies (required before lint/test)
yarn install
# Linting
yarn lint # Run biome + prettier + editorconfig-checker
yarn lint:biome # Biome only
yarn lint:ec # EditorConfig checker only
# Formatting
yarn fix:biome # Autofix with biome (JS/TS/JSON/MD)
yarn fix:prettier # Autofix with prettier (YAML)
yarn format # Format with biome
yarn format:yaml # Format YAML files with prettier
# Testing (Bats - Bash Automated Testing System)
yarn test # Run all tests in tests/
# Run a single test file:
./node_modules/.bin/bats tests/dfm.bats
# Shell linting
shellcheck <script> # Lint shell scripts
```
## Pre-commit Hooks
Configured in `.pre-commit-config.yaml`: shellcheck, shfmt, biome,
yamllint, prettier, actionlint, stylua, fish_syntax/fish_indent.
Run `pre-commit run --all-files` to check everything.
## Commit Convention
Semantic Commit messages: `type(scope): summary`
(e.g., `fix(tmux): correct prefix binding`).
Enforced by commitlint extending `@ivuorinen/commitlint-config`.
## Architecture
### Shell Configuration Chain
Both `base/bashrc` and `base/zshrc` source `config/shared.sh`,
which loads:
- `config/exports` — environment variables, XDG dirs, PATH
- `config/alias` — shell aliases
Zsh additionally uses **antidote** (in `tools/antidote/`)
for plugin management and **oh-my-posh** for the prompt.
### dfm — Dotfiles Manager
`local/bin/dfm` is the main management script. Key commands:
- `dfm install all` — install everything (called during `./install`)
- `dfm brew install` / `dfm brew update` — Homebrew management
- `dfm docs all` — regenerate documentation under `docs/`
### Submodules
External dependencies are git submodules (Dotbot, plugins,
tmux plugins, cheatsheets, antidote).
Managed by `add-submodules.sh`. All set to `ignore = dirty`.
Updated automatically via GitHub Actions on a schedule.
### Host-specific Configs
Machine-specific overrides live in `hosts/<hostname>/`
with their own `base/`, `config/`, and `install.conf.yaml`.
These are layered on top of the global config during installation.
## Code Style
- **EditorConfig**: 2-space indent, UTF-8, LF line endings.
See `.editorconfig` for per-filetype overrides
(4-space for PHP/fish, tabs for git config).
- **Shell scripts**: Must have a shebang or
`# shellcheck shell=bash` directive.
Follow shfmt settings in `.editorconfig`
(2-space indent, `binary_next_line`,
`switch_case_indent`, `space_redirects`, `function_next_line`).
- **Lua** (neovim config): Formatted with stylua (`stylua.toml`),
90-char line length.
- **JSON/JS/TS/Markdown**: Formatted with Biome (`biome.json`),
80-char width.
- **YAML**: Formatted with Prettier (`.prettierrc.json`),
validated with yamllint (`.yamllint.yml`).
## ShellCheck Disabled Rules
Defined in `.shellcheckrc`:
SC2039 (POSIX `local`), SC2166 (`-o` in test),
SC2154 (unassigned variables), SC1091 (source following),
SC2174 (mkdir -p -m), SC2016 (single-quote expressions).
## Package Manager
Yarn (v4.12.0) is the package manager. Do not use npm.

View File

@@ -42,25 +42,19 @@ done
# Mark certain repositories shallow
git config -f .gitmodules submodule.antidote.shallow true
# Log a message using msgr if available, else echo
_log()
{
local msg="$1"
_log() {
if command -v msgr > /dev/null 2>&1; then
msgr run_done "$msg"
msgr run_done "$1"
else
echo " [ok] $msg"
echo " [ok] $1"
fi
return 0
}
# Remove a stale git submodule and clean up references
remove_old_submodule()
{
remove_old_submodule() {
local name="$1" path="$2"
# Remove working tree
if [[ -d "$path" ]]; then
if [ -d "$path" ]; then
rm -rf "$path"
_log "Removed $path"
fi
@@ -72,13 +66,13 @@ remove_old_submodule()
git config --remove-section "submodule.$path" 2> /dev/null || true
# Skip name-based cleanup if no submodule name provided
[[ -z "$name" ]] && return 0
[ -z "$name" ] && return 0
# Remove .git/config section keyed by name
git config --remove-section "submodule.$name" 2> /dev/null || true
# Remove .git/modules/<name>/ cached repository
if [[ -d ".git/modules/$name" ]]; then
if [ -d ".git/modules/$name" ]; then
rm -rf ".git/modules/$name"
_log "Removed .git/modules/$name"
fi

View File

@@ -22,3 +22,4 @@ x-have antidot && {
PROMPT_DIRTRIM=3
PROMPT_COMMAND='PS1_CMD1=$(git branch --show-current 2>/dev/null)'
PS1='\[\e[95m\]\u\[\e[0m\]@\[\e[38;5;22;2m\]\h\[\e[0m\] \[\e[38;5;33m\]\w\[\e[0m\] \[\e[92;2m\]${PS1_CMD1}\n\[\e[39m\]➜\[\e[0m\] '

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -38,3 +38,4 @@ bashcompinit
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
export P10K_CONFIG="$DOTFILES/config/zsh/p10k.zsh"
[[ ! -f "$P10K_CONFIG" ]] || source "$P10K_CONFIG"

View File

@@ -93,13 +93,13 @@ expand-main:
# Note that not all layouts respond to this command.
increase-main:
mod: mod1
key: ","
key: ','
# Decrease the number of windows in the main pane.
# Note that not all layouts respond to this command.
decrease-main:
mod: mod1
key: "."
key: '.'
# General purpose command for custom layouts.
# Functionality is layout-dependent.

View File

@@ -150,7 +150,6 @@ commit()
git commit -a -m "$commitMessage"
}
# Run Laravel scheduler in a loop
scheduler()
{
while :; do

View File

@@ -7,67 +7,65 @@ To be used with a companion fish function like this:
"""
from __future__ import print_function
import json
import os
import signal
import subprocess
import sys
import traceback
BASH = "bash"
BASH = 'bash'
FISH_READONLY = [
"PWD",
"SHLVL",
"history",
"pipestatus",
"status",
"version",
"FISH_VERSION",
"fish_pid",
"hostname",
"_",
"fish_private_mode",
'PWD', 'SHLVL', 'history', 'pipestatus', 'status', 'version',
'FISH_VERSION', 'fish_pid', 'hostname', '_', 'fish_private_mode'
]
IGNORED = ["PS1", "XPC_SERVICE_NAME"]
IGNORED = [
'PS1', 'XPC_SERVICE_NAME'
]
def ignored(name):
if name == "PWD": # this is read only, but has special handling
if name == 'PWD': # this is read only, but has special handling
return False
# ignore other read only variables
if name in FISH_READONLY:
return True
if name in IGNORED or name.startswith("BASH_FUNC"):
return True
return name.startswith("%")
if name.startswith('%'):
return True
return False
def escape(string):
# use json.dumps to reliably escape quotes and backslashes
return json.dumps(string).replace(r"$", r"\$")
return json.dumps(string).replace(r'$', r'\$')
def escape_identifier(word):
return escape(word.replace("?", "\\?"))
return escape(word.replace('?', '\\?'))
def comment(string):
return "\n".join(["# " + line for line in string.split("\n")])
return '\n'.join(['# ' + line for line in string.split('\n')])
def gen_script():
# Use the following instead of /usr/bin/env to read environment so we can
# deal with multi-line environment variables (and other odd cases).
env_reader = f"{sys.executable} -c 'import os,json; print(json.dumps({{k:v for k,v in os.environ.items()}}))'"
args = [BASH, "-c", env_reader]
env_reader = "%s -c 'import os,json; print(json.dumps({k:v for k,v in os.environ.items()}))'" % (sys.executable)
args = [BASH, '-c', env_reader]
output = subprocess.check_output(args, universal_newlines=True)
old_env = output.strip()
pipe_r, pipe_w = os.pipe()
os.set_inheritable(pipe_w, True)
command = f"eval $1 && ({env_reader}; alias) >&{pipe_w}"
args = [BASH, "-c", command, "bass", " ".join(sys.argv[1:])]
if sys.version_info >= (3, 4):
os.set_inheritable(pipe_w, True)
command = 'eval $1 && ({}; alias) >&{}'.format(
env_reader,
pipe_w
)
args = [BASH, '-c', command, 'bass', ' '.join(sys.argv[1:])]
p = subprocess.Popen(args, universal_newlines=True, close_fds=False)
os.close(pipe_w)
with os.fdopen(pipe_r) as f:
@@ -75,7 +73,9 @@ def gen_script():
alias_str = f.read()
if p.wait() != 0:
raise subprocess.CalledProcessError(
returncode=p.returncode, cmd=" ".join(sys.argv[1:]), output=new_env + alias_str
returncode=p.returncode,
cmd=' '.join(sys.argv[1:]),
output=new_env + alias_str
)
new_env = new_env.strip()
@@ -89,41 +89,41 @@ def gen_script():
continue
v1 = old_env.get(k)
if not v1:
script_lines.append(comment(f"adding {k}={v}"))
script_lines.append(comment('adding %s=%s' % (k, v)))
elif v1 != v:
script_lines.append(comment(f"updating {k}={v1} -> {v}"))
script_lines.append(comment('updating %s=%s -> %s' % (k, v1, v)))
# process special variables
if k == "PWD":
script_lines.append(f"cd {escape(v)}")
if k == 'PWD':
script_lines.append('cd %s' % escape(v))
continue
else:
continue
if k == "PATH": # noqa: SIM108
value = " ".join([escape(directory) for directory in v.split(":")])
if k == 'PATH':
value = ' '.join([escape(directory)
for directory in v.split(':')])
else:
value = escape(v)
script_lines.append(f"set -g -x {k} {value}")
script_lines.append('set -g -x %s %s' % (k, value))
for var in set(old_env.keys()) - set(new_env.keys()):
script_lines.append(comment(f"removing {var}"))
script_lines.append(f"set -e {var}")
script_lines.append(comment('removing %s' % var))
script_lines.append('set -e %s' % var)
script = "\n".join(script_lines)
script = '\n'.join(script_lines)
alias_lines = []
for line in alias_str.splitlines():
_, rest = line.split(None, 1)
k, v = rest.split("=", 1)
alias_lines.append("alias " + escape_identifier(k) + "=" + v)
alias = "\n".join(alias_lines)
alias = '\n'.join(alias_lines)
return script + "\n" + alias
return script + '\n' + alias
script_file = os.fdopen(3, "w")
script_file = os.fdopen(3, 'w')
if not sys.argv[1:]:
print("__bass_usage", file=script_file, end="")
print('__bass_usage', file=script_file, end='')
sys.exit(0)
try:
@@ -131,8 +131,8 @@ try:
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)
except Exception:
print("Bass internal error!", file=sys.stderr)
raise # traceback will output to stderr
print('Bass internal error!', file=sys.stderr)
raise # traceback will output to stderr
except KeyboardInterrupt:
signal.signal(signal.SIGINT, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGINT)

View File

@@ -8,8 +8,8 @@ function fisher --argument-names cmd --description "A plugin manager for Fish"
echo "fisher, version $fisher_version"
case "" -h --help
echo "Usage: fisher install <plugins...> Install plugins"
echo " fisher remove <plugins...> Remove installed plugins"
echo " fisher uninstall <plugins...> Remove installed plugins (alias)"
echo " fisher remove <plugins...> Remove installed plugins"
echo " fisher uninstall <plugins...> Remove installed plugins (alias)"
echo " fisher update <plugins...> Update installed plugins"
echo " fisher update Update all installed plugins"
echo " fisher list [<regex>] List installed plugins matching regex"
@@ -41,7 +41,7 @@ function fisher --argument-names cmd --description "A plugin manager for Fish"
echo "fisher: \"$fish_plugins\" file not found: \"$cmd\"" >&2 && return 1
end
set arg_plugins $file_plugins
else if test "$cmd" = install && ! set --query old_plugins[1]
else if test "$cmd" = install && ! set --query old_plugins[1]
set --append arg_plugins $file_plugins
end

View File

@@ -58,3 +58,4 @@ fish_pager_color_progress 737994
fish_pager_color_prefix f4b8e4
fish_pager_color_completion c6d0f5
fish_pager_color_description 737994

View File

@@ -58,3 +58,4 @@ fish_pager_color_progress 6e738d
fish_pager_color_prefix f5bde6
fish_pager_color_completion cad3f5
fish_pager_color_description 6e738d

View File

@@ -58,3 +58,4 @@ fish_pager_color_progress 6c7086
fish_pager_color_prefix f5c2e7
fish_pager_color_completion cdd6f4
fish_pager_color_description 6c7086

View File

@@ -13,37 +13,32 @@
if [[ $- =~ i ]]; then
# To use custom commands instead of find, override _fzf_compgen_{path,dir}
if ! declare -f _fzf_compgen_path > /dev/null; then
_fzf_compgen_path()
{
if ! declare -f _fzf_compgen_path >/dev/null; then
_fzf_compgen_path() {
echo "$1"
command find -L "$1" \
-name .git -prune -o -name .hg -prune -o -name .svn -prune -o \( -type d -o -type f -o -type l \) \
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
-a -not -path "$1" -print 2>/dev/null | sed 's@^\./@@'
}
fi
if ! declare -f _fzf_compgen_dir > /dev/null; then
_fzf_compgen_dir()
{
if ! declare -f _fzf_compgen_dir >/dev/null; then
_fzf_compgen_dir() {
command find -L "$1" \
-name .git -prune -o -name .hg -prune -o -name .svn -prune -o -type d \
-a -not -path "$1" -print 2> /dev/null | sed 's@^\./@@'
-a -not -path "$1" -print 2>/dev/null | sed 's@^\./@@'
}
fi
###########################################################
# To redraw line after fzf closes (printf '\e[5n')
bind '"\e[0n": redraw-current-line' 2> /dev/null
bind '"\e[0n": redraw-current-line' 2>/dev/null
__fzf_comprun()
{
__fzf_comprun() {
if [[ "$(type -t _fzf_comprun 2>&1)" = function ]]; then
_fzf_comprun "$@"
elif [[ -n "${TMUX_PANE-}" ]] && {
[[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]
}; then
elif [[ -n "${TMUX_PANE-}" ]] && { [[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]; }; then
shift
fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- "$@"
else
@@ -52,8 +47,7 @@ if [[ $- =~ i ]]; then
fi
}
__fzf_orig_completion()
{
__fzf_orig_completion() {
local l comp f cmd
while read -r l; do
if [[ "$l" =~ ^(.*\ -F)\ *([^ ]*).*\ ([^ ]*)$ ]]; then
@@ -69,8 +63,7 @@ if [[ $- =~ i ]]; then
done
}
_fzf_opts_completion()
{
_fzf_opts_completion() {
local cur prev opts
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
@@ -119,18 +112,18 @@ if [[ $- =~ i ]]; then
--sync"
case "${prev}" in
--tiebreak)
COMPREPLY=($(compgen -W "length begin end index" -- "$cur"))
return 0
;;
--color)
COMPREPLY=($(compgen -W "dark light 16 bw" -- "$cur"))
return 0
;;
--history)
COMPREPLY=()
return 0
;;
--tiebreak)
COMPREPLY=($(compgen -W "length begin end index" -- "$cur"))
return 0
;;
--color)
COMPREPLY=($(compgen -W "dark light 16 bw" -- "$cur"))
return 0
;;
--history)
COMPREPLY=()
return 0
;;
esac
if [[ "$cur" =~ ^-|\+ ]]; then
@@ -141,8 +134,7 @@ if [[ $- =~ i ]]; then
return 0
}
_fzf_handle_dynamic_completion()
{
_fzf_handle_dynamic_completion() {
local cmd orig_var orig ret orig_cmd orig_complete
cmd="$1"
shift
@@ -150,15 +142,15 @@ if [[ $- =~ i ]]; then
orig_var="_fzf_orig_completion_$cmd"
orig="${!orig_var-}"
orig="${orig##*#}"
if [[ -n "$orig" ]] && type "$orig" > /dev/null 2>&1; then
if [[ -n "$orig" ]] && type "$orig" >/dev/null 2>&1; then
$orig "$@"
elif [[ -n "${_fzf_completion_loader-}" ]]; then
orig_complete=$(complete -p "$orig_cmd" 2> /dev/null)
orig_complete=$(complete -p "$orig_cmd" 2>/dev/null)
_completion_loader "$@"
ret=$?
# _completion_loader may not have updated completion for the command
if [[ "$(complete -p "$orig_cmd" 2> /dev/null)" != "$orig_complete" ]]; then
__fzf_orig_completion < <(complete -p "$orig_cmd" 2> /dev/null)
if [[ "$(complete -p "$orig_cmd" 2>/dev/null)" != "$orig_complete" ]]; then
__fzf_orig_completion < <(complete -p "$orig_cmd" 2>/dev/null)
if [[ "${__fzf_nospace_commands-}" = *" $orig_cmd "* ]]; then
eval "${orig_complete/ -F / -o nospace -F }"
else
@@ -169,8 +161,7 @@ if [[ $- =~ i ]]; then
fi
}
__fzf_generic_path_completion()
{
__fzf_generic_path_completion() {
local cur base dir leftover matches trigger cmd
cmd="${COMP_WORDS[0]}"
if [[ $cmd == \\* ]]; then
@@ -216,8 +207,7 @@ if [[ $- =~ i ]]; then
fi
}
_fzf_complete()
{
_fzf_complete() {
# Split arguments around --
local args rest str_arg i sep
args=("$@")
@@ -241,7 +231,7 @@ if [[ $- =~ i ]]; then
local cur selected trigger cmd post
post="$(caller 0 | awk '{print $2}')_post"
type -t "$post" > /dev/null 2>&1 || post=cat
type -t "$post" >/dev/null 2>&1 || post=cat
cmd="${COMP_WORDS[0]//[^A-Za-z0-9_=]/_}"
trigger=${FZF_COMPLETION_TRIGGER-'**'}
@@ -263,59 +253,50 @@ if [[ $- =~ i ]]; then
fi
}
_fzf_path_completion()
{
_fzf_path_completion() {
__fzf_generic_path_completion _fzf_compgen_path "-m" "" "$@"
}
# Deprecated. No file only completion.
_fzf_file_completion()
{
_fzf_file_completion() {
_fzf_path_completion "$@"
}
_fzf_dir_completion()
{
_fzf_dir_completion() {
__fzf_generic_path_completion _fzf_compgen_dir "" "/" "$@"
}
_fzf_complete_kill()
{
_fzf_complete_kill() {
_fzf_proc_completion "$@"
}
_fzf_proc_completion()
{
_fzf_proc_completion() {
_fzf_complete -m --header-lines=1 --preview 'echo {}' --preview-window down:3:wrap --min-height 15 -- "$@" < <(
command ps -eo user,pid,ppid,start,time,command 2> /dev/null \
|| command ps -eo user,pid,ppid,time,args # For BusyBox
command ps -eo user,pid,ppid,start,time,command 2>/dev/null ||
command ps -eo user,pid,ppid,time,args # For BusyBox
)
}
_fzf_proc_completion_post()
{
_fzf_proc_completion_post() {
awk '{print $2}'
}
_fzf_host_completion()
{
_fzf_host_completion() {
_fzf_complete +m -- "$@" < <(
command cat <(command tail -n +1 ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2> /dev/null | command grep -i '^\s*host\(name\)\? ' | awk '{for (i = 2; i <= NF; i++) print $1 " " $i}' | command grep -v '[*?%]') \
command cat <(command tail -n +1 ~/.ssh/config ~/.ssh/config.d/* /etc/ssh/ssh_config 2>/dev/null | command grep -i '^\s*host\(name\)\? ' | awk '{for (i = 2; i <= NF; i++) print $1 " " $i}' | command grep -v '[*?%]') \
<(command grep -oE '^[[a-z0-9.,:-]+' ~/.ssh/known_hosts | tr ',' '\n' | tr -d '[' | awk '{ print $1 " " $1 }') \
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') \
| awk '{if (length($2) > 0) {print $2}}' | sort -u
<(command grep -v '^\s*\(#\|$\)' /etc/hosts | command grep -Fv '0.0.0.0') |
awk '{if (length($2) > 0) {print $2}}' | sort -u
)
}
_fzf_var_completion()
{
_fzf_var_completion() {
_fzf_complete -m -- "$@" < <(
declare -xp | sed -En 's|^declare [^ ]+ ([^=]+).*|\1|p'
)
}
_fzf_alias_completion()
{
_fzf_alias_completion() {
_fzf_complete -m -- "$@" < <(
alias | sed -En 's|^alias ([^=]+).*|\1|p'
)
@@ -340,14 +321,13 @@ if [[ $- =~ i ]]; then
svn tar unzip zip"
# Preserve existing completion
__fzf_orig_completion < <(complete -p $d_cmds $a_cmds 2> /dev/null)
__fzf_orig_completion < <(complete -p $d_cmds $a_cmds 2>/dev/null)
if type _completion_loader > /dev/null 2>&1; then
if type _completion_loader >/dev/null 2>&1; then
_fzf_completion_loader=1
fi
__fzf_defc()
{
__fzf_defc() {
local cmd func opts orig_var orig def
cmd="$1"
func="$2"
@@ -374,23 +354,22 @@ if [[ $- =~ i ]]; then
unset cmd d_cmds a_cmds
_fzf_setup_completion()
{
_fzf_setup_completion() {
local kind fn cmd
kind=$1
fn=_fzf_${1}_completion
if [[ $# -lt 2 ]] || ! type -t "$fn" > /dev/null; then
if [[ $# -lt 2 ]] || ! type -t "$fn" >/dev/null; then
echo "usage: ${FUNCNAME[0]} path|dir|var|alias|host|proc COMMANDS..."
return 1
fi
shift
__fzf_orig_completion < <(complete -p "$@" 2> /dev/null)
__fzf_orig_completion < <(complete -p "$@" 2>/dev/null)
for cmd in "$@"; do
case "$kind" in
dir) __fzf_defc "$cmd" "$fn" "-o nospace -o dirnames" ;;
var) __fzf_defc "$cmd" "$fn" "-o default -o nospace -v" ;;
alias) __fzf_defc "$cmd" "$fn" "-a" ;;
*) __fzf_defc "$cmd" "$fn" "-o default -o bashdefault" ;;
dir) __fzf_defc "$cmd" "$fn" "-o nospace -o dirnames" ;;
var) __fzf_defc "$cmd" "$fn" "-o default -o nospace -v" ;;
alias) __fzf_defc "$cmd" "$fn" "-a" ;;
*) __fzf_defc "$cmd" "$fn" "-o default -o bashdefault" ;;
esac
done
}

View File

@@ -4,7 +4,7 @@
# Auto-completion
# ---------------
# shellcheck source=completion.bash
[[ $- == *i* ]] && source "$HOME/.dotfiles/config/fzf/completion.bash" 2> /dev/null
[[ $- == *i* ]] && source "$HOME/.dotfiles/config/fzf/completion.bash" 2>/dev/null
# Key bindings
# ------------

View File

@@ -13,8 +13,7 @@
# Key bindings
# ------------
__fzf_select__()
{
__fzf_select__() {
local cmd opts
cmd="${FZF_CTRL_T_COMMAND:-"command find -L . -mindepth 1 \\( -path '*/\\.*' -o -fstype 'sysfs' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
-o -type f -print \
@@ -22,32 +21,27 @@ __fzf_select__()
-o -type l -print 2> /dev/null | cut -b3-"}"
opts="--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore --reverse ${FZF_DEFAULT_OPTS-} ${FZF_CTRL_T_OPTS-} -m"
# shellcheck disable=SC2091 # Intentionally execute output of __fzfcmd
eval "$cmd" | FZF_DEFAULT_OPTS="$opts" $(__fzfcmd) "$@" \
| while read -r item; do
eval "$cmd" | FZF_DEFAULT_OPTS="$opts" $(__fzfcmd) "$@" |
while read -r item; do
printf '%q ' "$item" # escape special chars
done
}
if [[ $- =~ i ]]; then
__fzfcmd()
{
[[ -n "${TMUX_PANE-}" ]] && {
[[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]
} \
&& echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
__fzfcmd() {
[[ -n "${TMUX_PANE-}" ]] && { [[ "${FZF_TMUX:-0}" != 0 ]] || [[ -n "${FZF_TMUX_OPTS-}" ]]; } &&
echo "fzf-tmux ${FZF_TMUX_OPTS:--d${FZF_TMUX_HEIGHT:-40%}} -- " || echo "fzf"
}
fzf-file-widget()
{
fzf-file-widget() {
local selected
selected="$(__fzf_select__ "$@")"
READLINE_LINE="${READLINE_LINE:0:$READLINE_POINT}$selected${READLINE_LINE:$READLINE_POINT}"
READLINE_POINT=$((READLINE_POINT + ${#selected}))
}
__fzf_cd__()
{
__fzf_cd__() {
local cmd opts dir
cmd="${FZF_ALT_C_COMMAND:-"command find -L . -mindepth 1 \\( -path '*/\\.*' -o -fstype 'sysfs' -o -fstype 'devfs' -o -fstype 'devtmpfs' -o -fstype 'proc' \\) -prune \
-o -type d -print 2> /dev/null | cut -b3-"}"
@@ -59,17 +53,16 @@ if [[ $- =~ i ]]; then
) && printf 'builtin cd -- %q' "$dir"
}
__fzf_history__()
{
__fzf_history__() {
local output opts script
opts="--height ${FZF_TMUX_HEIGHT:-40%} --bind=ctrl-z:ignore ${FZF_DEFAULT_OPTS-} -n2..,.. --scheme=history --bind=ctrl-r:toggle-sort ${FZF_CTRL_R_OPTS-} +m --read0"
script='BEGIN { getc; $/ = "\n\t"; $HISTCOUNT = $ENV{last_hist} + 1 } s/^[ *]//; print $HISTCOUNT - $. . "\t$_" if !$seen{$_}++'
# shellcheck disable=SC2091 # Intentionally execute output of __fzfcmd
output=$(
set +o pipefail
builtin fc -lnr -2147483648 \
| last_hist=$(HISTTIMEFORMAT='' builtin history 1) perl -n -l0 -e "$script" \
| FZF_DEFAULT_OPTS="$opts" $(__fzfcmd) --query "$READLINE_LINE"
builtin fc -lnr -2147483648 |
last_hist=$(HISTTIMEFORMAT='' builtin history 1) perl -n -l0 -e "$script" |
FZF_DEFAULT_OPTS="$opts" $(__fzfcmd) --query "$READLINE_LINE"
) || return
READLINE_LINE=${output#*$'\t'}
if [[ -z "$READLINE_POINT" ]]; then

View File

@@ -52,4 +52,4 @@ keybindings:
prs: []
repoPaths: {}
pager:
diff: ""
diff: ''

View File

@@ -1,3 +1,3 @@
---
git_protocol: https
version: "1"
version: '1'

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/env bash
[[ -z "$NVM_DIR" ]] && export NVM_DIR="$HOME/.config/nvm"
[[ -s "$NVM_DIR/nvm.sh" ]] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -z "$NVM_DIR" ] && export NVM_DIR="$HOME/.config/nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm

View File

@@ -5,7 +5,7 @@
# shellcheck shell=bash
# Defaults
[[ -z "$DOTFILES" ]] && export DOTFILES="$HOME/.dotfiles"
[ -z "$DOTFILES" ] && export DOTFILES="$HOME/.dotfiles"
DOTFILES_CURRENT_SHELL=$(basename "$SHELL")
export DOTFILES_CURRENT_SHELL
@@ -15,7 +15,7 @@ VERBOSE="${VERBOSE:-0}"
DEBUG="${DEBUG:-0}"
# Enable debugging with DEBUG=1
[[ "${DEBUG:-0}" -eq 1 ]] && set -x
[ "${DEBUG:-0}" -eq 1 ] && set -x
# Detect the current shell
CURRENT_SHELL=$(ps -p $$ -ocomm= | awk -F/ '{print $NF}')
@@ -33,10 +33,9 @@ x-path-prepend()
;;
*)
echo "Unsupported shell: $CURRENT_SHELL"
return 1
exit 1
;;
esac
return 0
}
# Function to set environment variables based on the shell
@@ -53,10 +52,9 @@ x-set-env()
;;
*)
echo "Unsupported shell: $CURRENT_SHELL"
return 1
exit 1
;;
esac
return 0
}
# Explicitly set XDG folders, if not already set
@@ -76,7 +74,7 @@ x-path-prepend "$DOTFILES/local/bin"
x-path-prepend "$XDG_BIN_HOME"
# Custom completion paths
[[ -z "$ZSH_CUSTOM_COMPLETION_PATH" ]] && export ZSH_CUSTOM_COMPLETION_PATH="$XDG_CONFIG_HOME/zsh/completion"
[ -z "$ZSH_CUSTOM_COMPLETION_PATH" ] && export ZSH_CUSTOM_COMPLETION_PATH="$XDG_CONFIG_HOME/zsh/completion"
x-dc "$ZSH_CUSTOM_COMPLETION_PATH"
export FPATH="$ZSH_CUSTOM_COMPLETION_PATH:$FPATH"
@@ -85,8 +83,7 @@ if ! declare -f msg > /dev/null; then
# $1 - message (string)
msg()
{
local message="$1"
[[ "$VERBOSE" -eq 1 ]] && msgr msg "$message"
[ "$VERBOSE" -eq 1 ] && msgr msg "$1"
return 0
}
msg "msg was not defined, defined it now"
@@ -98,8 +95,7 @@ if ! declare -f msg_err > /dev/null; then
# $1 - error message (string)
msg_err()
{
local message="$1"
msgr err "$message" >&2
msgr err "$1" >&2
exit 1
}
fi
@@ -110,8 +106,7 @@ if ! declare -f msg_done > /dev/null; then
# $1 - message (string)
msg_done()
{
local message="$1"
msgr "done" "$message"
msgr "done" "$1"
return 0
}
fi
@@ -122,8 +117,7 @@ if ! declare -f msg_run > /dev/null; then
# $1 - message (string)
msg_run()
{
local message="$1"
msgr run "$message"
msgr run "$1"
return 0
}
fi
@@ -134,8 +128,7 @@ if ! declare -f msg_ok > /dev/null; then
# $1 - message (string)
msg_ok()
{
local message="$1"
msgr ok "$message"
msgr ok "$1"
return 0
}
fi
@@ -150,16 +143,12 @@ if ! declare -f array_diff > /dev/null; then
# Source: https://stackoverflow.com/a/42399479/594940
array_diff()
{
local result_var="$1"
local arr1_name="$2"
local arr2_name="$3"
# shellcheck disable=SC1083,SC2086
eval local ARR1=\(\"\${${arr1_name}[@]}\"\)
eval local ARR1=\(\"\${$2[@]}\"\)
# shellcheck disable=SC1083,SC2086
eval local ARR2=\(\"\${${arr2_name}[@]}\"\)
eval local ARR2=\(\"\${$3[@]}\"\)
local IFS=$'\n'
mapfile -t "$result_var" < <(comm -23 <(echo "${ARR1[*]}" | sort) <(echo "${ARR2[*]}" | sort))
return 0
mapfile -t "$1" < <(comm -23 <(echo "${ARR1[*]}" | sort) <(echo "${ARR2[*]}" | sort))
}
fi

View File

@@ -7,13 +7,13 @@ DEFAULT_NAME="main"
CURRENT_SESSION=$(tmux display-message -p "#{session_name}")
# Check that the session has a name
if [[ "$CURRENT_SESSION" = "#{session_name}" ]] || [[ "$CURRENT_SESSION" = "0" ]]; then
if [ "$CURRENT_SESSION" = "#{session_name}" ] || [ "$CURRENT_SESSION" = "0" ]; then
# Check if the default name is already in use
if tmux has-session -t "$DEFAULT_NAME" 2> /dev/null; then
# Query the user for a new name
echo "Session name '$DEFAULT_NAME' is already in use. Enter a new name:"
read -r NEW_NAME
while tmux has-session -t "$NEW_NAME" 2> /dev/null || [[ -z "$NEW_NAME" ]]; do
while tmux has-session -t "$NEW_NAME" 2> /dev/null || [ -z "$NEW_NAME" ]; do
echo "Name '$NEW_NAME' is invalid or already in use. Enter a new name:"
read -r NEW_NAME
done

View File

@@ -8,14 +8,12 @@
set -euo pipefail
# Fall back to native tmux session picker if sesh is not installed
if ! command -v sesh &> /dev/null; then
if ! command -v sesh &>/dev/null; then
tmux choose-tree -Zs
exit 0
fi
# Pick a sesh session using gum filter
pick_with_gum()
{
pick_with_gum() {
sesh list -i \
| gum filter \
--limit 1 \
@@ -24,7 +22,6 @@ pick_with_gum()
--placeholder 'Pick a sesh' \
--height 50 \
--prompt='⚡'
return 0
}
FZF_COMMON_OPTS=(
@@ -43,23 +40,15 @@ FZF_COMMON_OPTS=(
--preview 'sesh preview {}'
)
# Pick a sesh session using fzf-tmux popup
pick_with_fzf_tmux()
{
pick_with_fzf_tmux() {
sesh list --icons | fzf-tmux -p 80%,70% "${FZF_COMMON_OPTS[@]}"
return 0
}
# Pick a sesh session using fzf inline
pick_with_fzf()
{
pick_with_fzf() {
sesh list --icons | fzf "${FZF_COMMON_OPTS[@]}"
return 0
}
# Pick a sesh session using bash select menu
pick_with_select()
{
pick_with_select() {
local sessions
mapfile -t sessions < <(sesh list)
if [[ ${#sessions[@]} -eq 0 ]]; then
@@ -75,11 +64,11 @@ pick_with_select()
}
# Cascading tool detection
if command -v gum &> /dev/null; then
if command -v gum &>/dev/null; then
selection=$(pick_with_gum)
elif command -v fzf-tmux &> /dev/null; then
elif command -v fzf-tmux &>/dev/null; then
selection=$(pick_with_fzf_tmux)
elif command -v fzf &> /dev/null; then
elif command -v fzf &>/dev/null; then
selection=$(pick_with_fzf)
else
selection=$(pick_with_select)

View File

@@ -1,40 +0,0 @@
# Skip Already-Installed Cargo Packages
## Problem
`install-cargo-packages.sh` runs `cargo install-update -a` to update installed
packages, then runs `cargo install` for every package in the list — including
ones that are already installed and up-to-date. This wastes time rebuilding
packages that don't need it.
## Solution
Capture the `cargo install-update -a` output, parse installed package names,
and skip `cargo install` for any package that appeared in the update output.
## Changes
**File:** `scripts/install-cargo-packages.sh`
1. Declare an associative array `installed_packages` at the top.
2. In the `cargo-install-update` section, capture output with `tee /dev/stderr`
so it displays in real-time while also being stored in a variable.
3. Parse the captured output with `awk` — extract the first column from lines
matching a version pattern (`v[0-9]+\.[0-9]+`), skipping the header.
4. Populate `installed_packages` associative array from parsed names.
5. In `install_packages()`, check each package against the array. If found, log
a skip message via `msgr` and continue. If not found, install as before.
6. If `cargo-install-update` is not available, the array stays empty and all
packages install normally (preserves existing behavior).
## Output Parsing
The `cargo install-update -a` output format:
```text
Package Installed Latest Needs update
zoxide v0.9.8 v0.9.9 Yes
bkt v0.8.2 v0.8.2 No
```
Extraction: `awk '/v[0-9]+\.[0-9]+/ { print $1 }'` gets package names.

View File

@@ -1,55 +0,0 @@
# dfm Cleanup Design
## Summary
Clean up `local/bin/dfm` to fix bugs, remove dead code, improve
cross-platform portability, and make error handling consistent.
## Changes
### 1. Bash Version Bootstrap
Add a check at the top of the script (after variable declarations)
that requires bash 4.0+. On macOS, if bash is too old, install
Homebrew (if missing) and bash, then print instructions and exit.
The check itself uses only bash 3.2-compatible syntax.
### 2. Remove Fish Dead Code
Remove `CURRENT_SHELL` detection, `source_file()` function, and all
fish branches. Replace `source_file` calls with direct `source`.
The script has a bash shebang — fish handling was unreachable.
### 3. Bug Fixes
- Remove `ntfy` from install menu (no install script exists)
- Fix `msg)``msgr)` case label in `section_tests`
- Guard all `shift` calls against empty argument lists
- Quote `$width` in `menu_builder` seq calls
- Fix `$"..."` locale string → `"..."` in `usage()`
- Fix `exit 0` on apt.txt error → `return 1`
### 4. Replace `declare -A` in `section_scripts`
Replace associative array with indexed `"name:desc"` array,
matching the pattern used everywhere else in the script.
Move `get_script_description()` to top-level (out of the function).
### 5. Early-Return Guards & exit → return
- `section_brew()`: Early return with `msgr warn` if brew unavailable.
Remove duplicate `! x-have brew` check.
- `section_apt()`: Same pattern for apt.
- `section_check()`: Replace `exit` with `return`.
- `section_apt() install`: Replace `exit` with `return`.
- `section_brew() untracked`: Replace `exit` with `return`.
## Files Changed
- `local/bin/dfm` (all changes)
## Verification
- `yarn test` (existing bats test)
- `shellcheck local/bin/dfm`
- `bash -n local/bin/dfm` (syntax check)

View File

@@ -1,46 +0,0 @@
# x-* Scripts Cleanup Design
## Summary
Comprehensive cleanup of all 34 x-* utility scripts in `local/bin/`.
Fix critical bugs, consolidate duplicates, standardize patterns.
## Changes
### Removals
- `x-mkd`, `x-mkd.md`, `tests/x-mkd.bats` — unused, cd-in-subshell broken
- `x-validate-sha256sum.sh`, `x-validate-sha256sum.sh.md` — duplicates x-sha256sum-matcher
### Thin Wrappers (delegate to x-path)
- `x-path-append` → calls `x-path append "$@"`
- `x-path-prepend` → calls `x-path prepend "$@"`
- `x-path-remove` → calls `x-path remove "$@"`
### Critical Fixes
- `x-clean-vendordirs`: call msgr as command (it's in PATH)
- `x-foreach`: replace eval with direct "$@" execution
- `x-ip`: add error handling, curl check
### Consistency Fixes
- Fix `#!/bin/bash``#!/usr/bin/env bash` (x-env-list, x-localip)
- POSIX scripts keep `#!/bin/sh`
- Add `set -euo pipefail` where missing in bash scripts
- Use XDG variables instead of hardcoded paths (x-change-alacritty-theme)
- Quote unquoted variables
### Minor Fixes
- `x-multi-ping`: remove unused VERBOSE variable
- `x-when-down`, `x-when-up`: add error handling
- `x-term-colors`: add usage message
- `x-record`: fix undefined notify-call reference
## Verification
- `yarn test` — ensure remaining tests pass
- `shellcheck` on modified scripts
- `bash -n` syntax check on all modified bash scripts

View File

@@ -1,5 +1,5 @@
---
- include: "tools/dotbot-defaults.yaml"
- include: 'tools/dotbot-defaults.yaml'
- shell:
- echo "Configuring air"
- link:
@@ -7,7 +7,7 @@
force: true
glob: true
path: hosts/air/base/**
prefix: "."
prefix: '.'
~/.config/:
glob: true
force: true

View File

@@ -1,5 +1,5 @@
---
- include: "tools/dotbot-defaults.yaml"
- include: 'tools/dotbot-defaults.yaml'
- shell:
- echo "Configuring lakka"
- link:
@@ -7,7 +7,7 @@
force: true
glob: true
path: hosts/lakka/base/**
prefix: "."
prefix: '.'
~/.config/:
glob: true
force: true

View File

@@ -1,5 +1,5 @@
---
- include: "tools/dotbot-defaults.yaml"
- include: 'tools/dotbot-defaults.yaml'
- shell:
- echo "Configuring s"
- link:
@@ -7,7 +7,7 @@
force: true
glob: true
path: hosts/s/base/**
prefix: "."
prefix: '.'
~/.config/:
glob: true
force: true

View File

@@ -1,5 +1,5 @@
---
- include: "tools/dotbot-defaults.yaml"
- include: 'tools/dotbot-defaults.yaml'
- shell:
- echo "Configuring tunkki"
- link:
@@ -7,7 +7,7 @@
force: true
glob: true
path: hosts/tunkki/base/**
prefix: "."
prefix: '.'
~/.config/:
glob: true
force: true

View File

@@ -22,9 +22,9 @@ git submodule update --init --recursive "${DOTBOT_DIR}"
if [ "${DOTBOT_HOST}" != "" ]; then
DOTBOT_HOST_CONFIG="${BASEDIR}/hosts/${DOTBOT_HOST}/${CONFIG}"
echo "-> Trying if host config can be found: ${DOTBOT_HOST_CONFIG}"
[ -r "$DOTBOT_HOST_CONFIG" ] && [ -f "$DOTBOT_HOST_CONFIG" ] \
&& echo "(!) Found $DOTBOT_HOST_CONFIG" \
&& "$DOTBOT_BIN_PATH" \
[ -r "$DOTBOT_HOST_CONFIG" ] && [ -f "$DOTBOT_HOST_CONFIG" ] &&
echo "(!) Found $DOTBOT_HOST_CONFIG" &&
"$DOTBOT_BIN_PATH" \
-d "$BASEDIR" \
--plugin-dir=tools/dotbot-include \
-c "$DOTBOT_HOST_CONFIG" \

View File

@@ -1,5 +1,5 @@
---
- include: "tools/dotbot-defaults.yaml"
- include: 'tools/dotbot-defaults.yaml'
- clean:
~/:
@@ -34,7 +34,7 @@
force: true
glob: true
path: base/*
prefix: "."
prefix: '.'
# Most of the configs
~/.config/:
glob: true

View File

@@ -20,7 +20,7 @@ Some problematic code has been fixed per `shellcheck` suggestions.
## Sourced
| Script | Source |
|-------------------------|-------------------|
| ----------------------- | ----------------- |
| `x-dupes` | skx/sysadmin-util |
| `x-foreach` | mvdan/dotfiles |
| `x-multi-ping` | skx/sysadmin-util |

View File

@@ -64,7 +64,6 @@ menu_builder()
done
}
# Handle install section commands
section_install()
{
USAGE_PREFIX="$SCRIPT install <command>"
@@ -194,7 +193,6 @@ section_install()
esac
}
# Handle Homebrew section commands
section_brew()
{
USAGE_PREFIX="$SCRIPT brew <command>"
@@ -293,7 +291,6 @@ section_brew()
esac
}
# Handle helper utility commands
section_helpers()
{
USAGE_PREFIX="$SCRIPT helpers <command>"
@@ -370,7 +367,6 @@ section_helpers()
esac
}
# Handle apt package manager commands
section_apt()
{
USAGE_PREFIX="$SCRIPT apt <command>"
@@ -439,7 +435,6 @@ section_apt()
esac
}
# Handle documentation generation commands
section_docs()
{
USAGE_PREFIX="$SCRIPT docs <command>"
@@ -464,7 +459,6 @@ section_docs()
esac
}
# Handle dotfiles formatting and reset commands
section_dotfiles()
{
USAGE_PREFIX="$SCRIPT dotfiles <command>"
@@ -532,7 +526,6 @@ section_dotfiles()
esac
}
# Handle system check commands (arch, hostname)
section_check()
{
USAGE_PREFIX="$SCRIPT check <command>"
@@ -559,7 +552,6 @@ section_check()
esac
}
# Handle install script execution
section_scripts()
{
USAGE_PREFIX="$SCRIPT scripts <command>"
@@ -627,7 +619,6 @@ section_tests()
esac
}
# Display main usage information for all sections
usage()
{
echo ""
@@ -651,7 +642,6 @@ usage()
section_helpers
}
# Parse section argument and dispatch to handler
main()
{
SECTION="${1:-}"

View File

@@ -22,37 +22,32 @@ if [ "$DEBUG" -eq 1 ]; then
set -x
fi
# Print an error message in red
# Output functions
msg_err()
{
echo -e "\e[31m$*\e[0m" >&2
}
# Print a success message in green
msg_success()
{
echo -e "\e[32m$*\e[0m"
}
# Print a warning message in yellow
msg_warn()
{
echo -e "\e[33m$*\e[0m" >&2
}
# Print an info message in blue
msg_info()
{
echo -e "\e[36m$*\e[0m"
}
# Print a debug message when verbose mode is on
msg_debug()
{
[[ $VERBOSE -eq 1 ]] && echo -e "\e[35m$*\e[0m"
}
# Display usage information and examples
show_help()
{
cat << EOF

View File

@@ -90,14 +90,13 @@ declare -A DIR_HAS_REPOS
# Record start time
START_TIME=$(date +%s)
# Log an error message
# Logging functions
log_error()
{
print_color "31" "ERROR:" >&2
echo " $*" >&2
}
# Log an informational message
log_info()
{
if [[ $VERBOSE -eq 1 ]]; then
@@ -106,7 +105,6 @@ log_info()
fi
}
# Log a warning message
log_warn()
{
print_color "33" "WARNING:" >&2
@@ -913,7 +911,6 @@ process_in_parallel()
echo -e "\nProcessed $total repositories in $dur (Total runtime: $runtime)"
}
# Check a directory for git status with progress tracking
check_directory_with_progress()
{
local dir

View File

@@ -23,25 +23,21 @@ CLR_RESET="\033[0m"
# │ Color functions │
# ╰──────────────────────────────────────────────────────────╯
# Wrap text in red color
function __color_red()
{
local MSG="$1"
echo -e "${CLR_RED}${MSG}${CLR_RESET}"
}
# Wrap text in yellow color
function __color_yellow()
{
local MSG="$1"
echo -e "${CLR_YELLOW}${MSG}${CLR_RESET}"
}
# Wrap text in green color
function __color_green()
{
local MSG="$1"
echo -e "${CLR_GREEN}${MSG}${CLR_RESET}"
}
# Wrap text in blue color
function __color_blue()
{
local MSG="$1"
@@ -52,43 +48,36 @@ function __color_blue()
# │ Helpers │
# ╰──────────────────────────────────────────────────────────╯
# Print blue arrow marker
function __log_marker()
{
echo -e "${CLR_BLUE}➜${CLR_RESET}"
}
# Print green checkmark marker
function __log_marker_ok()
{
echo -e "${CLR_GREEN}✔${CLR_RESET}"
}
# Print blue checkmark marker
function __log_marker_ok_blue()
{
echo -e "${CLR_BLUE}✔${CLR_RESET}"
}
# Print yellow warning marker
function __log_marker_warn()
{
echo -e "${CLR_YELLOW}⁕${CLR_RESET}"
}
# Print yellow question marker
function __log_marker_question()
{
echo -e "${CLR_YELLOW}?${CLR_RESET}"
}
# Print red error marker
function __log_marker_err()
{
echo -e "${CLR_RED}⛌${CLR_RESET}"
}
# Print indentation spacing
function __log_indent()
{
echo " "
@@ -98,85 +87,71 @@ function __log_indent()
# │ Log functions │
# ╰──────────────────────────────────────────────────────────╯
# Print a message with arrow marker
function msg()
{
echo -e "$(__log_marker) $1"
}
# Print a celebration message
function msg_yay()
{
echo -e "🎉 $1"
}
# Print a celebration message with checkmark
function msg_yay_done()
{
echo -e "🎉 $1 ...$(__log_marker_ok)"
}
# Print a message with completion checkmark
function msg_done()
{
echo -e "$(__log_marker) $1 ...$(__log_marker_ok)"
}
# Print a completion checkmark suffix
function msg_done_suffix()
{
echo -e "$(__log_marker) ...$(__log_marker_ok)"
}
# Print a prompt-style message
function msg_prompt()
{
echo -e "$(__log_marker_question) $1"
}
# Print a prompt message with checkmark
function msg_prompt_done()
{
echo -e "$(__log_marker_question) $1 ...$(__log_marker_ok)"
}
# Print an indented message
function msg_nested()
{
echo -e "$(__log_indent)$(__log_marker) $1"
}
# Print an indented message with checkmark
function msg_nested_done()
{
echo -e "$(__log_indent)$(__log_marker) $1 ...$(__log_marker_ok)"
}
# Print a running-task message in green
function msg_run()
{
echo -e "${CLR_GREEN}➜ $1${CLR_RESET} $2"
}
# Print a running-task message with checkmark
function msg_run_done()
{
echo -e "${CLR_GREEN}➜ $1${CLR_RESET} $2 ...$(__log_marker_ok)"
}
# Print an ok/success message
function msg_ok()
{
echo -e "$(__log_marker_ok) $1"
}
# Print a warning message
function msg_warn()
{
echo -e "$(__log_marker_warn) $1"
}
# Print an error message
function msg_err()
{
echo -e "$(__log_marker_err) $1"
@@ -199,7 +174,6 @@ ask()
# If this is being sourced, no need to run the next steps.
[ "$sourced" = 1 ] && return
# Run visual tests for all message types
function __tests()
{
msg "[ msg ]"
@@ -218,7 +192,6 @@ function __tests()
msg_yay_done "[ yay_done ]"
}
# Show usage information and examples
function usage()
{
echo "usage: msgr [type] [message] [optional second message]"

View File

@@ -19,7 +19,7 @@ set -euo pipefail # Add error handling
LATEST_VERSION_FORMULA="php" # The formula name for latest PHP version
PHP_VERSION_FILE=".php-version" # File name to look for when auto-switching
# Verify that Homebrew is installed
# Switch brew php version
function check_dependencies()
{
if ! command -v brew > /dev/null 2>&1; then
@@ -28,7 +28,6 @@ function check_dependencies()
fi
}
# Display help message and usage examples
function usage()
{
echo "Brew PHP Switcher - Switch between PHP versions installed via Homebrew"
@@ -54,7 +53,6 @@ function usage()
exit 0
}
# List all PHP versions installed via Homebrew
function list_php_versions()
{
# Check Homebrew's installation path for PHP versions
@@ -187,7 +185,6 @@ function list_php_versions()
done
}
# Convert a version number to a Homebrew formula name
function get_php_formula_for_version()
{
local version="$1"
@@ -202,7 +199,6 @@ function get_php_formula_for_version()
echo "php@$version"
}
# Check if a Homebrew formula is installed
function check_formula_installed()
{
local formula="$1"
@@ -220,7 +216,6 @@ function check_formula_installed()
return 1
}
# Unlink the currently active PHP version
function unlink_current_php()
{
local current_formula=""
@@ -246,7 +241,6 @@ function unlink_current_php()
fi
}
# Link a specific PHP formula as the active version
function link_php_version()
{
local formula="$1"
@@ -271,7 +265,6 @@ function link_php_version()
fi
}
# Display the currently active PHP version
function get_current_version()
{
if ! command -v php > /dev/null 2>&1; then
@@ -307,7 +300,6 @@ function get_current_version()
fi
}
# Validate PHP version format (x.y or latest)
function validate_version()
{
local version="$1"
@@ -320,7 +312,6 @@ function validate_version()
fi
}
# Search for .php-version file in directory hierarchy
function find_php_version_file()
{
local dir="$PWD"
@@ -343,7 +334,6 @@ function find_php_version_file()
return 1
}
# Auto-switch PHP based on .php-version file
function auto_switch_php_version()
{
local version_file
@@ -370,7 +360,6 @@ function auto_switch_php_version()
switch_php_version "$version"
}
# Switch to a specific PHP version
function switch_php_version()
{
local version="$1"
@@ -409,7 +398,6 @@ function switch_php_version()
echo "PHP executable: $(command -v php)"
}
# Parse arguments and dispatch to appropriate action
function main()
{
local version=""

View File

@@ -5,7 +5,6 @@
#
# Modified by Ismo Vuorinen <https://github.com/ivuorinen> 2023
# Display usage information for pushover
__pushover_usage()
{
printf "pushover <options> <message>\n"
@@ -24,7 +23,6 @@ __pushover_usage()
return 1
}
# Format an optional curl form field
__pushover_opt_field()
{
field=$1
@@ -35,7 +33,6 @@ __pushover_opt_field()
fi
}
# Send a pushover notification via curl
__pushover_send_message()
{
device="${1:-}"

View File

@@ -10,7 +10,6 @@ VERSION="1.0.0"
LANG_MAP="c:.c,.h|cpp:.cpp,.cc,.cxx,.hpp,.hxx|csharp:.cs|go:.go|java:.java|
javascript:.js,.jsx,.mjs,.ts,.tsx|python:.py|ruby:.rb|swift:.swift"
# Display usage information and options
usage()
{
cat << EOF
@@ -25,26 +24,22 @@ EOF
exit "${1:-0}"
}
# Log a timestamped message to stderr
log()
{
printf '[%s] %s\n' "$(date '+%H:%M:%S')" "$*" >&2
}
# Log an error message and exit
err()
{
log "ERROR: $*"
exit 1
}
# Verify codeql binary is available in PATH
check_codeql()
{
command -v codeql > /dev/null 2>&1 || err "codeql binary not found in PATH"
log "Found codeql: $(codeql version --format=terse)"
}
# Get or create the CodeQL cache directory
get_cache_dir()
{
cache="${XDG_CACHE_HOME:-$HOME/.cache}/codeql"
@@ -52,7 +47,6 @@ get_cache_dir()
printf '%s' "$cache"
}
# Detect supported programming languages in source path
detect_languages()
{
src_path="$1"
@@ -91,7 +85,6 @@ detect_languages()
printf '%s' "$detected" | tr ' ' '\n' | sort -u | tr '\n' ' ' | sed 's/ $//'
}
# Create a CodeQL database for a language
create_database()
{
lang="$1"
@@ -105,7 +98,6 @@ create_database()
--overwrite
}
# Display analysis result statistics from SARIF file
show_results_stats()
{
sarif_file="$1"
@@ -134,7 +126,6 @@ show_results_stats()
return 0
}
# Run CodeQL analysis for a single language
analyze_language()
{
lang="$1"
@@ -181,7 +172,6 @@ analyze_language()
rm -rf "$db_path"
}
# Parse arguments and run CodeQL analysis pipeline
main()
{
src_path="."

View File

@@ -24,7 +24,7 @@ str_to_operator = {
def vercmp(expr):
"""Version Comparison function."""
words = expr.split()
comparisons = [words[i : i + 3] for i in range(0, len(words) - 2, 2)]
comparisons = [words[i: i + 3] for i in range(0, len(words) - 2, 2)]
for left, op_str, right in comparisons:
compare_op = str_to_operator[op_str]
if not compare_op(version.parse(left), version.parse(right)):
@@ -63,7 +63,7 @@ def test():
except KeyError:
pass
else:
raise AssertionError("invalid operator did not raise")
assert False, "invalid operator did not raise"
if __name__ == "__main__":

View File

@@ -190,7 +190,6 @@ get_custom_group()
return 1
}
# Check if a key matches the skipped keys list
is_skipped()
{
local key=$1

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python script to find the largest files in a git repository.
# The general method is based on the script in this blog post:
@@ -31,60 +32,60 @@
# vim:tw=120:ts=4:ft=python:norl:
from subprocess import check_output, Popen, PIPE
import argparse
import glob
import signal
import sys
from subprocess import PIPE, Popen, check_output # nosec B404
sortByOnDiskSize = False
class Blob(object):
sha1 = ''
size = 0
packed_size = 0
path = ''
class Blob:
sha1 = ""
size = 0
packed_size = 0
path = ""
def __init__(self, line):
cols = line.split()
self.sha1, self.size, self.packed_size = cols[0], int(cols[2]), int(cols[3])
def __init__(self, line):
cols = line.split()
self.sha1, self.size, self.packed_size = cols[0], int(cols[2]), int(cols[3])
def __repr__(self):
return '{} - {} - {} - {}'.format(
self.sha1, self.size, self.packed_size, self.path)
def __repr__(self):
return f"{self.sha1} - {self.size} - {self.packed_size} - {self.path}"
def __lt__(self, other):
if (sortByOnDiskSize):
return self.size < other.size
else:
return self.packed_size < other.packed_size
def __lt__(self, other):
if sortByOnDiskSize:
return self.size < other.size
else:
return self.packed_size < other.packed_size
def csv_line(self):
return f"{self.size / 1024},{self.packed_size / 1024},{self.sha1},{self.path}"
def csv_line(self):
return "{},{},{},{}".format(
self.size/1024, self.packed_size/1024, self.sha1, self.path)
def main():
global sortByOnDiskSize
global sortByOnDiskSize
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
args = parse_arguments()
sortByOnDiskSize = args.sortByOnDiskSize
size_limit = 1024 * args.filesExceeding
args = parse_arguments()
sortByOnDiskSize = args.sortByOnDiskSize
size_limit = 1024*args.filesExceeding
if args.filesExceeding > 0:
print(f"Finding objects larger than {args.filesExceeding}kB…")
else:
print(f"Finding the {args.matchCount} largest objects…")
if args.filesExceeding > 0:
print("Finding objects larger than {}kB…".format(args.filesExceeding))
else:
print("Finding the {} largest objects…".format(args.matchCount))
blobs = get_top_blobs(args.matchCount, size_limit)
blobs = get_top_blobs(args.matchCount, size_limit)
populate_blob_paths(blobs)
print_out_blobs(blobs)
populate_blob_paths(blobs)
print_out_blobs(blobs)
def get_top_blobs(count, size_limit):
"""Get top blobs from git repository
"""Get top blobs from git repository
Args:
count (int): How many items to return
@@ -92,141 +93,110 @@ def get_top_blobs(count, size_limit):
Returns:
dict: Dictionary of Blobs
"""
sort_column = 4
"""
sort_column = 4
if sortByOnDiskSize:
sort_column = 3
if sortByOnDiskSize:
sort_column = 3
git_dir = check_output(["git", "rev-parse", "--git-dir"]).decode("utf-8").strip() # nosec B603 # nosemgrep
idx_files = glob.glob(f"{git_dir}/objects/pack/pack-*.idx")
verify_pack = Popen( # nosec B603
["git", "verify-pack", "-v", *idx_files],
stdout=PIPE,
stderr=PIPE,
)
grep_blob = Popen(["grep", "blob"], stdin=verify_pack.stdout, stdout=PIPE, stderr=PIPE) # nosec B603
if verify_pack.stdout:
verify_pack.stdout.close()
sort_cmd = Popen( # nosec B603
["sort", f"-k{sort_column}nr"],
stdin=grep_blob.stdout,
stdout=PIPE,
stderr=PIPE,
)
if grep_blob.stdout:
grep_blob.stdout.close()
output = [line for line in sort_cmd.communicate()[0].decode("utf-8").strip().split("\n") if line]
verify_pack = "git verify-pack -v `git rev-parse --git-dir`/objects/pack/pack-*.idx | grep blob | sort -k{}nr".format(sort_column) # noqa: E501
output = check_output(verify_pack, shell=True).decode('utf-8').strip().split("\n")[:-1] # noqa: E501
blobs = {}
# use __lt__ to do the appropriate comparison
compare_blob = Blob(f"a b {size_limit} {size_limit} c")
for obj_line in output:
blob = Blob(obj_line)
blobs = dict()
# use __lt__ to do the appropriate comparison
compare_blob = Blob("a b {} {} c".format(size_limit, size_limit))
for obj_line in output:
blob = Blob(obj_line)
if size_limit > 0:
if compare_blob < blob:
blobs[blob.sha1] = blob
else:
break
else:
blobs[blob.sha1] = blob
if size_limit > 0:
if compare_blob < blob:
blobs[blob.sha1] = blob
else:
break
else:
blobs[blob.sha1] = blob
if len(blobs) == count:
break
if len(blobs) == count:
break
return blobs
return blobs
def populate_blob_paths(blobs):
"""Populate blob paths that only have a path
"""Populate blob paths that only have a path
Args:
blobs (Blob, dict): Dictionary of Blobs
"""
if len(blobs):
print("Finding object paths…")
Args:
blobs (Blob, dict): Dictionary of Blobs
"""
if len(blobs):
print("Finding object paths…")
# Only include revs which have a path. Other revs aren't blobs.
rev_list = Popen(["git", "rev-list", "--all", "--objects"], stdout=PIPE, stderr=PIPE) # nosec B603
awk_filter = Popen(["awk", "$2 {print}"], stdin=rev_list.stdout, stdout=PIPE, stderr=PIPE) # nosec B603
if rev_list.stdout:
rev_list.stdout.close()
all_object_lines = [line for line in awk_filter.communicate()[0].decode("utf-8").strip().split("\n") if line]
outstanding_keys = list(blobs.keys())
# Only include revs which have a path. Other revs aren't blobs.
rev_list = "git rev-list --all --objects | awk '$2 {print}'"
all_object_lines = check_output(rev_list, shell=True).decode('utf-8').strip().split("\n")[:-1] # noqa: E501
outstanding_keys = list(blobs.keys())
for line in all_object_lines:
cols = line.split()
sha1, path = cols[0], " ".join(cols[1:])
for line in all_object_lines:
cols = line.split()
sha1, path = cols[0], " ".join(cols[1:])
if sha1 in outstanding_keys:
outstanding_keys.remove(sha1)
blobs[sha1].path = path
if (sha1 in outstanding_keys):
outstanding_keys.remove(sha1)
blobs[sha1].path = path
# short-circuit the search if we're done
if not len(outstanding_keys):
break
# short-circuit the search if we're done
if not len(outstanding_keys):
break
def print_out_blobs(blobs):
if len(blobs):
csv_lines = ["size,pack,hash,path"]
if len(blobs):
csv_lines = ["size,pack,hash,path"]
for blob in sorted(blobs.values(), reverse=True):
csv_lines.append(blob.csv_line())
for blob in sorted(blobs.values(), reverse=True):
csv_lines.append(blob.csv_line())
command = ["column", "-t", "-s", ","]
p = Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE)
command = ["column", "-t", "-s", ","]
p = Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE)
# Encode the input as bytes
input_data = ("\n".join(csv_lines) + "\n").encode()
# Encode the input as bytes
input_data = ("\n".join(csv_lines) + "\n").encode()
stdout, _ = p.communicate(input_data)
stdout, _ = p.communicate(input_data)
print("\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n")
print("\nAll sizes in kB. The pack column is the compressed size of the object inside the pack file.\n") # noqa: E501
print(stdout.decode("utf-8").rstrip("\n"))
else:
print("No files found which match those criteria.")
print(stdout.decode("utf-8").rstrip('\n'))
else:
print("No files found which match those criteria.")
def parse_arguments():
parser = argparse.ArgumentParser(description="List the largest files in a git repository")
parser.add_argument(
"-c",
"--match-count",
dest="matchCount",
type=int,
default=10,
help="Files to return. Default is 10. Ignored if --files-exceeding is used.",
)
parser.add_argument(
"--files-exceeding",
dest="filesExceeding",
type=int,
default=0,
help=(
"The cutoff amount, in KB. Files with a pack size"
" (or physical size, with -p) larger than this will be printed."
),
)
parser.add_argument(
"-p",
"--physical-sort",
dest="sortByOnDiskSize",
action="store_true",
default=False,
help="Sort by the on-disk size. Default is to sort by the pack size.",
)
parser = argparse.ArgumentParser(
description='List the largest files in a git repository'
)
parser.add_argument(
'-c', '--match-count', dest='matchCount', type=int, default=10,
help='Files to return. Default is 10. Ignored if --files-exceeding is used.'
)
parser.add_argument(
'--files-exceeding', dest='filesExceeding', type=int, default=0,
help='The cutoff amount, in KB. Files with a pack size (or physical size, with -p) larger than this will be printed.' # noqa: E501
)
parser.add_argument(
'-p', '--physical-sort', dest='sortByOnDiskSize',
action='store_true', default=False,
help='Sort by the on-disk size. Default is to sort by the pack size.'
)
return parser.parse_args()
return parser.parse_args()
def signal_handler(_signal, _frame):
print("Caught Ctrl-C. Exiting.")
def signal_handler(signal, frame):
print('Caught Ctrl-C. Exiting.')
sys.exit(0)
# Default function is main()
if __name__ == "__main__":
main()
if __name__ == '__main__':
main()

View File

@@ -41,7 +41,6 @@ LOOP=0
SLEEP=1
TIMEOUT=5
# Display usage information and options
usage()
{
echo "Usage: $0 [--loop|--forever] [--sleep=N] hostname1 hostname2 ..."

View File

@@ -39,19 +39,16 @@ log_error()
{
echo -e "${RED}ERROR:${NC} $1" >&2
}
# Log a warning message
log_warn()
{
echo -e "${YELLOW}WARN:${NC} $1" >&2
}
# Log an informational message
log_info()
{
if [[ "${INFO:-0}" == "1" ]]; then
echo -e "${GREEN}INFO:${NC} $1" >&2
fi
}
# Log a debug message
log_debug()
{
if [[ "${DEBUG:-0}" == "1" ]]; then

View File

@@ -1,626 +0,0 @@
#!/usr/bin/env bash
# x-sonarcloud - Fetch SonarCloud issues for LLM analysis
# Copyright (c) 2025 - Licensed under MIT
#
# Usage:
# x-sonarcloud # Auto-detect, all open issues
# x-sonarcloud --pr <number> # PR-specific issues
# x-sonarcloud --branch <name> # Branch-specific issues
# x-sonarcloud --org <org> --project-key <key> # Explicit project
# x-sonarcloud --severities BLOCKER,CRITICAL # Filter by severity
# x-sonarcloud --types BUG,VULNERABILITY # Filter by type
# x-sonarcloud --statuses OPEN,CONFIRMED # Filter by status
# x-sonarcloud --resolved # Include resolved issues
# x-sonarcloud -h|--help # Show this help
#
# Examples:
# x-sonarcloud # All open issues in project
# x-sonarcloud --pr 42 # Issues on PR #42
# x-sonarcloud --branch main # Issues on main branch
# x-sonarcloud --severities BLOCKER --types BUG # Only blocker bugs
#
# Requirements:
# - curl and jq installed
# - SONAR_TOKEN environment variable set
# - sonar-project.properties or .sonarlint/connectedMode.json for auto-detection
set -euo pipefail
# Colors for output (stderr only)
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[1;33m'
readonly BLUE='\033[0;34m'
readonly NC='\033[0m' # No Color
# API constants
readonly MAX_PAGE_SIZE=500
readonly MAX_TOTAL_ISSUES=10000
# Show usage information
show_usage()
{
sed -n '3,27p' "$0" | sed 's/^# //' | sed 's/^#//'
}
# Log functions
log_error()
{
echo -e "${RED}ERROR:${NC} $1" >&2
}
# Log a warning message
log_warn()
{
echo -e "${YELLOW}WARN:${NC} $1" >&2
}
# Log an informational message
log_info()
{
if [[ "${INFO:-0}" == "1" ]]; then
echo -e "${GREEN}INFO:${NC} $1" >&2
fi
}
# Log a debug message
log_debug()
{
if [[ "${DEBUG:-0}" == "1" ]]; then
echo -e "${BLUE}DEBUG:${NC} $1" >&2
fi
}
# Check required dependencies
check_dependencies()
{
local missing=0
if ! command -v curl &> /dev/null; then
log_error "curl is not installed. Install it with your package manager."
missing=1
fi
if ! command -v jq &> /dev/null; then
log_error "jq is not installed. Install it with your package manager:"
log_error " https://jqlang.github.io/jq/download/"
missing=1
fi
if [[ "$missing" -eq 1 ]]; then
exit 1
fi
}
# Check authentication
check_auth()
{
if [[ -z "${SONAR_TOKEN:-}" ]]; then
log_error "SONAR_TOKEN environment variable is not set."
log_error "Generate a token at: https://sonarcloud.io/account/security"
log_error "Then export it: export SONAR_TOKEN=your_token_here"
exit 1
fi
}
# Detect project from sonar-project.properties
detect_project_from_properties()
{
local props_file="sonar-project.properties"
if [[ ! -f "$props_file" ]]; then
return 1
fi
local org key
org=$(grep -E '^sonar\.organization=' "$props_file" 2> /dev/null | cut -d'=' -f2- || echo "")
key=$(grep -E '^sonar\.projectKey=' "$props_file" 2> /dev/null | cut -d'=' -f2- || echo "")
if [[ -n "$org" && -n "$key" ]]; then
log_debug "Detected from sonar-project.properties: org=$org key=$key"
echo "$org" "$key" ""
return 0
fi
return 1
}
# Detect project from .sonarlint/connectedMode.json
detect_project_from_sonarlint()
{
local sonarlint_file=".sonarlint/connectedMode.json"
if [[ ! -f "$sonarlint_file" ]]; then
return 1
fi
local org key region
org=$(jq -r '.sonarCloudOrganization // empty' "$sonarlint_file" 2> /dev/null || echo "")
key=$(jq -r '.projectKey // empty' "$sonarlint_file" 2> /dev/null || echo "")
region=$(jq -r '.region // empty' "$sonarlint_file" 2> /dev/null || echo "")
if [[ -n "$org" && -n "$key" ]]; then
log_debug "Detected from .sonarlint/connectedMode.json: org=$org key=$key region=$region"
echo "$org" "$key" "$region"
return 0
fi
return 1
}
# Orchestrate project detection in priority order
detect_project()
{
local result
# 1. sonar-project.properties
if result=$(detect_project_from_properties); then
echo "$result"
return 0
fi
# 2. .sonarlint/connectedMode.json
if result=$(detect_project_from_sonarlint); then
echo "$result"
return 0
fi
# No config found
log_error "Could not auto-detect SonarCloud project configuration."
log_error "Provide one of the following:"
log_error " 1. sonar-project.properties with sonar.organization and sonar.projectKey"
log_error " 2. .sonarlint/connectedMode.json with sonarCloudOrganization and projectKey"
log_error " 3. CLI flags: --org <org> --project-key <key>"
return 1
}
# Get API base URL (currently same for all regions)
get_base_url()
{
echo "https://sonarcloud.io"
}
# Make an authenticated SonarCloud API request
sonar_api_request()
{
local url="$1"
log_debug "API request: $url"
local http_code body response
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $SONAR_TOKEN" \
"$url" 2> /dev/null) || {
log_error "curl request failed for: $url"
return 1
}
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
case "$http_code" in
200)
echo "$body"
return 0
;;
401)
log_error "Authentication failed (HTTP 401). Check your SONAR_TOKEN."
return 1
;;
403)
log_error "Access forbidden (HTTP 403). Token may lack required permissions."
return 1
;;
404)
log_error "Not found (HTTP 404). Check organization and project key."
return 1
;;
429)
log_error "Rate limited (HTTP 429). Wait before retrying."
return 1
;;
*)
log_error "API request failed with HTTP $http_code"
log_debug "Response body: $body"
return 1
;;
esac
}
# Fetch a single page of issues
fetch_issues_page()
{
local base_url="$1"
local project_key="$2"
local page="$3"
local pr_number="${4:-}"
local branch="${5:-}"
local severities="${6:-}"
local types="${7:-}"
local statuses="${8:-}"
local resolved="${9:-}"
local url="${base_url}/api/issues/search?componentKeys=${project_key}"
url="${url}&p=${page}&ps=${MAX_PAGE_SIZE}"
if [[ -n "$pr_number" ]]; then
url="${url}&pullRequest=${pr_number}"
fi
if [[ -n "$branch" ]]; then
url="${url}&branch=${branch}"
fi
if [[ -n "$severities" ]]; then
url="${url}&severities=${severities}"
fi
if [[ -n "$types" ]]; then
url="${url}&types=${types}"
fi
if [[ -n "$statuses" ]]; then
url="${url}&statuses=${statuses}"
fi
if [[ -n "$resolved" ]]; then
url="${url}&resolved=${resolved}"
fi
sonar_api_request "$url"
}
# Fetch all issues with pagination
fetch_all_issues()
{
local base_url="$1"
local project_key="$2"
local pr_number="${3:-}"
local branch="${4:-}"
local severities="${5:-}"
local types="${6:-}"
local statuses="${7:-}"
local resolved="${8:-}"
local page=1
local all_issues="[]"
local total=0
while true; do
log_info "Fetching issues page $page..."
local response
response=$(fetch_issues_page "$base_url" "$project_key" "$page" \
"$pr_number" "$branch" "$severities" "$types" "$statuses" "$resolved") || return 1
local page_issues page_total
page_issues=$(echo "$response" | jq '.issues // []' 2> /dev/null || echo "[]")
page_total=$(echo "$response" | jq '.total // 0' 2> /dev/null || echo "0")
local page_count
page_count=$(echo "$page_issues" | jq 'length' 2> /dev/null || echo "0")
log_debug "Page $page: $page_count issues (total available: $page_total)"
# Merge into accumulated results
all_issues=$(echo "$all_issues" "$page_issues" | jq -s '.[0] + .[1]' 2> /dev/null || echo "$all_issues")
total=$(echo "$all_issues" | jq 'length' 2> /dev/null || echo "0")
# Check if we have all issues or hit the cap
if [[ "$page_count" -lt "$MAX_PAGE_SIZE" ]]; then
break
fi
if [[ "$total" -ge "$MAX_TOTAL_ISSUES" ]]; then
log_warn "Reached maximum of $MAX_TOTAL_ISSUES issues. Results may be incomplete."
break
fi
page=$((page + 1))
done
log_info "Fetched $total issues total"
echo "$all_issues"
}
# Format issues grouped by severity then by file
format_issues_by_severity()
{
local issues="$1"
local base_url="$2"
local org="$3"
local project_key="$4"
echo "$issues" | jq -r --arg base_url "$base_url" --arg org "$org" --arg key "$project_key" '
group_by(.severity) | sort_by(-(
if .[0].severity == "BLOCKER" then 5
elif .[0].severity == "CRITICAL" then 4
elif .[0].severity == "MAJOR" then 3
elif .[0].severity == "MINOR" then 2
elif .[0].severity == "INFO" then 1
else 0 end
)) | .[] |
"### Severity: \(.[0].severity)\n" +
(
group_by(.component) | .[] |
"#### File: \(.[0].component | split(":") | if length > 1 then .[1:] | join(":") else .[0] end)\n" +
(
[.[] |
"##### Issue: \(.message)\n" +
"- **Rule:** \(.rule)\n" +
"- **Type:** \(.type)\n" +
"- **Severity:** \(.severity)\n" +
"- **Status:** \(.status)\n" +
"- **Line:** \(.line // "N/A")\n" +
"- **Effort:** \(.effort // "N/A")\n" +
"- **Created:** \(.creationDate // "N/A")\n" +
"- **URL:** \($base_url)/project/issues?open=\(.key)&id=\($key)\n"
] | join("\n")
)
)
' 2> /dev/null || echo "Error formatting issues."
}
# Format summary counts
format_summary()
{
local issues="$1"
echo "### By Severity"
echo ""
echo "$issues" | jq -r '
group_by(.severity) | .[] |
"- **\(.[0].severity):** \(length)"
' 2> /dev/null || echo "- Error computing severity counts"
echo ""
echo "### By Type"
echo ""
echo "$issues" | jq -r '
group_by(.type) | .[] |
"- **\(.[0].type):** \(length)"
' 2> /dev/null || echo "- Error computing type counts"
echo ""
echo "### Total"
echo ""
local count
count=$(echo "$issues" | jq 'length' 2> /dev/null || echo "0")
echo "- **Total issues:** $count"
}
# Format the full markdown output
format_output()
{
local org="$1"
local project_key="$2"
local mode="$3"
local mode_value="$4"
local base_url="$5"
local issues="$6"
local issue_count
issue_count=$(echo "$issues" | jq 'length' 2> /dev/null || echo "0")
# Header and LLM instructions
cat << 'EOF'
# SonarCloud Issues Analysis Report
## LLM Processing Instructions
You are analyzing code quality issues from SonarCloud for this project.
**Your tasks:**
1. **Triage**: Review each issue and assess its real impact on the codebase
2. **Priority Assessment**: Rank issues by severity and likelihood of causing problems
3. **Code Verification**: Check the actual source code to confirm each issue is valid
4. **Root Cause Analysis**: Identify why the issue exists and what pattern caused it
5. **Implementation Plan**: Create actionable fix tasks grouped by file for efficiency
6. **False Positive Detection**: Flag issues that appear to be false positives with reasoning
**Tools to use:**
- `find`, `cat`, `rg` commands and available tools to examine current codebase
- `git log` and `git blame` to understand code history and authorship
- File system tools to verify mentioned files exist and check current state
EOF
# Project information
cat << EOF
## Project Information
- **Organization:** $org
- **Project Key:** $project_key
EOF
case "$mode" in
pr)
echo "- **Mode:** Pull Request #$mode_value"
echo "- **URL:** ${base_url}/project/issues?pullRequest=${mode_value}&id=${project_key}"
;;
branch)
echo "- **Mode:** Branch \`$mode_value\`"
echo "- **URL:** ${base_url}/project/issues?branch=${mode_value}&id=${project_key}"
;;
*)
echo "- **Mode:** Project (all open issues)"
echo "- **URL:** ${base_url}/project/issues?id=${project_key}"
;;
esac
echo "- **Dashboard:** ${base_url}/project/overview?id=${project_key}"
# Issues section
echo ""
echo "## Issues ($issue_count total)"
echo ""
if [[ "$issue_count" -eq 0 ]]; then
echo "No issues found matching the specified filters."
else
format_issues_by_severity "$issues" "$base_url" "$org" "$project_key"
echo ""
echo "## Summary"
echo ""
format_summary "$issues"
fi
# Footer
cat << 'EOF'
## Next Steps for LLM Analysis
1. **Validate against current code:**
- Check if mentioned files and lines still match the reported issues
- Verify issues are not already fixed in the current branch
- Identify false positives and explain why they are false positives
2. **Prioritize fixes:**
- Address BLOCKER and CRITICAL severity issues first
- Group fixes by file to minimize context switching
- Consider effort estimates when planning the fix order
3. **Group by file for implementation:**
- Batch changes to the same file together
- Consider dependencies between fixes
- Plan atomic commits per logical change group
4. **Track progress:**
- Use todo lists and memory tools to track which issues are addressed
- Mark false positives with clear reasoning
- Verify fixes do not introduce new issues
EOF
}
# Main pipeline: fetch and display issues
fetch_and_display_issues()
{
local org="$1"
local project_key="$2"
local mode="$3"
local mode_value="$4"
local severities="${5:-}"
local types="${6:-}"
local statuses="${7:-}"
local resolved="${8:-}"
local base_url
base_url=$(get_base_url)
local pr_number=""
local branch=""
case "$mode" in
pr)
pr_number="$mode_value"
;;
branch)
branch="$mode_value"
;;
esac
log_info "Fetching SonarCloud issues for $project_key (mode: $mode)..."
local issues
issues=$(fetch_all_issues "$base_url" "$project_key" \
"$pr_number" "$branch" "$severities" "$types" "$statuses" "$resolved") || {
log_error "Failed to fetch issues"
return 1
}
format_output "$org" "$project_key" "$mode" "$mode_value" "$base_url" "$issues"
}
# Main function
main()
{
local org=""
local project_key=""
local mode="project"
local mode_value=""
local severities=""
local types=""
local statuses="OPEN,CONFIRMED,REOPENED"
local resolved="false"
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
-h | --help)
show_usage
exit 0
;;
--pr)
mode="pr"
mode_value="${2:?Missing PR number after --pr}"
shift 2
;;
--branch)
mode="branch"
mode_value="${2:?Missing branch name after --branch}"
shift 2
;;
--org)
org="${2:?Missing organization after --org}"
shift 2
;;
--project-key)
project_key="${2:?Missing project key after --project-key}"
shift 2
;;
--severities)
severities="${2:?Missing severities after --severities}"
shift 2
;;
--types)
types="${2:?Missing types after --types}"
shift 2
;;
--statuses)
statuses="${2:?Missing statuses after --statuses}"
shift 2
;;
--resolved)
resolved="true"
statuses=""
shift
;;
*)
log_error "Unknown argument: $1"
show_usage
exit 1
;;
esac
done
check_dependencies
check_auth
# Auto-detect project if not specified via CLI
if [[ -z "$org" || -z "$project_key" ]]; then
local detected
detected=$(detect_project) || exit 1
# shellcheck disable=SC2034 # region reserved for future per-region base URLs
read -r detected_org detected_key detected_region <<< "$detected"
if [[ -z "$org" ]]; then
org="$detected_org"
fi
if [[ -z "$project_key" ]]; then
project_key="$detected_key"
fi
fi
log_debug "Organization: $org"
log_debug "Project Key: $project_key"
log_debug "Mode: $mode"
log_debug "Severities: ${severities:-all}"
log_debug "Types: ${types:-all}"
log_debug "Statuses: ${statuses:-all}"
log_debug "Resolved: $resolved"
fetch_and_display_issues "$org" "$project_key" "$mode" "$mode_value" \
"$severities" "$types" "$statuses" "$resolved"
}
# Run main function with all arguments
main "$@"

View File

@@ -1,46 +0,0 @@
# x-sonarcloud
---
## Usage
```bash
x-sonarcloud # Auto-detect, all open issues
x-sonarcloud --pr <number> # PR-specific issues
x-sonarcloud --branch <name> # Branch-specific issues
x-sonarcloud --org <org> --project-key <key> # Explicit project
x-sonarcloud --severities BLOCKER,CRITICAL # Filter by severity
x-sonarcloud --types BUG,VULNERABILITY # Filter by type
x-sonarcloud --statuses OPEN,CONFIRMED # Filter by status
x-sonarcloud --resolved # Include resolved issues
x-sonarcloud -h|--help # Show help
```
Fetches SonarCloud code quality issues via REST API and formats them as
structured markdown with LLM processing instructions for automated analysis
and triage.
## Examples
```bash
x-sonarcloud # All open issues in project
x-sonarcloud --pr 42 # Issues on PR #42
x-sonarcloud --branch main # Issues on main branch
x-sonarcloud --severities BLOCKER --types BUG # Only blocker bugs
```
## Requirements
- `curl` and `jq` installed
- `SONAR_TOKEN` environment variable set
(generate at <https://sonarcloud.io/account/security>)
- Project auto-detection via `sonar-project.properties` or
`.sonarlint/connectedMode.json`, or explicit `--org`/`--project-key` flags
## Environment Variables
- `SONAR_TOKEN` — Bearer token for SonarCloud API authentication (required)
- `INFO=1` — Enable informational log messages on stderr
- `DEBUG=1` — Enable debug log messages on stderr
<!-- vim: set ft=markdown spell spelllang=en_us cc=80 : -->

View File

@@ -154,7 +154,6 @@ get_state()
# ERROR HANDLING AND CLEANUP
# ============================================================================
# Clean up temporary files and handle exit
cleanup()
{
exit_code=$?
@@ -178,7 +177,6 @@ trap cleanup EXIT INT TERM
# LOGGING FUNCTIONS
# ============================================================================
# Create audit directories and initialize log file
setup_logging()
{
# Create all necessary directories
@@ -199,7 +197,6 @@ setup_logging()
} >> "$LOG_FILE"
}
# Log a message with timestamp and severity level
log_message()
{
level="$1"
@@ -228,7 +225,6 @@ log_message()
# INPUT VALIDATION
# ============================================================================
# Validate hostname format for SSH connection
validate_hostname()
{
hostname="$1"
@@ -248,7 +244,6 @@ validate_hostname()
return 0
}
# Validate username format for SSH connection
validate_username()
{
username="$1"
@@ -268,7 +263,6 @@ validate_username()
return 0
}
# Parse input file into validated host entries
parse_host_list()
{
input_file="$1"
@@ -315,7 +309,6 @@ parse_host_list()
# SSH CONNECTION FUNCTIONS
# ============================================================================
# Execute SSH command with retry logic and key fallback
ssh_with_retry()
{
host="$1"
@@ -380,7 +373,6 @@ ssh_with_retry()
return 1
}
# Verify SSH connectivity to a host
test_ssh_connectivity()
{
host="$1"
@@ -400,7 +392,6 @@ test_ssh_connectivity()
# SSH SECURITY AUDIT FUNCTIONS
# ============================================================================
# Audit SSH daemon configuration on a remote host
check_sshd_config()
{
host="$1"
@@ -460,7 +451,6 @@ check_sshd_config()
# AUTOMATED UPDATES DETECTION
# ============================================================================
# Check if automated security updates are enabled
check_automated_updates()
{
host="$1"
@@ -542,7 +532,6 @@ check_automated_updates()
# PENDING REBOOT DETECTION
# ============================================================================
# Detect if a remote host requires a reboot
check_pending_reboot()
{
host="$1"
@@ -613,7 +602,6 @@ check_pending_reboot()
# REMEDIATION FUNCTIONS
# ============================================================================
# Create a timestamped backup of sshd_config
backup_sshd_config()
{
host="$1"
@@ -628,7 +616,6 @@ backup_sshd_config()
" "$ssh_key"
}
# Disable password authentication on a remote host
disable_password_auth()
{
host="$1"
@@ -681,7 +668,6 @@ ClientAliveCountMax 2
# REPORTING FUNCTIONS
# ============================================================================
# Generate CSV report from audit results
generate_csv_report()
{
report_file="$1"
@@ -707,7 +693,6 @@ generate_csv_report()
done < "$HOSTS_LIST_FILE"
}
# Display formatted audit summary to terminal
display_summary()
{
printf '\n'
@@ -758,7 +743,6 @@ display_summary()
# MAIN AUDIT FUNCTION
# ============================================================================
# Run all audit checks on a single host
audit_host()
{
host_entry="$1"
@@ -804,7 +788,6 @@ audit_host()
# MAIN EXECUTION
# ============================================================================
# Main entry point: parse args, run audits, generate report
main()
{
input_file="${1:-}"

View File

@@ -9,13 +9,11 @@
# <r> <g> <b> range from 0 to 255 inclusive.
# The escape sequence ^[0m returns output to default
# Set terminal background to an RGB color
setBackgroundColor()
{
echo -en "\x1b[48;2;$1;$2;$3""m"
}
# Reset terminal output formatting
resetOutput()
{
echo -en "\x1b[0m\n"

View File

@@ -28,7 +28,6 @@
set -euo pipefail
# Display usage information and options
usage()
{
cat << EOF
@@ -53,7 +52,6 @@ THUMB_SUFFIX="${THUMB_SUFFIX:-_thumb}"
# List of MIME types supported by ImageMagick (adjust as needed)
ALLOWED_MIMETYPES=("image/jpeg" "image/png" "image/gif" "image/bmp" "image/tiff" "image/webp")
# Verify ImageMagick is available
check_magick_installed()
{
if ! command -v magick &> /dev/null; then
@@ -62,7 +60,6 @@ check_magick_installed()
fi
}
# Verify mimetype command is available
check_mimetype_installed()
{
if ! command -v mimetype &> /dev/null; then
@@ -168,7 +165,6 @@ generate_thumbnails()
done < <(find "$source_dir" -type f -print0)
}
# Parse options, validate inputs, and generate thumbnails
main()
{
parse_options "$@"

View File

@@ -26,7 +26,6 @@ if [ "$#" -lt 2 ]; then
exit 1
fi
# Wait until host stops responding to ping
wait_for_host_down()
{
local host=$1
@@ -38,7 +37,6 @@ wait_for_host_down()
done
}
# Wait for host to go down then execute command
main()
{
local host=$1

View File

@@ -30,7 +30,6 @@ if [ "$#" -lt 2 ]; then
exit 1
fi
# Extract hostname from arguments, handling ssh shortcut
get_host()
{
if [ "$1" = "ssh" ]; then
@@ -40,7 +39,6 @@ get_host()
fi
}
# Wait until host responds to ping
wait_for_host()
{
local host=$1
@@ -52,7 +50,6 @@ wait_for_host()
done
}
# Wait for host to come online then execute command
main()
{
local host

View File

@@ -9,15 +9,10 @@
"lint:biome": "biome check .",
"fix:biome": "biome check --write .",
"format": "biome format --write .",
"lint:prettier": "prettier --check '**/*.{yml,yaml}'",
"fix:prettier": "prettier --write '**/*.{yml,yaml}'",
"format:yaml": "prettier --write '**/*.{yml,yaml}'",
"test": "bash test-all.sh",
"lint:ec": "ec -f gcc",
"lint:md-table": "git ls-files '*.md' | xargs markdown-table-formatter --check",
"fix:md-table": "git ls-files '*.md' | xargs markdown-table-formatter",
"lint": "yarn lint:biome && yarn lint:prettier && yarn lint:ec && yarn lint:md-table",
"fix": "yarn fix:biome && yarn fix:prettier && yarn fix:md-table"
"lint": "yarn lint:biome && yarn lint:ec",
"fix": "yarn fix:biome"
},
"repository": {
"type": "git",
@@ -38,8 +33,6 @@
"@types/node": "^24.0.1",
"bats": "^1.12.0",
"editorconfig-checker": "^6.1.0",
"markdown-table-formatter": "^1.7.0",
"prettier": "^3.8.1",
"typescript": "^5.8.3"
},
"packageManager": "yarn@4.12.0"

View File

@@ -1,9 +0,0 @@
[tool.ruff]
target-version = "py39"
line-length = 120
[tool.ruff.lint]
select = ["E", "F", "W", "I", "UP", "B", "SIM", "C4"]
[tool.ruff.format]
quote-style = "double"

View File

@@ -7,7 +7,6 @@ set -euo pipefail
source "${DOTFILES}/config/shared.sh"
DEST="$HOME/.dotfiles/docs/nvim-keybindings.md"
# Generate Neovim keybindings documentation
main()
{
msg "Generating Neovim keybindings documentation"
@@ -29,7 +28,6 @@ main()
&& mv "${DEST}.tmp" "$DEST"
msg "Neovim keybindings documentation generated at $DEST"
return 0
}
main "$@"

View File

@@ -6,30 +6,20 @@
source "${DOTFILES}/config/shared.sh"
DEST="$HOME/.dotfiles/docs/wezterm-keybindings.md"
# Generate wezterm keybindings documentation
main()
{
msg "Generating wezterm keybindings documentation"
local tmp
tmp="$(mktemp)"
trap 'rm -f "$tmp"' RETURN
{
printf "# wezterm keybindings\n\n"
printf "\`\`\`txt\n"
} > "$tmp"
} > "$DEST"
if ! wezterm show-keys >> "$tmp"; then
msg "Failed to run 'wezterm show-keys'"
return 1
fi
wezterm show-keys >> "$DEST"
printf "\`\`\`\n\n- Generated on %s\n" "$(date)" >> "$tmp"
printf "\`\`\`\n\n- Generated on %s\n" "$(date)" >> "$DEST"
mv "$tmp" "$DEST"
msg "wezterm keybindings documentation generated at $DEST"
return 0
}
main "$@"

View File

@@ -14,34 +14,33 @@ fi
packages=(
# Build essentials
build-essential # gcc, g++, make
cmake # Cross-platform build system
pkg-config # Helper for compiling against libraries
autoconf # Automatic configure script builder
automake # Makefile generator
libtool # Generic library support script
build-essential # gcc, g++, make
cmake # Cross-platform build system
pkg-config # Helper for compiling against libraries
autoconf # Automatic configure script builder
automake # Makefile generator
libtool # Generic library support script
# Libraries for compiling languages
libssl-dev # SSL development headers
libffi-dev # Foreign function interface
zlib1g-dev # Compression library
libreadline-dev # Command-line editing
libbz2-dev # Bzip2 compression
libsqlite3-dev # SQLite database
libncurses-dev # Terminal UI library
libssl-dev # SSL development headers
libffi-dev # Foreign function interface
zlib1g-dev # Compression library
libreadline-dev # Command-line editing
libbz2-dev # Bzip2 compression
libsqlite3-dev # SQLite database
libncurses-dev # Terminal UI library
# CLI utilities (not in cargo/go/npm)
jq # JSON processor
tmux # Terminal multiplexer
tree # Directory listing
unzip # Archive extraction
shellcheck # Shell script linter
socat # Multipurpose network relay
gnupg # GPG encryption/signing
software-properties-common # add-apt-repository command
jq # JSON processor
tmux # Terminal multiplexer
tree # Directory listing
unzip # Archive extraction
shellcheck # Shell script linter
socat # Multipurpose network relay
gnupg # GPG encryption/signing
software-properties-common # add-apt-repository command
)
# Install apt packages that are not already present
install_packages()
{
local to_install=()
@@ -65,15 +64,12 @@ install_packages()
else
msgr ok "All packages already installed"
fi
return 0
}
# Install all apt packages and report completion
main()
{
install_packages
msgr yay "apt package installations complete"
return 0
}
main "$@"

View File

@@ -25,18 +25,18 @@ fi
# Cargo packages to install
packages=(
cargo-update # A cargo subcommand for checking and applying updates to installed executables
cargo-cache # Cargo cache management utility
tree-sitter-cli # An incremental parsing system for programming tools
bkt # A subprocess caching utility
difftastic # A structural diff that understands syntax
fd-find # A simple, fast and user-friendly alternative to 'find'
ripgrep # Recursively searches directories for a regex pattern while respecting your gitignore
bob-nvim # A version manager for neovim
bottom # A cross-platform graphical process/system monitor
eza # A modern alternative to ls
tmux-sessionizer # A tool for opening git repositories as tmux sessions
zoxide # A smarter cd command
cargo-update # A cargo subcommand for checking and applying updates to installed executables
cargo-cache # Cargo cache management utility
tree-sitter-cli # An incremental parsing system for programming tools
bkt # A subprocess caching utility
difftastic # A structural diff that understands syntax
fd-find # A simple, fast and user-friendly alternative to 'find'
ripgrep # Recursively searches directories for a regex pattern while respecting your gitignore
bob-nvim # A version manager for neovim
bottom # A cross-platform graphical process/system monitor
eza # A modern alternative to ls
tmux-sessionizer # A tool for opening git repositories as tmux sessions
zoxide # A smarter cd command
)
# Number of jobs to run in parallel, this helps to keep the system responsive
@@ -57,7 +57,6 @@ install_packages()
msgr run_done "Done installing $pkg"
echo ""
done
return 0
}
# Function to perform additional steps for installed cargo packages
@@ -73,16 +72,13 @@ post_install_steps()
msgr run "Removing cargo cache"
cargo cache --autoclean
msgr "done" "Done removing cargo cache"
return 0
}
# Install cargo packages and run post-install steps
main()
{
install_packages
msgr "done" "Installed cargo packages!"
post_install_steps
return 0
}
main "$@"

View File

@@ -12,7 +12,6 @@ PBB_SYNTAX="syntax: bash"
PBB_TAGS="tags: [bash]"
PBB_TEMP_DIR="${XDG_CACHE_HOME:-$HOME/.cache}/cheat/pbb"
# Verify required tools are installed
check_required_tools()
{
for t in "${PBB_REQUIRED_TOOLS[@]}"; do
@@ -21,37 +20,32 @@ check_required_tools()
exit 1
fi
done
return 0
}
# Clone or update the pure-bash-bible repository
clone_or_update_repo()
{
if [[ ! -d "$PBB_TEMP_DIR/.git" ]]; then
if [ ! -d "$PBB_TEMP_DIR/.git" ]; then
msg_run "Starting to clone $PBB_GIT"
git clone --depth 1 --single-branch -q "$PBB_GIT" "$PBB_TEMP_DIR"
msg_yay "Cloned $PBB_GIT"
git clone --depth 1 --single-branch -q "$PBB_GIT" "$PBB_TEMP_DIR" \
&& msg_yay "Cloned $PBB_GIT"
else
msg_run "Starting to update $PBB_GIT"
git -C "$PBB_TEMP_DIR" reset --hard origin/master
git -C "$PBB_TEMP_DIR" pull -q
msg_yay "Updated $PBB_GIT"
git -C "$PBB_TEMP_DIR" pull -q \
&& msgr yay "Updated $PBB_GIT"
fi
return 0
}
# Get the cheat destination directory for pure-bash-bible
prepare_cheat_dest()
{
local cheat_dest
cheat_dest="$(cheat -d | grep pure-bash-bible | head -1 | awk '{print $2}')"
if [[ ! -d "$cheat_dest" ]]; then
if [ ! -d "$cheat_dest" ]; then
mkdir -p "$cheat_dest"
fi
echo "$cheat_dest"
return 0
}
# Processes chapter files from the pure-bash-bible repository and generates or updates corresponding cheat sheets.
@@ -89,22 +83,19 @@ process_chapters()
LC_ALL=C perl -pi.bak -e 's/\<\!-- CHAPTER END --\>//' "$cheat_file"
rm "$cheat_file.bak"
if [[ '---' != "$(head -1 < "$cheat_file")" ]]; then
if [ '---' != "$(head -1 < "$cheat_file")" ]; then
local metadata
metadata="$PBB_SYNTAX\n$PBB_TAGS\n$PBB_SOURCE\n"
printf '%s\n%b%s\n%s' "---" "$metadata" "---" "$(cat "$cheat_file")" > "$cheat_file"
fi
done
return 0
}
# Install pure-bash-bible cheatsheets
main()
{
check_required_tools
clone_or_update_repo
process_chapters
return 0
}
main "$@"

View File

@@ -14,7 +14,7 @@ EXPECTED_CHECKSUM="$(php -r 'copy("https://composer.github.io/installer.sig", "p
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_CHECKSUM="$(php -r "echo hash_file('sha384', 'composer-setup.php');")"
if [[ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]]; then
if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then
echo >&2 'ERROR: Invalid installer checksum'
rm composer-setup.php
exit 1
@@ -23,7 +23,7 @@ fi
php composer-setup.php --quiet
RESULT=$?
rm composer-setup.php
if [[ $RESULT -eq 0 ]]; then
if [ $RESULT -eq 0 ]; then
mv composer.phar ~/.local/bin/composer
fi
exit $RESULT

View File

@@ -14,44 +14,41 @@ fi
packages=(
# Build essentials (individual packages, group handled separately)
cmake # Cross-platform build system
pkgconfig # Helper for compiling against libraries
autoconf # Automatic configure script builder
automake # Makefile generator
libtool # Generic library support script
cmake # Cross-platform build system
pkgconfig # Helper for compiling against libraries
autoconf # Automatic configure script builder
automake # Makefile generator
libtool # Generic library support script
# Libraries for compiling languages
openssl-devel # SSL development headers
libffi-devel # Foreign function interface
zlib-devel # Compression library
readline-devel # Command-line editing
bzip2-devel # Bzip2 compression
sqlite-devel # SQLite database
ncurses-devel # Terminal UI library
openssl-devel # SSL development headers
libffi-devel # Foreign function interface
zlib-devel # Compression library
readline-devel # Command-line editing
bzip2-devel # Bzip2 compression
sqlite-devel # SQLite database
ncurses-devel # Terminal UI library
# CLI utilities (not in cargo/go/npm)
jq # JSON processor
tmux # Terminal multiplexer
tree # Directory listing
unzip # Archive extraction
ShellCheck # Shell script linter
socat # Multipurpose network relay
gnupg2 # GPG encryption/signing
jq # JSON processor
tmux # Terminal multiplexer
tree # Directory listing
unzip # Archive extraction
ShellCheck # Shell script linter
socat # Multipurpose network relay
gnupg2 # GPG encryption/signing
)
# Install the Development Tools dnf group
install_dev_tools_group()
{
if dnf group list installed 2> /dev/null | grep -q "Development Tools"; then
if dnf group list installed 2>/dev/null | grep -q "Development Tools"; then
msgr ok "@development-tools group already installed"
else
msgr run "Installing @development-tools group"
sudo dnf group install -y "Development Tools"
fi
return 0
}
# Install dnf packages that are not already present
install_packages()
{
local to_install=()
@@ -74,16 +71,13 @@ install_packages()
else
msgr ok "All packages already installed"
fi
return 0
}
# Install all dnf packages and report completion
main()
{
install_dev_tools_group
install_packages
msgr yay "dnf package installations complete"
return 0
}
main "$@"

View File

@@ -18,16 +18,11 @@ fonts=(
# Function to clone or update the NerdFonts repository
clone_or_update_repo()
{
if [[ ! -d "$TMP_PATH/.git" ]]; then
rm -rf "$TMP_PATH"
if [ ! -d "$TMP_PATH" ]; then
git clone --quiet --filter=blob:none --sparse --depth=1 "$GIT_REPO" "$TMP_PATH"
fi
cd "$TMP_PATH" || {
msgr err "No such folder $TMP_PATH"
exit 1
}
return 0
cd "$TMP_PATH" || { msgr err "No such folder $TMP_PATH"; exit 1; }
}
# Function to add fonts to sparse-checkout
@@ -43,7 +38,6 @@ add_fonts_to_sparse_checkout()
git sparse-checkout add "patched-fonts/$font"
echo ""
done
return 0
}
# Function to install NerdFonts
@@ -53,24 +47,19 @@ install_fonts()
# shellcheck disable=SC2048,SC2086
./install.sh -q -s ${fonts[*]}
msgr run_done "Done"
return 0
}
# Remove the temporary nerd-fonts clone directory
remove_tmp_path()
{
rm -rf "$TMP_PATH"
return 0
}
# Clone, sparse-checkout, install fonts, and clean up
main()
{
clone_or_update_repo
add_fonts_to_sparse_checkout
install_fonts
remove_tmp_path
return 0
}
main "$@"

View File

@@ -45,15 +45,12 @@ install_extensions()
gh extension install "$ext"
echo ""
done
return 0
}
# Install all GitHub CLI extensions
main()
{
install_extensions
msgr run_done "Done"
return 0
}
main "$@"

View File

@@ -15,15 +15,9 @@ if ! command -v git-crypt &> /dev/null; then
BUILD_PATH="$(mktemp -d)"
trap 'rm -rf "$BUILD_PATH"' EXIT
if [[ ! -f "$CHECK_PATH" ]]; then
git clone --depth 1 "$REPO_URL" "$BUILD_PATH" || {
msgr err "Failed to clone $REPO_URL"
exit 1
}
cd "$BUILD_PATH" || {
msgr err "$BUILD_PATH not found"
exit 1
}
if [ ! -f "$CHECK_PATH" ]; then
git clone --depth 1 "$REPO_URL" "$BUILD_PATH" || { msgr err "Failed to clone $REPO_URL"; exit 1; }
cd "$BUILD_PATH" || { msgr err "$BUILD_PATH not found"; exit 1; }
make && make install PREFIX="$HOME/.local"
else
msgr run_done "git-crypt ($CHECK_PATH) already installed"

View File

@@ -11,13 +11,13 @@ msgr run "Installing go packages"
# Go packages to install
packages=(
github.com/dotzero/git-profile@latest # Switch between git user profiles
github.com/google/yamlfmt/cmd/yamlfmt@latest # Format yaml files
github.com/cheat/cheat/cmd/cheat@latest # Interactive cheatsheets on the CLI
github.com/charmbracelet/glow@latest # Render markdown on the CLI
github.com/junegunn/fzf@latest # General-purpose fuzzy finder
github.com/charmbracelet/gum@latest # Glamorous shell scripts
github.com/joshmedeski/sesh/v2@latest # Terminal session manager
github.com/dotzero/git-profile@latest # Switch between git user profiles
github.com/google/yamlfmt/cmd/yamlfmt@latest # Format yaml files
github.com/cheat/cheat/cmd/cheat@latest # Interactive cheatsheets on the CLI
github.com/charmbracelet/glow@latest # Render markdown on the CLI
github.com/junegunn/fzf@latest # General-purpose fuzzy finder
github.com/charmbracelet/gum@latest # Glamorous shell scripts
github.com/joshmedeski/sesh/v2@latest # Terminal session manager
)
# Function to install go packages
@@ -33,7 +33,6 @@ install_packages()
go install "$pkg"
echo ""
done
return 0
}
# Function to install completions and run actions for selected packages
@@ -45,7 +44,6 @@ post_install()
git-profile completion zsh > "$ZSH_CUSTOM_COMPLETION_PATH/_git-profile" \
&& msgr run_done "Installed completions for git-profile"
fi
return 0
}
# Function to clear go cache
@@ -53,17 +51,14 @@ clear_go_cache()
{
msgr run "Clearing go cache"
go clean -cache -modcache
return 0
}
# Install go packages, completions, and clear cache
main()
{
install_packages
post_install
clear_go_cache
msgr run_done "Done"
return 0
}
main "$@"

View File

@@ -5,7 +5,7 @@ set -uo pipefail
# This script contains large portions from following scripts:
# - https://github.com/freekmurze/dotfiles/blob/main/macos/set-defaults.sh
[[ "$(uname)" != "Darwin" ]] && echo "Not a macOS system" && exit 0
[ "$(uname)" != "Darwin" ] && echo "Not a macOS system" && exit 0
# shellcheck source=shared.sh
source "$DOTFILES/config/shared.sh"

View File

@@ -13,10 +13,10 @@ if ! command -v npm &> /dev/null; then
fi
packages=(
editorconfig-checker # Check files against .editorconfig rules
github-release-notes # Create release notes from tags and issues
neovim # Neovim node client
corepack # Node.js package manager version management
editorconfig-checker # Check files against .editorconfig rules
github-release-notes # Create release notes from tags and issues
neovim # Neovim node client
corepack # Node.js package manager version management
)
# Function to install npm packages
@@ -36,16 +36,14 @@ install_packages()
fi
echo ""
done
return 0
}
# Function to upgrade all global npm packages
upgrade_global_packages()
{
msgr run "Upgrading all global packages"
npm -g --no-progress --no-timing --no-fund outdated || true
npm -g --no-progress --no-timing --no-fund outdated
npm -g --no-timing --no-fund upgrade
return 0
}
# Function to clean npm cache
@@ -55,17 +53,14 @@ clean_npm_cache()
npm cache verify
npm cache clean --force
npm cache verify
return 0
}
# Install, upgrade, and clean npm packages
main()
{
install_packages
upgrade_global_packages
clean_npm_cache
msgr yay "npm package installations complete"
return 0
}
main "$@"

View File

@@ -43,18 +43,15 @@ install_ntfy()
mkdir -p ~/.config/ntfy
# Copy config only if it does not exist
if [[ ! -f "$HOME/.config/ntfy/client.yml" ]]; then
if [ ! -f "$HOME/.config/ntfy/client.yml" ]; then
cp "$tmpdir/${NTFY_DIR}/client/client.yml" ~/.config/ntfy/client.yml
fi
return 0
}
# Download and install ntfy
main()
{
install_ntfy
msgr "done" "ntfy installation complete"
return 0
}
main "$@"

View File

@@ -16,15 +16,14 @@ fi
# CLI tools — installed isolated with `uv tool install`
tools=(
ansible # IT automation and configuration management
openapi-python-client # Generate Python API clients from OpenAPI specs
ruff # Fast Python linter and formatter
ansible # IT automation and configuration management
openapi-python-client # Generate Python API clients from OpenAPI specs
)
# Library packages — installed into system Python with `uv pip install --system`
libraries=(
libtmux # Python API for tmux
pynvim # Neovim Python client
libtmux # Python API for tmux
pynvim # Neovim Python client
)
# Function to install CLI tools via uv tool install
@@ -41,7 +40,6 @@ install_tools()
uv tool install --upgrade "$pkg"
echo ""
done
return 0
}
# Function to install library packages via uv pip install
@@ -58,7 +56,6 @@ install_libraries()
uv pip install --system --upgrade "$pkg"
echo ""
done
return 0
}
# Function to upgrade all uv-managed tools
@@ -66,17 +63,14 @@ upgrade_tools()
{
msgr run "Upgrading all uv-managed tools"
uv tool upgrade --all
return 0
}
# Install Python tools, libraries, and upgrade all
main()
{
install_tools
install_libraries
upgrade_tools
msgr yay "Python package installations complete"
return 0
}
main "$@"

View File

@@ -3,11 +3,9 @@ set -euo pipefail
# @description Install XCode CLI Tools with osascript magic.
# Ismo Vuorinen <https://github.com/ivuorinen> 2018
#
# shellcheck source=../config/shared.sh
source "${DOTFILES}/config/shared.sh"
# Check if the script is running on macOS
if [[ "$(uname)" != "Darwin" ]]; then
if [ "$(uname)" != "Darwin" ]; then
msgr warn "Not a macOS system"
exit 0
fi
@@ -29,7 +27,6 @@ keep_alive_sudo()
sleep 60
kill -0 "$$" || exit
done 2> /dev/null &
return 0
}
XCODE_TOOLS_PATH="$(xcode-select -p)"
@@ -43,13 +40,12 @@ prompt_xcode_install()
'tell app "System Events" to display dialog "Please click install when Command Line Developer Tools appears"'
)"
if [[ "$XCODE_MESSAGE" = "button returned:OK" ]]; then
if [ "$XCODE_MESSAGE" = "button returned:OK" ]; then
xcode-select --install
else
msgr warn "You have cancelled the installation, please rerun the installer."
exit 1
fi
return 0
}
# Main function
@@ -57,17 +53,16 @@ main()
{
keep_alive_sudo
if [[ -x "$XCODE_SWIFT_PATH" ]]; then
if [ -x "$XCODE_SWIFT_PATH" ]; then
msgr run "You have swift from xcode-select. Continuing..."
else
prompt_xcode_install
fi
until [[ -f "$XCODE_SWIFT_PATH" ]]; do
until [ -f "$XCODE_SWIFT_PATH" ]; do
echo -n "."
sleep 1
done
return 0
}
main "$@"

View File

@@ -14,20 +14,18 @@ clone_z_repo()
local git_path=$1
local bin_path=$2
if [[ ! -d "$bin_path" ]]; then
if [ ! -d "$bin_path" ]; then
git clone "$git_path" "$bin_path"
msgr run_done "z installed at $bin_path"
else
msgr ok "z ($bin_path/) already installed"
fi
return 0
}
# Main function
main()
{
clone_z_repo "$Z_GIT_PATH" "$Z_BIN_PATH"
return 0
}
main "$@"

View File

@@ -5,7 +5,7 @@
: "${VERBOSE:=0}"
# Source the main shared config if not already loaded
if [[ -z "${SHARED_SCRIPTS_SOURCED:-}" ]]; then
if [ -z "${SHARED_SCRIPTS_SOURCED:-}" ]; then
source "${DOTFILES}/config/shared.sh"
export SHARED_SCRIPTS_SOURCED=1
fi

View File

@@ -3,7 +3,7 @@
set -euo pipefail
if [[ -x "node_modules/bats/bin/bats" ]]; then
if [ -x "node_modules/bats/bin/bats" ]; then
git ls-files '*.bats' -z | xargs -0 node_modules/bats/bin/bats
elif command -v npx > /dev/null; then
git ls-files '*.bats' -z | xargs -0 npx --yes bats

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env bats
setup()
{
setup() {
export DOTFILES="$PWD"
}

221
yarn.lock
View File

@@ -96,22 +96,6 @@ __metadata:
languageName: node
linkType: hard
"@isaacs/balanced-match@npm:^4.0.1":
version: 4.0.1
resolution: "@isaacs/balanced-match@npm:4.0.1"
checksum: 10c0/7da011805b259ec5c955f01cee903da72ad97c5e6f01ca96197267d3f33103d5b2f8a1af192140f3aa64526c593c8d098ae366c2b11f7f17645d12387c2fd420
languageName: node
linkType: hard
"@isaacs/brace-expansion@npm:^5.0.1":
version: 5.0.1
resolution: "@isaacs/brace-expansion@npm:5.0.1"
dependencies:
"@isaacs/balanced-match": "npm:^4.0.1"
checksum: 10c0/e5d67c7bbf1f17b88132a35bc638af306d48acbb72810d48fa6e6edd8ab375854773108e8bf70f021f7ef6a8273455a6d1f0c3b5aa2aff06ce7894049ab77fb8
languageName: node
linkType: hard
"@types/node@npm:^24.0.1":
version: 24.10.9
resolution: "@types/node@npm:24.10.9"
@@ -130,25 +114,6 @@ __metadata:
languageName: node
linkType: hard
"debug@npm:^4.3.4":
version: 4.4.3
resolution: "debug@npm:4.4.3"
dependencies:
ms: "npm:^2.1.3"
peerDependenciesMeta:
supports-color:
optional: true
checksum: 10c0/d79136ec6c83ecbefd0f6a5593da6a9c91ec4d7ddc4b54c883d6e71ec9accb5f67a1a5e96d00a328196b5b5c86d365e98d8a3a70856aaf16b4e7b1985e67f5a6
languageName: node
linkType: hard
"deep-is@npm:^0.1.3":
version: 0.1.4
resolution: "deep-is@npm:0.1.4"
checksum: 10c0/7f0ee496e0dff14a573dc6127f14c95061b448b87b995fc96c017ce0a1e66af1675e73f1d6064407975bc4ea6ab679497a29fff7b5b9c4e99cb10797c1ad0b4c
languageName: node
linkType: hard
"editorconfig-checker@npm:^6.1.0":
version: 6.1.1
resolution: "editorconfig-checker@npm:6.1.1"
@@ -159,49 +124,6 @@ __metadata:
languageName: node
linkType: hard
"fast-levenshtein@npm:^2.0.6":
version: 2.0.6
resolution: "fast-levenshtein@npm:2.0.6"
checksum: 10c0/111972b37338bcb88f7d9e2c5907862c280ebf4234433b95bc611e518d192ccb2d38119c4ac86e26b668d75f7f3894f4ff5c4982899afced7ca78633b08287c4
languageName: node
linkType: hard
"find-package-json@npm:^1.2.0":
version: 1.2.0
resolution: "find-package-json@npm:1.2.0"
checksum: 10c0/85d6c97afb9f8f0deb0d344a1c4eb8027347cf4d61666c28d3ac3f913e916684441218682b3dd6f8ad570e5d43c96a7db521f70183d70df559d07e1f99cdc635
languageName: node
linkType: hard
"fs-extra@npm:^11.1.1":
version: 11.3.3
resolution: "fs-extra@npm:11.3.3"
dependencies:
graceful-fs: "npm:^4.2.0"
jsonfile: "npm:^6.0.1"
universalify: "npm:^2.0.0"
checksum: 10c0/984924ff4104e3e9f351b658a864bf3b354b2c90429f57aec0acd12d92c4e6b762cbacacdffb4e745b280adce882e1f980c485d9f02c453f769ab4e7fc646ce3
languageName: node
linkType: hard
"glob@npm:^13.0.0":
version: 13.0.1
resolution: "glob@npm:13.0.1"
dependencies:
minimatch: "npm:^10.1.2"
minipass: "npm:^7.1.2"
path-scurry: "npm:^2.0.0"
checksum: 10c0/af7b863dec8dff74f61d7d6e53104e1f6bbdd482157a196cade8ed857481e876ec35181b38a059b2a7b93ea3b08248f4ff0792fef6dc91814fd5097a716f48e4
languageName: node
linkType: hard
"graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0":
version: 4.2.11
resolution: "graceful-fs@npm:4.2.11"
checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2
languageName: node
linkType: hard
"ivuorinen-dotfiles@workspace:.":
version: 0.0.0-use.local
resolution: "ivuorinen-dotfiles@workspace:."
@@ -210,139 +132,10 @@ __metadata:
"@types/node": "npm:^24.0.1"
bats: "npm:^1.12.0"
editorconfig-checker: "npm:^6.1.0"
markdown-table-formatter: "npm:^1.7.0"
prettier: "npm:^3.8.1"
typescript: "npm:^5.8.3"
languageName: unknown
linkType: soft
"jsonfile@npm:^6.0.1":
version: 6.2.0
resolution: "jsonfile@npm:6.2.0"
dependencies:
graceful-fs: "npm:^4.1.6"
universalify: "npm:^2.0.0"
dependenciesMeta:
graceful-fs:
optional: true
checksum: 10c0/7f4f43b08d1869ded8a6822213d13ae3b99d651151d77efd1557ced0889c466296a7d9684e397bd126acf5eb2cfcb605808c3e681d0fdccd2fe5a04b47e76c0d
languageName: node
linkType: hard
"levn@npm:^0.4.1":
version: 0.4.1
resolution: "levn@npm:0.4.1"
dependencies:
prelude-ls: "npm:^1.2.1"
type-check: "npm:~0.4.0"
checksum: 10c0/effb03cad7c89dfa5bd4f6989364bfc79994c2042ec5966cb9b95990e2edee5cd8969ddf42616a0373ac49fac1403437deaf6e9050fbbaa3546093a59b9ac94e
languageName: node
linkType: hard
"lru-cache@npm:^11.0.0":
version: 11.2.5
resolution: "lru-cache@npm:11.2.5"
checksum: 10c0/cc98958d25dddf1c8a8cbdc49588bd3b24450e8dfa78f32168fd188a20d4a0331c7406d0f3250c86a46619ee288056fd7a1195e8df56dc8a9592397f4fbd8e1d
languageName: node
linkType: hard
"markdown-table-formatter@npm:^1.7.0":
version: 1.7.0
resolution: "markdown-table-formatter@npm:1.7.0"
dependencies:
debug: "npm:^4.3.4"
find-package-json: "npm:^1.2.0"
fs-extra: "npm:^11.1.1"
glob: "npm:^13.0.0"
markdown-table-prettify: "npm:^3.6.0"
optionator: "npm:^0.9.4"
bin:
markdown-table-formatter: lib/index.js
checksum: 10c0/0f0d5eaec2c3bb9c60328ffbb4652305845def5387f4c87dd6e83559ef793961353af64ae44bce9cda3394469e419e046ae42fe7e9cafd47414b42deaa28f3b7
languageName: node
linkType: hard
"markdown-table-prettify@npm:^3.6.0":
version: 3.7.0
resolution: "markdown-table-prettify@npm:3.7.0"
bin:
markdown-table-prettify: cli/index.js
checksum: 10c0/f387b1ca81ceaa201bda2ce1db8e4d392a4d4ac3d7bb3173c7d9e3d9ca389e31d247eee2ccd2fa30f3132ae2447dc51285fb68636cdaf825633a43a499f41cd6
languageName: node
linkType: hard
"minimatch@npm:^10.1.2":
version: 10.1.2
resolution: "minimatch@npm:10.1.2"
dependencies:
"@isaacs/brace-expansion": "npm:^5.0.1"
checksum: 10c0/0cccef3622201703de6ecf9d772c0be1d5513dcc038ed9feb866c20cf798243e678ac35605dac3f1a054650c28037486713fe9e9a34b184b9097959114daf086
languageName: node
linkType: hard
"minipass@npm:^7.1.2":
version: 7.1.2
resolution: "minipass@npm:7.1.2"
checksum: 10c0/b0fd20bb9fb56e5fa9a8bfac539e8915ae07430a619e4b86ff71f5fc757ef3924b23b2c4230393af1eda647ed3d75739e4e0acb250a6b1eb277cf7f8fe449557
languageName: node
linkType: hard
"ms@npm:^2.1.3":
version: 2.1.3
resolution: "ms@npm:2.1.3"
checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48
languageName: node
linkType: hard
"optionator@npm:^0.9.4":
version: 0.9.4
resolution: "optionator@npm:0.9.4"
dependencies:
deep-is: "npm:^0.1.3"
fast-levenshtein: "npm:^2.0.6"
levn: "npm:^0.4.1"
prelude-ls: "npm:^1.2.1"
type-check: "npm:^0.4.0"
word-wrap: "npm:^1.2.5"
checksum: 10c0/4afb687a059ee65b61df74dfe87d8d6815cd6883cb8b3d5883a910df72d0f5d029821f37025e4bccf4048873dbdb09acc6d303d27b8f76b1a80dd5a7d5334675
languageName: node
linkType: hard
"path-scurry@npm:^2.0.0":
version: 2.0.1
resolution: "path-scurry@npm:2.0.1"
dependencies:
lru-cache: "npm:^11.0.0"
minipass: "npm:^7.1.2"
checksum: 10c0/2a16ed0e81fbc43513e245aa5763354e25e787dab0d539581a6c3f0f967461a159ed6236b2559de23aa5b88e7dc32b469b6c47568833dd142a4b24b4f5cd2620
languageName: node
linkType: hard
"prelude-ls@npm:^1.2.1":
version: 1.2.1
resolution: "prelude-ls@npm:1.2.1"
checksum: 10c0/b00d617431e7886c520a6f498a2e14c75ec58f6d93ba48c3b639cf241b54232d90daa05d83a9e9b9fef6baa63cb7e1e4602c2372fea5bc169668401eb127d0cd
languageName: node
linkType: hard
"prettier@npm:^3.8.1":
version: 3.8.1
resolution: "prettier@npm:3.8.1"
bin:
prettier: bin/prettier.cjs
checksum: 10c0/33169b594009e48f570471271be7eac7cdcf88a209eed39ac3b8d6d78984039bfa9132f82b7e6ba3b06711f3bfe0222a62a1bfb87c43f50c25a83df1b78a2c42
languageName: node
linkType: hard
"type-check@npm:^0.4.0, type-check@npm:~0.4.0":
version: 0.4.0
resolution: "type-check@npm:0.4.0"
dependencies:
prelude-ls: "npm:^1.2.1"
checksum: 10c0/7b3fd0ed43891e2080bf0c5c504b418fbb3e5c7b9708d3d015037ba2e6323a28152ec163bcb65212741fa5d2022e3075ac3c76440dbd344c9035f818e8ecee58
languageName: node
linkType: hard
"typescript@npm:^5.8.3":
version: 5.9.3
resolution: "typescript@npm:5.9.3"
@@ -369,17 +162,3 @@ __metadata:
checksum: 10c0/3033e2f2b5c9f1504bdc5934646cb54e37ecaca0f9249c983f7b1fc2e87c6d18399ebb05dc7fd5419e02b2e915f734d872a65da2e3eeed1813951c427d33cc9a
languageName: node
linkType: hard
"universalify@npm:^2.0.0":
version: 2.0.1
resolution: "universalify@npm:2.0.1"
checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a
languageName: node
linkType: hard
"word-wrap@npm:^1.2.5":
version: 1.2.5
resolution: "word-wrap@npm:1.2.5"
checksum: 10c0/e0e4a1ca27599c92a6ca4c32260e8a92e8a44f4ef6ef93f803f8ed823f486e0889fc0b93be4db59c8d51b3064951d25e43d434e95dc8c960cc3a63d65d00ba20
languageName: node
linkType: hard