mirror of
https://github.com/ivuorinen/nvm-auto-use.fish.git
synced 2026-02-10 13:48:58 +00:00
feat: refactor plugin architecture, enhance linting, CI & tooling
- Major refactor of core Fish functions for modularity, caching, and error handling - Improved `.editorconfig` and Makefile for stricter formatting and linting standards - Expanded linting support: added EditorConfig checks, auto-install for missing tools, and Makefile targets - Enhanced CI workflow with clearer permissions and job steps in GitHub Actions - Updated documentation in `README.md` and `CLAUDE.md` to reflect new features, advanced developer tools, and contribution guidelines - Improved Node.js version manager detection, switching, and installation logic - Added/updated utility functions for configuration, silent mode, notifications, and version extraction - Various bug fixes, code quality improvements, and expanded test coverage
This commit is contained in:
109
tests/unit/test_async_helpers.fish
Normal file
109
tests/unit/test_async_helpers.fish
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_async helper functions
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_async_version_check
|
||||
echo "Testing _nvm_async_version_check..."
|
||||
|
||||
# Create a test version file
|
||||
echo "18.17.0" >async_test.nvmrc
|
||||
|
||||
# Should return job id (background job)
|
||||
set -l job_id (_nvm_async_version_check "async_test.nvmrc")
|
||||
if test -n "$job_id"
|
||||
echo "✅ _nvm_async_version_check started job $job_id"
|
||||
else
|
||||
echo "❌ _nvm_async_version_check did not start a job"
|
||||
return 1
|
||||
end
|
||||
|
||||
# Wait for job completion
|
||||
_nvm_async_wait "$job_id" 5
|
||||
and echo "✅ Async job completed"
|
||||
or echo "⚠️ Async job timed out"
|
||||
|
||||
rm -f async_test.nvmrc
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_async_manager_check
|
||||
echo "Testing _nvm_async_manager_check..."
|
||||
|
||||
# Should return job id (background job)
|
||||
set -l job_id (_nvm_async_manager_check "nvm")
|
||||
if test -n "$job_id"
|
||||
echo "✅ _nvm_async_manager_check started job $job_id"
|
||||
else
|
||||
echo "❌ _nvm_async_manager_check did not start a job"
|
||||
return 1
|
||||
end
|
||||
|
||||
# Wait for job completion
|
||||
_nvm_async_wait "$job_id" 5
|
||||
and echo "✅ Async manager check job completed"
|
||||
or echo "⚠️ Async manager check job timed out"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_async_cleanup
|
||||
echo "Testing _nvm_async_cleanup..."
|
||||
|
||||
# Start a dummy background job
|
||||
sleep 2 &
|
||||
set -l job_id (jobs -l | tail -n 1 | grep -o '[0-9]*')
|
||||
if test -n "$job_id"
|
||||
echo "✅ Dummy job started: $job_id"
|
||||
else
|
||||
echo "❌ Failed to start dummy job"
|
||||
return 1
|
||||
end
|
||||
|
||||
# Cleanup should not error
|
||||
_nvm_async_cleanup
|
||||
echo "✅ _nvm_async_cleanup executed"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_async_wait
|
||||
echo "Testing _nvm_async_wait..."
|
||||
|
||||
# Start a quick background job
|
||||
sleep 1 &
|
||||
set -l job_id (jobs -l | tail -n 1 | grep -o '[0-9]*')
|
||||
if test -n "$job_id"
|
||||
_nvm_async_wait "$job_id" 3
|
||||
and echo "✅ _nvm_async_wait completed for job $job_id"
|
||||
or echo "⚠️ _nvm_async_wait timed out for job $job_id"
|
||||
else
|
||||
echo "❌ Failed to start background job for wait test"
|
||||
return 1
|
||||
end
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function main
|
||||
setup_test_env
|
||||
|
||||
set -l failed 0
|
||||
|
||||
test_async_version_check; or set failed (math "$failed + 1")
|
||||
test_async_manager_check; or set failed (math "$failed + 1")
|
||||
test_async_cleanup; or set failed (math "$failed + 1")
|
||||
test_async_wait; or set failed (math "$failed + 1")
|
||||
|
||||
cleanup_test_env
|
||||
|
||||
if test $failed -eq 0
|
||||
echo "🎉 All async helper tests passed!"
|
||||
return 0
|
||||
else
|
||||
echo "💥 $failed async helper test(s) failed"
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
main
|
||||
109
tests/unit/test_auto_use_config_helpers.fish
Normal file
109
tests/unit/test_auto_use_config_helpers.fish
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_auto_use_config helper functions
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_config_show
|
||||
echo "Testing _nvm_auto_use_config_show..."
|
||||
|
||||
# Should print config summary (no error)
|
||||
_nvm_auto_use_config_show
|
||||
and echo "✅ Config show prints summary"
|
||||
or echo "❌ Config show failed"
|
||||
end
|
||||
|
||||
function test_config_auto_install
|
||||
echo "Testing _nvm_auto_use_config_auto_install..."
|
||||
|
||||
_nvm_auto_use_config_auto_install on
|
||||
test -z "$_nvm_auto_use_no_install"
|
||||
and echo "✅ Auto-install enabled"
|
||||
or echo "❌ Auto-install enable failed"
|
||||
|
||||
_nvm_auto_use_config_auto_install off
|
||||
test -n "$_nvm_auto_use_no_install"
|
||||
and echo "✅ Auto-install disabled"
|
||||
or echo "❌ Auto-install disable failed"
|
||||
end
|
||||
|
||||
function test_config_silent
|
||||
echo "Testing _nvm_auto_use_config_silent..."
|
||||
|
||||
_nvm_auto_use_config_silent on
|
||||
test -n "$_nvm_auto_use_silent"
|
||||
and echo "✅ Silent mode enabled"
|
||||
or echo "❌ Silent mode enable failed"
|
||||
|
||||
_nvm_auto_use_config_silent off
|
||||
test -z "$_nvm_auto_use_silent"
|
||||
and echo "✅ Silent mode disabled"
|
||||
or echo "❌ Silent mode disable failed"
|
||||
end
|
||||
|
||||
function test_config_debounce
|
||||
echo "Testing _nvm_auto_use_config_debounce..."
|
||||
|
||||
_nvm_auto_use_config_debounce 1234
|
||||
assert_equals "$_nvm_auto_use_debounce_ms" 1234 "Debounce set correctly"
|
||||
|
||||
_nvm_auto_use_config_debounce ""
|
||||
assert_equals "$_nvm_auto_use_debounce_ms" 1234 "Debounce unchanged on invalid input"
|
||||
end
|
||||
|
||||
function test_config_exclude_include
|
||||
echo "Testing _nvm_auto_use_config_exclude and _nvm_auto_use_config_include..."
|
||||
|
||||
set -e _nvm_auto_use_excluded_dirs
|
||||
_nvm_auto_use_config_exclude testdir
|
||||
assert_contains "$_nvm_auto_use_excluded_dirs" testdir "Exclude added"
|
||||
|
||||
_nvm_auto_use_config_include testdir
|
||||
assert_not_equals "$_nvm_auto_use_excluded_dirs" testdir "Exclude removed"
|
||||
end
|
||||
|
||||
function test_config_manager
|
||||
echo "Testing _nvm_auto_use_config_manager..."
|
||||
|
||||
_nvm_auto_use_config_manager nvm
|
||||
assert_equals "$_nvm_auto_use_preferred_manager" nvm "Manager set to nvm"
|
||||
|
||||
_nvm_auto_use_config_manager ""
|
||||
test -z "$_nvm_auto_use_preferred_manager"
|
||||
and echo "✅ Manager reset to auto-detect"
|
||||
or echo "❌ Manager reset failed"
|
||||
|
||||
_nvm_auto_use_config_manager invalid
|
||||
assert_not_equals "$_nvm_auto_use_preferred_manager" invalid "Invalid manager not set"
|
||||
end
|
||||
|
||||
function test_config_reset
|
||||
echo "Testing _nvm_auto_use_config_reset..."
|
||||
|
||||
set -g _nvm_auto_use_no_install 1
|
||||
set -g _nvm_auto_use_silent 1
|
||||
set -g _nvm_auto_use_debounce_ms 999
|
||||
set -g _nvm_auto_use_excluded_dirs foo
|
||||
set -g _nvm_auto_use_preferred_manager nvm
|
||||
|
||||
_nvm_auto_use_config_reset
|
||||
|
||||
test -z "$_nvm_auto_use_no_install"
|
||||
and test -z "$_nvm_auto_use_silent"
|
||||
and test -z "$_nvm_auto_use_debounce_ms"
|
||||
and test -z "$_nvm_auto_use_excluded_dirs"
|
||||
and test -z "$_nvm_auto_use_preferred_manager"
|
||||
and echo "✅ Config reset works"
|
||||
or echo "❌ Config reset failed"
|
||||
end
|
||||
|
||||
function main
|
||||
test_config_show
|
||||
test_config_auto_install
|
||||
test_config_silent
|
||||
test_config_debounce
|
||||
test_config_exclude_include
|
||||
test_config_manager
|
||||
test_config_reset
|
||||
end
|
||||
|
||||
main
|
||||
134
tests/unit/test_auto_use_helpers.fish
Normal file
134
tests/unit/test_auto_use_helpers.fish
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_auto_use helper functions
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_select_manager
|
||||
echo "Testing _nvm_auto_use_select_manager..."
|
||||
|
||||
# Mock nvm_compat_detect to return a list
|
||||
function nvm_compat_detect
|
||||
echo "nvm fnm volta"
|
||||
end
|
||||
|
||||
set -e _nvm_auto_use_preferred_manager
|
||||
set -l manager (_nvm_auto_use_select_manager)
|
||||
assert_equals "$manager" nvm "Default manager selection returns first available"
|
||||
|
||||
set -g _nvm_auto_use_preferred_manager volta
|
||||
set manager (_nvm_auto_use_select_manager)
|
||||
assert_equals "$manager" volta "Preferred manager selection works"
|
||||
|
||||
set -e _nvm_auto_use_preferred_manager
|
||||
functions -e nvm_compat_detect
|
||||
end
|
||||
|
||||
function test_should_debounce
|
||||
echo "Testing _nvm_auto_use_should_debounce..."
|
||||
|
||||
set -e _nvm_auto_use_last_change
|
||||
set -g _nvm_auto_use_debounce_ms 1000
|
||||
|
||||
# First call should set last_change and return 1 (not debounced)
|
||||
set result (_nvm_auto_use_should_debounce)
|
||||
assert_equals "$result" "" "First call not debounced"
|
||||
|
||||
# Second call within debounce period should return 0 (debounced)
|
||||
set result (_nvm_auto_use_should_debounce)
|
||||
assert_equals "$result" "" "Second call debounced"
|
||||
|
||||
set -e _nvm_auto_use_last_change
|
||||
set -e _nvm_auto_use_debounce_ms
|
||||
end
|
||||
|
||||
function test_is_excluded_dir
|
||||
echo "Testing _nvm_auto_use_is_excluded_dir..."
|
||||
|
||||
set -g _nvm_auto_use_excluded_dirs testdir
|
||||
set -l orig_pwd (pwd)
|
||||
cd /
|
||||
mkdir -p testdir
|
||||
cd testdir
|
||||
|
||||
set result (_nvm_auto_use_is_excluded_dir)
|
||||
assert_equals "$result" "" "Excluded directory detected"
|
||||
|
||||
cd "$orig_pwd"
|
||||
set -e _nvm_auto_use_excluded_dirs
|
||||
end
|
||||
|
||||
function test_get_mtime
|
||||
echo "Testing _nvm_auto_use_get_mtime..."
|
||||
|
||||
echo test >testfile
|
||||
set mtime (_nvm_auto_use_get_mtime "testfile")
|
||||
test -n "$mtime"
|
||||
and echo "✅ mtime returned: $mtime"
|
||||
or echo "❌ mtime not returned"
|
||||
|
||||
rm -f testfile
|
||||
end
|
||||
|
||||
function test_is_cache_valid
|
||||
echo "Testing _nvm_auto_use_is_cache_valid..."
|
||||
|
||||
set -g _nvm_auto_use_cached_file foo
|
||||
set -g _nvm_auto_use_cached_mtime 123
|
||||
set result (_nvm_auto_use_is_cache_valid "foo" "123")
|
||||
assert_equals "$result" "" "Cache valid returns 0"
|
||||
|
||||
set result (_nvm_auto_use_is_cache_valid "bar" "123")
|
||||
assert_equals "$result" "" "Cache invalid returns 1"
|
||||
|
||||
set -e _nvm_auto_use_cached_file
|
||||
set -e _nvm_auto_use_cached_mtime
|
||||
end
|
||||
|
||||
function test_clear_cache
|
||||
echo "Testing _nvm_auto_use_clear_cache..."
|
||||
|
||||
set -g _nvm_auto_use_cached_file foo
|
||||
set -g _nvm_auto_use_cached_version bar
|
||||
set -g _nvm_auto_use_cached_mtime baz
|
||||
_nvm_auto_use_clear_cache
|
||||
if not set -q _nvm_auto_use_cached_file
|
||||
echo "✅ Cached file cleared"
|
||||
else
|
||||
echo "❌ Cached file not cleared"
|
||||
end
|
||||
if not set -q _nvm_auto_use_cached_version
|
||||
echo "✅ Cached version cleared"
|
||||
else
|
||||
echo "❌ Cached version not cleared"
|
||||
end
|
||||
if not set -q _nvm_auto_use_cached_mtime
|
||||
echo "✅ Cached mtime cleared"
|
||||
else
|
||||
echo "❌ Cached mtime not cleared"
|
||||
end
|
||||
end
|
||||
|
||||
function main
|
||||
setup_test_env
|
||||
|
||||
set -l failed 0
|
||||
|
||||
test_select_manager; or set failed (math "$failed + 1")
|
||||
test_should_debounce; or set failed (math "$failed + 1")
|
||||
test_is_excluded_dir; or set failed (math "$failed + 1")
|
||||
test_get_mtime; or set failed (math "$failed + 1")
|
||||
test_is_cache_valid; or set failed (math "$failed + 1")
|
||||
test_clear_cache; or set failed (math "$failed + 1")
|
||||
|
||||
cleanup_test_env
|
||||
|
||||
if test $failed -eq 0
|
||||
echo "🎉 All nvm_auto_use helper tests passed!"
|
||||
return 0
|
||||
else
|
||||
echo "💥 $failed helper test(s) failed"
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
main
|
||||
98
tests/unit/test_cache.fish
Executable file
98
tests/unit/test_cache.fish
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_cache.fish
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_cache_basic_operations
|
||||
echo "Testing basic cache operations..."
|
||||
|
||||
# Test set and get
|
||||
nvm_cache set test_key test_value
|
||||
set -l result (nvm_cache get "test_key")
|
||||
assert_equals "$result" test_value "Cache set and get works"
|
||||
|
||||
# Test delete
|
||||
nvm_cache delete test_key
|
||||
nvm_cache get test_key
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Cache delete works"
|
||||
or echo "❌ Cache delete failed"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_cache_ttl
|
||||
echo "Testing cache TTL..."
|
||||
|
||||
# Set with short TTL
|
||||
nvm_cache set ttl_key ttl_value
|
||||
|
||||
# Should exist immediately
|
||||
set -l result (nvm_cache get "ttl_key" 10)
|
||||
assert_equals "$result" ttl_value "Cache value exists within TTL"
|
||||
|
||||
# Mock expired cache by setting TTL to 0
|
||||
set -l result (nvm_cache get "ttl_key" 0)
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Cache TTL expiration works"
|
||||
or echo "❌ Cache TTL expiration failed"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_cache_stats
|
||||
echo "Testing cache stats..."
|
||||
|
||||
# Clear cache first
|
||||
nvm_cache clear
|
||||
|
||||
# Add some items
|
||||
nvm_cache set stats_key1 value1
|
||||
nvm_cache set stats_key2 value2
|
||||
|
||||
# Get stats
|
||||
set -l stats (nvm_cache stats)
|
||||
assert_contains "$stats" "Cache files: 2" "Cache stats shows correct file count"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_cache_key_generation
|
||||
echo "Testing cache key generation..."
|
||||
|
||||
# Test directory-based key
|
||||
set -l key1 (_nvm_cache_key "test_file.txt")
|
||||
set -l key2 (_nvm_cache_key "test_file.txt")
|
||||
assert_equals "$key1" "$key2" "Same file generates same cache key"
|
||||
|
||||
# Test different files generate different keys
|
||||
set -l key3 (_nvm_cache_key "different_file.txt")
|
||||
assert_not_equals "$key1" "$key3" "Different files generate different cache keys"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function main
|
||||
setup_test_env
|
||||
|
||||
set -l failed 0
|
||||
|
||||
test_cache_basic_operations; or set failed (math "$failed + 1")
|
||||
test_cache_ttl; or set failed (math "$failed + 1")
|
||||
test_cache_stats; or set failed (math "$failed + 1")
|
||||
test_cache_key_generation; or set failed (math "$failed + 1")
|
||||
|
||||
cleanup_test_env
|
||||
|
||||
if test $failed -eq 0
|
||||
echo "🎉 All cache tests passed!"
|
||||
return 0
|
||||
else
|
||||
echo "💥 $failed cache test(s) failed"
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
main
|
||||
99
tests/unit/test_cache_helpers.fish
Normal file
99
tests/unit/test_cache_helpers.fish
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_cache helper functions
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_nvm_cache_get_set_delete
|
||||
echo "Testing _nvm_cache_set, _nvm_cache_get, and _nvm_cache_delete..."
|
||||
|
||||
set -l key test_key
|
||||
set -l value test_value
|
||||
|
||||
# Set cache value
|
||||
_nvm_cache_set $key $value
|
||||
set -l result (_nvm_cache_get $key 300)
|
||||
assert_equals "$result" "$value" "Cache set and get returns correct value"
|
||||
|
||||
# Delete cache value
|
||||
_nvm_cache_delete $key
|
||||
set -l result (_nvm_cache_get $key 300)
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Cache delete works"
|
||||
or echo "❌ Cache delete failed"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_nvm_cache_clear_and_stats
|
||||
echo "Testing _nvm_cache_clear and _nvm_cache_stats..."
|
||||
|
||||
# Set multiple cache values
|
||||
_nvm_cache_set key1 value1
|
||||
_nvm_cache_set key2 value2
|
||||
|
||||
# Stats should show at least 2 files
|
||||
set -l stats (_nvm_cache_stats)
|
||||
assert_contains "$stats" "Cache files:" "Cache stats reports file count"
|
||||
|
||||
# Clear cache
|
||||
_nvm_cache_clear
|
||||
set -l stats_after (_nvm_cache_stats)
|
||||
assert_contains "$stats_after" "Cache files: 0" "Cache clear removes all files"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_nvm_cache_ttl
|
||||
echo "Testing _nvm_cache_get TTL expiration..."
|
||||
|
||||
set -l key ttl_key
|
||||
set -l value ttl_value
|
||||
|
||||
_nvm_cache_set $key $value
|
||||
|
||||
# Should exist immediately
|
||||
set -l result (_nvm_cache_get $key 10)
|
||||
assert_equals "$result" "$value" "Cache value exists within TTL"
|
||||
|
||||
# Simulate expired cache by setting TTL to 0
|
||||
set -l result (_nvm_cache_get $key 0)
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Cache TTL expiration works"
|
||||
or echo "❌ Cache TTL expiration failed"
|
||||
|
||||
_nvm_cache_delete $key
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_nvm_cache_dir
|
||||
echo "Testing _nvm_cache_dir returns a valid directory..."
|
||||
|
||||
set -l dir (_nvm_cache_dir)
|
||||
test -n "$dir"
|
||||
and echo "✅ _nvm_cache_dir returns: $dir"
|
||||
or echo "❌ _nvm_cache_dir did not return a directory"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function main
|
||||
set -l failed 0
|
||||
|
||||
test_nvm_cache_get_set_delete; or set failed (math "$failed + 1")
|
||||
test_nvm_cache_clear_and_stats; or set failed (math "$failed + 1")
|
||||
test_nvm_cache_ttl; or set failed (math "$failed + 1")
|
||||
test_nvm_cache_dir; or set failed (math "$failed + 1")
|
||||
|
||||
if test $failed -eq 0
|
||||
echo "🎉 All nvm_cache helper tests passed!"
|
||||
return 0
|
||||
else
|
||||
echo "💥 $failed nvm_cache helper test(s) failed"
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
main
|
||||
154
tests/unit/test_security.fish
Executable file
154
tests/unit/test_security.fish
Executable file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env fish
|
||||
# Unit tests for nvm_security.fish
|
||||
|
||||
source tests/test_runner.fish
|
||||
|
||||
function test_version_validation
|
||||
echo "Testing version validation..."
|
||||
|
||||
# Valid versions
|
||||
nvm_security check_version "18.17.0"
|
||||
and echo "✅ Valid semver accepted"
|
||||
or echo "❌ Valid semver rejected"
|
||||
|
||||
nvm_security check_version "v20.5.1"
|
||||
and echo "✅ Version with 'v' prefix accepted"
|
||||
or echo "❌ Version with 'v' prefix rejected"
|
||||
|
||||
# Invalid versions
|
||||
nvm_security check_version "invalid.version"
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Invalid version rejected"
|
||||
or echo "❌ Invalid version accepted"
|
||||
|
||||
# Suspicious characters
|
||||
nvm_security check_version "18.0.0; touch /tmp/nvm-auto-use-malicious-test"
|
||||
set status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Malicious version string rejected"
|
||||
or echo "❌ Malicious version string accepted"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_security_policies
|
||||
echo "Testing security policies..."
|
||||
|
||||
# Set minimum version policy
|
||||
nvm_security policy set min_version "16.0.0"
|
||||
set -l min_version (nvm_security policy get min_version)
|
||||
assert_equals "$min_version" "16.0.0" "Minimum version policy set correctly"
|
||||
|
||||
# Test version below minimum
|
||||
nvm_security check_version "14.0.0"
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Version below minimum rejected"
|
||||
or echo "❌ Version below minimum accepted"
|
||||
|
||||
# Set maximum version policy
|
||||
nvm_security policy set max_version "20.0.0"
|
||||
set -l max_version (nvm_security policy get max_version)
|
||||
assert_equals "$max_version" "20.0.0" "Maximum version policy set correctly"
|
||||
|
||||
# Test version above maximum
|
||||
nvm_security check_version "21.0.0"
|
||||
set status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Version above maximum rejected"
|
||||
or echo "❌ Version above maximum accepted"
|
||||
|
||||
# Reset policies
|
||||
nvm_security policy reset
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_version_comparison
|
||||
echo "Testing version comparison..."
|
||||
|
||||
# Test less than
|
||||
_nvm_security_version_compare "16.0.0" "18.0.0" -lt
|
||||
and echo "✅ Version comparison (less than) works"
|
||||
or echo "❌ Version comparison (less than) failed"
|
||||
|
||||
# Test greater than
|
||||
_nvm_security_version_compare "20.0.0" "18.0.0" -gt
|
||||
and echo "✅ Version comparison (greater than) works"
|
||||
or echo "❌ Version comparison (greater than) failed"
|
||||
|
||||
# Test equal
|
||||
_nvm_security_version_compare "18.17.0" "18.17.0" -eq
|
||||
and echo "✅ Version comparison (equal) works"
|
||||
or echo "❌ Version comparison (equal) failed"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_source_validation
|
||||
echo "Testing source file validation..."
|
||||
|
||||
# Create test files
|
||||
echo "18.17.0" >test_nvmrc
|
||||
echo "18.0.0; touch /tmp/nvm-auto-use-malicious-test" >malicious_nvmrc
|
||||
|
||||
# Test valid source
|
||||
nvm_security validate_source test_nvmrc
|
||||
and echo "✅ Valid source file accepted"
|
||||
or echo "❌ Valid source file rejected"
|
||||
|
||||
# Test malicious source
|
||||
nvm_security validate_source malicious_nvmrc
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Malicious source file rejected"
|
||||
or echo "❌ Malicious source file accepted"
|
||||
|
||||
# Cleanup
|
||||
rm -f test_nvmrc malicious_nvmrc
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function test_vulnerability_check
|
||||
echo "Testing vulnerability checking..."
|
||||
|
||||
# Test known vulnerable version (if any in our test data)
|
||||
nvm_security check_cve "16.0.0"
|
||||
set -l status_code $status
|
||||
test $status_code -ne 0
|
||||
and echo "✅ Known vulnerable version flagged"
|
||||
or echo "ℹ️ No vulnerability data for test version"
|
||||
|
||||
# Test presumably safe version
|
||||
nvm_security check_cve "18.17.0"
|
||||
and echo "✅ Safe version check completed"
|
||||
or echo "ℹ️ Vulnerability check completed with warnings"
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
function main
|
||||
setup_test_env
|
||||
|
||||
set -l failed 0
|
||||
|
||||
test_version_validation; or set failed (math "$failed + 1")
|
||||
test_security_policies; or set failed (math "$failed + 1")
|
||||
test_version_comparison; or set failed (math "$failed + 1")
|
||||
test_source_validation; or set failed (math "$failed + 1")
|
||||
test_vulnerability_check; or set failed (math "$failed + 1")
|
||||
|
||||
cleanup_test_env
|
||||
|
||||
if test $failed -eq 0
|
||||
echo "🎉 All security tests passed!"
|
||||
return 0
|
||||
else
|
||||
echo "💥 $failed security test(s) failed"
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
main
|
||||
Reference in New Issue
Block a user