loki-mode 6.20.0 → 6.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/SKILL.md +2 -2
- package/VERSION +1 -1
- package/autonomy/loki +854 -0
- package/completions/_loki +1 -0
- package/completions/loki.bash +17 -1
- package/dashboard/__init__.py +1 -1
- package/docs/INSTALLATION.md +1 -1
- package/mcp/__init__.py +1 -1
- package/package.json +1 -1
package/SKILL.md
CHANGED
|
@@ -3,7 +3,7 @@ name: loki-mode
|
|
|
3
3
|
description: Multi-agent autonomous startup system. Triggers on "Loki Mode". Takes PRD to deployed product with minimal human intervention. Requires --dangerously-skip-permissions flag.
|
|
4
4
|
---
|
|
5
5
|
|
|
6
|
-
# Loki Mode v6.
|
|
6
|
+
# Loki Mode v6.21.0
|
|
7
7
|
|
|
8
8
|
**You are an autonomous agent. You make decisions. You do not ask questions. You do not stop.**
|
|
9
9
|
|
|
@@ -267,4 +267,4 @@ The following features are documented in skill modules but not yet fully automat
|
|
|
267
267
|
| Quality gates 3-reviewer system | Implemented (v5.35.0) | 5 specialist reviewers in `skills/quality-gates.md`; execution in run.sh |
|
|
268
268
|
| Benchmarks (HumanEval, SWE-bench) | Infrastructure only | Runner scripts and datasets exist in `benchmarks/`; no published results |
|
|
269
269
|
|
|
270
|
-
**v6.
|
|
270
|
+
**v6.21.0 | [Autonomi](https://www.autonomi.dev/) flagship product | ~260 lines core**
|
package/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
6.
|
|
1
|
+
6.21.0
|
package/autonomy/loki
CHANGED
|
@@ -436,6 +436,7 @@ show_help() {
|
|
|
436
436
|
echo " remote [PRD] Start remote session (connect from phone/browser, Claude Pro/Max)"
|
|
437
437
|
echo " trigger Event-driven autonomous execution (schedules, webhooks)"
|
|
438
438
|
echo " failover [cmd] Cross-provider auto-failover (status|--enable|--test|--chain)"
|
|
439
|
+
echo " onboard [path] Analyze a repo and generate CLAUDE.md (structure, conventions, commands)"
|
|
439
440
|
echo " plan <PRD> Dry-run PRD analysis: complexity, cost, and execution plan"
|
|
440
441
|
echo " version Show version"
|
|
441
442
|
echo " help Show this help"
|
|
@@ -9189,6 +9190,9 @@ main() {
|
|
|
9189
9190
|
failover)
|
|
9190
9191
|
cmd_failover "$@"
|
|
9191
9192
|
;;
|
|
9193
|
+
onboard)
|
|
9194
|
+
cmd_onboard "$@"
|
|
9195
|
+
;;
|
|
9192
9196
|
version|--version|-v)
|
|
9193
9197
|
cmd_version
|
|
9194
9198
|
;;
|
|
@@ -13827,4 +13831,854 @@ $diff"
|
|
|
13827
13831
|
esac
|
|
13828
13832
|
}
|
|
13829
13833
|
|
|
13834
|
+
# Project onboarding - analyze repo and generate CLAUDE.md (v6.21.0)
|
|
13835
|
+
cmd_onboard() {
|
|
13836
|
+
local target_path="."
|
|
13837
|
+
local depth=2
|
|
13838
|
+
local format="markdown"
|
|
13839
|
+
local output_path=""
|
|
13840
|
+
local use_stdout=false
|
|
13841
|
+
local update_mode=false
|
|
13842
|
+
|
|
13843
|
+
# Parse arguments
|
|
13844
|
+
while [[ $# -gt 0 ]]; do
|
|
13845
|
+
case "$1" in
|
|
13846
|
+
--depth)
|
|
13847
|
+
depth="${2:-2}"
|
|
13848
|
+
shift 2
|
|
13849
|
+
;;
|
|
13850
|
+
--format)
|
|
13851
|
+
format="${2:-markdown}"
|
|
13852
|
+
shift 2
|
|
13853
|
+
;;
|
|
13854
|
+
--output)
|
|
13855
|
+
output_path="${2:-}"
|
|
13856
|
+
shift 2
|
|
13857
|
+
;;
|
|
13858
|
+
--stdout)
|
|
13859
|
+
use_stdout=true
|
|
13860
|
+
shift
|
|
13861
|
+
;;
|
|
13862
|
+
--update)
|
|
13863
|
+
update_mode=true
|
|
13864
|
+
shift
|
|
13865
|
+
;;
|
|
13866
|
+
--help|-h)
|
|
13867
|
+
echo -e "${BOLD}loki onboard${NC} - Analyze a project and generate CLAUDE.md"
|
|
13868
|
+
echo ""
|
|
13869
|
+
echo "Usage: loki onboard [path] [options]"
|
|
13870
|
+
echo ""
|
|
13871
|
+
echo "Arguments:"
|
|
13872
|
+
echo " path Path to repository (default: current directory)"
|
|
13873
|
+
echo ""
|
|
13874
|
+
echo "Options:"
|
|
13875
|
+
echo " --depth N Analysis depth: 1=surface, 2=moderate, 3=deep (default: 2)"
|
|
13876
|
+
echo " --format FORMAT Output format: markdown, json, yaml (default: markdown)"
|
|
13877
|
+
echo " --output PATH Custom output file path"
|
|
13878
|
+
echo " --stdout Print to stdout instead of writing file"
|
|
13879
|
+
echo " --update Update existing CLAUDE.md with new findings"
|
|
13880
|
+
echo " --help Show this help"
|
|
13881
|
+
echo ""
|
|
13882
|
+
echo "Examples:"
|
|
13883
|
+
echo " loki onboard # Analyze current directory"
|
|
13884
|
+
echo " loki onboard ~/projects/myapp # Analyze specific repo"
|
|
13885
|
+
echo " loki onboard --depth 3 # Deep analysis with dependency mapping"
|
|
13886
|
+
echo " loki onboard --format json # JSON output"
|
|
13887
|
+
echo " loki onboard --stdout # Print to terminal"
|
|
13888
|
+
return 0
|
|
13889
|
+
;;
|
|
13890
|
+
-*)
|
|
13891
|
+
log_error "Unknown option: $1"
|
|
13892
|
+
echo "Run 'loki onboard --help' for usage."
|
|
13893
|
+
return 1
|
|
13894
|
+
;;
|
|
13895
|
+
*)
|
|
13896
|
+
target_path="$1"
|
|
13897
|
+
shift
|
|
13898
|
+
;;
|
|
13899
|
+
esac
|
|
13900
|
+
done
|
|
13901
|
+
|
|
13902
|
+
# Validate target path
|
|
13903
|
+
if [ ! -d "$target_path" ]; then
|
|
13904
|
+
log_error "Directory not found: $target_path"
|
|
13905
|
+
return 1
|
|
13906
|
+
fi
|
|
13907
|
+
|
|
13908
|
+
# Resolve to absolute path
|
|
13909
|
+
target_path="$(cd "$target_path" && pwd)"
|
|
13910
|
+
|
|
13911
|
+
# When --stdout, send log messages to stderr to keep stdout clean
|
|
13912
|
+
if [ "$use_stdout" = true ]; then
|
|
13913
|
+
log_info "Analyzing project at: $target_path" >&2
|
|
13914
|
+
log_info "Depth: $depth | Format: $format" >&2
|
|
13915
|
+
else
|
|
13916
|
+
log_info "Analyzing project at: $target_path"
|
|
13917
|
+
log_info "Depth: $depth | Format: $format"
|
|
13918
|
+
fi
|
|
13919
|
+
|
|
13920
|
+
# --- Detect project metadata ---
|
|
13921
|
+
local project_name
|
|
13922
|
+
project_name="$(basename "$target_path")"
|
|
13923
|
+
|
|
13924
|
+
local languages=""
|
|
13925
|
+
local frameworks=""
|
|
13926
|
+
local build_system=""
|
|
13927
|
+
local test_framework=""
|
|
13928
|
+
local entry_points=""
|
|
13929
|
+
local package_manager=""
|
|
13930
|
+
local project_description=""
|
|
13931
|
+
local project_version=""
|
|
13932
|
+
|
|
13933
|
+
# Detect languages and config files
|
|
13934
|
+
local config_files=""
|
|
13935
|
+
|
|
13936
|
+
if [ -f "$target_path/package.json" ]; then
|
|
13937
|
+
config_files="$config_files package.json"
|
|
13938
|
+
languages="$languages JavaScript/TypeScript"
|
|
13939
|
+
package_manager="npm"
|
|
13940
|
+
if [ -f "$target_path/yarn.lock" ]; then
|
|
13941
|
+
package_manager="yarn"
|
|
13942
|
+
elif [ -f "$target_path/pnpm-lock.yaml" ]; then
|
|
13943
|
+
package_manager="pnpm"
|
|
13944
|
+
elif [ -f "$target_path/bun.lockb" ]; then
|
|
13945
|
+
package_manager="bun"
|
|
13946
|
+
fi
|
|
13947
|
+
# Extract metadata from package.json
|
|
13948
|
+
if command -v python3 &>/dev/null; then
|
|
13949
|
+
local pkg_name
|
|
13950
|
+
pkg_name=$(python3 -c "
|
|
13951
|
+
import json, sys
|
|
13952
|
+
try:
|
|
13953
|
+
d = json.load(open('$target_path/package.json'))
|
|
13954
|
+
print(d.get('name', ''))
|
|
13955
|
+
except: pass
|
|
13956
|
+
" 2>/dev/null || true)
|
|
13957
|
+
if [ -n "$pkg_name" ]; then
|
|
13958
|
+
project_name="$pkg_name"
|
|
13959
|
+
fi
|
|
13960
|
+
project_description=$(python3 -c "
|
|
13961
|
+
import json, sys
|
|
13962
|
+
try:
|
|
13963
|
+
d = json.load(open('$target_path/package.json'))
|
|
13964
|
+
print(d.get('description', ''))
|
|
13965
|
+
except: pass
|
|
13966
|
+
" 2>/dev/null || true)
|
|
13967
|
+
project_version=$(python3 -c "
|
|
13968
|
+
import json, sys
|
|
13969
|
+
try:
|
|
13970
|
+
d = json.load(open('$target_path/package.json'))
|
|
13971
|
+
print(d.get('version', ''))
|
|
13972
|
+
except: pass
|
|
13973
|
+
" 2>/dev/null || true)
|
|
13974
|
+
entry_points=$(python3 -c "
|
|
13975
|
+
import json, sys
|
|
13976
|
+
try:
|
|
13977
|
+
d = json.load(open('$target_path/package.json'))
|
|
13978
|
+
main = d.get('main', '')
|
|
13979
|
+
if main: print(main)
|
|
13980
|
+
scripts = d.get('scripts', {})
|
|
13981
|
+
if 'start' in scripts: print('scripts.start: ' + scripts['start'])
|
|
13982
|
+
except: pass
|
|
13983
|
+
" 2>/dev/null || true)
|
|
13984
|
+
fi
|
|
13985
|
+
# Detect frameworks from dependencies
|
|
13986
|
+
if grep -q '"react"' "$target_path/package.json" 2>/dev/null; then
|
|
13987
|
+
frameworks="$frameworks React"
|
|
13988
|
+
fi
|
|
13989
|
+
if grep -q '"next"' "$target_path/package.json" 2>/dev/null; then
|
|
13990
|
+
frameworks="$frameworks Next.js"
|
|
13991
|
+
fi
|
|
13992
|
+
if grep -q '"vue"' "$target_path/package.json" 2>/dev/null; then
|
|
13993
|
+
frameworks="$frameworks Vue"
|
|
13994
|
+
fi
|
|
13995
|
+
if grep -q '"express"' "$target_path/package.json" 2>/dev/null; then
|
|
13996
|
+
frameworks="$frameworks Express"
|
|
13997
|
+
fi
|
|
13998
|
+
if grep -q '"fastify"' "$target_path/package.json" 2>/dev/null; then
|
|
13999
|
+
frameworks="$frameworks Fastify"
|
|
14000
|
+
fi
|
|
14001
|
+
if grep -q '"svelte"' "$target_path/package.json" 2>/dev/null; then
|
|
14002
|
+
frameworks="$frameworks Svelte"
|
|
14003
|
+
fi
|
|
14004
|
+
# Detect test framework
|
|
14005
|
+
if grep -q '"jest"' "$target_path/package.json" 2>/dev/null; then
|
|
14006
|
+
test_framework="$test_framework jest"
|
|
14007
|
+
fi
|
|
14008
|
+
if grep -q '"vitest"' "$target_path/package.json" 2>/dev/null; then
|
|
14009
|
+
test_framework="$test_framework vitest"
|
|
14010
|
+
fi
|
|
14011
|
+
if grep -q '"mocha"' "$target_path/package.json" 2>/dev/null; then
|
|
14012
|
+
test_framework="$test_framework mocha"
|
|
14013
|
+
fi
|
|
14014
|
+
if grep -q '"playwright"' "$target_path/package.json" 2>/dev/null; then
|
|
14015
|
+
test_framework="$test_framework playwright"
|
|
14016
|
+
fi
|
|
14017
|
+
fi
|
|
14018
|
+
|
|
14019
|
+
if [ -f "$target_path/pyproject.toml" ]; then
|
|
14020
|
+
config_files="$config_files pyproject.toml"
|
|
14021
|
+
languages="$languages Python"
|
|
14022
|
+
package_manager="pip/poetry"
|
|
14023
|
+
if grep -q "django" "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14024
|
+
frameworks="$frameworks Django"
|
|
14025
|
+
fi
|
|
14026
|
+
if grep -q "flask" "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14027
|
+
frameworks="$frameworks Flask"
|
|
14028
|
+
fi
|
|
14029
|
+
if grep -q "fastapi" "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14030
|
+
frameworks="$frameworks FastAPI"
|
|
14031
|
+
fi
|
|
14032
|
+
if grep -q "pytest" "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14033
|
+
test_framework="$test_framework pytest"
|
|
14034
|
+
fi
|
|
14035
|
+
fi
|
|
14036
|
+
|
|
14037
|
+
if [ -f "$target_path/setup.py" ] || [ -f "$target_path/setup.cfg" ]; then
|
|
14038
|
+
config_files="$config_files setup.py"
|
|
14039
|
+
languages="$languages Python"
|
|
14040
|
+
[ -z "$package_manager" ] && package_manager="pip"
|
|
14041
|
+
fi
|
|
14042
|
+
|
|
14043
|
+
if [ -f "$target_path/requirements.txt" ]; then
|
|
14044
|
+
config_files="$config_files requirements.txt"
|
|
14045
|
+
languages="$languages Python"
|
|
14046
|
+
[ -z "$package_manager" ] && package_manager="pip"
|
|
14047
|
+
fi
|
|
14048
|
+
|
|
14049
|
+
if [ -f "$target_path/Cargo.toml" ]; then
|
|
14050
|
+
config_files="$config_files Cargo.toml"
|
|
14051
|
+
languages="$languages Rust"
|
|
14052
|
+
package_manager="cargo"
|
|
14053
|
+
build_system="cargo"
|
|
14054
|
+
test_framework="$test_framework cargo-test"
|
|
14055
|
+
fi
|
|
14056
|
+
|
|
14057
|
+
if [ -f "$target_path/go.mod" ]; then
|
|
14058
|
+
config_files="$config_files go.mod"
|
|
14059
|
+
languages="$languages Go"
|
|
14060
|
+
package_manager="go-modules"
|
|
14061
|
+
build_system="go"
|
|
14062
|
+
test_framework="$test_framework go-test"
|
|
14063
|
+
fi
|
|
14064
|
+
|
|
14065
|
+
if [ -f "$target_path/Gemfile" ]; then
|
|
14066
|
+
config_files="$config_files Gemfile"
|
|
14067
|
+
languages="$languages Ruby"
|
|
14068
|
+
package_manager="bundler"
|
|
14069
|
+
if grep -q "rails" "$target_path/Gemfile" 2>/dev/null; then
|
|
14070
|
+
frameworks="$frameworks Rails"
|
|
14071
|
+
fi
|
|
14072
|
+
if grep -q "rspec" "$target_path/Gemfile" 2>/dev/null; then
|
|
14073
|
+
test_framework="$test_framework rspec"
|
|
14074
|
+
fi
|
|
14075
|
+
fi
|
|
14076
|
+
|
|
14077
|
+
if [ -f "$target_path/pom.xml" ]; then
|
|
14078
|
+
config_files="$config_files pom.xml"
|
|
14079
|
+
languages="$languages Java"
|
|
14080
|
+
build_system="maven"
|
|
14081
|
+
package_manager="maven"
|
|
14082
|
+
fi
|
|
14083
|
+
|
|
14084
|
+
if [ -f "$target_path/build.gradle" ] || [ -f "$target_path/build.gradle.kts" ]; then
|
|
14085
|
+
config_files="$config_files build.gradle"
|
|
14086
|
+
languages="$languages Java/Kotlin"
|
|
14087
|
+
build_system="gradle"
|
|
14088
|
+
package_manager="gradle"
|
|
14089
|
+
fi
|
|
14090
|
+
|
|
14091
|
+
if [ -f "$target_path/Makefile" ]; then
|
|
14092
|
+
config_files="$config_files Makefile"
|
|
14093
|
+
[ -z "$build_system" ] && build_system="make"
|
|
14094
|
+
fi
|
|
14095
|
+
|
|
14096
|
+
if [ -f "$target_path/CMakeLists.txt" ]; then
|
|
14097
|
+
config_files="$config_files CMakeLists.txt"
|
|
14098
|
+
languages="$languages C/C++"
|
|
14099
|
+
build_system="cmake"
|
|
14100
|
+
fi
|
|
14101
|
+
|
|
14102
|
+
# Detect shell scripts
|
|
14103
|
+
local shell_count=0
|
|
14104
|
+
shell_count=$(find "$target_path" -maxdepth 2 -name "*.sh" -type f 2>/dev/null | wc -l | tr -d ' ')
|
|
14105
|
+
if [ "$shell_count" -gt 0 ]; then
|
|
14106
|
+
languages="$languages Bash"
|
|
14107
|
+
fi
|
|
14108
|
+
|
|
14109
|
+
# Deduplicate languages
|
|
14110
|
+
languages=$(echo "$languages" | tr ' ' '\n' | sort -u | tr '\n' ' ' | sed 's/^ *//;s/ *$//')
|
|
14111
|
+
frameworks=$(echo "$frameworks" | tr ' ' '\n' | sort -u | tr '\n' ' ' | sed 's/^ *//;s/ *$//')
|
|
14112
|
+
test_framework=$(echo "$test_framework" | tr ' ' '\n' | sort -u | tr '\n' ' ' | sed 's/^ *//;s/ *$//')
|
|
14113
|
+
|
|
14114
|
+
# --- Detect CI/CD ---
|
|
14115
|
+
local ci_system=""
|
|
14116
|
+
if [ -d "$target_path/.github/workflows" ]; then
|
|
14117
|
+
ci_system="GitHub Actions"
|
|
14118
|
+
fi
|
|
14119
|
+
if [ -f "$target_path/.gitlab-ci.yml" ]; then
|
|
14120
|
+
ci_system="$ci_system GitLab CI"
|
|
14121
|
+
fi
|
|
14122
|
+
if [ -f "$target_path/Jenkinsfile" ]; then
|
|
14123
|
+
ci_system="$ci_system Jenkins"
|
|
14124
|
+
fi
|
|
14125
|
+
if [ -f "$target_path/.circleci/config.yml" ]; then
|
|
14126
|
+
ci_system="$ci_system CircleCI"
|
|
14127
|
+
fi
|
|
14128
|
+
if [ -f "$target_path/.travis.yml" ]; then
|
|
14129
|
+
ci_system="$ci_system Travis CI"
|
|
14130
|
+
fi
|
|
14131
|
+
|
|
14132
|
+
# --- Read README ---
|
|
14133
|
+
local readme_content=""
|
|
14134
|
+
local readme_file=""
|
|
14135
|
+
for f in README.md readme.md README.rst README README.txt; do
|
|
14136
|
+
if [ -f "$target_path/$f" ]; then
|
|
14137
|
+
readme_file="$f"
|
|
14138
|
+
readme_content=$(head -50 "$target_path/$f" 2>/dev/null || true)
|
|
14139
|
+
break
|
|
14140
|
+
fi
|
|
14141
|
+
done
|
|
14142
|
+
|
|
14143
|
+
# Extract first meaningful line from README as description fallback
|
|
14144
|
+
if [ -z "$project_description" ] && [ -n "$readme_content" ]; then
|
|
14145
|
+
project_description=$(echo "$readme_content" | grep -v '^#' | grep -v '^$' | grep -v '^\[' | grep -v '^!' | head -1 | sed 's/^ *//')
|
|
14146
|
+
fi
|
|
14147
|
+
|
|
14148
|
+
# --- Build directory tree ---
|
|
14149
|
+
local tree_output=""
|
|
14150
|
+
if command -v git &>/dev/null && [ -d "$target_path/.git" ]; then
|
|
14151
|
+
# Use git ls-files for accurate tree (respects .gitignore)
|
|
14152
|
+
tree_output=$(cd "$target_path" && git ls-files 2>/dev/null | head -200 || true)
|
|
14153
|
+
else
|
|
14154
|
+
# Fallback: find with common excludions
|
|
14155
|
+
tree_output=$(find "$target_path" -maxdepth 4 -type f \
|
|
14156
|
+
-not -path '*/node_modules/*' \
|
|
14157
|
+
-not -path '*/.git/*' \
|
|
14158
|
+
-not -path '*/vendor/*' \
|
|
14159
|
+
-not -path '*/__pycache__/*' \
|
|
14160
|
+
-not -path '*/dist/*' \
|
|
14161
|
+
-not -path '*/build/*' \
|
|
14162
|
+
-not -path '*/.next/*' \
|
|
14163
|
+
-not -path '*/target/*' \
|
|
14164
|
+
2>/dev/null | sed "s|$target_path/||" | sort | head -200)
|
|
14165
|
+
fi
|
|
14166
|
+
|
|
14167
|
+
# Categorize files
|
|
14168
|
+
local src_files=""
|
|
14169
|
+
local test_files=""
|
|
14170
|
+
local doc_files=""
|
|
14171
|
+
local config_file_list=""
|
|
14172
|
+
local ci_files=""
|
|
14173
|
+
local other_files=""
|
|
14174
|
+
|
|
14175
|
+
while IFS= read -r file; do
|
|
14176
|
+
[ -z "$file" ] && continue
|
|
14177
|
+
case "$file" in
|
|
14178
|
+
*.test.*|*.spec.*|*_test.*|*_spec.*|tests/*|test/*|__tests__/*|spec/*)
|
|
14179
|
+
test_files="$test_files $file"
|
|
14180
|
+
;;
|
|
14181
|
+
*.md|*.rst|*.txt|docs/*|doc/*|wiki/*)
|
|
14182
|
+
doc_files="$doc_files $file"
|
|
14183
|
+
;;
|
|
14184
|
+
.github/*|.gitlab-ci*|.circleci/*|Jenkinsfile|.travis*)
|
|
14185
|
+
ci_files="$ci_files $file"
|
|
14186
|
+
;;
|
|
14187
|
+
package.json|pyproject.toml|Cargo.toml|go.mod|Gemfile|pom.xml|build.gradle*|Makefile|CMakeLists.txt|*.toml|*.cfg|*.ini|*.yml|*.yaml|Dockerfile*|docker-compose*|.env*|.eslint*|.prettier*|tsconfig*|jest.config*|vitest.config*)
|
|
14188
|
+
config_file_list="$config_file_list $file"
|
|
14189
|
+
;;
|
|
14190
|
+
*.js|*.ts|*.tsx|*.jsx|*.py|*.rs|*.go|*.rb|*.java|*.kt|*.c|*.cpp|*.h|*.hpp|*.sh|*.swift|*.cs|*.php|*.lua|*.zig|*.el|*.clj)
|
|
14191
|
+
src_files="$src_files $file"
|
|
14192
|
+
;;
|
|
14193
|
+
*)
|
|
14194
|
+
other_files="$other_files $file"
|
|
14195
|
+
;;
|
|
14196
|
+
esac
|
|
14197
|
+
done <<< "$tree_output"
|
|
14198
|
+
|
|
14199
|
+
local src_count=$(echo "$src_files" | wc -w | tr -d ' ')
|
|
14200
|
+
local test_count=$(echo "$test_files" | wc -w | tr -d ' ')
|
|
14201
|
+
local doc_count=$(echo "$doc_files" | wc -w | tr -d ' ')
|
|
14202
|
+
|
|
14203
|
+
# --- Depth 2+: Analyze source files ---
|
|
14204
|
+
local key_exports=""
|
|
14205
|
+
local key_functions=""
|
|
14206
|
+
local key_classes=""
|
|
14207
|
+
|
|
14208
|
+
if [ "$depth" -ge 2 ]; then
|
|
14209
|
+
if [ "$use_stdout" = true ]; then
|
|
14210
|
+
log_info "Depth 2: Scanning source files for exports, functions, classes..." >&2
|
|
14211
|
+
else
|
|
14212
|
+
log_info "Depth 2: Scanning source files for exports, functions, classes..."
|
|
14213
|
+
fi
|
|
14214
|
+
|
|
14215
|
+
for src_file in $src_files; do
|
|
14216
|
+
[ ! -f "$target_path/$src_file" ] && continue
|
|
14217
|
+
local file_lines
|
|
14218
|
+
file_lines=$(wc -l < "$target_path/$src_file" 2>/dev/null | tr -d ' ')
|
|
14219
|
+
|
|
14220
|
+
# Only scan files under 2000 lines for performance
|
|
14221
|
+
[ "$file_lines" -gt 2000 ] && continue
|
|
14222
|
+
|
|
14223
|
+
local file_ext="${src_file##*.}"
|
|
14224
|
+
case "$file_ext" in
|
|
14225
|
+
js|ts|tsx|jsx)
|
|
14226
|
+
# Find exported functions/classes
|
|
14227
|
+
local exports
|
|
14228
|
+
exports=$(grep -n "^export " "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14229
|
+
if [ -n "$exports" ]; then
|
|
14230
|
+
key_exports="$key_exports
|
|
14231
|
+
$src_file:
|
|
14232
|
+
$exports"
|
|
14233
|
+
fi
|
|
14234
|
+
;;
|
|
14235
|
+
py)
|
|
14236
|
+
# Find class and function definitions
|
|
14237
|
+
local classes
|
|
14238
|
+
classes=$(grep -n "^class " "$target_path/$src_file" 2>/dev/null | head -5 || true)
|
|
14239
|
+
local funcs
|
|
14240
|
+
funcs=$(grep -n "^def \|^async def " "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14241
|
+
if [ -n "$classes" ]; then
|
|
14242
|
+
key_classes="$key_classes
|
|
14243
|
+
$src_file:
|
|
14244
|
+
$classes"
|
|
14245
|
+
fi
|
|
14246
|
+
if [ -n "$funcs" ]; then
|
|
14247
|
+
key_functions="$key_functions
|
|
14248
|
+
$src_file:
|
|
14249
|
+
$funcs"
|
|
14250
|
+
fi
|
|
14251
|
+
;;
|
|
14252
|
+
go)
|
|
14253
|
+
local funcs
|
|
14254
|
+
funcs=$(grep -n "^func " "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14255
|
+
if [ -n "$funcs" ]; then
|
|
14256
|
+
key_functions="$key_functions
|
|
14257
|
+
$src_file:
|
|
14258
|
+
$funcs"
|
|
14259
|
+
fi
|
|
14260
|
+
;;
|
|
14261
|
+
rs)
|
|
14262
|
+
local funcs
|
|
14263
|
+
funcs=$(grep -n "^pub fn \|^pub async fn " "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14264
|
+
if [ -n "$funcs" ]; then
|
|
14265
|
+
key_functions="$key_functions
|
|
14266
|
+
$src_file:
|
|
14267
|
+
$funcs"
|
|
14268
|
+
fi
|
|
14269
|
+
;;
|
|
14270
|
+
sh)
|
|
14271
|
+
local funcs
|
|
14272
|
+
funcs=$(grep -n "^[a-zA-Z_][a-zA-Z_0-9]*() {" "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14273
|
+
if [ -n "$funcs" ]; then
|
|
14274
|
+
key_functions="$key_functions
|
|
14275
|
+
$src_file:
|
|
14276
|
+
$funcs"
|
|
14277
|
+
fi
|
|
14278
|
+
;;
|
|
14279
|
+
rb)
|
|
14280
|
+
local classes
|
|
14281
|
+
classes=$(grep -n "^class " "$target_path/$src_file" 2>/dev/null | head -5 || true)
|
|
14282
|
+
local funcs
|
|
14283
|
+
funcs=$(grep -n "^ def " "$target_path/$src_file" 2>/dev/null | head -10 || true)
|
|
14284
|
+
if [ -n "$classes" ]; then
|
|
14285
|
+
key_classes="$key_classes
|
|
14286
|
+
$src_file:
|
|
14287
|
+
$classes"
|
|
14288
|
+
fi
|
|
14289
|
+
if [ -n "$funcs" ]; then
|
|
14290
|
+
key_functions="$key_functions
|
|
14291
|
+
$src_file:
|
|
14292
|
+
$funcs"
|
|
14293
|
+
fi
|
|
14294
|
+
;;
|
|
14295
|
+
java|kt)
|
|
14296
|
+
local classes
|
|
14297
|
+
classes=$(grep -n "^public class \|^class \|^data class " "$target_path/$src_file" 2>/dev/null | head -5 || true)
|
|
14298
|
+
if [ -n "$classes" ]; then
|
|
14299
|
+
key_classes="$key_classes
|
|
14300
|
+
$src_file:
|
|
14301
|
+
$classes"
|
|
14302
|
+
fi
|
|
14303
|
+
;;
|
|
14304
|
+
esac
|
|
14305
|
+
done
|
|
14306
|
+
fi
|
|
14307
|
+
|
|
14308
|
+
# --- Depth 3: Dependency analysis ---
|
|
14309
|
+
local dep_graph=""
|
|
14310
|
+
|
|
14311
|
+
if [ "$depth" -ge 3 ]; then
|
|
14312
|
+
if [ "$use_stdout" = true ]; then
|
|
14313
|
+
log_info "Depth 3: Analyzing imports and dependencies..." >&2
|
|
14314
|
+
else
|
|
14315
|
+
log_info "Depth 3: Analyzing imports and dependencies..."
|
|
14316
|
+
fi
|
|
14317
|
+
|
|
14318
|
+
for src_file in $src_files; do
|
|
14319
|
+
[ ! -f "$target_path/$src_file" ] && continue
|
|
14320
|
+
local file_lines
|
|
14321
|
+
file_lines=$(wc -l < "$target_path/$src_file" 2>/dev/null | tr -d ' ')
|
|
14322
|
+
[ "$file_lines" -gt 2000 ] && continue
|
|
14323
|
+
|
|
14324
|
+
local imports=""
|
|
14325
|
+
local file_ext="${src_file##*.}"
|
|
14326
|
+
case "$file_ext" in
|
|
14327
|
+
js|ts|tsx|jsx)
|
|
14328
|
+
imports=$(grep "^import " "$target_path/$src_file" 2>/dev/null | grep -v "node_modules" | head -15 || true)
|
|
14329
|
+
;;
|
|
14330
|
+
py)
|
|
14331
|
+
imports=$(grep "^import \|^from " "$target_path/$src_file" 2>/dev/null | head -15 || true)
|
|
14332
|
+
;;
|
|
14333
|
+
go)
|
|
14334
|
+
imports=$(sed -n '/^import (/,/^)/p' "$target_path/$src_file" 2>/dev/null | grep -v '^import\|^)' | head -15 || true)
|
|
14335
|
+
;;
|
|
14336
|
+
rs)
|
|
14337
|
+
imports=$(grep "^use " "$target_path/$src_file" 2>/dev/null | head -15 || true)
|
|
14338
|
+
;;
|
|
14339
|
+
esac
|
|
14340
|
+
|
|
14341
|
+
if [ -n "$imports" ]; then
|
|
14342
|
+
dep_graph="$dep_graph
|
|
14343
|
+
$src_file:
|
|
14344
|
+
$imports"
|
|
14345
|
+
fi
|
|
14346
|
+
done
|
|
14347
|
+
fi
|
|
14348
|
+
|
|
14349
|
+
# --- Detect build/run/test commands ---
|
|
14350
|
+
local build_cmd=""
|
|
14351
|
+
local run_cmd=""
|
|
14352
|
+
local test_cmd=""
|
|
14353
|
+
|
|
14354
|
+
if [ -f "$target_path/package.json" ]; then
|
|
14355
|
+
if command -v python3 &>/dev/null; then
|
|
14356
|
+
local scripts_json
|
|
14357
|
+
scripts_json=$(python3 -c "
|
|
14358
|
+
import json
|
|
14359
|
+
try:
|
|
14360
|
+
d = json.load(open('$target_path/package.json'))
|
|
14361
|
+
s = d.get('scripts', {})
|
|
14362
|
+
for k in ['build', 'dev', 'start', 'test', 'lint', 'format', 'check', 'typecheck']:
|
|
14363
|
+
if k in s:
|
|
14364
|
+
print(f'{k}: {s[k]}')
|
|
14365
|
+
except: pass
|
|
14366
|
+
" 2>/dev/null || true)
|
|
14367
|
+
if echo "$scripts_json" | grep -q "^build:"; then
|
|
14368
|
+
build_cmd="${package_manager:-npm} run build"
|
|
14369
|
+
fi
|
|
14370
|
+
if echo "$scripts_json" | grep -q "^dev:"; then
|
|
14371
|
+
run_cmd="${package_manager:-npm} run dev"
|
|
14372
|
+
elif echo "$scripts_json" | grep -q "^start:"; then
|
|
14373
|
+
run_cmd="${package_manager:-npm} start"
|
|
14374
|
+
fi
|
|
14375
|
+
if echo "$scripts_json" | grep -q "^test:"; then
|
|
14376
|
+
test_cmd="${package_manager:-npm} test"
|
|
14377
|
+
fi
|
|
14378
|
+
fi
|
|
14379
|
+
fi
|
|
14380
|
+
|
|
14381
|
+
if [ -f "$target_path/Makefile" ]; then
|
|
14382
|
+
[ -z "$build_cmd" ] && build_cmd="make"
|
|
14383
|
+
if grep -q "^test:" "$target_path/Makefile" 2>/dev/null; then
|
|
14384
|
+
[ -z "$test_cmd" ] && test_cmd="make test"
|
|
14385
|
+
fi
|
|
14386
|
+
if grep -q "^run:" "$target_path/Makefile" 2>/dev/null; then
|
|
14387
|
+
[ -z "$run_cmd" ] && run_cmd="make run"
|
|
14388
|
+
fi
|
|
14389
|
+
fi
|
|
14390
|
+
|
|
14391
|
+
if [ -f "$target_path/Cargo.toml" ]; then
|
|
14392
|
+
build_cmd="cargo build"
|
|
14393
|
+
run_cmd="cargo run"
|
|
14394
|
+
test_cmd="cargo test"
|
|
14395
|
+
fi
|
|
14396
|
+
|
|
14397
|
+
if [ -f "$target_path/go.mod" ]; then
|
|
14398
|
+
build_cmd="go build ./..."
|
|
14399
|
+
run_cmd="go run ."
|
|
14400
|
+
test_cmd="go test ./..."
|
|
14401
|
+
fi
|
|
14402
|
+
|
|
14403
|
+
if [ -f "$target_path/pyproject.toml" ]; then
|
|
14404
|
+
if grep -q '\[tool.pytest' "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14405
|
+
test_cmd="pytest"
|
|
14406
|
+
fi
|
|
14407
|
+
if grep -q '\[tool.poetry' "$target_path/pyproject.toml" 2>/dev/null; then
|
|
14408
|
+
build_cmd="poetry build"
|
|
14409
|
+
run_cmd="poetry run python -m ${project_name}"
|
|
14410
|
+
fi
|
|
14411
|
+
fi
|
|
14412
|
+
|
|
14413
|
+
# --- Generate output ---
|
|
14414
|
+
local output=""
|
|
14415
|
+
|
|
14416
|
+
if [ "$format" = "json" ]; then
|
|
14417
|
+
# JSON output
|
|
14418
|
+
output=$(cat <<ENDJSON
|
|
14419
|
+
{
|
|
14420
|
+
"project": {
|
|
14421
|
+
"name": "$project_name",
|
|
14422
|
+
"description": $(python3 -c "import json; print(json.dumps('$project_description'))" 2>/dev/null || echo "\"$project_description\""),
|
|
14423
|
+
"version": "$project_version",
|
|
14424
|
+
"path": "$target_path"
|
|
14425
|
+
},
|
|
14426
|
+
"languages": "$(echo $languages | sed 's/ */ /g')",
|
|
14427
|
+
"frameworks": "$(echo $frameworks | sed 's/ */ /g')",
|
|
14428
|
+
"build_system": "$build_system",
|
|
14429
|
+
"package_manager": "$package_manager",
|
|
14430
|
+
"test_framework": "$(echo $test_framework | sed 's/ */ /g')",
|
|
14431
|
+
"ci": "$(echo $ci_system | sed 's/ */ /g')",
|
|
14432
|
+
"files": {
|
|
14433
|
+
"source": $src_count,
|
|
14434
|
+
"test": $test_count,
|
|
14435
|
+
"docs": $doc_count
|
|
14436
|
+
},
|
|
14437
|
+
"commands": {
|
|
14438
|
+
"build": "$build_cmd",
|
|
14439
|
+
"run": "$run_cmd",
|
|
14440
|
+
"test": "$test_cmd"
|
|
14441
|
+
},
|
|
14442
|
+
"depth": $depth
|
|
14443
|
+
}
|
|
14444
|
+
ENDJSON
|
|
14445
|
+
)
|
|
14446
|
+
elif [ "$format" = "yaml" ]; then
|
|
14447
|
+
# YAML output
|
|
14448
|
+
output=$(cat <<ENDYAML
|
|
14449
|
+
project:
|
|
14450
|
+
name: $project_name
|
|
14451
|
+
description: "$project_description"
|
|
14452
|
+
version: "$project_version"
|
|
14453
|
+
path: $target_path
|
|
14454
|
+
languages: $languages
|
|
14455
|
+
frameworks: $frameworks
|
|
14456
|
+
build_system: $build_system
|
|
14457
|
+
package_manager: $package_manager
|
|
14458
|
+
test_framework: $(echo $test_framework | sed 's/ */ /g')
|
|
14459
|
+
ci: $(echo $ci_system | sed 's/ */ /g')
|
|
14460
|
+
files:
|
|
14461
|
+
source: $src_count
|
|
14462
|
+
test: $test_count
|
|
14463
|
+
docs: $doc_count
|
|
14464
|
+
commands:
|
|
14465
|
+
build: "$build_cmd"
|
|
14466
|
+
run: "$run_cmd"
|
|
14467
|
+
test: "$test_cmd"
|
|
14468
|
+
depth: $depth
|
|
14469
|
+
ENDYAML
|
|
14470
|
+
)
|
|
14471
|
+
else
|
|
14472
|
+
# Markdown output (CLAUDE.md format)
|
|
14473
|
+
output="# $project_name"
|
|
14474
|
+
[ -n "$project_description" ] && output="$output
|
|
14475
|
+
|
|
14476
|
+
$project_description"
|
|
14477
|
+
[ -n "$project_version" ] && output="$output
|
|
14478
|
+
|
|
14479
|
+
Version: $project_version"
|
|
14480
|
+
|
|
14481
|
+
output="$output
|
|
14482
|
+
|
|
14483
|
+
## Overview
|
|
14484
|
+
|
|
14485
|
+
| Property | Value |
|
|
14486
|
+
|----------|-------|
|
|
14487
|
+
| Languages | ${languages:-N/A} |
|
|
14488
|
+
| Frameworks | ${frameworks:-N/A} |
|
|
14489
|
+
| Build System | ${build_system:-N/A} |
|
|
14490
|
+
| Package Manager | ${package_manager:-N/A} |
|
|
14491
|
+
| Test Framework | ${test_framework:-N/A} |
|
|
14492
|
+
| CI/CD | ${ci_system:-N/A} |"
|
|
14493
|
+
|
|
14494
|
+
# Commands section
|
|
14495
|
+
if [ -n "$build_cmd" ] || [ -n "$run_cmd" ] || [ -n "$test_cmd" ]; then
|
|
14496
|
+
output="$output
|
|
14497
|
+
|
|
14498
|
+
## Commands
|
|
14499
|
+
|
|
14500
|
+
\`\`\`bash"
|
|
14501
|
+
[ -n "$build_cmd" ] && output="$output
|
|
14502
|
+
# Build
|
|
14503
|
+
$build_cmd"
|
|
14504
|
+
[ -n "$run_cmd" ] && output="$output
|
|
14505
|
+
|
|
14506
|
+
# Run
|
|
14507
|
+
$run_cmd"
|
|
14508
|
+
[ -n "$test_cmd" ] && output="$output
|
|
14509
|
+
|
|
14510
|
+
# Test
|
|
14511
|
+
$test_cmd"
|
|
14512
|
+
output="$output
|
|
14513
|
+
\`\`\`"
|
|
14514
|
+
fi
|
|
14515
|
+
|
|
14516
|
+
# Project structure
|
|
14517
|
+
output="$output
|
|
14518
|
+
|
|
14519
|
+
## Project Structure
|
|
14520
|
+
|
|
14521
|
+
Files: $src_count source, $test_count test, $doc_count docs"
|
|
14522
|
+
|
|
14523
|
+
# Show directory structure (top-level)
|
|
14524
|
+
local top_dirs
|
|
14525
|
+
top_dirs=$(echo "$tree_output" | sed 's|/.*||' | sort -u | head -30)
|
|
14526
|
+
if [ -n "$top_dirs" ]; then
|
|
14527
|
+
output="$output
|
|
14528
|
+
|
|
14529
|
+
\`\`\`"
|
|
14530
|
+
while IFS= read -r dir; do
|
|
14531
|
+
[ -z "$dir" ] && continue
|
|
14532
|
+
if [ -d "$target_path/$dir" ]; then
|
|
14533
|
+
# Count files in directory
|
|
14534
|
+
local dir_count
|
|
14535
|
+
dir_count=$(echo "$tree_output" | grep "^${dir}/" | wc -l | tr -d ' ')
|
|
14536
|
+
output="$output
|
|
14537
|
+
$dir/ ($dir_count files)"
|
|
14538
|
+
else
|
|
14539
|
+
output="$output
|
|
14540
|
+
$dir"
|
|
14541
|
+
fi
|
|
14542
|
+
done <<< "$top_dirs"
|
|
14543
|
+
output="$output
|
|
14544
|
+
\`\`\`"
|
|
14545
|
+
fi
|
|
14546
|
+
|
|
14547
|
+
# Key files
|
|
14548
|
+
if [ -n "$config_file_list" ]; then
|
|
14549
|
+
output="$output
|
|
14550
|
+
|
|
14551
|
+
## Key Files
|
|
14552
|
+
"
|
|
14553
|
+
for cf in $config_file_list; do
|
|
14554
|
+
output="$output
|
|
14555
|
+
- \`$cf\`"
|
|
14556
|
+
done
|
|
14557
|
+
fi
|
|
14558
|
+
|
|
14559
|
+
if [ -n "$entry_points" ]; then
|
|
14560
|
+
output="$output
|
|
14561
|
+
|
|
14562
|
+
## Entry Points
|
|
14563
|
+
|
|
14564
|
+
\`\`\`
|
|
14565
|
+
$entry_points
|
|
14566
|
+
\`\`\`"
|
|
14567
|
+
fi
|
|
14568
|
+
|
|
14569
|
+
# Depth 2+: exports, functions, classes
|
|
14570
|
+
if [ "$depth" -ge 2 ]; then
|
|
14571
|
+
if [ -n "$key_classes" ]; then
|
|
14572
|
+
output="$output
|
|
14573
|
+
|
|
14574
|
+
## Key Classes
|
|
14575
|
+
\`\`\`
|
|
14576
|
+
$key_classes
|
|
14577
|
+
\`\`\`"
|
|
14578
|
+
fi
|
|
14579
|
+
|
|
14580
|
+
if [ -n "$key_functions" ]; then
|
|
14581
|
+
output="$output
|
|
14582
|
+
|
|
14583
|
+
## Key Functions
|
|
14584
|
+
\`\`\`
|
|
14585
|
+
$key_functions
|
|
14586
|
+
\`\`\`"
|
|
14587
|
+
fi
|
|
14588
|
+
|
|
14589
|
+
if [ -n "$key_exports" ]; then
|
|
14590
|
+
output="$output
|
|
14591
|
+
|
|
14592
|
+
## Exports
|
|
14593
|
+
\`\`\`
|
|
14594
|
+
$key_exports
|
|
14595
|
+
\`\`\`"
|
|
14596
|
+
fi
|
|
14597
|
+
fi
|
|
14598
|
+
|
|
14599
|
+
# Depth 3: dependency graph
|
|
14600
|
+
if [ "$depth" -ge 3 ] && [ -n "$dep_graph" ]; then
|
|
14601
|
+
output="$output
|
|
14602
|
+
|
|
14603
|
+
## Dependency Graph (Imports)
|
|
14604
|
+
\`\`\`
|
|
14605
|
+
$dep_graph
|
|
14606
|
+
\`\`\`"
|
|
14607
|
+
fi
|
|
14608
|
+
|
|
14609
|
+
# CI/CD section
|
|
14610
|
+
if [ -n "$ci_system" ] && [ -n "$ci_files" ]; then
|
|
14611
|
+
output="$output
|
|
14612
|
+
|
|
14613
|
+
## CI/CD ($ci_system)
|
|
14614
|
+
"
|
|
14615
|
+
for cf in $ci_files; do
|
|
14616
|
+
output="$output
|
|
14617
|
+
- \`$cf\`"
|
|
14618
|
+
done
|
|
14619
|
+
fi
|
|
14620
|
+
|
|
14621
|
+
# Architecture notes from README
|
|
14622
|
+
if [ -n "$readme_file" ]; then
|
|
14623
|
+
output="$output
|
|
14624
|
+
|
|
14625
|
+
## Documentation
|
|
14626
|
+
|
|
14627
|
+
See \`$readme_file\` for project documentation."
|
|
14628
|
+
fi
|
|
14629
|
+
|
|
14630
|
+
output="$output
|
|
14631
|
+
|
|
14632
|
+
---
|
|
14633
|
+
Generated by loki onboard (depth $depth) on $(date +%Y-%m-%d)"
|
|
14634
|
+
fi
|
|
14635
|
+
|
|
14636
|
+
# --- Output ---
|
|
14637
|
+
if [ "$use_stdout" = true ]; then
|
|
14638
|
+
echo "$output"
|
|
14639
|
+
return 0
|
|
14640
|
+
fi
|
|
14641
|
+
|
|
14642
|
+
# Determine output path
|
|
14643
|
+
if [ -z "$output_path" ]; then
|
|
14644
|
+
output_path="$target_path/.claude/CLAUDE.md"
|
|
14645
|
+
fi
|
|
14646
|
+
|
|
14647
|
+
# Handle update mode
|
|
14648
|
+
if [ "$update_mode" = true ] && [ -f "$output_path" ]; then
|
|
14649
|
+
local timestamp
|
|
14650
|
+
timestamp=$(date +%Y-%m-%d)
|
|
14651
|
+
local update_marker="## Updated: $timestamp"
|
|
14652
|
+
# Append new findings after a separator
|
|
14653
|
+
{
|
|
14654
|
+
cat "$output_path"
|
|
14655
|
+
echo ""
|
|
14656
|
+
echo "---"
|
|
14657
|
+
echo ""
|
|
14658
|
+
echo "$update_marker"
|
|
14659
|
+
echo ""
|
|
14660
|
+
echo "$output"
|
|
14661
|
+
} > "${output_path}.tmp"
|
|
14662
|
+
mv "${output_path}.tmp" "$output_path"
|
|
14663
|
+
log_info "Updated: $output_path"
|
|
14664
|
+
return 0
|
|
14665
|
+
fi
|
|
14666
|
+
|
|
14667
|
+
# Create directory and write
|
|
14668
|
+
local output_dir
|
|
14669
|
+
output_dir=$(dirname "$output_path")
|
|
14670
|
+
mkdir -p "$output_dir"
|
|
14671
|
+
|
|
14672
|
+
echo "$output" > "$output_path"
|
|
14673
|
+
log_info "Generated: $output_path"
|
|
14674
|
+
log_info "Project: $project_name | $src_count source files | $test_count tests | $doc_count docs"
|
|
14675
|
+
|
|
14676
|
+
if [ -n "$languages" ]; then
|
|
14677
|
+
log_info "Languages: $languages"
|
|
14678
|
+
fi
|
|
14679
|
+
if [ -n "$frameworks" ]; then
|
|
14680
|
+
log_info "Frameworks: $frameworks"
|
|
14681
|
+
fi
|
|
14682
|
+
}
|
|
14683
|
+
|
|
13830
14684
|
main "$@"
|
package/completions/_loki
CHANGED
|
@@ -108,6 +108,7 @@ function _loki_commands {
|
|
|
108
108
|
'enterprise:Enterprise feature commands'
|
|
109
109
|
'voice:Voice input commands'
|
|
110
110
|
'doctor:Check system prerequisites'
|
|
111
|
+
'onboard:Analyze repo and generate CLAUDE.md'
|
|
111
112
|
'version:Show version'
|
|
112
113
|
'completions:Output shell completions'
|
|
113
114
|
'help:Show help'
|
package/completions/loki.bash
CHANGED
|
@@ -5,7 +5,7 @@ _loki_completion() {
|
|
|
5
5
|
_init_completion || return
|
|
6
6
|
|
|
7
7
|
# Main subcommands (must match autonomy/loki main case statement)
|
|
8
|
-
local main_commands="start quick demo init stop pause resume status dashboard logs serve api sandbox notify import github issue config provider reset memory compound checkpoint council dogfood projects enterprise voice secrets doctor watchdog audit metrics syslog version completions help"
|
|
8
|
+
local main_commands="start quick demo init stop pause resume status dashboard logs serve api sandbox notify import github issue config provider reset memory compound checkpoint council dogfood projects enterprise voice secrets doctor watchdog audit metrics syslog onboard version completions help"
|
|
9
9
|
|
|
10
10
|
# 1. If we are on the first argument (subcommand)
|
|
11
11
|
if [[ $cword -eq 1 ]]; then
|
|
@@ -128,6 +128,22 @@ _loki_completion() {
|
|
|
128
128
|
COMPREPLY=( $(compgen -W "${issue_cmds}" -- "$cur") )
|
|
129
129
|
;;
|
|
130
130
|
|
|
131
|
+
onboard)
|
|
132
|
+
if [[ "$cur" == -* ]]; then
|
|
133
|
+
COMPREPLY=( $(compgen -W "--depth --format --output --stdout --update --help" -- "$cur") )
|
|
134
|
+
return 0
|
|
135
|
+
fi
|
|
136
|
+
if [[ "$prev" == "--depth" ]]; then
|
|
137
|
+
COMPREPLY=( $(compgen -W "1 2 3" -- "$cur") )
|
|
138
|
+
return 0
|
|
139
|
+
fi
|
|
140
|
+
if [[ "$prev" == "--format" ]]; then
|
|
141
|
+
COMPREPLY=( $(compgen -W "markdown json yaml" -- "$cur") )
|
|
142
|
+
return 0
|
|
143
|
+
fi
|
|
144
|
+
_filedir -d
|
|
145
|
+
;;
|
|
146
|
+
|
|
131
147
|
completions)
|
|
132
148
|
COMPREPLY=( $(compgen -W "bash zsh" -- "$cur") )
|
|
133
149
|
;;
|
package/dashboard/__init__.py
CHANGED
package/docs/INSTALLATION.md
CHANGED
package/mcp/__init__.py
CHANGED