Skip to content

Commit 11c4cda

Browse files
committed
update: tree structure and simplify merge steps
1 parent 9b4ed92 commit 11c4cda

3 files changed

Lines changed: 265 additions & 43 deletions

File tree

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,3 +205,4 @@ cython_debug/
205205
marimo/_static/
206206
marimo/_lsp/
207207
__marimo__/
208+
merged_code_context.txt

.mergeignore

Lines changed: 211 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,211 @@
1+
# Byte-compiled / optimized / DLL files
2+
__pycache__/
3+
*.py[codz]
4+
*$py.class
5+
6+
# C extensions
7+
*.so
8+
9+
# Distribution / packaging
10+
.Python
11+
build/
12+
develop-eggs/
13+
dist/
14+
downloads/
15+
eggs/
16+
.eggs/
17+
lib/
18+
lib64/
19+
parts/
20+
sdist/
21+
var/
22+
wheels/
23+
share/python-wheels/
24+
*.egg-info/
25+
.installed.cfg
26+
*.egg
27+
MANIFEST
28+
29+
# PyInstaller
30+
# Usually these files are written by a python script from a template
31+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
32+
*.manifest
33+
*.spec
34+
35+
# Installer logs
36+
pip-log.txt
37+
pip-delete-this-directory.txt
38+
39+
# Unit test / coverage reports
40+
htmlcov/
41+
.tox/
42+
.nox/
43+
.coverage
44+
.coverage.*
45+
.cache
46+
nosetests.xml
47+
coverage.xml
48+
*.cover
49+
*.py.cover
50+
.hypothesis/
51+
.pytest_cache/
52+
cover/
53+
54+
# Translations
55+
*.mo
56+
*.pot
57+
58+
# Django stuff:
59+
*.log
60+
local_settings.py
61+
db.sqlite3
62+
db.sqlite3-journal
63+
64+
# Flask stuff:
65+
instance/
66+
.webassets-cache
67+
68+
# Scrapy stuff:
69+
.scrapy
70+
71+
# Sphinx documentation
72+
docs/_build/
73+
74+
# PyBuilder
75+
.pybuilder/
76+
target/
77+
78+
# Jupyter Notebook
79+
.ipynb_checkpoints
80+
81+
# IPython
82+
profile_default/
83+
ipython_config.py
84+
85+
# pyenv
86+
# For a library or package, you might want to ignore these files since the code is
87+
# intended to run in multiple environments; otherwise, check them in:
88+
# .python-version
89+
90+
# pipenv
91+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
93+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
94+
# install all needed dependencies.
95+
#Pipfile.lock
96+
97+
# UV
98+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99+
# This is especially recommended for binary packages to ensure reproducibility, and is more
100+
# commonly ignored for libraries.
101+
#uv.lock
102+
103+
# poetry
104+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105+
# This is especially recommended for binary packages to ensure reproducibility, and is more
106+
# commonly ignored for libraries.
107+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108+
#poetry.lock
109+
#poetry.toml
110+
111+
# pdm
112+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115+
#pdm.lock
116+
#pdm.toml
117+
.pdm-python
118+
.pdm-build/
119+
120+
# pixi
121+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122+
#pixi.lock
123+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124+
# in the .venv directory. It is recommended not to include this directory in version control.
125+
.pixi
126+
127+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128+
__pypackages__/
129+
130+
# Celery stuff
131+
celerybeat-schedule
132+
celerybeat.pid
133+
134+
# SageMath parsed files
135+
*.sage.py
136+
137+
# Environments
138+
.env
139+
.envrc
140+
.venv
141+
env/
142+
venv/
143+
ENV/
144+
env.bak/
145+
venv.bak/
146+
147+
# Spyder project settings
148+
.spyderproject
149+
.spyproject
150+
151+
# Rope project settings
152+
.ropeproject
153+
154+
# mkdocs documentation
155+
/site
156+
157+
# mypy
158+
.mypy_cache/
159+
.dmypy.json
160+
dmypy.json
161+
162+
# Pyre type checker
163+
.pyre/
164+
165+
# pytype static type analyzer
166+
.pytype/
167+
168+
# Cython debug symbols
169+
cython_debug/
170+
171+
# PyCharm
172+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
173+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
174+
# and can be added to the global gitignore or merged into this file. For a more nuclear
175+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
176+
#.idea/
177+
178+
# Abstra
179+
# Abstra is an AI-powered process automation framework.
180+
# Ignore directories containing user credentials, local state, and settings.
181+
# Learn more at https://abstra.io/docs
182+
.abstra/
183+
184+
# Visual Studio Code
185+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
186+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
187+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
188+
# you could uncomment the following to ignore the entire vscode folder
189+
# .vscode/
190+
191+
# Ruff stuff:
192+
.ruff_cache/
193+
194+
# PyPI configuration file
195+
.pypirc
196+
197+
# Cursor
198+
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
199+
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
200+
# refer to https://docs.cursor.com/context/ignore-files
201+
.cursorignore
202+
.cursorindexingignore
203+
204+
# Marimo
205+
marimo/_static/
206+
marimo/_lsp/
207+
__marimo__/
208+
209+
# Exclude this tool's output
210+
merged_code_context.txt
211+

src/flatcode/cli.py

Lines changed: 53 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,35 @@ def is_path_ignored(rel_path: Path, rules: List[Tuple[str, bool]]) -> bool:
126126

127127
return ignored
128128

129+
# --- Project Tree Generator ---
130+
131+
def generate_project_tree(file_paths: List[str], root_name: str) -> str:
132+
"""Generates a string representation of the project tree from a list of file paths."""
133+
tree_dict: Dict = {}
134+
for path in sorted(file_paths):
135+
parts = Path(path).parts
136+
current_level = tree_dict
137+
for part in parts:
138+
if part not in current_level:
139+
current_level[part] = {}
140+
current_level = current_level[part]
141+
142+
lines = [f"{root_name}/"]
143+
144+
def _generate_lines_recursive(subtree: Dict, prefix: str):
145+
entries = sorted(subtree.items())
146+
for i, (name, content) in enumerate(entries):
147+
is_last = (i == len(entries) - 1)
148+
connector = "└── " if is_last else "├── "
149+
lines.append(f"{prefix}{connector}{name}")
150+
151+
if content: # It's a directory, recurse
152+
new_prefix = prefix + (" " if is_last else "│ ")
153+
_generate_lines_recursive(content, new_prefix)
154+
155+
_generate_lines_recursive(tree_dict, "")
156+
return "\n".join(lines) + "\n"
157+
129158
# --- Token Estimator ---
130159

131160
# Initialize a global tokenizer to avoid reloading
@@ -214,10 +243,6 @@ def main():
214243
files_to_merge: List[Tuple[Path, str, int, str]] = []
215244
total_files_scanned = 0
216245

217-
print(f"\n--- Phase 1: Scanning & Estimating Tokens ---")
218-
print(f"Using rules from: {mergeignore_file.name}")
219-
print(f"Including extensions: {args.extensions}")
220-
221246
for path in root_dir.rglob("*"):
222247
total_files_scanned += 1
223248

@@ -238,7 +263,6 @@ def main():
238263
try:
239264
content = path.read_text(encoding="utf-8")
240265
tokens = estimate_tokens(content)
241-
print(f" > [Found] {rel_path.as_posix()} (~{tokens} tokens)")
242266
files_to_merge.append((path, rel_path.as_posix(), tokens, content))
243267
except Exception as e:
244268
print(f" > [Warning] Skipping {rel_path.as_posix()} (read error: {e})", file=sys.stderr)
@@ -247,7 +271,7 @@ def main():
247271
print(f"\nScan complete. No matching files found to merge (scanned {total_files_scanned} items).")
248272
return
249273

250-
# 4. Phase 1.5: Top 10 Review
274+
# 4. Phase 1.5: Top 10 Review and Tree Generation
251275
files_to_merge.sort(key=lambda x: x[2], reverse=True)
252276
total_tokens = sum(f[2] for f in files_to_merge)
253277

@@ -261,43 +285,29 @@ def main():
261285
print(f"Total files to merge: {len(files_to_merge)}")
262286
print(f"Total estimated tokens: {total_tokens}")
263287
print("-" * 70)
264-
265-
# 5. Phase 2: Confirm and Merge
266-
proceed = False
267-
if args.yes:
268-
print("Auto-confirming with --yes flag.")
269-
proceed = True
270-
else:
271-
try:
272-
choice = input(f"> Proceed with merging {len(files_to_merge)} files? (Y/n): ").strip().lower()
273-
if choice != 'n':
274-
proceed = True
275-
except KeyboardInterrupt:
276-
print("\nOperation cancelled.", file=sys.stderr)
277-
sys.exit(0)
278-
279-
if proceed:
280-
print(f"\n--- Phase 2: Merging files into {output_file.name} ---")
281-
try:
282-
with open(output_file, "w", encoding="utf-8") as f:
283-
f.write(f"# --- flatcode: Project Context Snapshot --- #\n")
284-
f.write(f"# Root: {root_dir}\n")
285-
f.write(f"# Files: {len(files_to_merge)}\n")
286-
f.write(f"# Est. Tokens: {total_tokens}\n")
287-
f.write(f"# --- Start of Context --- #\n\n")
288-
289-
for path, rel_path, tokens, content in files_to_merge:
290-
print(f" > Merging: {rel_path}")
291-
f.write(f"--- File: {rel_path} ---\n\n")
292-
f.write(content)
293-
f.write(f"\n\n--- End: {rel_path} ---\n\n")
294-
295-
print(f"\n--- Success! ---")
296-
print(f"Project context successfully merged into: {output_file}")
297-
except IOError as e:
298-
print(f"\n*** Error writing to output file: {e} ***", file=sys.stderr)
299-
else:
300-
print("Operation cancelled.")
288+
289+
# Generate the project tree string from the final list of files
290+
relative_paths_for_tree = [f[1] for f in files_to_merge]
291+
project_tree_str = generate_project_tree(relative_paths_for_tree, root_dir.name)
292+
293+
# 5. Phase 2: Merge files
294+
try:
295+
with open(output_file, "w", encoding="utf-8") as f:
296+
f.write(f"# --- flatcode: Project Context Snapshot --- #\n")
297+
f.write(f"# Root: {root_dir}\n")
298+
f.write(f"# Files: {len(files_to_merge)}\n")
299+
f.write(f"# Est. Tokens: {total_tokens}\n")
300+
f.write(f"# --- Project Tree --- #\n")
301+
f.write(project_tree_str)
302+
f.write(f"# --- Start of Context --- #\n\n")
303+
304+
for path, rel_path, tokens, content in files_to_merge:
305+
f.write(f"--- File: {rel_path} ---\n\n")
306+
f.write(content)
307+
f.write(f"\n\n--- End: {rel_path} ---\n\n")
308+
309+
except IOError as e:
310+
print(f"\n*** Error writing to output file: {e} ***", file=sys.stderr)
301311

302312
except KeyboardInterrupt:
303313
print("\nOperation cancelled by user.", file=sys.stderr)

0 commit comments

Comments
 (0)