Skip to content

Commit 64b748e

Browse files
committed
review fixes 1
1 parent 36f6ac5 commit 64b748e

File tree

5 files changed

+97
-20
lines changed

5 files changed

+97
-20
lines changed

README.md

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ ContextPacker is a desktop application designed to scrape websites, clone Git re
1818
* **Smart Filtering:** Automatically respects **`.gitignore`** rules and allows hiding common binary and image files.
1919
* **Customisability:**
2020
* **Customisable Settings:** Configure scraping options (depth, paths, speed) and file exclusions.
21-
* **External Configuration:** Key settings (`user_agents`, `default_local_excludes`, `binary_file_patterns`) can be modified in a **`config.json`** file created on first run.
21+
* **External Configuration:** Key settings (`user_agents`, `default_local_excludes`, `binary_file_patterns`) can be modified in a **`settings.json`** file created on first run.
2222
* **Cross-Platform:** Supports Light and Dark themes (detects system theme on Windows, macOS, and Linux).
2323

2424
-----
@@ -46,6 +46,25 @@ The application operates in two main modes, selected via radio buttons:
4646

4747
-----
4848

49+
## 🔧 Advanced Configuration (`settings.json`)
50+
51+
The application creates a `settings.json` file on first run in the application data directory (e.g., `%APPDATA%\ContextPacker` on Windows). This file contains settings that are only read once on startup and are intended to be user-managed.
52+
53+
| Key | Description | Default Value | Notes |
54+
| :--- | :--- | :--- | :--- |
55+
| `logging_level` | Sets the verbosity of the internal log output. | `"INFO"` | Options: `"DEBUG"`, `"INFO"`, `"WARNING"`, `"ERROR"`, `"CRITICAL"`. |
56+
| `log_max_size_mb` | Maximum size (in megabytes) of the `app.log` file before log rotation occurs. | `3` | |
57+
| `log_backup_count` | Number of backup log files to keep during rotation. | `5` | |
58+
| `user_agents` | A list of strings used by the web crawler to identify itself. | `[...]` | The application cycles through these. |
59+
| `default_output_format` | The default file extension selected in the Output panel. | `".md"` | Options: `".md"`, `".txt"`, `".xml"`. |
60+
| `default_local_excludes` | A list of global `fnmatch` patterns automatically applied to local directory scans. | `[".archive/", ".git/", ...]` | These are visible and editable in the 'Excludes' text area. |
61+
| `binary_file_patterns` | A list of `fnmatch` patterns that are considered binary/image files and can be toggled via the 'Hide Images + Binaries' checkbox. | `[*.png, *.jpg, ...]` | |
62+
| `max_age_cache_days` | The number of days after which old, temporary session and cache directories are automatically deleted on startup. | `7` | Set to a high number to keep all cache files indefinitely. |
63+
64+
The file also contains window-state keys (`window_size`, `h_sash_state`, etc.) which are managed automatically by the application on close.
65+
66+
-----
67+
4968
## ⚙️ Installation & Setup
5069

5170
### Requirements

core/actions.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,10 @@ def clone_repo_worker(url, path, message_queue: queue.Queue, cancel_event: threa
5959

6060
try:
6161
resolved_path = Path(path).resolve()
62-
if not any(parent.name == "cache" for parent in resolved_path.parents):
63-
message_queue.put(StatusMessage(status=StatusType.ERROR, message="Invalid clone path detected."))
62+
app_cache_dir = Path(get_app_data_dir()) / "cache"
63+
# Ensure the resolved path is a sub-path of the application's cache directory
64+
if not resolved_path.is_relative_to(app_cache_dir):
65+
message_queue.put(StatusMessage(status=StatusType.ERROR, message="Invalid clone path detected: Path is outside the application cache directory."))
6466
return
6567
except Exception:
6668
message_queue.put(StatusMessage(status=StatusType.ERROR, message="Invalid path provided."))
@@ -116,6 +118,8 @@ def clone_repo_worker(url, path, message_queue: queue.Queue, cancel_event: threa
116118
if process:
117119
if reader_thread and reader_thread.is_alive():
118120
reader_thread.join(timeout=GIT_READER_THREAD_JOIN_TIMEOUT_SECONDS)
121+
if reader_thread.is_alive():
122+
error_handler.log_message("Warning: Git output reader thread did not terminate in time.")
119123
error_handler.handle_process_cleanup(process)
120124
error_handler.handle_stream_cleanup(process)
121125

@@ -125,16 +129,17 @@ def packaging_worker(source_dir, output_path, repomix_style, exclude_patterns, t
125129
logging.debug(f"Packaging worker started. Source: {source_dir}, Output: {output_path}")
126130

127131
class RepomixProgressHandler(logging.Handler):
128-
def __init__(self, msg_queue, total_files_count):
132+
def __init__(self, msg_queue, total_files_count, cancel_event: threading.Event):
129133
super().__init__()
130134
self.msg_queue = msg_queue
131135
self.total_files = total_files_count
132136
self.processed_count = 0
137+
self.cancel_event = cancel_event
133138
self.batch_size = REPOMIX_PROGRESS_UPDATE_BATCH_SIZE
134139
self.last_progress_value = -1
135140

136141
def emit(self, record):
137-
if cancel_event.is_set():
142+
if self.cancel_event.is_set():
138143
return
139144

140145
msg = self.format(record)
@@ -150,10 +155,11 @@ def emit(self, record):
150155

151156
repomix_logger = logging.getLogger("repomix")
152157
original_level = repomix_logger.level
153-
progress_handler = RepomixProgressHandler(message_queue, total_files)
158+
progress_handler = None
154159

155160
try:
156161
repomix_logger.setLevel(logging.INFO)
162+
progress_handler = RepomixProgressHandler(message_queue, total_files, cancel_event)
157163
repomix_logger.addHandler(progress_handler)
158164
run_repomix(
159165
source_dir,
@@ -164,7 +170,8 @@ def emit(self, record):
164170
exclude_patterns=exclude_patterns,
165171
)
166172
finally:
167-
repomix_logger.removeHandler(progress_handler)
173+
if progress_handler:
174+
repomix_logger.removeHandler(progress_handler)
168175
repomix_logger.setLevel(original_level)
169176
logging.debug(f"Packaging worker finished for source: {source_dir}")
170177

core/config_service.py

Lines changed: 55 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import json
22
import sys
33
from pathlib import Path
4+
from typing import List, Optional
45

56

67
class ConfigService:
@@ -66,6 +67,14 @@ def __init__(self):
6667
}
6768
self._config_dir = self._get_config_dir()
6869
self._config_path = self._config_dir / self._config_filename
70+
71+
# Separately store the mutable keys that the app changes
72+
self._mutable_keys = ["window_size", "window_pos", "h_sash_state", "v_sash_state"]
73+
# Separately store the user-editable keys that are not changed by the app at runtime
74+
self._static_keys = list(set(self._default_config.keys()) - set(self._mutable_keys))
75+
76+
# Read/Write config. All keys are initialized from defaults, then overwritten by file.
77+
# This dict holds the runtime values.
6978
self.config = self._load_config()
7079

7180
def _get_config_dir(self):
@@ -78,35 +87,69 @@ def _get_config_dir(self):
7887

7988
def _load_config(self):
8089
"""Loads settings.json, creating a default one if it doesn't exist."""
90+
# Use a copy of defaults for a foundation
91+
config = self._default_config.copy()
92+
8193
if not self._config_path.exists():
8294
try:
95+
# Write only the static keys to the initial file, letting mutable keys use defaults
96+
initial_config_content = {k: v for k, v in self._default_config.items() if k in self._static_keys}
8397
with open(self._config_path, "w", encoding="utf-8") as f:
84-
json.dump(self._default_config, f, indent=4)
85-
return self._default_config.copy()
98+
json.dump(initial_config_content, f, indent=4)
99+
# The runtime config remains the full default set (config.copy())
100+
return config
86101
except IOError as e:
87102
print(f"Warning: Could not create default config file: {e}")
88-
return self._default_config.copy()
103+
return config.copy()
89104

90105
try:
91106
with open(self._config_path, "r", encoding="utf-8") as f:
92107
loaded_config = json.load(f)
93-
# Merge loaded config with defaults to ensure all keys are present
94-
config = self._default_config.copy()
108+
# Merge loaded config with defaults.
109+
# This ensures all keys are present and respects user's static config.
95110
config.update(loaded_config)
96111
return config
97112
except (json.JSONDecodeError, IOError) as e:
98113
print(f"Warning: Could not load config.json, using defaults: {e}")
99-
return self._default_config.copy()
114+
return config.copy()
100115

101116
def get(self, key, default=None):
102117
"""Gets a configuration value by key."""
103118
return self.config.get(key, default)
104119

105-
def save_config(self):
106-
"""Saves the current configuration dictionary to settings.json."""
120+
def save_config(self, save_static=False):
121+
"""
122+
Saves the current configuration dictionary to settings.json.
123+
124+
Args:
125+
save_static (bool): If True, saves ALL keys (static and mutable).
126+
If False (default), only saves the mutable keys,
127+
merging them back into the existing file's content.
128+
"""
129+
# 1. Read the existing file content to keep static settings unless told to overwrite
130+
current_file_config = {}
131+
if self._config_path.exists():
132+
try:
133+
with open(self._config_path, "r", encoding="utf-8") as f:
134+
current_file_config = json.load(f)
135+
except (json.JSONDecodeError, IOError):
136+
pass # Use empty dict if file is corrupt/missing
137+
138+
# 2. Determine what to save
139+
if save_static:
140+
config_to_save = self.config.copy()
141+
else:
142+
# Only save the mutable keys from runtime config,
143+
# and merge with the current file content to keep static keys.
144+
config_to_save = current_file_config.copy()
145+
for key in self._mutable_keys:
146+
if key in self.config:
147+
config_to_save[key] = self.config[key]
148+
149+
# 3. Write the new content
107150
try:
108151
with open(self._config_path, "w", encoding="utf-8") as f:
109-
json.dump(self.config, f, indent=4)
152+
json.dump(config_to_save, f, indent=4)
110153
except IOError as e:
111154
print(f"Error: Could not save config file: {e}")
112155

@@ -118,4 +161,6 @@ def save_window_state(self, size, pos, h_splitter_state, v_splitter_state):
118161
v_sash_qba = v_splitter_state.toBase64()
119162
self.config["h_sash_state"] = bytes(h_sash_qba.data()).decode("utf-8")
120163
self.config["v_sash_state"] = bytes(v_sash_qba.data()).decode("utf-8")
121-
self.save_config()
164+
165+
# Only save mutable keys (window state)
166+
self.save_config(save_static=False)

core/error_handling.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,10 +63,15 @@ def handle_process_cleanup(self, process, timeout: int = PROCESS_CLEANUP_TIMEOUT
6363
self.log_message("Process terminated gracefully.")
6464
return True
6565
except subprocess.TimeoutExpired:
66+
# Graceful terminate failed, force kill
6667
process.kill()
67-
process.wait(timeout=PROCESS_FORCE_KILL_WAIT_SECONDS)
68-
self.log_message("Process killed forcefully.")
69-
return True
68+
try:
69+
process.wait(timeout=PROCESS_FORCE_KILL_WAIT_SECONDS)
70+
self.log_message("Process killed forcefully.")
71+
return True
72+
except subprocess.TimeoutExpired:
73+
self.log_message("Process force-kill timed out. Process may be a zombie.")
74+
return False # Indicate that cleanup failed
7075
except Exception as e:
7176
self.log_message(f"Warning during process cleanup: {e}")
7277
return False

ui/main_window.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ def __init__(self, config_service):
6262
self.hide_binaries_check: QCheckBox
6363
self.dir_level_ctrl: QSpinBox
6464
self.list_group: QGroupBox
65+
self.list_stack_layout: QVBoxLayout
6566
self.standard_log_list: QTableWidget
6667
self.local_file_list: QTableWidget
6768
self.progress_gauge: QProgressBar

0 commit comments

Comments
 (0)