Fix batch size (20→5) and script detection in monitor
- Reduce embed batch to 5 — AnythingLLM hangs on batches >10 - Fix check_script_running() to properly detect setup.py process (was returning false because pgrep matched monitor.py too) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
11
monitor.py
11
monitor.py
@@ -85,8 +85,15 @@ def check_api(config):
|
||||
def check_script_running():
|
||||
try:
|
||||
import subprocess
|
||||
result = subprocess.run(["pgrep", "-f", "setup.py"], capture_output=True, text=True)
|
||||
return result.returncode == 0
|
||||
result = subprocess.run(
|
||||
["pgrep", "-af", "setup.py"],
|
||||
capture_output=True, text=True
|
||||
)
|
||||
# Filter out monitor.py and grep itself
|
||||
for line in result.stdout.strip().split("\n"):
|
||||
if line and "monitor" not in line and "pgrep" not in line:
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
4
setup.py
4
setup.py
@@ -532,8 +532,8 @@ def assign_to_workspaces(config, persona_folders, progress, batch_size, delay):
|
||||
|
||||
log.info(f"[{idx}/{total_personas}] → {codename} ({slug}): {len(new_docs)} docs to embed")
|
||||
|
||||
# Use smaller batches for embedding (10-20 is safer than 50)
|
||||
embed_batch = min(batch_size, 20)
|
||||
# Use small batches for embedding — AnythingLLM hangs on large batches
|
||||
embed_batch = min(batch_size, 5)
|
||||
persona_ok = 0
|
||||
persona_fail = 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user