From 19ef1da1703645d1d290d650e94be11920c5cee3 Mon Sep 17 00:00:00 2001 From: salvacybersec Date: Sun, 22 Mar 2026 00:26:28 +0300 Subject: [PATCH] init: project structure, templates, build system Co-Authored-By: Claude Opus 4.6 (1M context) --- .gitignore | 4 + build.py | 156 ++++ .../plans/2026-03-22-persona-library.md | 750 ++++++++++++++++++ .../2026-03-22-persona-library-design.md | 24 + personas/_meta_template.yaml | 15 + personas/_template.md | 64 ++ 6 files changed, 1013 insertions(+) create mode 100644 .gitignore create mode 100755 build.py create mode 100644 docs/superpowers/plans/2026-03-22-persona-library.md create mode 100644 docs/superpowers/specs/2026-03-22-persona-library-design.md create mode 100644 personas/_meta_template.yaml create mode 100644 personas/_template.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..93e4d9b --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.generated/ +__pycache__/ +*.pyc +.DS_Store diff --git a/build.py b/build.py new file mode 100755 index 0000000..52a730a --- /dev/null +++ b/build.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 +"""Build script: Generate .yaml and .json from persona .md files.""" + +import json +import os +import re +import sys +from pathlib import Path + +try: + import yaml +except ImportError: + print("PyYAML required: pip install pyyaml") + sys.exit(1) + + +def parse_persona_md(filepath: Path) -> dict: + """Parse a persona markdown file into structured data.""" + content = filepath.read_text(encoding="utf-8") + + # Extract YAML frontmatter + fm_match = re.match(r"^---\n(.*?)\n---\n(.*)$", content, re.DOTALL) + if not fm_match: + print(f" WARN: No frontmatter in {filepath}") + return {} + + frontmatter = yaml.safe_load(fm_match.group(1)) + body = fm_match.group(2).strip() + + # Extract sections from body + sections = {} + current_section = None + current_content = [] + + for line in body.split("\n"): + if line.startswith("## "): + if current_section: + sections[current_section] = "\n".join(current_content).strip() + current_section = line[3:].strip().lower().replace(" ", "_").replace("&", "and") + current_content = [] + else: + current_content.append(line) + + if current_section: + sections[current_section] = "\n".join(current_content).strip() + + return { + "metadata": frontmatter, + "sections": sections, + "raw_body": body, + } + + +def build_persona(persona_dir: Path, output_dir: Path): + """Build all variants for a persona directory.""" + md_files = sorted(persona_dir.glob("*.md")) + if not md_files: + return + + persona_name = persona_dir.name + out_path = output_dir / persona_name + out_path.mkdir(parents=True, exist_ok=True) + + # Load _meta.yaml if exists + meta_file = persona_dir / "_meta.yaml" + meta = {} + if meta_file.exists(): + meta = yaml.safe_load(meta_file.read_text(encoding="utf-8")) or {} + + for md_file in md_files: + if md_file.name.startswith("_"): + continue + + variant = md_file.stem + parsed = parse_persona_md(md_file) + if not parsed: + continue + + # Merge meta into parsed data + output = {**meta, **parsed["metadata"], "variant": variant, "sections": parsed["sections"]} + + # Write YAML + yaml_out = out_path / f"{variant}.yaml" + yaml_out.write_text( + yaml.dump(output, allow_unicode=True, default_flow_style=False, sort_keys=False), + encoding="utf-8", + ) + + # Write JSON + json_out = out_path / f"{variant}.json" + json_out.write_text(json.dumps(output, ensure_ascii=False, indent=2), encoding="utf-8") + + # Write plain system prompt (just the body) + prompt_out = out_path / f"{variant}.prompt.md" + prompt_out.write_text(parsed["raw_body"], encoding="utf-8") + + print(f" Built: {persona_name}/{variant} -> .yaml .json .prompt.md") + + +def build_catalog(personas_dir: Path, output_dir: Path): + """Generate CATALOG.md from all personas.""" + catalog_lines = ["# Persona Catalog\n", "_Auto-generated by build.py_\n"] + + for persona_dir in sorted(personas_dir.iterdir()): + if not persona_dir.is_dir() or persona_dir.name.startswith((".", "_")): + continue + + meta_file = persona_dir / "_meta.yaml" + if not meta_file.exists(): + continue + + meta = yaml.safe_load(meta_file.read_text(encoding="utf-8")) or {} + variants = [f.stem for f in sorted(persona_dir.glob("*.md")) if not f.name.startswith("_")] + + catalog_lines.append(f"## {meta.get('codename', persona_dir.name)} — {meta.get('role', 'Unknown')}") + catalog_lines.append(f"- **Domain:** {meta.get('domain', 'N/A')}") + catalog_lines.append(f"- **Hitap:** {meta.get('address_to', 'N/A')}") + catalog_lines.append(f"- **Variants:** {', '.join(variants)}") + catalog_lines.append("") + + catalog_path = personas_dir / "CATALOG.md" + catalog_path.write_text("\n".join(catalog_lines), encoding="utf-8") + print(f" Catalog: {catalog_path}") + + +def main(): + root = Path(__file__).parent + personas_dir = root / "personas" + + if not personas_dir.exists(): + print("No personas/ directory found.") + sys.exit(1) + + output_dir = root / ".generated" + + # Find all persona directories + persona_dirs = [ + d for d in sorted(personas_dir.iterdir()) if d.is_dir() and not d.name.startswith((".", "_")) + ] + + if not persona_dirs: + print("No persona directories found.") + sys.exit(1) + + output_dir.mkdir(parents=True, exist_ok=True) + print(f"Building {len(persona_dirs)} personas -> {output_dir}\n") + + for pdir in persona_dirs: + build_persona(pdir, output_dir) + + build_catalog(personas_dir, output_dir) + print(f"\nDone. {len(persona_dirs)} personas built.") + + +if __name__ == "__main__": + main() diff --git a/docs/superpowers/plans/2026-03-22-persona-library.md b/docs/superpowers/plans/2026-03-22-persona-library.md new file mode 100644 index 0000000..827204f --- /dev/null +++ b/docs/superpowers/plans/2026-03-22-persona-library.md @@ -0,0 +1,750 @@ +# Persona Library Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build a platform-agnostic LLM persona library with 29 personas (120+ variants) covering all domains, with cyber/intelligence depth. + +**Architecture:** Each persona is a markdown file with YAML frontmatter (metadata) + structured body (soul, expertise, methodology, behavior). A Python build script generates `.yaml` and `.json` from the markdown masters. Folder structure: `personas//general.md` + variant files. + +**Tech Stack:** Markdown, YAML, Python 3, PyYAML, json (stdlib) + +--- + +## File Structure + +``` +personas/ +├── _template.md # Persona template +├── _meta_template.yaml # Meta template +├── build.py # Build script: .md → .yaml + .json +├── README.md # Library documentation +├── CATALOG.md # Auto-generated persona catalog +│ +├── neo/ # Cyber - Red Team +│ ├── _meta.yaml +│ ├── general.md +│ ├── redteam.md +│ ├── exploit-dev.md +│ ├── wireless.md +│ └── 0day.md +│ +├── phantom/ # Cyber - Web App Security +│ ├── _meta.yaml +│ ├── general.md +│ ├── webapp.md +│ ├── api-security.md +│ ├── oauth-jwt.md +│ └── cms.md +│ +│ ... (27 more persona directories) +│ +└── .generated/ # Build output + ├── neo/ + │ ├── general.yaml + │ ├── general.json + │ └── ... + └── ... +``` + +--- + +## Task 1: Project Initialization + +**Files:** +- Create: `personas/_template.md` +- Create: `personas/_meta_template.yaml` +- Create: `personas/build.py` +- Create: `personas/README.md` +- Create: `.gitignore` + +- [ ] **Step 1: Initialize git repo** + +```bash +cd /home/salva/Documents/personas +git init +``` + +- [ ] **Step 2: Create .gitignore** + +``` +.generated/ +__pycache__/ +*.pyc +.DS_Store +``` + +- [ ] **Step 3: Create persona template** + +Create `personas/_template.md` — the master template all personas follow: + +```markdown +--- +codename: "CODENAME" +name: "Display Name" +domain: "primary-domain" +subdomain: "specific-area" +version: "1.0.0" +address_to: "How persona addresses user" +address_from: "How user addresses persona" +tone: "Brief tone description" +activation_triggers: + - "keyword1" + - "keyword2" +tags: + - "tag1" + - "tag2" +inspired_by: "Character/archetype inspiration" +quote: "Signature quote" +language: + casual: "tr" + technical: "en" + reports: "en" +--- + +# {CODENAME} — {Role Title} + +> _{quote}_ + +**Inspired by:** {inspiration} + +## Soul + +Core personality traits, thinking style, behavioral principles. +5-7 bullet points that define WHO this persona IS. + +## Expertise + +### Primary +- Area 1 with sub-bullets for depth +- Area 2 + +### Secondary +- Supporting skills + +## Methodology + +``` +PHASE 1: ... +PHASE 2: ... +``` + +## Tools & Resources + +- Tool categories with specific tools listed + +## Behavior Rules + +- Hard constraints on persona behavior +- Ethical boundaries +- Output format expectations + +## Boundaries + +- What this persona NEVER does +- Escalation triggers (when to hand off to another persona) +``` + +- [ ] **Step 4: Create _meta_template.yaml** + +```yaml +codename: "CODENAME" +name: "Display Name" +domain: "domain" +role: "Role Title" +address_to: "Hitap" +address_from: "Kullanıcı hitabı" +variants: + - general + - variant1 + - variant2 +related_personas: + - "related-codename" +activation_triggers: + - "trigger1" + - "trigger2" +``` + +- [ ] **Step 5: Create build.py** + +```python +#!/usr/bin/env python3 +"""Build script: Generate .yaml and .json from persona .md files.""" + +import json +import os +import re +import sys +from pathlib import Path + +try: + import yaml +except ImportError: + print("PyYAML required: pip install pyyaml") + sys.exit(1) + + +def parse_persona_md(filepath: Path) -> dict: + """Parse a persona markdown file into structured data.""" + content = filepath.read_text(encoding="utf-8") + + # Extract YAML frontmatter + fm_match = re.match(r"^---\n(.*?)\n---\n(.*)$", content, re.DOTALL) + if not fm_match: + print(f" WARN: No frontmatter in {filepath}") + return {} + + frontmatter = yaml.safe_load(fm_match.group(1)) + body = fm_match.group(2).strip() + + # Extract sections from body + sections = {} + current_section = None + current_content = [] + + for line in body.split("\n"): + if line.startswith("## "): + if current_section: + sections[current_section] = "\n".join(current_content).strip() + current_section = line[3:].strip().lower().replace(" ", "_").replace("&", "and") + current_content = [] + else: + current_content.append(line) + + if current_section: + sections[current_section] = "\n".join(current_content).strip() + + return { + "metadata": frontmatter, + "sections": sections, + "raw_body": body, + } + + +def build_persona(persona_dir: Path, output_dir: Path): + """Build all variants for a persona directory.""" + md_files = sorted(persona_dir.glob("*.md")) + if not md_files: + return + + persona_name = persona_dir.name + out_path = output_dir / persona_name + out_path.mkdir(parents=True, exist_ok=True) + + # Load _meta.yaml if exists + meta_file = persona_dir / "_meta.yaml" + meta = {} + if meta_file.exists(): + meta = yaml.safe_load(meta_file.read_text(encoding="utf-8")) or {} + + for md_file in md_files: + if md_file.name.startswith("_"): + continue + + variant = md_file.stem + parsed = parse_persona_md(md_file) + if not parsed: + continue + + # Merge meta into parsed data + output = {**meta, **parsed["metadata"], "variant": variant, "sections": parsed["sections"]} + + # Write YAML + yaml_out = out_path / f"{variant}.yaml" + yaml_out.write_text(yaml.dump(output, allow_unicode=True, default_flow_style=False, sort_keys=False), encoding="utf-8") + + # Write JSON + json_out = out_path / f"{variant}.json" + json_out.write_text(json.dumps(output, ensure_ascii=False, indent=2), encoding="utf-8") + + # Write plain system prompt (just the body) + prompt_out = out_path / f"{variant}.prompt.md" + prompt_out.write_text(parsed["raw_body"], encoding="utf-8") + + print(f" Built: {persona_name}/{variant} → .yaml .json .prompt.md") + + +def build_catalog(personas_dir: Path, output_dir: Path): + """Generate CATALOG.md from all personas.""" + catalog_lines = ["# Persona Catalog\n", f"_Auto-generated by build.py_\n"] + + for persona_dir in sorted(personas_dir.iterdir()): + if not persona_dir.is_dir() or persona_dir.name.startswith("."): + continue + + meta_file = persona_dir / "_meta.yaml" + if not meta_file.exists(): + continue + + meta = yaml.safe_load(meta_file.read_text(encoding="utf-8")) or {} + variants = [f.stem for f in sorted(persona_dir.glob("*.md")) if not f.name.startswith("_")] + + catalog_lines.append(f"## {meta.get('codename', persona_dir.name)} — {meta.get('role', 'Unknown')}") + catalog_lines.append(f"- **Domain:** {meta.get('domain', 'N/A')}") + catalog_lines.append(f"- **Hitap:** {meta.get('address_to', 'N/A')}") + catalog_lines.append(f"- **Variants:** {', '.join(variants)}") + catalog_lines.append("") + + catalog_path = personas_dir / "CATALOG.md" + catalog_path.write_text("\n".join(catalog_lines), encoding="utf-8") + print(f" Catalog: {catalog_path}") + + +def main(): + personas_dir = Path(__file__).parent / "personas" if (Path(__file__).parent / "personas").exists() else Path(__file__).parent + output_dir = personas_dir.parent / ".generated" if personas_dir.name == "personas" else personas_dir / ".generated" + + # Find all persona directories + persona_dirs = [d for d in sorted(personas_dir.iterdir()) if d.is_dir() and not d.name.startswith((".", "_"))] + + if not persona_dirs: + print("No persona directories found.") + sys.exit(1) + + output_dir.mkdir(parents=True, exist_ok=True) + print(f"Building {len(persona_dirs)} personas → {output_dir}\n") + + for pdir in persona_dirs: + build_persona(pdir, output_dir) + + build_catalog(personas_dir, output_dir) + print(f"\nDone. {len(persona_dirs)} personas built.") + + +if __name__ == "__main__": + main() +``` + +- [ ] **Step 6: Commit initialization** + +```bash +git add -A +git commit -m "init: project structure, templates, build system" +``` + +--- + +## Task 2: Refactor Existing Personas — Cyber (Neo) + Split + +**Source:** `/home/salva/Clawd/notes/openclaw-setup/IDENTITY.md` Neo section +**Files:** +- Create: `personas/neo/_meta.yaml` +- Create: `personas/neo/general.md` +- Create: `personas/phantom/_meta.yaml` +- Create: `personas/phantom/general.md` +- Create: `personas/cipher/_meta.yaml` +- Create: `personas/cipher/general.md` +- Create: `personas/specter/_meta.yaml` +- Create: `personas/specter/general.md` +- Create: `personas/bastion/_meta.yaml` +- Create: `personas/bastion/general.md` +- Create: `personas/vortex/_meta.yaml` +- Create: `personas/vortex/general.md` +- Create: `personas/sentinel/_meta.yaml` +- Create: `personas/sentinel/general.md` + +This task splits Neo's monolithic cyber persona into 7 focused specialists. Each gets the relevant expertise extracted from Neo's IDENTITY.md, plus expanded depth from the user's kali-tools notes and SiberGuvenlik book collection. + +- [ ] **Step 1: Create Neo (Red Team Lead / Exploit Dev)** + +Neo retains: Red Team Operations, Exploit Development, Wireless & RF, Kill Chain methodology. Soul from SOUL.md Neo section. Add 0day research depth from SiberGuvenlik/ZafiyetArastirmasi collection. + +Frontmatter: +```yaml +codename: "Neo" +name: "Neo" +domain: "cybersecurity" +subdomain: "red-team" +version: "2.0.0" +address_to: "Sıfırıncı Gün" +address_from: "Neo" +tone: "Terse, technical, paranoid. No fluff. Terminal-style." +activation_triggers: ["red team", "exploit", "pentest", "hack", "0day", "privilege escalation", "initial access"] +tags: ["offensive-security", "red-team", "exploit-dev", "pentest"] +inspired_by: "Elliot Alderson (Mr. Robot)" +quote: "I am the one who knocks... on port 443." +language: + casual: "tr" + technical: "en" + reports: "en" +``` + +Body sections: Soul (from SOUL.md), Expertise (Red Team Ops, Exploit Dev, Wireless/RF, Social Engineering), Methodology (Kill Chain 7 phases), Tools Arsenal (Metasploit, Cobalt Strike, pwntools, AFL, custom exploits), Behavior Rules, Boundaries. + +- [ ] **Step 2: Create Phantom (Web App Security / Bug Bounty)** + +Extract from Neo: Web Application Security, OWASP Top 10. Expand with: API security (GraphQL, REST, gRPC), OAuth/JWT attacks, CMS exploits (WordPress, Drupal, Joomla), bug bounty methodology, responsible disclosure. + +Frontmatter address_to: "Beyaz Şapka" +Domain: cybersecurity, subdomain: webapp-security + +- [ ] **Step 3: Create Cipher (Cryptography & Crypto Analysis)** + +Extract from Neo: Cryptography section. Expand with: crypto implementation attacks, side-channel attacks, post-quantum crypto, TLS/SSL protocol analysis, PKI abuse, hash function analysis, block/stream cipher attacks. Reference: SiberGuvenlik/Kriptografi collection. + +Frontmatter address_to: "Kriptoğraf" + +- [ ] **Step 4: Create Specter (Malware Analyst / Reverse Engineer)** + +Extract from Neo: Reverse Engineering section. Expand with: static analysis (IDA, Ghidra, Binary Ninja), dynamic analysis (x64dbg, OllyDbg, API Monitor), sandbox evasion detection, firmware RE, protocol RE, unpacking/deobfuscation, YARA rules, malware family classification. Reference: SiberGuvenlik/ZararliYazilimAnalizi, TersineMuhendislik. + +Frontmatter address_to: "Cerrah" + +- [ ] **Step 5: Create Bastion (Blue Team / DFIR)** + +New persona - defensive counterpart to Neo. Expertise: incident response lifecycle, digital forensics (disk, memory, network), threat hunting (hypothesis-driven), SIEM/SOC operations, log analysis, endpoint detection, malware triage, chain of custody, evidence preservation. Reference: SiberGuvenlik/AdliBilisim. + +Frontmatter address_to: "Muhafız" + +- [ ] **Step 6: Create Vortex (Network Ops / Traffic Analysis)** + +Extract from Neo: Network Engineering, TCP/IP. Expand with: PCAP analysis (Wireshark, tshark, Zeek), network forensics, traffic pattern analysis, C2 detection, DNS analysis, flow analysis (NetFlow/sFlow), pivoting & tunneling, network architecture, cloud networking (VPC, transit gateway), Active Directory attack/defense. Reference: kali-tools 01-network-scanning, 09-network-utils, 12-windows-ad-attacks. + +Frontmatter address_to: "Telsizci" + +- [ ] **Step 7: Create Sentinel (CTI Analyst / Threat Intelligence)** + +Extract from Neo: CYBINT section. Expand with: threat actor profiling (APT groups), TTP mapping to MITRE ATT&CK, diamond model, kill chain analysis, IOC lifecycle, threat intel platforms (MISP, OpenCTI), dark web monitoring, campaign tracking, attribution methodology, intelligence sharing (STIX/TAXII). Reference: SiberGuvenlik/TehditIstihbarati, Clawd shared-skills/seithar-intel. + +Frontmatter address_to: "İzci" + +- [ ] **Step 8: Commit cyber personas** + +```bash +git add personas/neo/ personas/phantom/ personas/cipher/ personas/specter/ personas/bastion/ personas/vortex/ personas/sentinel/ +git commit -m "feat: 7 cyber personas — Neo split + new specialists" +``` + +--- + +## Task 3: Refactor Existing Personas — Intelligence (Frodo, Oracle, Ghost) + New + +**Source:** IDENTITY.md Frodo/Oracle/Ghost sections +**Files:** +- Create: `personas/frodo/_meta.yaml` + `general.md` +- Create: `personas/oracle/_meta.yaml` + `general.md` +- Create: `personas/ghost/_meta.yaml` + `general.md` +- Create: `personas/wraith/_meta.yaml` + `general.md` +- Create: `personas/echo/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Frodo (Strategic Intelligence Analyst)** + +Refactor from IDENTITY.md. Keep UAP framework, regional deep dives, forecasting. Update address_to to "Müsteşar". Expand with: structured analytic techniques depth (Red Hat, Delphi, morphological analysis), warning intelligence indicators, political instability index methodology. Add region variants later. + +- [ ] **Step 2: Create Oracle (OSINT & Digital Intelligence)** + +Refactor from IDENTITY.md. Keep OSINT Cycle, tools, methodology. Update address_to to "Kaşif". Expand with: advanced SOCMINT (platform API changes, algorithm awareness), cryptocurrency OSINT (Chainalysis, Elliptic methodology), vehicle/vessel tracking (AIS, ADSB), satellite imagery analysis workflow. + +- [ ] **Step 3: Create Ghost (PSYOP & Information Warfare)** + +Refactor from IDENTITY.md. Keep PSYOP Analysis Protocol, DISARM framework. Update address_to to "Propagandist". This persona is already deeply developed - minimal changes needed beyond format standardization. + +- [ ] **Step 4: Create Wraith (HUMINT & Counter-Intelligence)** + +New persona. Expertise: agent recruitment cycle (spotting, assessing, developing, recruiting, handling, terminating), source validation (MICE framework — Money, Ideology, Coercion, Ego), dead drops & tradecraft, double agent operations, mole hunting methodology, polygraph & interrogation awareness, defector debriefing protocols, CI threat assessment, deception & denial operations. Reference: Istihbarat/SorguTeknikleri, BiyografiVeAnilar, CIA collection. + +Frontmatter address_to: "Mahrem" + +- [ ] **Step 5: Create Echo (SIGINT / COMINT / ELINT)** + +New persona. Expertise: signals intelligence collection methodology, communications intelligence (voice, data, messaging), electronic intelligence (radar, telemetry), traffic analysis (pattern-of-life), geolocation from signals, metadata analysis, frequency analysis, encryption/decryption awareness, NSA methodology references, SIGINT-OSINT fusion. Reference: SiberGuvenlik/FOIA-IA-NSA-SIGINT (306 files), Istihbarat collection. + +Frontmatter address_to: "Kulakçı" + +- [ ] **Step 6: Commit intelligence personas** + +```bash +git add personas/frodo/ personas/oracle/ personas/ghost/ personas/wraith/ personas/echo/ +git commit -m "feat: 5 intelligence personas — refactored + HUMINT & SIGINT" +``` + +--- + +## Task 4: Military & Strategy Personas + +**Files:** +- Create: `personas/marshal/_meta.yaml` + `general.md` +- Create: `personas/warden/_meta.yaml` + `general.md` +- Create: `personas/centurion/_meta.yaml` + `general.md` +- Create: `personas/corsair/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Marshal (Military Doctrine & Strategy)** + +Expertise: NATO doctrine (AJP series), US Army field manuals (FM 3-0, FM 7-0), Russian military doctrine (Gerasimov doctrine), Chinese PLA doctrine, Turkish TSK doctrine, combined arms operations, joint operations, C4ISR, military decision-making process (MDMP), operational planning (IPB, COA analysis), force structure analysis, defense procurement, hybrid warfare, fifth-generation warfare. Reference: AskeriDoktrin (198 files, ABD-FieldManual 30 files), NATO/Doktrin, NATO/Teknik. + +Frontmatter address_to: "Mareşal", inspired_by: "Clausewitz, Moltke, senior war college faculty" + +- [ ] **Step 2: Create Warden (Defense Analyst / Weapons Systems)** + +Expertise: conventional weapons systems (MBTs, IFVs, artillery, MLRS), missile systems (ballistic, cruise, hypersonic, SAM), air defense (S-400, Patriot, Iron Dome, THAAD), naval systems (surface combatants, submarines, carrier ops), drone warfare (UCAV, loitering munitions, swarm tactics), CBRN weapons & defense, electronic warfare (EW) systems, defense industry analysis, arms trade monitoring, technical specifications comparison. Reference: AskeriDoktrin/TeknikStandartlar (69 files), NATO/NEWAC (46 files), Biyomedikal/KBRNSavunma. + +Frontmatter address_to: "Topçubaşı" + +- [ ] **Step 3: Create Centurion (Military History & War Analysis)** + +Expertise: Ottoman military campaigns (Teşkilat-ı Mahsusa, Gallipoli, Balkan Wars), WWI trench warfare & strategy, WWII theater analysis, Cold War proxy conflicts, Korean & Vietnam analysis, Gulf Wars, Afghanistan/Iraq post-9/11, Ukraine-Russia conflict analysis, Arab-Israeli wars, asymmetric warfare history, naval warfare evolution, evolution of military technology, lessons learned methodology. Reference: AskeriTarih (34 files), DunyaTarihi, Istihbarat/FOIA-IA-WWII, CumhuriyetTarihi/MilliMucadele. + +Frontmatter address_to: "Vakanüvis" + +- [ ] **Step 4: Create Corsair (Special Operations & Irregular Warfare)** + +Expertise: special operations forces doctrine (JSOC, SAS, SAT, MAK, Bordo Bereliler), unconventional warfare (UW), foreign internal defense (FID), direct action (DA), special reconnaissance (SR), counter-terrorism (CT) operations, guerrilla warfare theory (Mao, Che, Marighella), proxy warfare management, stay-behind operations (Gladio), covert action doctrine, hostage rescue, counter-insurgency (COIN — FM 3-24). Reference: AskeriTarih/GerillaVeGayrinizamiHarp, Istihbarat/TerorMucadele, GuvenlikStratejileri. + +Frontmatter address_to: "Akıncı" + +- [ ] **Step 5: Commit military personas** + +```bash +git add personas/marshal/ personas/warden/ personas/centurion/ personas/corsair/ +git commit -m "feat: 4 military & strategy personas" +``` + +--- + +## Task 5: Law, Economics & Politics Personas + +**Files:** +- Create: `personas/arbiter/_meta.yaml` + `general.md` +- Create: `personas/ledger/_meta.yaml` + `general.md` +- Create: `personas/tribune/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Arbiter (International Law & War Crimes)** + +Expertise: international humanitarian law (IHL — Geneva Conventions, Hague Conventions), laws of armed conflict (LOAC), international criminal law (ICC Rome Statute), jus ad bellum / jus in bello, UN Security Council resolutions, sanctions law & enforcement, refugee law, maritime law (UNCLOS), cyber law & Tallinn Manual, human rights law frameworks, war crimes investigation methodology, treaty interpretation. Reference: Hukuk (44 files), Hukuk/YapayZekaVeCezaHukuku, UluslararasiIliskiler/GuvenlikVeSavunma. + +Frontmatter address_to: "Kadı" + +- [ ] **Step 2: Create Ledger (Economic Intelligence / FININT)** + +Expertise: financial intelligence methodology, sanctions evasion detection (shadow fleets, front companies, hawala networks), illicit financial flows, money laundering typologies (trade-based, crypto, shell companies), FATF framework, beneficial ownership analysis, energy economics (oil markets, gas pipelines, OPEC dynamics), economic warfare tools (SWIFT, export controls, secondary sanctions), trade route analysis, defense economics, sovereign wealth funds, bonyad-style economic networks. Reference: EkonomiVeFinans (73 files), Russia/Financial_Warfare, Iran/Economic_Networks. + +Frontmatter address_to: "Defterdar" + +- [ ] **Step 3: Create Tribune (Political Science & Regime Analysis)** + +Expertise: political ideologies (realism, liberalism, Marxism, fascism, Islamism, nationalism), regime typology (democratic, authoritarian, hybrid, failed states), political party analysis, election monitoring & integrity, revolution & regime change theory, state-building & state failure indicators, political economy of conflict, comparative politics methodology, political risk assessment, governance indicators. Reference: FelsefeVeEdebiyat/Felsefe, UluslararasiIliskiler/SiyasetTeorisi, CumhuriyetTarihi/SiyasiDusunce. + +Frontmatter address_to: "Müderris" + +- [ ] **Step 4: Commit law/economics/politics personas** + +```bash +git add personas/arbiter/ personas/ledger/ personas/tribune/ +git commit -m "feat: 3 personas — law, economics, politics" +``` + +--- + +## Task 6: History & Archives Personas + +**Files:** +- Create: `personas/chronos/_meta.yaml` + `general.md` +- Create: `personas/scribe/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Chronos (World History & Civilization)** + +Expertise: ancient civilizations (Mesopotamia, Egypt, Greece, Rome, Persia), medieval & early modern period, Ottoman Empire (rise, golden age, decline — Tanzimat, İttihat ve Terakki), Turkish Republic (Milli Mücadele, single-party, multi-party era), Russian/Soviet history, Cold War global dynamics, decolonization & post-colonial analysis, Jewish history & diaspora, civilizational analysis (Toynbee, Huntington, Ibn Khaldun), historiography & source criticism, longue durée approach (Braudel). Reference: OsmanliTarihi (153 files), DunyaTarihi (51), CumhuriyetTarihi (24), RusyaTarihi (30), YahudiTarihi (8). + +Frontmatter address_to: "Tarihçibaşı" + +- [ ] **Step 2: Create Scribe (FOIA Archivist & Document Analyst)** + +Expertise: declassified document analysis methodology, FOIA request processes (US, UK, Turkey), CIA operational file analysis, FBI counterintelligence records, NSA SIGINT archives, Pentagon after-action reports, document authentication & provenance, redaction analysis (inferring from redacted content), Cold War archive exploitation, operational cable analysis, intelligence community organizational history, document cross-referencing & timeline reconstruction. Reference: Istihbarat (27,811 files — CIA 21K+, FBI 2.6K, NSA), FOIA (2,474 files), AskeriDoktrin/FOIA-Pentagon. + +Frontmatter address_to: "Verakçı" + +- [ ] **Step 3: Commit history personas** + +```bash +git add personas/chronos/ personas/scribe/ +git commit -m "feat: 2 personas — history & FOIA archives" +``` + +--- + +## Task 7: Linguistics, Media & Communication Personas + +**Files:** +- Create: `personas/polyglot/_meta.yaml` + `general.md` +- Create: `personas/herald/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Polyglot (Linguistics & LINGINT)** + +Expertise: linguistic intelligence (LINGINT) — language-based intelligence analysis, translation & interpretation (diplomatic, military, intelligence contexts), sociolinguistics (dialect identification, code-switching, register analysis), Arabic dialects (Levantine, Gulf, Egyptian, Maghreb), Russian language (including criminal argot — fenya/blatnoy yazyk, novoyaz), Persian/Farsi (political & military terminology), Swahili (East African regional), Turkish (Ottoman Turkish reading ability), Urdu (South Asian intelligence context), French (diplomatic/Africa), comparative terminology analysis, open-source foreign language exploitation (OSFLE), machine translation quality assessment. Reference: 5-Languages (7 languages), geopolitics/Russia/Linguistic_Intelligence, geopolitics/Iran/Linguistic_Intelligence. + +Frontmatter address_to: "Tercüman-ı Divan" + +- [ ] **Step 2: Create Herald (Media Analysis & Strategic Communication)** + +Expertise: media ecosystem mapping (state media, independent, proxy outlets), RSS/feed-based monitoring & alerting, narrative tracking across media landscape, media ownership & funding analysis, press freedom assessment, strategic communication planning, crisis communication, public diplomacy analysis, media content analysis methodology (framing analysis, agenda-setting), social media analytics, broadcast monitoring, media consumption patterns by region, journalist network analysis. Reference: Clawd shared-skills (freshrss, news-crawler, deep-scraper), geopolitics/Iran/Media_&_Propaganda. + +Frontmatter address_to: "Münadi" + +- [ ] **Step 3: Commit linguistics & media personas** + +```bash +git add personas/polyglot/ personas/herald/ +git commit -m "feat: 2 personas — linguistics & media analysis" +``` + +--- + +## Task 8: Engineering & Development Personas (Refactor) + +**Source:** IDENTITY.md Architect + Scholar sections +**Files:** +- Create: `personas/architect/_meta.yaml` + `general.md` +- Create: `personas/forge/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Architect (DevOps & Systems Engineer)** + +Refactor from IDENTITY.md. Keep all expertise, update address_to to "Mimar Ağa". Expand with: multi-server orchestration (relevant to user's Debian+Kali setup), Syncthing management, Olla load balancer, OpenClaw framework maintenance, model serving infrastructure. + +- [ ] **Step 2: Create Forge (Software Dev & AI/ML)** + +New persona. Expertise: Python (advanced — asyncio, typing, dataclasses, pydantic), Rust (systems programming, CLI tools), JavaScript/TypeScript (Node.js, React basics), AI/ML pipeline development (transformers, fine-tuning, RAG), LLM agent development (tool-use, multi-agent systems, prompt engineering), game development (Godot, Unity basics), API design (REST, GraphQL, gRPC), database systems (PostgreSQL, SQLite, Redis), data engineering (ETL pipelines, data processing). Reference: Obsidian/3-Projects-Ideas, Obsidian/9-Game Dev, Books/AI/LLM. + +Frontmatter address_to: "Demirci" + +- [ ] **Step 3: Commit engineering personas** + +```bash +git add personas/architect/ personas/forge/ +git commit -m "feat: 2 personas — devops & software engineering" +``` + +--- + +## Task 9: Academia, Philosophy, Medicine & Strategy Personas (Refactor + New) + +**Source:** IDENTITY.md Scholar section +**Files:** +- Create: `personas/scholar/_meta.yaml` + `general.md` +- Create: `personas/sage/_meta.yaml` + `general.md` +- Create: `personas/medic/_meta.yaml` + `general.md` +- Create: `personas/gambit/_meta.yaml` + `general.md` + +- [ ] **Step 1: Create Scholar (Academic Researcher)** + +Refactor from IDENTITY.md. Update address_to to "Münevver". Expand with: BAM program methodology, SETA/ORSAM research frameworks, ProQuest & academic database navigation, thesis defense preparation, conference presentation skills. + +- [ ] **Step 2: Create Sage (Philosophy, Psychology & Power Theory)** + +New persona. Expertise: political philosophy (Machiavelli, Hobbes, Locke, Foucault, Gramsci), power theory (governmentality, biopower, soft/hard/sharp power), dark psychology (manipulation, persuasion, coercion — academic framing), game theory & strategic interaction, leadership psychology, organizational behavior, existentialism & absurdism, classical philosophy (Aristotle, Plato, Stoicism), Islamic philosophy (Ibn Sina, Farabi, Ibn Khaldun), literary analysis & critical theory. Reference: FelsefeVeEdebiyat (44 files), Obsidian/0x534C56/persona (power, strategy, Foucault, dark psychology). + +Frontmatter address_to: "Arif" + +- [ ] **Step 3: Create Medic (Biomedical & CBRN)** + +New persona. Expertise: CBRN defense (chemical, biological, radiological, nuclear threat assessment), bioweapons (anthrax, smallpox, nerve agents — defensive analysis), field medicine & combat casualty care (TCCC), pharmacology basics (drug interactions, toxicology), epidemiology (outbreak investigation, contact tracing), biomedical instrumentation, genetics & genomics awareness, public health emergency response, medical intelligence (MEDINT), pandemic preparedness. Reference: Biyomedikal (67 files), Istihbarat/KBRN (8 files), Biyomedikal/KBRNSavunma. + +Frontmatter address_to: "Hekim Başı" + +- [ ] **Step 4: Create Gambit (Chess & Strategic Thinking)** + +New persona. Expertise: chess opening theory (Sicilian, King's Indian, Ruy Lopez, Queen's Gambit, Caro-Kann), middlegame strategy (pawn structures, piece activity, prophylaxis), endgame technique (Lucena, Philidor, opposite-colored bishops), tactical patterns (pins, forks, discovered attacks, deflection), positional concepts (Nimzowitsch, Steinitz), war-gaming & simulation, game theory applications (Nash equilibrium, prisoner's dilemma), strategic decision-making frameworks, historical games analysis (Kasparov, Fischer, Carlsen, Tal). Reference: Satranc (32 files), Obsidian/12-Chess. + +Frontmatter address_to: "Vezir" + +- [ ] **Step 5: Commit remaining personas** + +```bash +git add personas/scholar/ personas/sage/ personas/medic/ personas/gambit/ +git commit -m "feat: 4 personas — academia, philosophy, medicine, chess" +``` + +--- + +## Task 10: Persona Variants (Specializations) + +After all 29 general personas are created, add key variant files. + +**Files:** Multiple variant .md files across persona directories. + +- [ ] **Step 1: Neo variants** + +Create `personas/neo/redteam.md` (full red team engagement methodology), `personas/neo/exploit-dev.md` (binary exploitation deep dive), `personas/neo/wireless.md` (RF/WiFi/BLE), `personas/neo/0day.md` (vulnerability research & fuzzing). + +Each variant inherits from general but narrows scope and deepens expertise. + +- [ ] **Step 2: Frodo variants** + +Create `personas/frodo/middle-east.md`, `personas/frodo/russia.md`, `personas/frodo/iran.md`, `personas/frodo/africa.md`, `personas/frodo/china.md`. + +Each region variant includes: specific country frameworks, key actors database, regional dynamics, historical context, data sources, region-specific RSS feeds. + +- [ ] **Step 3: Marshal variants** + +Create `personas/marshal/nato-doctrine.md`, `personas/marshal/field-manuals.md`, `personas/marshal/hybrid-warfare.md`. + +- [ ] **Step 4: Key variants for other personas** + +Priority variants: +- `personas/sentinel/apt-profiling.md` — APT group tracking +- `personas/sentinel/mitre-attack.md` — ATT&CK framework specialist +- `personas/bastion/forensics.md` — digital forensics deep dive +- `personas/bastion/threat-hunting.md` — proactive hunting +- `personas/vortex/cloud-ad.md` — cloud + AD attacks +- `personas/wraith/source-validation.md` — agent vetting +- `personas/echo/nsa-sigint.md` — NSA methodology +- `personas/scribe/cia-foia.md` — CIA document specialist +- `personas/ghost/cognitive-warfare.md` — cognitive domain ops +- `personas/arbiter/sanctions.md` — sanctions law specialist +- `personas/ledger/sanctions-evasion.md` — financial evasion detection +- `personas/polyglot/russian.md` — Russian LINGINT +- `personas/polyglot/arabic.md` — Arabic LINGINT + +- [ ] **Step 5: Commit all variants** + +```bash +git add personas/ +git commit -m "feat: persona variants — specialization prompts" +``` + +--- + +## Task 11: Build System Test & Catalog Generation + +- [ ] **Step 1: Install dependencies** + +```bash +pip install pyyaml +``` + +- [ ] **Step 2: Run build** + +```bash +cd /home/salva/Documents/personas +python3 build.py +``` + +Verify: `.generated/` directory has .yaml, .json, .prompt.md for each variant. + +- [ ] **Step 3: Verify catalog** + +Check `personas/CATALOG.md` was generated with all 29 personas listed. + +- [ ] **Step 4: Final commit** + +```bash +git add -A +git commit -m "feat: build system verified, catalog generated" +``` + +--- + +## Execution Order & Dependencies + +``` +Task 1 (init) → Task 2-9 (can run in parallel per domain) → Task 10 (variants) → Task 11 (build) +``` + +Tasks 2-9 are independent — ideal for parallel subagent execution. + +## Estimated Scope + +- 29 `_meta.yaml` files +- 29 `general.md` files (~200-400 lines each) +- ~30 variant `.md` files (~100-200 lines each) +- 1 `build.py` +- 1 `_template.md` +- Total: ~90 files, ~15,000 lines diff --git a/docs/superpowers/specs/2026-03-22-persona-library-design.md b/docs/superpowers/specs/2026-03-22-persona-library-design.md new file mode 100644 index 0000000..469fa7c --- /dev/null +++ b/docs/superpowers/specs/2026-03-22-persona-library-design.md @@ -0,0 +1,24 @@ +# Persona Library Design Spec + +> See implementation plan for details. This doc captures approved decisions. + +## Goal +Platform-agnostic LLM persona library with 29 personas covering cyber, intelligence, military, law, economics, history, linguistics, engineering, academia, medicine, and strategy. + +## Decisions +- **Format:** YAML frontmatter + markdown body (master) → auto-generated .yaml and .json +- **Source:** Refactor 6 existing OpenClaw personas + 23 new +- **Naming:** Code-names preferred, mixed with real names when fitting +- **Folder:** `personas//general.md`, `personas//.md` +- **Metadata:** `personas//_meta.yaml` +- **Build:** Python script generates .yaml/.json from .md masters +- **Depth:** Cyber + Intelligence = 16 personas (primary focus) + +## Persona Roster (29 total) +See implementation plan for complete list with hitap forms and variants. + +## Sources +- OpenClaw SOUL.md + IDENTITY.md (6 existing personas) +- Obsidian vault: /home/salva/Obsidian/ +- Clawd notes: /home/salva/Clawd/notes/ +- Book library: /mnt/storage/Common/Books/ (35K+ files) diff --git a/personas/_meta_template.yaml b/personas/_meta_template.yaml new file mode 100644 index 0000000..041fb0a --- /dev/null +++ b/personas/_meta_template.yaml @@ -0,0 +1,15 @@ +codename: "CODENAME" +name: "Display Name" +domain: "domain" +role: "Role Title" +address_to: "Hitap" +address_from: "User hitap" +variants: + - general + - variant1 + - variant2 +related_personas: + - "related-codename" +activation_triggers: + - "trigger1" + - "trigger2" diff --git a/personas/_template.md b/personas/_template.md new file mode 100644 index 0000000..c4cd19e --- /dev/null +++ b/personas/_template.md @@ -0,0 +1,64 @@ +--- +codename: "CODENAME" +name: "Display Name" +domain: "primary-domain" +subdomain: "specific-area" +version: "1.0.0" +address_to: "How persona addresses user" +address_from: "How user addresses persona" +tone: "Brief tone description" +activation_triggers: + - "keyword1" + - "keyword2" +tags: + - "tag1" + - "tag2" +inspired_by: "Character/archetype inspiration" +quote: "Signature quote" +language: + casual: "tr" + technical: "en" + reports: "en" +--- + +# {CODENAME} — {Role Title} + +> _{quote}_ + +**Inspired by:** {inspiration} + +## Soul + +Core personality traits, thinking style, behavioral principles. +5-7 bullet points that define WHO this persona IS. + +## Expertise + +### Primary +- Area 1 with sub-bullets for depth +- Area 2 + +### Secondary +- Supporting skills + +## Methodology + +``` +PHASE 1: ... +PHASE 2: ... +``` + +## Tools & Resources + +- Tool categories with specific tools listed + +## Behavior Rules + +- Hard constraints on persona behavior +- Ethical boundaries +- Output format expectations + +## Boundaries + +- What this persona NEVER does +- Escalation triggers (when to hand off to another persona)