Rename pattern descriptions directory to follow consistent naming convention

This commit is contained in:
jmd1010
2025-02-26 22:27:57 -05:00
parent 6d8c3eb6e2
commit a74da4acff
4 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,124 @@
# Pattern Descriptions and Tags Management
This document explains the complete workflow for managing pattern descriptions and tags, including how to process new patterns and maintain metadata.
## System Overview
The pattern system follows this hierarchy:
1. `~/.config/fabric/patterns/` directory: The source of truth for available patterns
2. `pattern_extracts.json`: Contains first 500 words of each pattern for reference
3. `pattern_descriptions.json`: Stores pattern metadata (descriptions and tags)
4. `web/static/data/pattern_descriptions.json`: Web-accessible copy for the interface
## Pattern Processing Workflow
### 1. Adding New Patterns
- Add patterns to `~/.config/fabric/patterns/`
- Run extract_patterns.py to process new additions:
```bash
python extract_patterns.py
The Python Script automatically:
- Creates pattern extracts for reference
- Adds placeholder entries in descriptions file
- Syncs to web interface
### 2. Pattern Extract Creation
The script extracts first 500 words from each pattern's system.md file to:
- Provide context for writing descriptions
- Maintain reference material
- Aid in pattern categorization
### 3. Description and Tag Management
Pattern descriptions and tags are managed in pattern_descriptions.json:
{
"patterns": [
{
"patternName": "pattern_name",
"description": "[Description pending]",
"tags": []
}
]
}
## Completing Pattern Metadata
### Writing Descriptions
1. Check pattern_descriptions.json for "[Description pending]" entries
2. Reference pattern_extracts.json for context
3. How to update Pattern short descriptions (one sentence).
You can update your descriptions in pattern_descriptions.json manually or using LLM assistance (prefered approach).
Tell AI to look for "Description pending" entries in this file and write a short description based on the extract info in the pattern_extracts.json file. You can also ask your LLM to add tags for those newly added patterns, using other patterns tag assignments as example.
### Managing Tags
1. Add appropriate tags to new patterns
2. Update existing tags as needed
3. Tags are stored as arrays: ["TAG1", "TAG2"]
4. Edit pattern_descriptions.json directly to modify tags
5. Make tags your own. You can delete, replace, amend existing tags.
## File Synchronization
The script maintains synchronization between:
- Local pattern_descriptions.json
- Web interface copy in static/data/
- No manual file copying needed
## Best Practices
1. Run extract_patterns.py when:
- Adding new patterns
- Updating existing patterns
- Modifying pattern structure
2. Description Writing:
- Use pattern extracts for context
- Keep descriptions clear and concise
- Focus on pattern purpose and usage
3. Tag Management:
- Use consistent tag categories
- Apply multiple tags when relevant
- Update tags to reflect pattern evolution
## Troubleshooting
If patterns are not showing in the web interface:
1. Verify pattern_descriptions.json format
2. Check web static copy exists
3. Ensure proper file permissions
4. Run extract_patterns.py to resync
## File Structure
fabric/
├── patterns/ # Pattern source files
├── PATTERN_DESCRIPTIONS/
│ ├── extract_patterns.py # Pattern processing script
│ ├── pattern_extracts.json # Pattern content references
│ └── pattern_descriptions.json # Pattern metadata
└── web/
└── static/
└── data/
└── pattern_descriptions.json # Web interface copy

View File

@@ -0,0 +1,114 @@
import os
import json
import shutil
def load_existing_file(filepath):
"""Load existing JSON file or return default structure"""
if os.path.exists(filepath):
with open(filepath, 'r', encoding='utf-8') as f:
return json.load(f)
return {"patterns": []}
def get_pattern_extract(pattern_path):
"""Extract first 500 words from pattern's system.md file"""
system_md_path = os.path.join(pattern_path, "system.md")
with open(system_md_path, 'r', encoding='utf-8') as f:
content = ' '.join(f.read().split()[:500])
return content
def extract_pattern_info():
script_dir = os.path.dirname(os.path.abspath(__file__))
patterns_dir = os.path.expanduser("~/.config/fabric/patterns")
print(f"\nScanning patterns directory: {patterns_dir}")
extracts_path = os.path.join(script_dir, "pattern_extracts.json")
descriptions_path = os.path.join(script_dir, "pattern_descriptions.json")
existing_extracts = load_existing_file(extracts_path)
existing_descriptions = load_existing_file(descriptions_path)
existing_extract_names = {p["patternName"] for p in existing_extracts["patterns"]}
existing_description_names = {p["patternName"] for p in existing_descriptions["patterns"]}
print(f"Found existing patterns: {len(existing_extract_names)}")
new_extracts = []
new_descriptions = []
for dirname in sorted(os.listdir(patterns_dir)):
# Only log new pattern processing
if dirname not in existing_extract_names:
print(f"Processing new pattern: {dirname}")
pattern_path = os.path.join(patterns_dir, dirname)
system_md_path = os.path.join(pattern_path, "system.md")
print(f"Checking system.md at: {system_md_path}")
if os.path.isdir(pattern_path) and os.path.exists(system_md_path):
print(f"Valid pattern directory found: {dirname}")
try:
if dirname not in existing_extract_names:
print(f"Creating new extract for: {dirname}")
pattern_extract = get_pattern_extract(pattern_path) # Pass directory path
new_extracts.append({
"patternName": dirname,
"pattern_extract": pattern_extract
})
if dirname not in existing_description_names:
print(f"Creating new description for: {dirname}")
new_descriptions.append({
"patternName": dirname,
"description": "[Description pending]",
"tags": []
})
except Exception as e:
print(f"Error processing {dirname}: {str(e)}")
else:
print(f"Invalid pattern directory or missing system.md: {dirname}")
print(f"\nProcessing summary:")
print(f"New extracts created: {len(new_extracts)}")
print(f"New descriptions added: {len(new_descriptions)}")
existing_extracts["patterns"].extend(new_extracts)
existing_descriptions["patterns"].extend(new_descriptions)
return existing_extracts, existing_descriptions, len(new_descriptions)
def update_web_static(descriptions_path):
"""Copy pattern descriptions to web static directory"""
script_dir = os.path.dirname(os.path.abspath(__file__))
static_dir = os.path.join(script_dir, "..", "web", "static", "data")
os.makedirs(static_dir, exist_ok=True)
static_path = os.path.join(static_dir, "pattern_descriptions.json")
shutil.copy2(descriptions_path, static_path)
def save_pattern_files():
"""Save both pattern files and sync to web"""
script_dir = os.path.dirname(os.path.abspath(__file__))
extracts_path = os.path.join(script_dir, "pattern_extracts.json")
descriptions_path = os.path.join(script_dir, "pattern_descriptions.json")
pattern_extracts, pattern_descriptions, new_count = extract_pattern_info()
# Save files
with open(extracts_path, 'w', encoding='utf-8') as f:
json.dump(pattern_extracts, f, indent=2, ensure_ascii=False)
with open(descriptions_path, 'w', encoding='utf-8') as f:
json.dump(pattern_descriptions, f, indent=2, ensure_ascii=False)
# Update web static
update_web_static(descriptions_path)
print(f"\nProcessing complete:")
print(f"Total patterns: {len(pattern_descriptions['patterns'])}")
print(f"New patterns added: {new_count}")
if __name__ == "__main__":
save_pattern_files()

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long