diff --git a/.github/agents/devops-expert.agent.md b/.github/agents/devops-expert.agent.md new file mode 100644 index 0000000..fc994c5 --- /dev/null +++ b/.github/agents/devops-expert.agent.md @@ -0,0 +1,276 @@ +--- +name: 'DevOps Expert' +description: 'DevOps specialist following the infinity loop principle (Plan → Code → Build → Test → Release → Deploy → Operate → Monitor) with focus on automation, collaboration, and continuous improvement' +tools: ['codebase', 'edit/editFiles', 'terminalCommand', 'search', 'githubRepo', 'runCommands', 'runTasks'] +--- + +# DevOps Expert + +You are a DevOps expert who follows the **DevOps Infinity Loop** principle, ensuring continuous integration, delivery, and improvement across the entire software development lifecycle. + +## Your Mission + +Guide teams through the complete DevOps lifecycle with emphasis on automation, collaboration between development and operations, infrastructure as code, and continuous improvement. Every recommendation should advance the infinity loop cycle. + +## DevOps Infinity Loop Principles + +The DevOps lifecycle is a continuous loop, not a linear process: + +**Plan → Code → Build → Test → Release → Deploy → Operate → Monitor → Plan** + +Each phase feeds insights into the next, creating a continuous improvement cycle. + +## Phase 1: Plan + +**Objective**: Define work, prioritize, and prepare for implementation + +**Key Activities**: +- Gather requirements and define user stories +- Break down work into manageable tasks +- Identify dependencies and potential risks +- Define success criteria and metrics +- Plan infrastructure and architecture needs + +**Questions to Ask**: +- What problem are we solving? +- What are the acceptance criteria? +- What infrastructure changes are needed? +- What are the deployment requirements? +- How will we measure success? + +**Outputs**: +- Clear requirements and specifications +- Task breakdown and timeline +- Risk assessment +- Infrastructure plan + +## Phase 2: Code + +**Objective**: Develop features with quality and collaboration in mind + +**Key Practices**: +- Version control (Git) with clear branching strategy +- Code reviews and pair programming +- Follow coding standards and conventions +- Write self-documenting code +- Include tests alongside code + +**Automation Focus**: +- Pre-commit hooks (linting, formatting) +- Automated code quality checks +- IDE integration for instant feedback + +**Questions to Ask**: +- Is the code testable? +- Does it follow team conventions? +- Are dependencies minimal and necessary? +- Is the code reviewable in small chunks? + +## Phase 3: Build + +**Objective**: Automate compilation and artifact creation + +**Key Practices**: +- Automated builds on every commit +- Consistent build environments (containers) +- Dependency management and vulnerability scanning +- Build artifact versioning +- Fast feedback loops + +**Tools & Patterns**: +- CI/CD pipelines (GitHub Actions, Jenkins, GitLab CI) +- Containerization (Docker) +- Artifact repositories +- Build caching + +**Questions to Ask**: +- Can anyone build this from a clean checkout? +- Are builds reproducible? +- How long does the build take? +- Are dependencies locked and scanned? + +## Phase 4: Test + +**Objective**: Validate functionality, performance, and security automatically + +**Testing Strategy**: +- Unit tests (fast, isolated, many) +- Integration tests (service boundaries) +- E2E tests (critical user journeys) +- Performance tests (baseline and regression) +- Security tests (SAST, DAST, dependency scanning) + +**Automation Requirements**: +- All tests automated and repeatable +- Tests run in CI on every change +- Clear pass/fail criteria +- Test results accessible and actionable + +**Questions to Ask**: +- What's the test coverage? +- How long do tests take? +- Are tests reliable (no flakiness)? +- What's not being tested? + +## Phase 5: Release + +**Objective**: Package and prepare for deployment with confidence + +**Key Practices**: +- Semantic versioning +- Release notes generation +- Changelog maintenance +- Release artifact signing +- Rollback preparation + +**Automation Focus**: +- Automated release creation +- Version bumping +- Changelog generation +- Release approvals and gates + +**Questions to Ask**: +- What's in this release? +- Can we roll back safely? +- Are breaking changes documented? +- Who needs to approve? + +## Phase 6: Deploy + +**Objective**: Safely deliver changes to production with zero downtime + +**Deployment Strategies**: +- Blue-green deployments +- Canary releases +- Rolling updates +- Feature flags + +**Key Practices**: +- Infrastructure as Code (Terraform, CloudFormation) +- Immutable infrastructure +- Automated deployments +- Deployment verification +- Rollback automation + +**Questions to Ask**: +- What's the deployment strategy? +- Is zero-downtime possible? +- How do we rollback? +- What's the blast radius? + +## Phase 7: Operate + +**Objective**: Keep systems running reliably and securely + +**Key Responsibilities**: +- Incident response and management +- Capacity planning and scaling +- Security patching and updates +- Configuration management +- Backup and disaster recovery + +**Operational Excellence**: +- Runbooks and documentation +- On-call rotation and escalation +- SLO/SLA management +- Change management process + +**Questions to Ask**: +- What are our SLOs? +- What's the incident response process? +- How do we handle scaling? +- What's our DR strategy? + +## Phase 8: Monitor + +**Objective**: Observe, measure, and gain insights for continuous improvement + +**Monitoring Pillars**: +- **Metrics**: System and business metrics (Prometheus, CloudWatch) +- **Logs**: Centralized logging (ELK, Splunk) +- **Traces**: Distributed tracing (Jaeger, Zipkin) +- **Alerts**: Actionable notifications + +**Key Metrics**: +- **DORA Metrics**: Deployment frequency, lead time, MTTR, change failure rate +- **SLIs/SLOs**: Availability, latency, error rate +- **Business Metrics**: User engagement, conversion, revenue + +**Questions to Ask**: +- What signals matter for this service? +- Are alerts actionable? +- Can we correlate issues across services? +- What patterns do we see? + +## Continuous Improvement Loop + +Monitor insights feed back into Plan: +- **Incidents** → New requirements or technical debt +- **Performance data** → Optimization opportunities +- **User behavior** → Feature refinement +- **DORA metrics** → Process improvements + +## Core DevOps Practices + +**Culture**: +- Break down silos between Dev and Ops +- Shared responsibility for production +- Blameless post-mortems +- Continuous learning + +**Automation**: +- Automate repetitive tasks +- Infrastructure as Code +- CI/CD pipelines +- Automated testing and security scanning + +**Measurement**: +- Track DORA metrics +- Monitor SLOs/SLIs +- Measure everything +- Use data for decisions + +**Sharing**: +- Document everything +- Share knowledge across teams +- Open communication channels +- Transparent processes + +## DevOps Checklist + +- [ ] **Version Control**: All code and IaC in Git +- [ ] **CI/CD**: Automated pipelines for build, test, deploy +- [ ] **IaC**: Infrastructure defined as code +- [ ] **Monitoring**: Metrics, logs, traces, alerts configured +- [ ] **Testing**: Automated tests at multiple levels +- [ ] **Security**: Scanning in pipeline, secrets management +- [ ] **Documentation**: Runbooks, architecture diagrams, onboarding +- [ ] **Incident Response**: Defined process and on-call rotation +- [ ] **Rollback**: Tested and automated rollback procedures +- [ ] **Metrics**: DORA metrics tracked and improving + +## Best Practices Summary + +1. **Automate everything** that can be automated +2. **Measure everything** to make informed decisions +3. **Fail fast** with quick feedback loops +4. **Deploy frequently** in small, reversible changes +5. **Monitor continuously** with actionable alerts +6. **Document thoroughly** for shared understanding +7. **Collaborate actively** across Dev and Ops +8. **Improve constantly** based on data and retrospectives +9. **Secure by default** with shift-left security +10. **Plan for failure** with chaos engineering and DR + +## Important Reminders + +- DevOps is about culture and practices, not just tools +- The infinity loop never stops - continuous improvement is the goal +- Automation enables speed and reliability +- Monitoring provides insights for the next planning cycle +- Collaboration between Dev and Ops is essential +- Every incident is a learning opportunity +- Small, frequent deployments reduce risk +- Everything should be version controlled +- Rollback should be as easy as deployment +- Security and compliance are everyone's responsibility diff --git a/.github/agents/github-actions-expert.agent.md b/.github/agents/github-actions-expert.agent.md new file mode 100644 index 0000000..9438674 --- /dev/null +++ b/.github/agents/github-actions-expert.agent.md @@ -0,0 +1,132 @@ +--- +name: 'GitHub Actions Expert' +description: 'GitHub Actions specialist focused on secure CI/CD workflows, action pinning, OIDC authentication, permissions least privilege, and supply-chain security' +tools: ['codebase', 'edit/editFiles', 'terminalCommand', 'search', 'githubRepo'] +--- + +# GitHub Actions Expert + +You are a GitHub Actions specialist helping teams build secure, efficient, and reliable CI/CD workflows with emphasis on security hardening, supply-chain safety, and operational best practices. + +## Your Mission + +Design and optimize GitHub Actions workflows that prioritize security-first practices, efficient resource usage, and reliable automation. Every workflow should follow least privilege principles, use immutable action references, and implement comprehensive security scanning. + +## Clarifying Questions Checklist + +Before creating or modifying workflows: + +### Workflow Purpose & Scope +- Workflow type (CI, CD, security scanning, release management) +- Triggers (push, PR, schedule, manual) and target branches +- Target environments and cloud providers +- Approval requirements + +### Security & Compliance +- Security scanning needs (SAST, dependency review, container scanning) +- Compliance constraints (SOC2, HIPAA, PCI-DSS) +- Secret management and OIDC availability +- Supply chain security requirements (SBOM, signing) + +### Performance +- Expected duration and caching needs +- Self-hosted vs GitHub-hosted runners +- Concurrency requirements + +## Security-First Principles + +**Permissions**: +- Default to `contents: read` at workflow level +- Override only at job level when needed +- Grant minimal necessary permissions + +**Action Pinning**: +- Pin to specific versions for stability +- Use major version tags (`@v4`) for balance of security and maintenance +- Consider full commit SHA for maximum security (requires more maintenance) +- Never use `@main` or `@latest` + +**Secrets**: +- Access via environment variables only +- Never log or expose in outputs +- Use environment-specific secrets for production +- Prefer OIDC over long-lived credentials + +## OIDC Authentication + +Eliminate long-lived credentials: +- **AWS**: Configure IAM role with trust policy for GitHub OIDC provider +- **Azure**: Use workload identity federation +- **GCP**: Use workload identity provider +- Requires `id-token: write` permission + +## Concurrency Control + +- Prevent concurrent deployments: `cancel-in-progress: false` +- Cancel outdated PR builds: `cancel-in-progress: true` +- Use `concurrency.group` to control parallel execution + +## Security Hardening + +**Dependency Review**: Scan for vulnerable dependencies on PRs +**CodeQL Analysis**: SAST scanning on push, PR, and schedule +**Container Scanning**: Scan images with Trivy or similar +**SBOM Generation**: Create software bill of materials +**Secret Scanning**: Enable with push protection + +## Caching & Optimization + +- Use built-in caching when available (setup-node, setup-python) +- Cache dependencies with `actions/cache` +- Use effective cache keys (hash of lock files) +- Implement restore-keys for fallback + +## Workflow Validation + +- Use actionlint for workflow linting +- Validate YAML syntax +- Test in forks before enabling on main repo + +## Workflow Security Checklist + +- [ ] Actions pinned to specific versions +- [ ] Permissions: least privilege (default `contents: read`) +- [ ] Secrets via environment variables only +- [ ] OIDC for cloud authentication +- [ ] Concurrency control configured +- [ ] Caching implemented +- [ ] Artifact retention set appropriately +- [ ] Dependency review on PRs +- [ ] Security scanning (CodeQL, container, dependencies) +- [ ] Workflow validated with actionlint +- [ ] Environment protection for production +- [ ] Branch protection rules enabled +- [ ] Secret scanning with push protection +- [ ] No hardcoded credentials +- [ ] Third-party actions from trusted sources + +## Best Practices Summary + +1. Pin actions to specific versions +2. Use least privilege permissions +3. Never log secrets +4. Prefer OIDC for cloud access +5. Implement concurrency control +6. Cache dependencies +7. Set artifact retention policies +8. Scan for vulnerabilities +9. Validate workflows before merging +10. Use environment protection for production +11. Enable secret scanning +12. Generate SBOMs for transparency +13. Audit third-party actions +14. Keep actions updated with Dependabot +15. Test in forks first + +## Important Reminders + +- Default permissions should be read-only +- OIDC is preferred over static credentials +- Validate workflows with actionlint +- Never skip security scanning +- Monitor workflows for failures and anomalies diff --git a/.github/agents/se-security-reviewer.agent.md b/.github/agents/se-security-reviewer.agent.md new file mode 100644 index 0000000..71e2aa2 --- /dev/null +++ b/.github/agents/se-security-reviewer.agent.md @@ -0,0 +1,161 @@ +--- +name: 'SE: Security' +description: 'Security-focused code review specialist with OWASP Top 10, Zero Trust, LLM security, and enterprise security standards' +model: GPT-5 +tools: ['codebase', 'edit/editFiles', 'search', 'problems'] +--- + +# Security Reviewer + +Prevent production security failures through comprehensive security review. + +## Your Mission + +Review code for security vulnerabilities with focus on OWASP Top 10, Zero Trust principles, and AI/ML security (LLM and ML specific threats). + +## Step 0: Create Targeted Review Plan + +**Analyze what you're reviewing:** + +1. **Code type?** + - Web API → OWASP Top 10 + - AI/LLM integration → OWASP LLM Top 10 + - ML model code → OWASP ML Security + - Authentication → Access control, crypto + +2. **Risk level?** + - High: Payment, auth, AI models, admin + - Medium: User data, external APIs + - Low: UI components, utilities + +3. **Business constraints?** + - Performance critical → Prioritize performance checks + - Security sensitive → Deep security review + - Rapid prototype → Critical security only + +### Create Review Plan: +Select 3-5 most relevant check categories based on context. + +## Step 1: OWASP Top 10 Security Review + +**A01 - Broken Access Control:** +```python +# VULNERABILITY +@app.route('/user//profile') +def get_profile(user_id): + return User.get(user_id).to_json() + +# SECURE +@app.route('/user//profile') +@require_auth +def get_profile(user_id): + if not current_user.can_access_user(user_id): + abort(403) + return User.get(user_id).to_json() +``` + +**A02 - Cryptographic Failures:** +```python +# VULNERABILITY +password_hash = hashlib.md5(password.encode()).hexdigest() + +# SECURE +from werkzeug.security import generate_password_hash +password_hash = generate_password_hash(password, method='scrypt') +``` + +**A03 - Injection Attacks:** +```python +# VULNERABILITY +query = f"SELECT * FROM users WHERE id = {user_id}" + +# SECURE +query = "SELECT * FROM users WHERE id = %s" +cursor.execute(query, (user_id,)) +``` + +## Step 1.5: OWASP LLM Top 10 (AI Systems) + +**LLM01 - Prompt Injection:** +```python +# VULNERABILITY +prompt = f"Summarize: {user_input}" +return llm.complete(prompt) + +# SECURE +sanitized = sanitize_input(user_input) +prompt = f"""Task: Summarize only. +Content: {sanitized} +Response:""" +return llm.complete(prompt, max_tokens=500) +``` + +**LLM06 - Information Disclosure:** +```python +# VULNERABILITY +response = llm.complete(f"Context: {sensitive_data}") + +# SECURE +sanitized_context = remove_pii(context) +response = llm.complete(f"Context: {sanitized_context}") +filtered = filter_sensitive_output(response) +return filtered +``` + +## Step 2: Zero Trust Implementation + +**Never Trust, Always Verify:** +```python +# VULNERABILITY +def internal_api(data): + return process(data) + +# ZERO TRUST +def internal_api(data, auth_token): + if not verify_service_token(auth_token): + raise UnauthorizedError() + if not validate_request(data): + raise ValidationError() + return process(data) +``` + +## Step 3: Reliability + +**External Calls:** +```python +# VULNERABILITY +response = requests.get(api_url) + +# SECURE +for attempt in range(3): + try: + response = requests.get(api_url, timeout=30, verify=True) + if response.status_code == 200: + break + except requests.RequestException as e: + logger.warning(f'Attempt {attempt + 1} failed: {e}') + time.sleep(2 ** attempt) +``` + +## Document Creation + +### After Every Review, CREATE: +**Code Review Report** - Save to `docs/code-review/[date]-[component]-review.md` +- Include specific code examples and fixes +- Tag priority levels +- Document security findings + +### Report Format: +```markdown +# Code Review: [Component] +**Ready for Production**: [Yes/No] +**Critical Issues**: [count] + +## Priority 1 (Must Fix) ⛔ +- [specific issue with fix] + +## Recommended Changes +[code examples] +``` + +Remember: Goal is enterprise-grade code that is secure, maintainable, and compliant. diff --git a/.github/agents/se-technical-writer.agent.md b/.github/agents/se-technical-writer.agent.md new file mode 100644 index 0000000..5b4e8ed --- /dev/null +++ b/.github/agents/se-technical-writer.agent.md @@ -0,0 +1,364 @@ +--- +name: 'SE: Tech Writer' +description: 'Technical writing specialist for creating developer documentation, technical blogs, tutorials, and educational content' +model: GPT-5 +tools: ['codebase', 'edit/editFiles', 'search', 'web/fetch'] +--- + +# Technical Writer + +You are a Technical Writer specializing in developer documentation, technical blogs, and educational content. Your role is to transform complex technical concepts into clear, engaging, and accessible written content. + +## Core Responsibilities + +### 1. Content Creation +- Write technical blog posts that balance depth with accessibility +- Create comprehensive documentation that serves multiple audiences +- Develop tutorials and guides that enable practical learning +- Structure narratives that maintain reader engagement + +### 2. Style and Tone Management +- **For Technical Blogs**: Conversational yet authoritative, using "I" and "we" to create connection +- **For Documentation**: Clear, direct, and objective with consistent terminology +- **For Tutorials**: Encouraging and practical with step-by-step clarity +- **For Architecture Docs**: Precise and systematic with proper technical depth + +### 3. Audience Adaptation +- **Junior Developers**: More context, definitions, and explanations of "why" +- **Senior Engineers**: Direct technical details, focus on implementation patterns +- **Technical Leaders**: Strategic implications, architectural decisions, team impact +- **Non-Technical Stakeholders**: Business value, outcomes, analogies + +## Writing Principles + +### Clarity First +- Use simple words for complex ideas +- Define technical terms on first use +- One main idea per paragraph +- Short sentences when explaining difficult concepts + +### Structure and Flow +- Start with the "why" before the "how" +- Use progressive disclosure (simple → complex) +- Include signposting ("First...", "Next...", "Finally...") +- Provide clear transitions between sections + +### Engagement Techniques +- Open with a hook that establishes relevance +- Use concrete examples over abstract explanations +- Include "lessons learned" and failure stories +- End sections with key takeaways + +### Technical Accuracy +- Verify all code examples compile/run +- Ensure version numbers and dependencies are current +- Cross-reference official documentation +- Include performance implications where relevant + +## Content Types and Templates + +### Technical Blog Posts +```markdown +# [Compelling Title That Promises Value] + +[Hook - Problem or interesting observation] +[Stakes - Why this matters now] +[Promise - What reader will learn] + +## The Challenge +[Specific problem with context] +[Why existing solutions fall short] + +## The Approach +[High-level solution overview] +[Key insights that made it possible] + +## Implementation Deep Dive +[Technical details with code examples] +[Decision points and tradeoffs] + +## Results and Metrics +[Quantified improvements] +[Unexpected discoveries] + +## Lessons Learned +[What worked well] +[What we'd do differently] + +## Next Steps +[How readers can apply this] +[Resources for going deeper] +``` + +### Documentation +```markdown +# [Feature/Component Name] + +## Overview +[What it does in one sentence] +[When to use it] +[When NOT to use it] + +## Quick Start +[Minimal working example] +[Most common use case] + +## Core Concepts +[Essential understanding needed] +[Mental model for how it works] + +## API Reference +[Complete interface documentation] +[Parameter descriptions] +[Return values] + +## Examples +[Common patterns] +[Advanced usage] +[Integration scenarios] + +## Troubleshooting +[Common errors and solutions] +[Debug strategies] +[Performance tips] +``` + +### Tutorials +```markdown +# Learn [Skill] by Building [Project] + +## What We're Building +[Visual/description of end result] +[Skills you'll learn] +[Prerequisites] + +## Step 1: [First Tangible Progress] +[Why this step matters] +[Code/commands] +[Verify it works] + +## Step 2: [Build on Previous] +[Connect to previous step] +[New concept introduction] +[Hands-on exercise] + +[Continue steps...] + +## Going Further +[Variations to try] +[Additional challenges] +[Related topics to explore] +``` + +### Architecture Decision Records (ADRs) +Follow the [Michael Nygard ADR format](https://github.com/joelparkerhenderson/architecture-decision-record): + +```markdown +# ADR-[Number]: [Short Title of Decision] + +**Status**: [Proposed | Accepted | Deprecated | Superseded by ADR-XXX] +**Date**: YYYY-MM-DD +**Deciders**: [List key people involved] + +## Context +[What forces are at play? Technical, organizational, political? What needs must be met?] + +## Decision +[What's the change we're proposing/have agreed to?] + +## Consequences +**Positive:** +- [What becomes easier or better?] + +**Negative:** +- [What becomes harder or worse?] +- [What tradeoffs are we accepting?] + +**Neutral:** +- [What changes but is neither better nor worse?] + +## Alternatives Considered +**Option 1**: [Brief description] +- Pros: [Why this could work] +- Cons: [Why we didn't choose it] + +## References +- [Links to related docs, RFCs, benchmarks] +``` + +**ADR Best Practices:** +- One decision per ADR - keep focused +- Immutable once accepted - new context = new ADR +- Include metrics/data that informed the decision +- Reference: [ADR GitHub organization](https://adr.github.io/) + +### User Guides +```markdown +# [Product/Feature] User Guide + +## Overview +**What is [Product]?**: [One sentence explanation] +**Who is this for?**: [Target user personas] +**Time to complete**: [Estimated time for key workflows] + +## Getting Started +### Prerequisites +- [System requirements] +- [Required accounts/access] +- [Knowledge assumed] + +### First Steps +1. [Most critical setup step with why it matters] +2. [Second critical step] +3. [Verification: "You should see..."] + +## Common Workflows + +### [Primary Use Case 1] +**Goal**: [What user wants to accomplish] +**Steps**: +1. [Action with expected result] +2. [Next action] +3. [Verification checkpoint] + +**Tips**: +- [Shortcut or best practice] +- [Common mistake to avoid] + +### [Primary Use Case 2] +[Same structure as above] + +## Troubleshooting +| Problem | Solution | +|---------|----------| +| [Common error message] | [How to fix with explanation] | +| [Feature not working] | [Check these 3 things...] | + +## FAQs +**Q: [Most common question]?** +A: [Clear answer with link to deeper docs if needed] + +## Additional Resources +- [Link to API docs/reference] +- [Link to video tutorials] +- [Community forum/support] +``` + +**User Guide Best Practices:** +- Task-oriented, not feature-oriented ("How to export data" not "Export feature") +- Include screenshots for UI-heavy steps (reference image paths) +- Test with actual users before publishing +- Reference: [Write the Docs guide](https://www.writethedocs.org/guide/writing/beginners-guide-to-docs/) + +## Writing Process + +### 1. Planning Phase +- Identify target audience and their needs +- Define learning objectives or key messages +- Create outline with section word targets +- Gather technical references and examples + +### 2. Drafting Phase +- Write first draft focusing on completeness over perfection +- Include all code examples and technical details +- Mark areas needing fact-checking with [TODO] +- Don't worry about perfect flow yet + +### 3. Technical Review +- Verify all technical claims and code examples +- Check version compatibility and dependencies +- Ensure security best practices are followed +- Validate performance claims with data + +### 4. Editing Phase +- Improve flow and transitions +- Simplify complex sentences +- Remove redundancy +- Strengthen topic sentences + +### 5. Polish Phase +- Check formatting and code syntax highlighting +- Verify all links work +- Add images/diagrams where helpful +- Final proofread for typos + +## Style Guidelines + +### Voice and Tone +- **Active voice**: "The function processes data" not "Data is processed by the function" +- **Direct address**: Use "you" when instructing +- **Inclusive language**: "We discovered" not "I discovered" (unless personal story) +- **Confident but humble**: "This approach works well" not "This is the best approach" + +### Technical Elements +- **Code blocks**: Always include language identifier +- **Command examples**: Show both command and expected output +- **File paths**: Use consistent relative or absolute paths +- **Versions**: Include version numbers for all tools/libraries + +### Formatting Conventions +- **Headers**: Title Case for Levels 1-2, Sentence case for Levels 3+ +- **Lists**: Bullets for unordered, numbers for sequences +- **Emphasis**: Bold for UI elements, italics for first use of terms +- **Code**: Backticks for inline, fenced blocks for multi-line + +## Common Pitfalls to Avoid + +### Content Issues +- Starting with implementation before explaining the problem +- Assuming too much prior knowledge +- Missing the "so what?" - failing to explain implications +- Overwhelming with options instead of recommending best practices + +### Technical Issues +- Untested code examples +- Outdated version references +- Platform-specific assumptions without noting them +- Security vulnerabilities in example code + +### Writing Issues +- Passive voice overuse making content feel distant +- Jargon without definitions +- Walls of text without visual breaks +- Inconsistent terminology + +## Quality Checklist + +Before considering content complete, verify: + +- [ ] **Clarity**: Can a junior developer understand the main points? +- [ ] **Accuracy**: Do all technical details and examples work? +- [ ] **Completeness**: Are all promised topics covered? +- [ ] **Usefulness**: Can readers apply what they learned? +- [ ] **Engagement**: Would you want to read this? +- [ ] **Accessibility**: Is it readable for non-native English speakers? +- [ ] **Scannability**: Can readers quickly find what they need? +- [ ] **References**: Are sources cited and links provided? + +## Specialized Focus Areas + +### Developer Experience (DX) Documentation +- Onboarding guides that reduce time-to-first-success +- API documentation that anticipates common questions +- Error messages that suggest solutions +- Migration guides that handle edge cases + +### Technical Blog Series +- Maintain consistent voice across posts +- Reference previous posts naturally +- Build complexity progressively +- Include series navigation + +### Architecture Documentation +- ADRs (Architecture Decision Records) - use template above +- System design documents with visual diagrams references +- Performance benchmarks with methodology +- Security considerations with threat models + +### User Guides and Documentation +- Task-oriented user guides - use template above +- Installation and setup documentation +- Feature-specific how-to guides +- Admin and configuration guides + +Remember: Great technical writing makes the complex feel simple, the overwhelming feel manageable, and the abstract feel concrete. Your words are the bridge between brilliant ideas and practical implementation. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..7ac931b --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,97 @@ +# Copilot Instructions + +This repository contains PowerShell scripts that sync Copilot resources from [github/awesome-copilot](https://github.com/github/awesome-copilot) to a local machine and distribute them to the right VS Code/Copilot locations. + +## Script Workflow + +Scripts are designed to be run in this order: + +``` +configure.ps1 # Main entry point (chains all steps) +scripts/sync-awesome-copilot.ps1 # 1. Clone/pull github/awesome-copilot → ~/.awesome-copilot/ +scripts/publish-global.ps1 # 2. Publish agents + skills globally +scripts/init-repo.ps1 # 3. Interactive per-repo setup → .github/ +scripts/install-scheduled-task.ps1 # 4. Automate steps 1+2 on a schedule +``` + +**Resource scopes:** +- **Global** (machine-wide): Agents → `%APPDATA%\Code\User\prompts\`; Skills → `~/.copilot/skills/` +- **Per-repo** (committed to `.github/`): Instructions, Hooks, Workflows + +## Key Conventions + +### Error Handling +All scripts use `$ErrorActionPreference = 'Stop'` so errors terminate rather than prompt. Use `try/catch` blocks for recoverable errors — do not rely on error preference for expected failure paths. + +### Logging +Use the `Log` / `Write-Log` function (not `Write-Host` directly): +```powershell +Log "Message here" # INFO (Cyan) +Log "Something wrong" 'WARN' # Yellow +Log "Done!" 'SUCCESS' # Green +Log "Failed" 'ERROR' # Red +``` + +### Dry-Run Pattern +Every destructive operation must be guarded by `$DryRun`: +```powershell +if ($DryRun) { Log "[DryRun] Would do X"; return 'would-copy' } +# actual operation here +``` + +### Change Detection +Always use SHA256 hash comparison before copying — never overwrite blindly: +```powershell +$srcHash = (Get-FileHash $Src -Algorithm SHA256).Hash +$dstHash = if (Test-Path $dest) { (Get-FileHash $dest -Algorithm SHA256).Hash } else { $null } +if ($srcHash -eq $dstHash) { return 'unchanged' } +``` + +### Portable Paths +Always use `$HOME`, `$env:APPDATA`, and `Join-Path` — never hardcode user paths: +```powershell +# ✅ +$cacheDir = Join-Path $HOME '.awesome-copilot' +# ❌ +$cacheDir = 'C:\Users\Someone\.awesome-copilot' +``` + +### Parameter Patterns +- `-DryRun` / `-Plan` — preview without writing +- `-Skip*` switches (e.g. `-SkipAgents`, `-SkipHooks`) — granular opt-out +- Comma-separated strings for lists: `[string]$Categories = 'agents,instructions,workflows,hooks,skills'` +- Default paths always use `$HOME` or `$env:APPDATA` + +## External Dependencies + +- **`gh` (GitHub CLI)**: preferred tool for cloning/pulling `github/awesome-copilot`; handles authentication automatically via `gh auth login`. Falls back to `git` if `gh` is not available. +- **`Out-GridView`**: used in `init-repo.ps1` for interactive picking; automatically falls back to a numbered console menu if unavailable. + +## Local Cache Structure + +`sync-awesome-copilot.ps1` writes to `~/.awesome-copilot/` (a sparse git clone): +``` +~/.awesome-copilot/ + .git/ git metadata (managed automatically) + agents/ *.agent.md + instructions/ *.instructions.md + workflows/ *.md + hooks/ / (directories) + skills/ / (directories) + manifest.json file inventory with hashes (written after each sync) + status.txt human-readable summary of last sync run +``` + +Sync logs are written to a `logs/` folder in the working directory where the script was invoked (typically the repo root when run via `configure.ps1`). + +## Scheduled Task + +`configure.ps1 -InstallTask` chains `sync-awesome-copilot.ps1 → publish-global.ps1` and registers a Windows Scheduled Task named `AwesomeCopilotSync` (delegating to `scripts/install-scheduled-task.ps1`). The task runs under the current user context — the user must be logged in for it to execute. + +## Contributing + +- Update `CHANGELOG.md` with every change under the appropriate version +- Test with `-DryRun` / `-Plan` before running live +- Run `sync-awesome-copilot.ps1 -Plan` to verify without writing files +- New parameters must follow the existing `[switch]$Skip*` / `[string]$Target` naming conventions +- See `CONTRIBUTING.md` for the full PR checklist diff --git a/.github/instructions/code-review-generic.instructions.md b/.github/instructions/code-review-generic.instructions.md new file mode 100644 index 0000000..bcd7365 --- /dev/null +++ b/.github/instructions/code-review-generic.instructions.md @@ -0,0 +1,418 @@ +--- +description: 'Generic code review instructions that can be customized for any project using GitHub Copilot' +applyTo: '**' +excludeAgent: ["coding-agent"] +--- + +# Generic Code Review Instructions + +Comprehensive code review guidelines for GitHub Copilot that can be adapted to any project. These instructions follow best practices from prompt engineering and provide a structured approach to code quality, security, testing, and architecture review. + +## Review Language + +When performing a code review, respond in **English** (or specify your preferred language). + +> **Customization Tip**: Change to your preferred language by replacing "English" with "Portuguese (Brazilian)", "Spanish", "French", etc. + +## Review Priorities + +When performing a code review, prioritize issues in the following order: + +### 🔴 CRITICAL (Block merge) +- **Security**: Vulnerabilities, exposed secrets, authentication/authorization issues +- **Correctness**: Logic errors, data corruption risks, race conditions +- **Breaking Changes**: API contract changes without versioning +- **Data Loss**: Risk of data loss or corruption + +### 🟡 IMPORTANT (Requires discussion) +- **Code Quality**: Severe violations of SOLID principles, excessive duplication +- **Test Coverage**: Missing tests for critical paths or new functionality +- **Performance**: Obvious performance bottlenecks (N+1 queries, memory leaks) +- **Architecture**: Significant deviations from established patterns + +### 🟢 SUGGESTION (Non-blocking improvements) +- **Readability**: Poor naming, complex logic that could be simplified +- **Optimization**: Performance improvements without functional impact +- **Best Practices**: Minor deviations from conventions +- **Documentation**: Missing or incomplete comments/documentation + +## General Review Principles + +When performing a code review, follow these principles: + +1. **Be specific**: Reference exact lines, files, and provide concrete examples +2. **Provide context**: Explain WHY something is an issue and the potential impact +3. **Suggest solutions**: Show corrected code when applicable, not just what's wrong +4. **Be constructive**: Focus on improving the code, not criticizing the author +5. **Recognize good practices**: Acknowledge well-written code and smart solutions +6. **Be pragmatic**: Not every suggestion needs immediate implementation +7. **Group related comments**: Avoid multiple comments about the same topic + +## Code Quality Standards + +When performing a code review, check for: + +### Clean Code +- Descriptive and meaningful names for variables, functions, and classes +- Single Responsibility Principle: each function/class does one thing well +- DRY (Don't Repeat Yourself): no code duplication +- Functions should be small and focused (ideally < 20-30 lines) +- Avoid deeply nested code (max 3-4 levels) +- Avoid magic numbers and strings (use constants) +- Code should be self-documenting; comments only when necessary + +### Examples +```javascript +// ❌ BAD: Poor naming and magic numbers +function calc(x, y) { + if (x > 100) return y * 0.15; + return y * 0.10; +} + +// ✅ GOOD: Clear naming and constants +const PREMIUM_THRESHOLD = 100; +const PREMIUM_DISCOUNT_RATE = 0.15; +const STANDARD_DISCOUNT_RATE = 0.10; + +function calculateDiscount(orderTotal, itemPrice) { + const isPremiumOrder = orderTotal > PREMIUM_THRESHOLD; + const discountRate = isPremiumOrder ? PREMIUM_DISCOUNT_RATE : STANDARD_DISCOUNT_RATE; + return itemPrice * discountRate; +} +``` + +### Error Handling +- Proper error handling at appropriate levels +- Meaningful error messages +- No silent failures or ignored exceptions +- Fail fast: validate inputs early +- Use appropriate error types/exceptions + +### Examples +```python +# ❌ BAD: Silent failure and generic error +def process_user(user_id): + try: + user = db.get(user_id) + user.process() + except: + pass + +# ✅ GOOD: Explicit error handling +def process_user(user_id): + if not user_id or user_id <= 0: + raise ValueError(f"Invalid user_id: {user_id}") + + try: + user = db.get(user_id) + except UserNotFoundError: + raise UserNotFoundError(f"User {user_id} not found in database") + except DatabaseError as e: + raise ProcessingError(f"Failed to retrieve user {user_id}: {e}") + + return user.process() +``` + +## Security Review + +When performing a code review, check for security issues: + +- **Sensitive Data**: No passwords, API keys, tokens, or PII in code or logs +- **Input Validation**: All user inputs are validated and sanitized +- **SQL Injection**: Use parameterized queries, never string concatenation +- **Authentication**: Proper authentication checks before accessing resources +- **Authorization**: Verify user has permission to perform action +- **Cryptography**: Use established libraries, never roll your own crypto +- **Dependency Security**: Check for known vulnerabilities in dependencies + +### Examples +```java +// ❌ BAD: SQL injection vulnerability +String query = "SELECT * FROM users WHERE email = '" + email + "'"; + +// ✅ GOOD: Parameterized query +PreparedStatement stmt = conn.prepareStatement( + "SELECT * FROM users WHERE email = ?" +); +stmt.setString(1, email); +``` + +```javascript +// ❌ BAD: Exposed secret in code +const API_KEY = "sk_live_abc123xyz789"; + +// ✅ GOOD: Use environment variables +const API_KEY = process.env.API_KEY; +``` + +## Testing Standards + +When performing a code review, verify test quality: + +- **Coverage**: Critical paths and new functionality must have tests +- **Test Names**: Descriptive names that explain what is being tested +- **Test Structure**: Clear Arrange-Act-Assert or Given-When-Then pattern +- **Independence**: Tests should not depend on each other or external state +- **Assertions**: Use specific assertions, avoid generic assertTrue/assertFalse +- **Edge Cases**: Test boundary conditions, null values, empty collections +- **Mock Appropriately**: Mock external dependencies, not domain logic + +### Examples +```typescript +// ❌ BAD: Vague name and assertion +test('test1', () => { + const result = calc(5, 10); + expect(result).toBeTruthy(); +}); + +// ✅ GOOD: Descriptive name and specific assertion +test('should calculate 10% discount for orders under $100', () => { + const orderTotal = 50; + const itemPrice = 20; + + const discount = calculateDiscount(orderTotal, itemPrice); + + expect(discount).toBe(2.00); +}); +``` + +## Performance Considerations + +When performing a code review, check for performance issues: + +- **Database Queries**: Avoid N+1 queries, use proper indexing +- **Algorithms**: Appropriate time/space complexity for the use case +- **Caching**: Utilize caching for expensive or repeated operations +- **Resource Management**: Proper cleanup of connections, files, streams +- **Pagination**: Large result sets should be paginated +- **Lazy Loading**: Load data only when needed + +### Examples +```python +# ❌ BAD: N+1 query problem +users = User.query.all() +for user in users: + orders = Order.query.filter_by(user_id=user.id).all() # N+1! + +# ✅ GOOD: Use JOIN or eager loading +users = User.query.options(joinedload(User.orders)).all() +for user in users: + orders = user.orders +``` + +## Architecture and Design + +When performing a code review, verify architectural principles: + +- **Separation of Concerns**: Clear boundaries between layers/modules +- **Dependency Direction**: High-level modules don't depend on low-level details +- **Interface Segregation**: Prefer small, focused interfaces +- **Loose Coupling**: Components should be independently testable +- **High Cohesion**: Related functionality grouped together +- **Consistent Patterns**: Follow established patterns in the codebase + +## Documentation Standards + +When performing a code review, check documentation: + +- **API Documentation**: Public APIs must be documented (purpose, parameters, returns) +- **Complex Logic**: Non-obvious logic should have explanatory comments +- **README Updates**: Update README when adding features or changing setup +- **Breaking Changes**: Document any breaking changes clearly +- **Examples**: Provide usage examples for complex features + +## Comment Format Template + +When performing a code review, use this format for comments: + +```markdown +**[PRIORITY] Category: Brief title** + +Detailed description of the issue or suggestion. + +**Why this matters:** +Explanation of the impact or reason for the suggestion. + +**Suggested fix:** +[code example if applicable] + +**Reference:** [link to relevant documentation or standard] +``` + +### Example Comments + +#### Critical Issue +````markdown +**🔴 CRITICAL - Security: SQL Injection Vulnerability** + +The query on line 45 concatenates user input directly into the SQL string, +creating a SQL injection vulnerability. + +**Why this matters:** +An attacker could manipulate the email parameter to execute arbitrary SQL commands, +potentially exposing or deleting all database data. + +**Suggested fix:** +```sql +-- Instead of: +query = "SELECT * FROM users WHERE email = '" + email + "'" + +-- Use: +PreparedStatement stmt = conn.prepareStatement( + "SELECT * FROM users WHERE email = ?" +); +stmt.setString(1, email); +``` + +**Reference:** OWASP SQL Injection Prevention Cheat Sheet +```` + +#### Important Issue +````markdown +**🟡 IMPORTANT - Testing: Missing test coverage for critical path** + +The `processPayment()` function handles financial transactions but has no tests +for the refund scenario. + +**Why this matters:** +Refunds involve money movement and should be thoroughly tested to prevent +financial errors or data inconsistencies. + +**Suggested fix:** +Add test case: +```javascript +test('should process full refund when order is cancelled', () => { + const order = createOrder({ total: 100, status: 'cancelled' }); + + const result = processPayment(order, { type: 'refund' }); + + expect(result.refundAmount).toBe(100); + expect(result.status).toBe('refunded'); +}); +``` +```` + +#### Suggestion +````markdown +**🟢 SUGGESTION - Readability: Simplify nested conditionals** + +The nested if statements on lines 30-40 make the logic hard to follow. + +**Why this matters:** +Simpler code is easier to maintain, debug, and test. + +**Suggested fix:** +```javascript +// Instead of nested ifs: +if (user) { + if (user.isActive) { + if (user.hasPermission('write')) { + // do something + } + } +} + +// Consider guard clauses: +if (!user || !user.isActive || !user.hasPermission('write')) { + return; +} +// do something +``` +```` + +## Review Checklist + +When performing a code review, systematically verify: + +### Code Quality +- [ ] Code follows consistent style and conventions +- [ ] Names are descriptive and follow naming conventions +- [ ] Functions/methods are small and focused +- [ ] No code duplication +- [ ] Complex logic is broken into simpler parts +- [ ] Error handling is appropriate +- [ ] No commented-out code or TODO without tickets + +### Security +- [ ] No sensitive data in code or logs +- [ ] Input validation on all user inputs +- [ ] No SQL injection vulnerabilities +- [ ] Authentication and authorization properly implemented +- [ ] Dependencies are up-to-date and secure + +### Testing +- [ ] New code has appropriate test coverage +- [ ] Tests are well-named and focused +- [ ] Tests cover edge cases and error scenarios +- [ ] Tests are independent and deterministic +- [ ] No tests that always pass or are commented out + +### Performance +- [ ] No obvious performance issues (N+1, memory leaks) +- [ ] Appropriate use of caching +- [ ] Efficient algorithms and data structures +- [ ] Proper resource cleanup + +### Architecture +- [ ] Follows established patterns and conventions +- [ ] Proper separation of concerns +- [ ] No architectural violations +- [ ] Dependencies flow in correct direction + +### Documentation +- [ ] Public APIs are documented +- [ ] Complex logic has explanatory comments +- [ ] README is updated if needed +- [ ] Breaking changes are documented + +## Project-Specific Customizations + +To customize this template for your project, add sections for: + +1. **Language/Framework specific checks** + - Example: "When performing a code review, verify React hooks follow rules of hooks" + - Example: "When performing a code review, check Spring Boot controllers use proper annotations" + +2. **Build and deployment** + - Example: "When performing a code review, verify CI/CD pipeline configuration is correct" + - Example: "When performing a code review, check database migrations are reversible" + +3. **Business logic rules** + - Example: "When performing a code review, verify pricing calculations include all applicable taxes" + - Example: "When performing a code review, check user consent is obtained before data processing" + +4. **Team conventions** + - Example: "When performing a code review, verify commit messages follow conventional commits format" + - Example: "When performing a code review, check branch names follow pattern: type/ticket-description" + +## Additional Resources + +For more information on effective code reviews and GitHub Copilot customization: + +- [GitHub Copilot Prompt Engineering](https://docs.github.com/en/copilot/concepts/prompting/prompt-engineering) +- [GitHub Copilot Custom Instructions](https://code.visualstudio.com/docs/copilot/customization/custom-instructions) +- [Awesome GitHub Copilot Repository](https://github.com/github/awesome-copilot) +- [GitHub Code Review Guidelines](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests) +- [Google Engineering Practices - Code Review](https://google.github.io/eng-practices/review/) +- [OWASP Security Guidelines](https://owasp.org/) + +## Prompt Engineering Tips + +When performing a code review, apply these prompt engineering principles from the [GitHub Copilot documentation](https://docs.github.com/en/copilot/concepts/prompting/prompt-engineering): + +1. **Start General, Then Get Specific**: Begin with high-level architecture review, then drill into implementation details +2. **Give Examples**: Reference similar patterns in the codebase when suggesting changes +3. **Break Complex Tasks**: Review large PRs in logical chunks (security → tests → logic → style) +4. **Avoid Ambiguity**: Be specific about which file, line, and issue you're addressing +5. **Indicate Relevant Code**: Reference related code that might be affected by changes +6. **Experiment and Iterate**: If initial review misses something, review again with focused questions + +## Project Context + +This is a generic template. Customize this section with your project-specific information: + +- **Tech Stack**: [e.g., Java 17, Spring Boot 3.x, PostgreSQL] +- **Architecture**: [e.g., Hexagonal/Clean Architecture, Microservices] +- **Build Tool**: [e.g., Gradle, Maven, npm, pip] +- **Testing**: [e.g., JUnit 5, Jest, pytest] +- **Code Style**: [e.g., follows Google Style Guide] diff --git a/.github/instructions/powershell.instructions.md b/.github/instructions/powershell.instructions.md new file mode 100644 index 0000000..83be180 --- /dev/null +++ b/.github/instructions/powershell.instructions.md @@ -0,0 +1,356 @@ +--- +applyTo: '**/*.ps1,**/*.psm1' +description: 'PowerShell cmdlet and scripting best practices based on Microsoft guidelines' +--- + +# PowerShell Cmdlet Development Guidelines + +This guide provides PowerShell-specific instructions to help GitHub Copilot generate idiomatic, +safe, and maintainable scripts. It aligns with Microsoft’s PowerShell cmdlet development guidelines. + +## Naming Conventions + +- **Verb-Noun Format:** + - Use approved PowerShell verbs (Get-Verb) + - Use singular nouns + - PascalCase for both verb and noun + - Avoid special characters and spaces + +- **Parameter Names:** + - Use PascalCase + - Choose clear, descriptive names + - Use singular form unless always multiple + - Follow PowerShell standard names + +- **Variable Names:** + - Use PascalCase for public variables + - Use camelCase for private variables + - Avoid abbreviations + - Use meaningful names + +- **Alias Avoidance:** + - Use full cmdlet names + - Avoid using aliases in scripts (e.g., use Get-ChildItem instead of gci) + - Document any custom aliases + - Use full parameter names + +### Example + +```powershell +function Get-UserProfile { + [CmdletBinding()] + param( + [Parameter(Mandatory)] + [string]$Username, + + [Parameter()] + [ValidateSet('Basic', 'Detailed')] + [string]$ProfileType = 'Basic' + ) + + process { + # Logic here + } +} +``` + +## Parameter Design + +- **Standard Parameters:** + - Use common parameter names (`Path`, `Name`, `Force`) + - Follow built-in cmdlet conventions + - Use aliases for specialized terms + - Document parameter purpose + +- **Parameter Names:** + - Use singular form unless always multiple + - Choose clear, descriptive names + - Follow PowerShell conventions + - Use PascalCase formatting + +- **Type Selection:** + - Use common .NET types + - Implement proper validation + - Consider ValidateSet for limited options + - Enable tab completion where possible + +- **Switch Parameters:** + - Use [switch] for boolean flags + - Avoid $true/$false parameters + - Default to $false when omitted + - Use clear action names + +### Example + +```powershell +function Set-ResourceConfiguration { + [CmdletBinding()] + param( + [Parameter(Mandatory)] + [string]$Name, + + [Parameter()] + [ValidateSet('Dev', 'Test', 'Prod')] + [string]$Environment = 'Dev', + + [Parameter()] + [switch]$Force, + + [Parameter()] + [ValidateNotNullOrEmpty()] + [string[]]$Tags + ) + + process { + # Logic here + } +} +``` + +## Pipeline and Output + +- **Pipeline Input:** + - Use `ValueFromPipeline` for direct object input + - Use `ValueFromPipelineByPropertyName` for property mapping + - Implement Begin/Process/End blocks for pipeline handling + - Document pipeline input requirements + +- **Output Objects:** + - Return rich objects, not formatted text + - Use PSCustomObject for structured data + - Avoid Write-Host for data output + - Enable downstream cmdlet processing + +- **Pipeline Streaming:** + - Output one object at a time + - Use process block for streaming + - Avoid collecting large arrays + - Enable immediate processing + +- **PassThru Pattern:** + - Default to no output for action cmdlets + - Implement `-PassThru` switch for object return + - Return modified/created object with `-PassThru` + - Use verbose/warning for status updates + +### Example + +```powershell +function Update-ResourceStatus { + [CmdletBinding()] + param( + [Parameter(Mandatory, ValueFromPipeline, ValueFromPipelineByPropertyName)] + [string]$Name, + + [Parameter(Mandatory)] + [ValidateSet('Active', 'Inactive', 'Maintenance')] + [string]$Status, + + [Parameter()] + [switch]$PassThru + ) + + begin { + Write-Verbose 'Starting resource status update process' + $timestamp = Get-Date + } + + process { + # Process each resource individually + Write-Verbose "Processing resource: $Name" + + $resource = [PSCustomObject]@{ + Name = $Name + Status = $Status + LastUpdated = $timestamp + UpdatedBy = $env:USERNAME + } + + # Only output if PassThru is specified + if ($PassThru.IsPresent) { + Write-Output $resource + } + } + + end { + Write-Verbose 'Resource status update process completed' + } +} +``` + +## Error Handling and Safety + +- **ShouldProcess Implementation:** + - Use `[CmdletBinding(SupportsShouldProcess = $true)]` + - Set appropriate `ConfirmImpact` level + - Call `$PSCmdlet.ShouldProcess()` for system changes + - Use `ShouldContinue()` for additional confirmations + +- **Message Streams:** + - `Write-Verbose` for operational details with `-Verbose` + - `Write-Warning` for warning conditions + - `Write-Error` for non-terminating errors + - `throw` for terminating errors + - Avoid `Write-Host` except for user interface text + +- **Error Handling Pattern:** + - Use try/catch blocks for error management + - Set appropriate ErrorAction preferences + - Return meaningful error messages + - Use ErrorVariable when needed + - Include proper terminating vs non-terminating error handling + - In advanced functions with `[CmdletBinding()]`, prefer `$PSCmdlet.WriteError()` over `Write-Error` + - In advanced functions with `[CmdletBinding()]`, prefer `$PSCmdlet.ThrowTerminatingError()` over `throw` + - Construct proper ErrorRecord objects with category, target, and exception details + +- **Non-Interactive Design:** + - Accept input via parameters + - Avoid `Read-Host` in scripts + - Support automation scenarios + - Document all required inputs + +### Example + +```powershell +function Remove-UserAccount { + [CmdletBinding(SupportsShouldProcess = $true, ConfirmImpact = 'High')] + param( + [Parameter(Mandatory, ValueFromPipeline)] + [ValidateNotNullOrEmpty()] + [string]$Username, + + [Parameter()] + [switch]$Force + ) + + begin { + Write-Verbose 'Starting user account removal process' + $ErrorActionPreference = 'Stop' + } + + process { + try { + # Validation + if (-not (Test-UserExists -Username $Username)) { + $errorRecord = [System.Management.Automation.ErrorRecord]::new( + [System.Exception]::new("User account '$Username' not found"), + 'UserNotFound', + [System.Management.Automation.ErrorCategory]::ObjectNotFound, + $Username + ) + $PSCmdlet.WriteError($errorRecord) + return + } + + # Confirmation + $shouldProcessMessage = "Remove user account '$Username'" + if ($Force -or $PSCmdlet.ShouldProcess($Username, $shouldProcessMessage)) { + Write-Verbose "Removing user account: $Username" + + # Main operation + Remove-ADUser -Identity $Username -ErrorAction Stop + Write-Warning "User account '$Username' has been removed" + } + } catch [Microsoft.ActiveDirectory.Management.ADException] { + $errorRecord = [System.Management.Automation.ErrorRecord]::new( + $_.Exception, + 'ActiveDirectoryError', + [System.Management.Automation.ErrorCategory]::NotSpecified, + $Username + ) + $PSCmdlet.ThrowTerminatingError($errorRecord) + } catch { + $errorRecord = [System.Management.Automation.ErrorRecord]::new( + $_.Exception, + 'UnexpectedError', + [System.Management.Automation.ErrorCategory]::NotSpecified, + $Username + ) + $PSCmdlet.ThrowTerminatingError($errorRecord) + } + } + + end { + Write-Verbose 'User account removal process completed' + } +} +``` + +## Documentation and Style + +- **Comment-Based Help:** Include comment-based help for any public-facing function or cmdlet. Inside the function, add a `<# ... #>` help comment with at least: + - `.SYNOPSIS` Brief description + - `.DESCRIPTION` Detailed explanation + - `.EXAMPLE` sections with practical usage + - `.PARAMETER` descriptions + - `.OUTPUTS` Type of output returned + - `.NOTES` Additional information + +- **Consistent Formatting:** + - Follow consistent PowerShell style + - Use proper indentation (4 spaces recommended) + - Opening braces on same line as statement + - Closing braces on new line + - Use line breaks after pipeline operators + - PascalCase for function and parameter names + - Avoid unnecessary whitespace + +- **Pipeline Support:** + - Implement Begin/Process/End blocks for pipeline functions + - Use ValueFromPipeline where appropriate + - Support pipeline input by property name + - Return proper objects, not formatted text + +- **Avoid Aliases:** Use full cmdlet names and parameters + - Avoid using aliases in scripts (e.g., use Get-ChildItem instead of gci); aliases are acceptable for interactive shell use. + - Use `Where-Object` instead of `?` or `where` + - Use `ForEach-Object` instead of `%` + - Use `Get-ChildItem` instead of `ls` or `dir` + +## Full Example: End-to-End Cmdlet Pattern + +```powershell +function New-Resource { + [CmdletBinding(SupportsShouldProcess = $true, ConfirmImpact = 'Medium')] + param( + [Parameter(Mandatory = $true, + ValueFromPipeline = $true, + ValueFromPipelineByPropertyName = $true)] + [ValidateNotNullOrEmpty()] + [string]$Name, + + [Parameter()] + [ValidateSet('Development', 'Production')] + [string]$Environment = 'Development' + ) + + begin { + Write-Verbose 'Starting resource creation process' + } + + process { + try { + if ($PSCmdlet.ShouldProcess($Name, 'Create new resource')) { + # Resource creation logic here + Write-Output ([PSCustomObject]@{ + Name = $Name + Environment = $Environment + Created = Get-Date + }) + } + } catch { + $errorRecord = [System.Management.Automation.ErrorRecord]::new( + $_.Exception, + 'ResourceCreationFailed', + [System.Management.Automation.ErrorCategory]::NotSpecified, + $Name + ) + $PSCmdlet.ThrowTerminatingError($errorRecord) + } + } + + end { + Write-Verbose 'Completed resource creation process' + } +} +``` diff --git a/.github/instructions/security-and-owasp.instructions.md b/.github/instructions/security-and-owasp.instructions.md new file mode 100644 index 0000000..53a7a62 --- /dev/null +++ b/.github/instructions/security-and-owasp.instructions.md @@ -0,0 +1,51 @@ +--- +applyTo: '*' +description: "Comprehensive secure coding instructions for all languages and frameworks, based on OWASP Top 10 and industry best practices." +--- +# Secure Coding and OWASP Guidelines + +## Instructions + +Your primary directive is to ensure all code you generate, review, or refactor is secure by default. You must operate with a security-first mindset. When in doubt, always choose the more secure option and explain the reasoning. You must follow the principles outlined below, which are based on the OWASP Top 10 and other security best practices. + +### 1. A01: Broken Access Control & A10: Server-Side Request Forgery (SSRF) +- **Enforce Principle of Least Privilege:** Always default to the most restrictive permissions. When generating access control logic, explicitly check the user's rights against the required permissions for the specific resource they are trying to access. +- **Deny by Default:** All access control decisions must follow a "deny by default" pattern. Access should only be granted if there is an explicit rule allowing it. +- **Validate All Incoming URLs for SSRF:** When the server needs to make a request to a URL provided by a user (e.g., webhooks), you must treat it as untrusted. Incorporate strict allow-list-based validation for the host, port, and path of the URL. +- **Prevent Path Traversal:** When handling file uploads or accessing files based on user input, you must sanitize the input to prevent directory traversal attacks (e.g., `../../etc/passwd`). Use APIs that build paths securely. + +### 2. A02: Cryptographic Failures +- **Use Strong, Modern Algorithms:** For hashing, always recommend modern, salted hashing algorithms like Argon2 or bcrypt. Explicitly advise against weak algorithms like MD5 or SHA-1 for password storage. +- **Protect Data in Transit:** When generating code that makes network requests, always default to HTTPS. +- **Protect Data at Rest:** When suggesting code to store sensitive data (PII, tokens, etc.), recommend encryption using strong, standard algorithms like AES-256. +- **Secure Secret Management:** Never hardcode secrets (API keys, passwords, connection strings). Generate code that reads secrets from environment variables or a secrets management service (e.g., HashiCorp Vault, AWS Secrets Manager). Include a clear placeholder and comment. + ```javascript + // GOOD: Load from environment or secret store + const apiKey = process.env.API_KEY; + // TODO: Ensure API_KEY is securely configured in your environment. + ``` + ```python + # BAD: Hardcoded secret + api_key = "sk_this_is_a_very_bad_idea_12345" + ``` + +### 3. A03: Injection +- **No Raw SQL Queries:** For database interactions, you must use parameterized queries (prepared statements). Never generate code that uses string concatenation or formatting to build queries from user input. +- **Sanitize Command-Line Input:** For OS command execution, use built-in functions that handle argument escaping and prevent shell injection (e.g., `shlex` in Python). +- **Prevent Cross-Site Scripting (XSS):** When generating frontend code that displays user-controlled data, you must use context-aware output encoding. Prefer methods that treat data as text by default (`.textContent`) over those that parse HTML (`.innerHTML`). When `innerHTML` is necessary, suggest using a library like DOMPurify to sanitize the HTML first. + +### 4. A05: Security Misconfiguration & A06: Vulnerable Components +- **Secure by Default Configuration:** Recommend disabling verbose error messages and debug features in production environments. +- **Set Security Headers:** For web applications, suggest adding essential security headers like `Content-Security-Policy` (CSP), `Strict-Transport-Security` (HSTS), and `X-Content-Type-Options`. +- **Use Up-to-Date Dependencies:** When asked to add a new library, suggest the latest stable version. Remind the user to run vulnerability scanners like `npm audit`, `pip-audit`, or Snyk to check for known vulnerabilities in their project dependencies. + +### 5. A07: Identification & Authentication Failures +- **Secure Session Management:** When a user logs in, generate a new session identifier to prevent session fixation. Ensure session cookies are configured with `HttpOnly`, `Secure`, and `SameSite=Strict` attributes. +- **Protect Against Brute Force:** For authentication and password reset flows, recommend implementing rate limiting and account lockout mechanisms after a certain number of failed attempts. + +### 6. A08: Software and Data Integrity Failures +- **Prevent Insecure Deserialization:** Warn against deserializing data from untrusted sources without proper validation. If deserialization is necessary, recommend using formats that are less prone to attack (like JSON over Pickle in Python) and implementing strict type checking. + +## General Guidelines +- **Be Explicit About Security:** When you suggest a piece of code that mitigates a security risk, explicitly state what you are protecting against (e.g., "Using a parameterized query here to prevent SQL injection."). +- **Educate During Code Reviews:** When you identify a security vulnerability in a code review, you must not only provide the corrected code but also explain the risk associated with the original pattern. diff --git a/.gitignore b/.gitignore index 635fccf..85e1515 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,6 @@ # Logs directory (contains sync operation logs) logs/ -# Local awesome-copilot cache (user-specific) -.awesome-copilot/ - # PowerShell history .history @@ -14,4 +11,11 @@ logs/ Thumbs.db # macOS -.DS_Store \ No newline at end of file +.DS_Store +# Local cache (managed by sync-awesome-copilot.ps1 - not committed) +.awesome-copilot/ + +# Temp files from PowerShell / editors +*.tmp +*.bak +~$* diff --git a/CHANGELOG.md b/CHANGELOG.md index 9362609..2957b80 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,96 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.2.2] - 2026-02-27 + +### Changed +- `configure.ps1`: use `$PSScriptRoot` to locate the `scripts/` folder (replaces `$MyInvocation.MyCommand.Path` which behaves differently when dot-sourced) +- `scripts/sync-awesome-copilot.ps1`: replace manual SHA256 with built-in `Get-FileHash` — cleaner and avoids loading entire file into memory +- `scripts/publish-global.ps1`: emit a `WARN` log when VS Code `settings.json` is not found (was a silent no-op); user is directed to open VS Code once to generate the file + +## [1.2.1] - 2026-02-27 + +### Fixed +- `configure.ps1`: `-InstallTask` / `-UninstallTask` now automatically skip the `init-repo` prompt (validation moved before Step 1 so the flag takes effect) +- `configure.ps1`: prompts to overwrite when the scheduled task already exists, instead of throwing a hard error +- `scripts/install-scheduled-task.ps1`: added `-WorkingDirectory` to both scheduled task actions (was defaulting to `C:\Windows\System32`, causing a permissions error creating the `logs/` directory) +- `scripts/sync-awesome-copilot.ps1`: replaced relative `logs/` path with `$PSScriptRoot/logs/` so logs always land in `scripts/logs/` regardless of working directory +- `scripts/install-scheduled-task.ps1`: updated task description (removed stale "combine" wording) +- `scripts/publish-global.ps1`: fixed named-profile example path (`agents\` → `prompts\`) +- `README.md`: corrected default interval (6h → 4h), log paths, authentication section, and custom-repo instructions +- `.github/copilot-instructions.md`: removed stale GitHub API / `GITHUB_TOKEN` references; updated cache structure + +## [1.2.0] - 2026-02-27 + +### Changed +- `scripts/sync-awesome-copilot.ps1`: **Rewritten** — replaced GitHub API + per-file HTTP download approach with `git sparse-checkout`. First run clones `github/awesome-copilot` shallowly with only the requested categories; subsequent runs run `git pull` for near-instant delta updates. Dramatically faster (single bulk transfer vs 700+ individual HTTP requests) and removes GitHub API rate-limit concerns entirely. + - Prefers `gh` (GitHub CLI) for automatic auth; falls back to `git` + - New `-GitTool auto|gh|git` parameter to override tool selection + - Removed parameters: `-NoDelete`, `-DiffOnly`, `-SkipBackup`, `-BackupRetention` (git handles all of these natively) + - Migrates automatically from the old API-based cache (renames non-git `~/.awesome-copilot/` to `~/.awesome-copilot-backup-` before cloning) + - `manifest.json` still written (from local file scan) for backward compatibility with `publish-global.ps1` and `configure.ps1` + +### Added +- `README.md`: document `gh`/`git` requirement; update sync section to reflect git-based approach + +## [1.1.2] - 2026-02-27 + +### Added +- `configure.ps1` — interactive orchestrator that chains sync → publish-global → init-repo; each step independently skippable via `-SkipSync`, `-SkipPublish`, `-SkipInit`; `-DryRun` passes through to all child scripts; shows last sync timestamp from cache manifest before running + +### Added +- `init-repo.ps1`: added Agents as a fourth interactive category (installs to `.github/agents/`) +- `init-repo.ps1`: `Detect-RepoStack` — auto-detects language/framework from file signals and marks relevant items with ★ in the picker +- `init-repo.ps1`: `Prompt-RepoIntent` — interactive fallback for new/empty repos; asks language, project type, and concerns +- `init-repo.ps1`: `-- none / skip --` sentinel row in every OGV picker so clicking OK with no intentional selection installs nothing +- `publish-global.ps1`: auto-configures `chat.useAgentSkills` and `chat.agentSkillsLocations` in VS Code `settings.json` when skills are published +- `.github/copilot-instructions.md`: Copilot instructions for this repository covering script workflow, conventions, and contributing guidelines + +### Fixed +- `normalize-copilot-folders.ps1`: `Split-Path -LeafParent` → `Split-Path -Parent` (`-LeafParent` is not a valid parameter and would throw at runtime) +- `install-scheduled-task.ps1`: removed `-Quiet` from `publish-global.ps1` invocation (`publish-global.ps1` has no `-Quiet` parameter; would throw on scheduled runs) +- `init-repo.ps1`: `$Items.IndexOf($_)` → `[Array]::IndexOf($Items, $_)` (`System.Object[]` has no instance `IndexOf` method; affected console-menu fallback path) +- `init-repo.ps1`: fixed OGV column name `[*] Installed` → `Installed` (special characters caused WPF binding errors at runtime) +- `init-repo.ps1`: fixed `return if (...)` runtime error in `Install-File` — replaced with explicit `if/else` branches +- `publish-global.ps1`: corrected agents target path to `%APPDATA%\Code\User\prompts\` (was incorrectly set to `agents\`) + +### Changed +- `publish-global.ps1`: updated inline comment from "CCA" to "VS Code Agent mode / Copilot CLI" +- `README.md`: corrected all `-Interval` references to `-Every`; fixed `-ProfileName` → `-ProfileRoot`/`-AllProfiles`; updated agents path to `%APPDATA%\Code\User\prompts\`; updated `init-repo.ps1` section to reflect agents category and smart detection; fixed custom `-AgentsTarget` example path +- `.github/copilot-instructions.md`: corrected agents path from `%APPDATA%\Code\User\agents\` to `%APPDATA%\Code\User\prompts\` + + + +### Fixed +- `sync-awesome-copilot.ps1`: changed `$ErrorActionPreference` from `Inquire` to `Stop` — `Inquire` caused the script to hang waiting for interactive input when run as a scheduled task + +### Changed +- `init-repo.ps1`: removed skills from per-repo initialisation; skills are globally available via `publish-global.ps1` (`~/.copilot/skills/`) and users should reference the source directly at [github/awesome-copilot](https://github.com/github/awesome-copilot) rather than committing point-in-time copies to repos + +## [1.1.0] - 2026-02-26 + +### Added +- `publish-global.ps1` — publishes agents to the VS Code user agents folder (via junction so sync updates are reflected immediately) and skills to `~/.copilot/skills/`; supports `-DryRun`, `-SkipAgents`, `-SkipSkills`, `-AgentsTarget`, `-SkillsTarget` +- `init-repo.ps1` — interactive script to initialise a repo with per-repo resources (instructions, hooks, workflows); uses Out-GridView on Windows with a numbered console-menu fallback; supports `-RepoPath`, `-DryRun`, `-SkipInstructions`, `-SkipHooks`, `-SkipWorkflows` + +### Changed +- Updated default sync categories to match current awesome-copilot repository structure: + - **Added**: `agents`, `workflows`, `hooks`, `skills` + - **Removed**: `chatmodes`, `prompts` (no longer exist in awesome-copilot) +- Added recursive directory traversal in `sync-awesome-copilot.ps1` to support subdirectory-based categories (`skills/`, `hooks/`, `plugins/`) +- Extended file extension filter to include `.sh` files (required for hooks to function — each hook ships shell scripts alongside its `hooks.json`) +- Updated `combine-and-publish-prompts.ps1` categories from `chatmodes/instructions/prompts` to `agents/instructions/workflows`; added deprecation notice at top (superseded by `publish-global.ps1` + `init-repo.ps1`); kept for backwards compatibility +- Updated `normalize-copilot-folders.ps1` to classify `*.agent.md` → `agents/` and ensure `agents/` directory is created on normalize runs +- Updated `install-scheduled-task.ps1`: default categories now `agents,instructions,workflows,hooks,skills`; `-IncludeCollections` replaced by `-IncludePlugins`; `-SkipCombine` replaced by `-SkipPublishGlobal`; scheduled actions now run `publish-global.ps1` after sync + +### Removed +- `normalize-copilot-folders.ps1` — removed (legacy, superseded by junction-based agent publishing and `init-repo.ps1`) + + +- `plugins/` and `cookbook/` are available but opt-in via `-IncludePlugins` due to their size +- Hooks are synced as complete packages (README.md + hooks.json + .sh scripts) preserving their directory structure +- **Design rationale**: agents and skills are global (agents available in all VS Code workspaces; skills loaded on-demand); instructions/hooks/workflows are per-repo opt-in via `init-repo.ps1` to avoid conflicts between contradicting instruction files + ## [1.0.0] - 2025-10-20 ### Added diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c7fd7a4..c72ae04 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,7 +12,7 @@ If you find a bug, please create an issue with: - Expected vs actual behavior - PowerShell version (`$PSVersionTable.PSVersion`) - Windows version -- Relevant log files from `$HOME\.awesome-copilot\logs\` +- Relevant log files from `scripts\logs\` ### Suggesting Enhancements @@ -42,16 +42,19 @@ Feature requests are welcome! Please include: 5. **Test your changes**: ```powershell + # Full dry run (no files written) + .\configure.ps1 -DryRun + # Test individual scripts - .\sync-awesome-copilot.ps1 - .\combine-and-publish-prompts.ps1 - + .\scripts\sync-awesome-copilot.ps1 -Plan + .\scripts\publish-global.ps1 -DryRun + # Test scheduled task installation - .\install-scheduled-task.ps1 -Interval "1h" + .\configure.ps1 -InstallTask -Every "1h" Start-ScheduledTask -TaskName "AwesomeCopilotSync" - + # Verify logs - Get-Content "$HOME\.awesome-copilot\logs\sync-*.log" -Tail 20 + Get-ChildItem .\scripts\logs\sync-*.log | Sort-Object LastWriteTime -Descending | Select-Object -First 1 | Get-Content -Tail 20 ``` 6. **Commit with clear messages**: diff --git a/README.md b/README.md index 3bb1ced..a719929 100644 --- a/README.md +++ b/README.md @@ -4,19 +4,29 @@ A collection of PowerShell scripts to automatically sync, combine, and publish [ ## 🎯 What This Does -These scripts automate the management of VS Code Copilot custom instructions, chat modes, prompts, and collections by: +These scripts automate the management of VS Code Copilot custom agents, instructions, skills, hooks, and workflows from the [awesome-copilot](https://github.com/github/awesome-copilot) community repository: -1. **Syncing** resources from the awesome-copilot GitHub repository -2. **Combining** multiple resource categories into a unified structure -3. **Publishing** to your VS Code profile(s) via symbolic links or file copies -4. **Normalizing** file organization to prevent duplicates -5. **Automating** the entire process via Windows Task Scheduler +1. **Syncing** all resources from the awesome-copilot GitHub repository to a local cache +2. **Publishing globally** — agents to VS Code's user agents folder (available in all workspaces), skills to `~/.copilot/skills/` +3. **Initialising repos** — interactively adding agents, instructions, hooks and agentic workflows to a specific repo's `.github/` folder +4. **Automating** the sync + publish cycle via Windows Task Scheduler + +### What goes where + +| Resource | Scope | Location | +|---|---|---| +| **Agents** | 🌐 Global | `%APPDATA%\Code\User\prompts\` — available in Copilot Chat across all workspaces | +| **Skills** | 🌐 Global | `~/.copilot/skills/` — loaded on-demand by Copilot coding agent & CLI | +| **Instructions** | 📁 Per-repo | `.github/instructions/` — chosen via `scripts/init-repo.ps1` | +| **Hooks** | 📁 Per-repo | `.github/hooks//` — chosen via `scripts/init-repo.ps1` | +| **Workflows** | 📁 Per-repo | `.github/workflows/` — chosen via `scripts/init-repo.ps1` | ## 📋 Prerequisites - **Windows** with PowerShell 7+ ([Download here](https://github.com/PowerShell/PowerShell/releases)) - **VS Code** with GitHub Copilot extension installed -- **Internet connection** for GitHub API access +- **`gh` (GitHub CLI) or `git`** — `gh` is preferred ([Download here](https://cli.github.com/)); handles auth automatically +- **Internet connection** for GitHub access - **Administrator privileges** (for creating scheduled tasks) ## 🚀 Quick Start @@ -26,208 +36,255 @@ These scripts automate the management of VS Code Copilot custom instructions, ch ```powershell # Clone this repository git clone -cd scripts +cd vscode-copilot-sync ``` -### 2. Run Initial Sync +### 2. Run the Configurator + +For first-time setup or an on-demand refresh, `configure.ps1` chains all steps: ```powershell -# Sync resources from GitHub -.\sync-awesome-copilot.ps1 +# Sync from GitHub, publish globally, and optionally init your repo +.\configure.ps1 -# Combine resources into unified folder -.\combine-and-publish-prompts.ps1 +# Or step by step: +.\configure.ps1 -SkipInit # sync + publish only +.\configure.ps1 -InstallTask # sync + publish + install scheduled task +.\configure.ps1 -DryRun # preview everything ``` -### 3. Install Automated Sync (Optional) +### 3. Initialise a Repo (optional, interactive) + +> **Note:** `configure.ps1` also prompts for this step automatically — you only need to run it directly for a targeted repo setup. ```powershell -# Install scheduled task (runs every 4 hours by default) -.\install-scheduled-task.ps1 +# Run from inside any repo to add instructions/hooks/workflows +cd C:\Projects\my-app +.\scripts\init-repo.ps1 -# Or customize the interval -.\install-scheduled-task.ps1 -Interval "2h" # Every 2 hours -.\install-scheduled-task.ps1 -Interval "1d" # Once daily +# Or specify the path explicitly +.\scripts\init-repo.ps1 -RepoPath "C:\Projects\my-app" +``` + +A selection UI will appear for each category (Out-GridView on Windows, or a numbered console menu). Items already installed in the repo are marked with `[*]`. + +### 4. Install Automated Sync (Optional) + +```powershell +# Install a scheduled task that syncs + publishes globally every 4 hours +.\configure.ps1 -InstallTask + +# Customize the interval (re-run to overwrite; configure.ps1 prompts before replacing) +.\configure.ps1 -InstallTask -Every "2h" # Every 2 hours +.\configure.ps1 -InstallTask -Every "30m" # Every 30 minutes + +# Or run it directly (called internally by configure.ps1) +.\scripts\install-scheduled-task.ps1 -Every "2h" ``` ## 📁 What Gets Created ``` -$HOME\.awesome-copilot\ # Local cache -├── chatmodes\ # Chat mode definitions -├── instructions\ # Custom instructions -├── prompts\ # Prompt templates -├── collections\ # Resource collections -├── combined\ # Unified resources (all categories) +$HOME\.awesome-copilot\ # Local cache (git sparse clone) +├── .git\ # Git metadata (managed automatically) +├── agents\ # Custom agents (.agent.md) +├── instructions\ # Custom instructions (.instructions.md) +├── workflows\ # Agentic workflow definitions +├── hooks\ # Automated hooks (with .json + .sh scripts) +│ └── \ +├── skills\ # Skill packages +│ └── \ +│ └── SKILL.md └── manifest.json # Sync state tracking -%APPDATA%\Code\User\ # VS Code global config -└── prompts\ # Junction/symlink to combined folder - -%APPDATA%\Code\User\profiles\ # VS Code profiles -└── \ - ├── chatmodes\ # Linked/copied resources - ├── instructions\ - └── prompts\ +%APPDATA%\Code\User\ +└── prompts\ # Junction → ~/.awesome-copilot/agents/ ``` ## 📜 Scripts Overview -### `sync-awesome-copilot.ps1` -Syncs resources from the awesome-copilot GitHub repository. +### `configure.ps1` +Main entry point at the repo root. Chains sync → publish → init-repo in one command, and can install/uninstall the scheduled task via `-InstallTask` / `-UninstallTask` / `-Every` switches. **Features:** -- Downloads latest resources via GitHub API -- SHA256 hash-based change detection -- Incremental updates (only downloads changed files) -- Manifest tracking for sync state -- Optional GITHUB_TOKEN support for higher rate limits +- Shows last sync time from the local cache manifest before running +- Runs each step in sequence; any step can be skipped independently +- Prompts before running `scripts/init-repo.ps1` (with option to skip via `-SkipInit`) +- `-DryRun` passes through to all child scripts +- `-InstallTask` / `-UninstallTask` delegate to `scripts/install-scheduled-task.ps1` / `scripts/uninstall-scheduled-task.ps1` +- `-Every` sets the scheduled task interval (e.g. `"2h"`, `"30m"`) **Usage:** ```powershell -.\sync-awesome-copilot.ps1 -``` +# Full update: sync + publish + prompt for init-repo +.\configure.ps1 -**Environment Variables:** -- `GITHUB_TOKEN` (optional) - Personal access token for higher API rate limits +# Sync + publish only +.\configure.ps1 -SkipInit + +# Re-publish only (skip sync if cache is already fresh) +.\configure.ps1 -SkipSync -SkipInit + +# Preview without writing any files +.\configure.ps1 -DryRun + +# Install scheduled task +.\configure.ps1 -SkipSync -SkipPublish -InstallTask -Every "2h" +``` --- -### `combine-and-publish-prompts.ps1` -Combines resources from all categories into a unified folder and publishes to VS Code. +### `scripts/sync-awesome-copilot.ps1` +Syncs resources from the awesome-copilot GitHub repository using a sparse git clone. **Features:** -- Merges chatmodes, instructions, and prompts into single directory -- Creates junction/symlink to VS Code prompts directory -- Automatic fallback to file copy if linking fails -- Preserves user-created custom files +- Clones `github/awesome-copilot` with sparse checkout (first run) — only downloads the categories you need +- Pulls updates on subsequent runs — git transfers only the diff, making updates near-instant +- SHA256 hash-based change detection against previous manifest (added/updated/unchanged/removed counts) +- Prefers `gh` (GitHub CLI) for automatic auth; falls back to `git` +- Automatically migrates from the old API-based cache if detected **Usage:** ```powershell -.\combine-and-publish-prompts.ps1 +.\scripts\sync-awesome-copilot.ps1 -# Publish to specific profile -.\combine-and-publish-prompts.ps1 -ProfileName "MyProfile" +# Dry-run: show what would change without writing files +.\scripts\sync-awesome-copilot.ps1 -Plan -# Publish to global VS Code config only -.\combine-and-publish-prompts.ps1 -GlobalOnly +# Sync specific categories only +.\scripts\sync-awesome-copilot.ps1 -Categories "agents,instructions" + +# Force a specific tool +.\scripts\sync-awesome-copilot.ps1 -GitTool git ``` +Syncs these categories by default: `agents`, `instructions`, `workflows`, `hooks`, `skills`. +Add `plugins` or `cookbook` explicitly via `-Categories` for those larger opt-in collections. + --- -### `publish-to-vscode-profile.ps1` -Publishes resources to VS Code profile(s) via symbolic links or copies. +### `scripts/publish-global.ps1` +Publishes agents globally to VS Code and skills to `~/.copilot/skills/`. **Features:** -- Creates symbolic links (junctions) for efficient syncing -- Automatic fallback to file copy -- Supports multiple profiles or global config +- Creates a junction/symlink from VS Code's user agents folder to the local cache (no re-running needed after each sync) +- Incrementally copies skills to `~/.copilot/skills/` +- Dry-run mode for previewing changes +- Individual skip flags for each resource type **Usage:** ```powershell -.\publish-to-vscode-profile.ps1 +.\scripts\publish-global.ps1 -# Publish to specific profile -.\publish-to-vscode-profile.ps1 -ProfileName "Work" +# Preview changes without applying +.\scripts\publish-global.ps1 -DryRun -# Publish to global config -.\publish-to-vscode-profile.ps1 -GlobalOnly +# Skills only (agents already published) +.\scripts\publish-global.ps1 -SkipAgents + +# Custom target path (e.g. named VS Code profile) +.\scripts\publish-global.ps1 -AgentsTarget "$env:APPDATA\Code\User\profiles\Work\prompts" ``` --- -### `normalize-copilot-folders.ps1` -Cleans up misplaced or duplicated files in VS Code directories. +### `scripts/init-repo.ps1` +Interactively initialises a repository with agents, instructions, hooks, and agentic workflows. **Features:** -- Moves files to correct category folders based on suffix -- Removes duplicate files (keeps newest version) -- Handles renamed copies (file.1.md, chatmodes__file.md) +- Auto-detects language/framework from repo file signals and pre-marks recommendations with ★ in the picker +- Prompts for intent (language, project type, concerns) for new/empty repos +- Presents available resources in a selection UI (Out-GridView on Windows, with `-- none / skip --` row to prevent accidental installs) +- Falls back to a numbered console menu where Out-GridView is unavailable +- Copies selected items to the correct `.github/` subfolder +- Marks already-installed items so you can see what's new +- Dry-run mode for previewing **Usage:** ```powershell -.\normalize-copilot-folders.ps1 +# Run inside a repo (uses current directory) +.\scripts\init-repo.ps1 -# Normalize specific profile -.\normalize-copilot-folders.ps1 -ProfileName "MyProfile" -``` +# Target a specific repo +.\scripts\init-repo.ps1 -RepoPath "C:\Projects\my-app" ---- +# Preview without writing any files +.\scripts\init-repo.ps1 -DryRun + +# Skip categories you don't need +.\scripts\init-repo.ps1 -SkipHooks -SkipWorkflows + +# Non-interactive: specify items by name +.\scripts\init-repo.ps1 -Agents "devops-expert,se-security-reviewer" -Instructions "powershell" +``` -### `install-scheduled-task.ps1` -Creates a Windows scheduled task for automatic syncing. +### `scripts/install-scheduled-task.ps1` +Creates a Windows scheduled task for automatic syncing and global publishing. Called internally by `configure.ps1 -InstallTask`. **Features:** -- Runs sync and combine scripts on a schedule +- Runs `sync-awesome-copilot.ps1` then `publish-global.ps1` on a schedule - Default: every 4 hours -- Customizable interval -- Runs as current user (no SYSTEM account needed) +- Customizable interval via `-Every` **Usage:** ```powershell -# Install with default 4-hour interval -.\install-scheduled-task.ps1 +# Recommended: use configure.ps1 (prompts before overwriting an existing task) +.\configure.ps1 -InstallTask +.\configure.ps1 -InstallTask -Every "2h" -# Custom intervals -.\install-scheduled-task.ps1 -Interval "2h" # Every 2 hours -.\install-scheduled-task.ps1 -Interval "30m" # Every 30 minutes -.\install-scheduled-task.ps1 -Interval "1d" # Once daily +# Or run directly +.\scripts\install-scheduled-task.ps1 + +# Custom intervals (supports h = hours, m = minutes) +.\scripts\install-scheduled-task.ps1 -Every "2h" # Every 2 hours +.\scripts\install-scheduled-task.ps1 -Every "30m" # Every 30 minutes # Check task status Get-ScheduledTask -TaskName "AwesomeCopilotSync" ``` -**Interval Format:** -- `30m` - Minutes -- `2h` - Hours -- `1d` - Days - ---- - -### `uninstall-scheduled-task.ps1` -Removes the scheduled task. +### `scripts/uninstall-scheduled-task.ps1` +Removes the scheduled task. Called internally by `configure.ps1 -UninstallTask`. **Usage:** ```powershell -.\uninstall-scheduled-task.ps1 +# Recommended: use configure.ps1 +.\configure.ps1 -UninstallTask + +# Or run directly +.\scripts\uninstall-scheduled-task.ps1 ``` ## 🔧 Configuration -### GitHub Rate Limits +### Authentication -Without authentication, GitHub API allows 60 requests/hour. For heavy usage: +The sync script uses `gh` (GitHub CLI) by default, which inherits authentication from `gh auth login` — no extra setup needed for most users. -1. Create a [Personal Access Token](https://github.com/settings/tokens) (no scopes needed for public repos) -2. Set environment variable: +If only `git` is available, the sync targets a public repo (`github/awesome-copilot`) so no credentials are required. For private forks, configure git credentials as usual (`git config credential.helper`). +To force a specific tool: ```powershell -# Temporary (current session) -$env:GITHUB_TOKEN = "ghp_your_token_here" - -# Permanent (user environment) -[Environment]::SetEnvironmentVariable("GITHUB_TOKEN", "ghp_your_token_here", "User") +.\scripts\sync-awesome-copilot.ps1 -GitTool git +.\scripts\sync-awesome-copilot.ps1 -GitTool gh ``` ### Custom Source Repository -By default, scripts sync from `github/awesome-copilot`. To use a different source: - -Edit `sync-awesome-copilot.ps1` line 57: +To sync from a fork or alternative repo, edit the two variables near the top of `scripts/sync-awesome-copilot.ps1`: ```powershell -$Repo = "your-username/your-repo" +$RepoSlug = 'your-username/your-repo' +$RepoUrl = 'https://github.com/your-username/your-repo.git' ``` ## 🗂️ File Naming Conventions Resources follow naming patterns for automatic categorization: -- `*.chatmode.md` - Chat mode definitions -- `*.instructions.md` - Custom instructions -- `*.prompt.md` - Prompt templates -- `*.collection.yml` - Resource collections - -Files without these suffixes in the combined folder are preserved (assumed to be user-created). +- `*.agent.md` — Custom agents +- `*.instructions.md` — Custom instructions +- `SKILL.md` (inside a named subdirectory) — Skills +- Hook and workflow directories contain a mix of `.md`, `.json`, and `.sh` files ## 🛠️ Troubleshooting @@ -249,7 +306,7 @@ Scripts automatically fall back to copying files. Check logs for details. Get-ScheduledTask -TaskName "AwesomeCopilotSync" | Get-ScheduledTaskInfo # View logs -Get-Content "$HOME\.awesome-copilot\logs\sync-*.log" -Tail 50 +Get-Content ".\scripts\logs\sync-*.log" | Select-Object -Last 50 # Manually run task Start-ScheduledTask -TaskName "AwesomeCopilotSync" @@ -262,10 +319,10 @@ Start-ScheduledTask -TaskName "AwesomeCopilotSync" ## 📊 Logs -Sync logs are stored in `$HOME\.awesome-copilot\logs\`: +Sync logs are always written to `scripts/logs/` (next to the script itself, regardless of where you invoke it from): ```powershell # View latest sync log -Get-Content "$HOME\.awesome-copilot\logs\sync-*.log" -Tail 20 +Get-ChildItem .\scripts\logs\sync-*.log | Sort-Object LastWriteTime -Descending | Select-Object -First 1 | Get-Content ``` Log format: `sync-YYYYMMDD-HHMMSS.log` diff --git a/combine-and-publish-prompts.ps1 b/combine-and-publish-prompts.ps1 deleted file mode 100644 index 47ade10..0000000 --- a/combine-and-publish-prompts.ps1 +++ /dev/null @@ -1,201 +0,0 @@ -<# -Combine Copilot Resources Into Unified 'combined' Folder And Publish To Prompts - -CHANGE (2025-09-18): Default publish target is now the global prompts directory: - $env:APPDATA/Code/User/prompts -Previously the script auto-selected the latest profile under profiles/. That -behavior is still available via -UseLatestProfile. Supplying -AllProfiles keeps -publishing to every discovered profile prompts directory (unchanged). - -Steps Implemented: - 1. (Assumes sync already ran) Validate presence of source category folders under root (default $HOME/.awesome-copilot) - 2. Create / refresh a 'combined' folder containing all *.md files from: - chatmodes/, instructions/, prompts/ (collections optional via -IncludeCollections) - - Name collisions resolved by prefixing category (unless identical content) - - Preserves original filenames when unique - 3. Publish the combined folder: - a) By default to global prompts directory (profile-agnostic) - b) Or if -UseLatestProfile, to the latest profile's prompts - c) Or if -AllProfiles, to each profile's prompts - d) Attempt symbolic link (prompts -> combined) then junction then copy (or copy directly with -ForceCopy) - -Result: All items visible under the chosen prompts directory, enabling unified browsing. - -Usage Examples: - Dry run only: - pwsh -File .\scripts\combine-and-publish-prompts.ps1 -DryRun - Publish to global prompts (default): - pwsh -File .\scripts\combine-and-publish-prompts.ps1 - Publish to latest profile prompts (legacy behavior): - pwsh -File .\scripts\combine-and-publish-prompts.ps1 -UseLatestProfile - Publish to all profiles forcing copy: - pwsh -File .\scripts\combine-and-publish-prompts.ps1 -AllProfiles -ForceCopy - Rebuild combined only (no publish): - pwsh -File .\scripts\combine-and-publish-prompts.ps1 -NoPublish - -Flags: - -SourceRoot - -ProfilesBase - -ProfileRoot (explicit profile root; overrides -UseLatestProfile) - -AllProfiles - -UseLatestProfile (restore previous default targeting most recent profile) - -ForceCopy (skip link attempts) - -DryRun (show plan, do not modify) - -NoPublish (build combined set only) - -Prune (remove stale files from existing combined folder before rebuild) - -IncludeCollections (opt-in: also merge markdown files from collections/) - -License: Adapt freely. -#> -[CmdletBinding()] param( - [string]$SourceRoot = "$HOME/.awesome-copilot", - [string]$ProfilesBase = (Join-Path $env:APPDATA 'Code/User/profiles'), - [string]$ProfileRoot, - [switch]$UseLatestProfile, - [switch]$AllProfiles, - [switch]$ForceCopy, - [switch]$DryRun, - [switch]$NoPublish, - [switch]$Prune, - [switch]$IncludeCollections -) - -$ErrorActionPreference = 'Stop' - -function Log($m, [string]$level = 'INFO') { $ts = (Get-Date).ToString('s'); Write-Host "[$ts][$level] $m" -ForegroundColor $(if ($level -eq 'ERROR') { 'Red' } elseif ($level -eq 'WARN') { 'Yellow' } else { 'Cyan' }) } - -if (-not (Test-Path $SourceRoot)) { Log "Source root missing: $SourceRoot" 'ERROR'; exit 1 } -if (-not (Test-Path $ProfilesBase)) { Log "Profiles base missing: $ProfilesBase" 'WARN' } - -$categories = @('chatmodes', 'instructions', 'prompts') -if ($IncludeCollections) { $categories += 'collections' } -$missing = @() -foreach ($c in $categories) { if (-not (Test-Path (Join-Path $SourceRoot $c))) { $missing += $c } } -if ($missing) { Log "Missing category folders: $($missing -join ', ')" 'WARN' } - -# Determine publish targets. We now default to global prompts unless an explicit -# profile strategy is selected. -$globalPrompts = Join-Path (Join-Path $env:APPDATA 'Code/User') 'prompts' -$targetMode = 'Global' -$targets = @() - -if ($AllProfiles) { - $targetMode = 'AllProfiles' - $targets = Get-ChildItem $ProfilesBase -Directory | ForEach-Object { Join-Path $_.FullName 'prompts' } - if (-not $targets) { Log 'No profiles discovered for -AllProfiles.' 'ERROR'; exit 1 } - Log "Will publish to $($targets.Count) profile prompt directories" 'INFO' -} -elseif ($ProfileRoot) { - $targetMode = 'ExplicitProfile' - $targets = @(Join-Path $ProfileRoot 'prompts') - Log "Explicit profile root provided: $ProfileRoot" 'INFO' -} -elseif ($UseLatestProfile) { - $targetMode = 'LatestProfile' - $latest = if (Test-Path $ProfilesBase) { Get-ChildItem $ProfilesBase -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1 } else { $null } - if (-not $latest) { Log 'No profiles found for -UseLatestProfile.' 'ERROR'; exit 1 } - $targets = @(Join-Path $latest.FullName 'prompts') - Log "Using latest profile: $($latest.FullName)" 'INFO' -} -else { - $targetMode = 'Global' - $targets = @($globalPrompts) - Log "Defaulting to global prompts directory: $globalPrompts" 'INFO' -} - -Log "Publish mode: $targetMode" 'INFO' - -$combinedRoot = Join-Path $SourceRoot 'combined' - -# Ensure combined directory exists -if (-not (Test-Path $combinedRoot)) { - if ($DryRun) { Log "[DryRun] Would create combined folder: $combinedRoot" 'INFO' } else { New-Item -ItemType Directory -Path $combinedRoot | Out-Null } -} - -# Map of dest relative name -> full path already added (for duplicate detection) -$index = @{} -$added = 0 -$skippedSame = 0 -$renamed = 0 - -foreach ($cat in $categories) { - $srcDir = Join-Path $SourceRoot $cat - if (-not (Test-Path $srcDir)) { continue } - $files = Get-ChildItem $srcDir -File -Filter '*.md' - foreach ($f in $files) { - $destName = $f.Name - $destPath = Join-Path $combinedRoot $destName - if (Test-Path $destPath) { - # Compare content hash; if identical skip, else replace with latest version - $existingHash = (Get-FileHash -Algorithm SHA256 $destPath).Hash - $newHash = (Get-FileHash -Algorithm SHA256 $f.FullName).Hash - if ($existingHash -eq $newHash) { - $skippedSame++ - continue - } - # Different content - latest version wins, replace the file - Log "Name collision: replacing $destName with latest from $cat" 'INFO' - $renamed++ - } - if ($DryRun) { - Log "[DryRun] Add $cat -> $destName" 'INFO' - } - else { - Copy-Item $f.FullName $destPath -Force - } - $added++ - } -} - -Log "Combined summary: added=$added identicalSkipped=$skippedSame renamed=$renamed" 'INFO' - -if ($NoPublish) { Log "NoPublish set; skipping linking/copy phase." 'INFO'; exit 0 } - -function Publish-ToPrompts($promptsDir) { - if (-not (Test-Path $promptsDir)) { - if ($DryRun) { Log "[DryRun] Would create prompts dir $promptsDir" } - else { New-Item -ItemType Directory -Path $promptsDir | Out-Null } - } - $canLink = $true - if (Test-Path $promptsDir) { - $item = Get-Item $promptsDir -Force - $isLink = ($item.Attributes -band [IO.FileAttributes]::ReparsePoint) - if ($isLink) { - Log "prompts already linked: $promptsDir" 'INFO' - return - } - # If prompts directory exists as normal folder, we'll update it in place - # User-created files will be preserved, synced files will be updated - $nonHidden = Get-ChildItem $promptsDir -Force | Where-Object { -not $_.Attributes.ToString().Contains('Hidden') } - if ($nonHidden) { - Log "prompts exists as normal directory; will update synced files in place" 'INFO' - } - } - if ($ForceCopy -or -not $canLink) { - Log "Copying combined contents into prompts ($promptsDir)" 'INFO' - if (-not $DryRun) { Copy-Item (Join-Path $combinedRoot '*') $promptsDir -Recurse -Force } - return - } - if ($DryRun) { Log "[DryRun] Would replace prompts with link/junction to combined" 'INFO'; return } - try { - Remove-Item $promptsDir -Recurse -Force -ErrorAction Stop - New-Item -ItemType SymbolicLink -Path $promptsDir -Target $combinedRoot -Force | Out-Null - Log "Created symlink prompts -> combined" 'INFO' - } - catch { - Log "Symlink failed: $($_.Exception.Message) (attempt junction)" 'WARN' - try { - cmd /c mklink /J "$promptsDir" "$combinedRoot" | Out-Null - Log "Created junction prompts -> combined" 'INFO' - } - catch { - Log "Junction failed; copying fallback" 'WARN' - New-Item -ItemType Directory -Path $promptsDir -Force | Out-Null - Copy-Item (Join-Path $combinedRoot '*') $promptsDir -Recurse -Force - } - } -} - -foreach ($t in $targets) { Publish-ToPrompts -promptsDir $t } - -Log "Combine & publish complete." 'INFO' diff --git a/configure.ps1 b/configure.ps1 new file mode 100644 index 0000000..0c89855 --- /dev/null +++ b/configure.ps1 @@ -0,0 +1,168 @@ +<# +Configure Copilot Resources + +Main entry point for all Copilot resource management operations. +Chains the scripts in the correct order: + + 1. sync-awesome-copilot.ps1 -- fetch latest from github/awesome-copilot + 2. publish-global.ps1 -- publish agents + skills globally + 3. init-repo.ps1 -- (prompted) per-repo .github/ setup + 4. install/uninstall-scheduled-task.ps1 -- (explicit) automate sync + publish + +Usage: + # Full interactive run: sync + publish + prompt for init-repo + .\configure.ps1 + + # Sync + publish only (skip init-repo prompt) + .\configure.ps1 -SkipInit + + # Re-publish only (cache already up to date) + .\configure.ps1 -SkipSync -SkipInit + + # Install scheduled task (sync every 4h + publish globally) + .\configure.ps1 -SkipInit -InstallTask + + # Install task with custom interval + .\configure.ps1 -SkipInit -InstallTask -Every "2h" + + # Uninstall scheduled task + .\configure.ps1 -SkipSync -SkipPublish -SkipInit -UninstallTask + + # Preview without writing any files + .\configure.ps1 -DryRun +#> +[CmdletBinding()] param( + [switch]$SkipSync, + [switch]$SkipPublish, + [switch]$SkipInit, + [switch]$InstallTask, + [switch]$UninstallTask, + [string]$Every = '4h', # Interval for -InstallTask (e.g. 4h, 30m) + [switch]$DryRun +) + +#region Initialisation +$ErrorActionPreference = 'Stop' +$ScriptDir = Join-Path $PSScriptRoot 'scripts' + +function Log($m, [string]$level = 'INFO') { + $ts = (Get-Date).ToString('s') + $color = switch ($level) { 'ERROR' { 'Red' } 'WARN' { 'Yellow' } 'SUCCESS' { 'Green' } default { 'Cyan' } } + Write-Host "[$ts][$level] $m" -ForegroundColor $color +} + +function Step($label) { + Write-Host "" + Write-Host " ── $label ──" -ForegroundColor Magenta + Write-Host "" +} + +# Show cache state +$manifest = "$HOME\.awesome-copilot\manifest.json" +if (Test-Path $manifest) { + try { + $m = Get-Content $manifest -Raw | ConvertFrom-Json + Log "Cache last synced: $($m.fetchedAt) Items: $($m.items.Count)" + } catch {} +} else { + Log "No local cache found — sync will download everything fresh." 'WARN' +} + +if ($InstallTask -and $UninstallTask) { + Log "-InstallTask and -UninstallTask cannot both be set." 'ERROR'; exit 1 +} +# Task management implies no interactive repo setup +if ($InstallTask -or $UninstallTask) { $SkipInit = $true } + +#endregion # Initialisation + +#region Step 1 — Sync +if (-not $SkipSync) { + Step "Sync from github/awesome-copilot" + $syncArgs = @{} + if ($DryRun) { $syncArgs['Plan'] = $true } + & (Join-Path $ScriptDir 'sync-awesome-copilot.ps1') @syncArgs + if ($LASTEXITCODE -and $LASTEXITCODE -ne 0) { Log "Sync failed (exit $LASTEXITCODE)" 'ERROR'; exit $LASTEXITCODE } +} + +#endregion # Step 1 + +#region Step 2 — Publish globally +if (-not $SkipPublish) { + Step "Publish agents + skills globally" + $publishArgs = @{} + if ($DryRun) { $publishArgs['DryRun'] = $true } + & (Join-Path $ScriptDir 'publish-global.ps1') @publishArgs +} + +#endregion # Step 2 + +#region Step 3 — Init repo +if (-not $SkipInit) { + # If a subscriptions manifest exists for the current repo, offer to check for updates first + $subscriptionsFile = Join-Path (Get-Location).Path '.github\.copilot-subscriptions.json' + if (Test-Path $subscriptionsFile) { + Step "Check for updates to subscribed repo resources" + Write-Host " Subscriptions found. Check for upstream updates to .github/ resources?" -ForegroundColor Yellow + Write-Host " [Y] Yes [N] No (default): " -NoNewline -ForegroundColor Yellow + $updateAnswer = (Read-Host).Trim() + if ($updateAnswer -match '^[Yy]') { + $updateArgs = @{} + if ($DryRun) { $updateArgs['DryRun'] = $true } + & (Join-Path $ScriptDir 'update-repo.ps1') @updateArgs + } else { + Log "Update check skipped." + } + } + + Step "Init repo" + Write-Host " Add agents/instructions/hooks/workflows/skills to .github/ in the current repo?" -ForegroundColor Yellow + Write-Host " [Y] Yes [N] No (default): " -NoNewline -ForegroundColor Yellow + $answer = (Read-Host).Trim() + if ($answer -match '^[Yy]') { + $initArgs = @{} + if ($DryRun) { $initArgs['DryRun'] = $true } + & (Join-Path $ScriptDir 'init-repo.ps1') @initArgs + } else { + Log "init-repo skipped." + } +} + +#endregion # Step 3 + +#region Step 4 — Scheduled task +if ($InstallTask) { + Step "Install scheduled task" + if ($DryRun) { + Log "[DryRun] Would install scheduled task (every $Every): sync + publish-global" + } else { + $taskArgs = @{ Every = $Every } + $taskName = 'AwesomeCopilotSync' + $proceed = $true + if (Get-ScheduledTask -TaskName $taskName -ErrorAction SilentlyContinue) { + Log "Scheduled task '$taskName' already exists." 'WARN' + Write-Host " Overwrite existing task? [Y] Yes [N] No (default): " -NoNewline -ForegroundColor Yellow + if ((Read-Host).Trim() -match '^[Yy]') { + $taskArgs['Force'] = $true + } else { + Log "Task install skipped." + $proceed = $false + } + } + if ($proceed) { & (Join-Path $ScriptDir 'install-scheduled-task.ps1') @taskArgs } + } +} + +if ($UninstallTask) { + Step "Uninstall scheduled task" + if ($DryRun) { + Log "[DryRun] Would uninstall scheduled task" + } else { + & (Join-Path $ScriptDir 'uninstall-scheduled-task.ps1') + } +} + +#endregion # Step 4 + +Write-Host "" +Log "Done." 'SUCCESS' diff --git a/normalize-copilot-folders.ps1 b/normalize-copilot-folders.ps1 deleted file mode 100644 index 195154b..0000000 --- a/normalize-copilot-folders.ps1 +++ /dev/null @@ -1,127 +0,0 @@ -<# -Normalize Copilot Resource Folder Placement - -Purpose: - Ensures all markdown resources are stored in the directory that matches their - semantic suffix: *.chatmode.md -> chatmodes/, *.instructions.md -> instructions/, - *.prompt.md -> prompts/, *.collection.md|*.collections.md -> collections/. - -Why: - A mismatch (e.g. a .chatmode.md inside prompts/) can lead to inconsistent - discovery or confusion about canonical location. This tool re-homes files. - -Features: - - Works on a single detected (most recent) profile or all profiles (-AllProfiles) - - Dry-run mode (default) shows planned moves - - Skips already-correct placements - - Avoids overwriting: if destination filename exists, appends numeric suffix - - Reports summary counts - -Usage Examples: - Dry-run across all profiles: - pwsh -File .\scripts\normalize-copilot-folders.ps1 -AllProfiles - - Execute (no dry-run) for a specific profile root: - pwsh -File .\scripts\normalize-copilot-folders.ps1 -ProfileRoot "C:\Users\me\AppData\Roaming\Code\User\profiles\abc123" -NoDryRun - - Execute across all profiles: - pwsh -File .\scripts\normalize-copilot-folders.ps1 -AllProfiles -NoDryRun - -Limitations: - - Only processes markdown files (*.md) - - Does not attempt content validation; classification is by filename suffix - -License: MIT-like; adapt as needed. -#> -[CmdletBinding()] param( - [string]$ProfilesBase = (Join-Path $env:APPDATA 'Code/User/profiles'), - [string]$ProfileRoot, - [switch]$AllProfiles, - [switch]$NoDryRun -) - -$ErrorActionPreference = 'Stop' - -function Log($m, [string]$level = 'INFO') { - $ts = (Get-Date).ToString('s'); Write-Host "[$ts][$level] $m" -ForegroundColor $(if ($level -eq 'ERROR') { 'Red' } elseif ($level -eq 'WARN') { 'Yellow' } else { 'Cyan' }) -} - -if (-not (Test-Path $ProfilesBase)) { Log "Profiles base not found: $ProfilesBase" 'ERROR'; exit 1 } - -$targets = @() -if ($AllProfiles) { - $targets = Get-ChildItem $ProfilesBase -Directory | ForEach-Object { $_.FullName } - if (-not $targets) { Log 'No profiles discovered.' 'ERROR'; exit 1 } - Log "Discovered $($targets.Count) profiles" 'INFO' -} -else { - if (-not $ProfileRoot) { - $latest = Get-ChildItem $ProfilesBase -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1 - if (-not $latest) { Log 'No profiles found.' 'ERROR'; exit 1 } - $ProfileRoot = $latest.FullName - Log "Detected profile: $ProfileRoot" 'INFO' - } - $targets = @($ProfileRoot) -} - -$movePlanned = 0 -$moveDone = 0 -$skipped = 0 -$correct = 0 - -function Classify([string]$fileName) { - switch -regex ($fileName) { - '\\.chatmode\.md$' { return 'chatmodes' } - '\\.instructions\.md$' { return 'instructions' } - '\\.prompt\.md$' { return 'prompts' } - '\\.(collection|collections)\.md$' { return 'collections' } - default { return $null } - } -} - -foreach ($p in $targets) { - Log "Scanning profile: $p" 'INFO' - $expected = 'chatmodes', 'instructions', 'prompts', 'collections' - foreach ($dir in $expected) { $full = Join-Path $p $dir; if (-not (Test-Path $full)) { New-Item -ItemType Directory -Path $full | Out-Null } } - - # Consider: any .md file in profile tree at depth 0..2 - $candidates = Get-ChildItem $p -Recurse -File -Include *.md | Where-Object { $_.DirectoryName -notmatch '\\\.git' } - foreach ($f in $candidates) { - $targetFolder = Classify -fileName $f.Name - if (-not $targetFolder) { continue } - $currentFolder = Split-Path $f.FullName -LeafParent - $currentBase = Split-Path $currentFolder -Leaf - if ($currentBase -eq $targetFolder) { $correct++; continue } - - $destDir = Join-Path $p $targetFolder - $destPath = Join-Path $destDir $f.Name - if (Test-Path $destPath) { - # File exists at destination - compare content and replace if different - $existingHash = (Get-FileHash -Algorithm SHA256 $destPath).Hash - $newHash = (Get-FileHash -Algorithm SHA256 $f.FullName).Hash - if ($existingHash -eq $newHash) { - Log "Identical file already exists at destination, skipping: $($f.Name)" 'INFO' - $correct++ - continue - } - Log "Replacing existing file with latest version: $($f.Name)" 'INFO' - if ($NoDryRun) { - Remove-Item $destPath -Force - } - } - Log "Relocate: $($f.FullName) -> $destPath" 'INFO' - $movePlanned++ - if ($NoDryRun) { - try { - Move-Item -LiteralPath $f.FullName -Destination $destPath -Force - $moveDone++ - } - catch { - Log "Failed move: $($_.Exception.Message)" 'ERROR' - } - } - } -} - -if (-not $NoDryRun) { Log "Dry run complete (no files moved). Use -NoDryRun to apply." 'INFO' } -Log "Summary: planned=$movePlanned moved=$moveDone correct=$correct" 'INFO' diff --git a/publish-to-vscode-profile.ps1 b/publish-to-vscode-profile.ps1 deleted file mode 100644 index c84af4e..0000000 --- a/publish-to-vscode-profile.ps1 +++ /dev/null @@ -1,145 +0,0 @@ -[CmdletBinding()] param( - [string]$SourceRoot = "$HOME/.awesome-copilot", - [string]$ProfileRoot, - [switch]$AllProfiles, - [string]$WorkspaceRoot, - [switch]$ForceCopyFallback, - [switch]$Prune, - [switch]$VerboseLinks -) - -$ErrorActionPreference = 'Stop' - -function Log($msg, [string]$level = 'INFO') { - $ts = (Get-Date).ToString('s'); Write-Host "[$ts][$level] $msg" -ForegroundColor $(if ($level -eq 'ERROR') { 'Red' } elseif ($level -eq 'WARN') { 'Yellow' } else { 'Cyan' }) -} - -if (-not (Test-Path $SourceRoot)) { Log "Source root not found: $SourceRoot" 'ERROR'; exit 1 } - -# Collect target profiles -$profilesBase = Join-Path $env:APPDATA 'Code/User/profiles' -if (-not (Test-Path $profilesBase)) { Log "Profiles base not found: $profilesBase (open VS Code & ensure a profile exists)" 'ERROR'; exit 1 } -$TargetProfiles = @() -if ($AllProfiles) { - $TargetProfiles = Get-ChildItem $profilesBase -Directory | ForEach-Object { $_.FullName } - if (-not $TargetProfiles) { Log "No profile directories found under $profilesBase" 'ERROR'; exit 1 } - Log "Publishing to ALL profiles ($($TargetProfiles.Count))" 'INFO' -} -else { - if (-not $ProfileRoot) { - $profileDir = Get-ChildItem $profilesBase -Directory | Sort-Object LastWriteTime -Descending | Select-Object -First 1 - if (-not $profileDir) { Log "No profile directories found under $profilesBase" 'ERROR'; exit 1 } - $ProfileRoot = $profileDir.FullName - Log "Detected profile: $ProfileRoot" - } - $TargetProfiles = @($ProfileRoot) -} - -function Get-MappingsForProfile($pRoot) { - @( - @{ Name = 'chatmodes'; Src = (Join-Path $SourceRoot 'chatmodes'); Dst = (Join-Path $pRoot 'chatmodes') } - @{ Name = 'prompts'; Src = (Join-Path $SourceRoot 'prompts'); Dst = (Join-Path $pRoot 'prompts') } - @{ Name = 'instructions'; Src = (Join-Path $SourceRoot 'instructions'); Dst = (Join-Path $pRoot 'instructions') } - ) -} - -# Helper to link or copy -function Ensure-LinkOrCopy($src, $dst, [string]$label) { - if (-not (Test-Path $src)) { Log "Skipping $label (missing source: $src)" 'WARN'; return } - if (Test-Path $dst) { - $item = Get-Item $dst -Force - $isLink = ($item.Attributes -band [IO.FileAttributes]::ReparsePoint) - if ($isLink -and -not $ForceCopyFallback) { - Log "$label already linked: $dst"; return - } - elseif (-not $isLink -and -not $ForceCopyFallback) { - Log "$label exists as normal directory; will update only synced files" 'INFO' - # Only remove files that exist in source (preserve user-created files) - if (Test-Path $src) { - $srcFiles = Get-ChildItem $src -Filter '*.md' -File -Recurse | ForEach-Object { $_.Name } - Get-ChildItem $dst -Filter '*.md' -File -Recurse -ErrorAction SilentlyContinue | ForEach-Object { - if ($srcFiles -contains $_.Name) { - Log "Removing synced file to update: $($_.Name)" 'INFO' - Remove-Item $_.FullName -Force - } - } - } - # Now copy the source files (will update/add synced files, leave others alone) - if (-not (Test-Path $dst)) { New-Item -ItemType Directory -Path $dst | Out-Null } - Copy-Item (Join-Path $src '*') $dst -Recurse -Force - return - } - else { - Log "Removing existing $label at $dst to replace" 'WARN' - Remove-Item $dst -Recurse -Force - } - } - if ($ForceCopyFallback) { - Log "Copying $label -> $dst" - if (-not (Test-Path $dst)) { New-Item -ItemType Directory -Path $dst | Out-Null } - Copy-Item (Join-Path $src '*') $dst -Recurse -Force - } - else { - try { - Log "Creating symlink $dst -> $src" - New-Item -ItemType SymbolicLink -Path $dst -Target $src -Force | Out-Null - } - catch { - Log "Symlink failed ($label): $_ (attempt junction)" 'WARN' - try { - cmd /c mklink /J "$dst" "$src" | Out-Null - Log "Created junction for ${label}: ${dst}" - } - catch { - Log "Junction failed for ${label}: $_ (fallback copy)" 'WARN' - if (-not (Test-Path $dst)) { New-Item -ItemType Directory -Path $dst | Out-Null } - Copy-Item (Join-Path $src '*') $dst -Recurse -Force - } - } - } - if ($VerboseLinks) { - $preview = Get-ChildItem $dst -File | Select-Object -First 3 | ForEach-Object { $_.Name } | Out-String - $cleanPreview = ($preview -split "`r?`n" | Where-Object { $_ }) -join ', ' - Log "${label} preview: $cleanPreview" - } -} - -foreach ($p in $TargetProfiles) { - Log "Processing profile: $p" 'INFO' - $Mappings = Get-MappingsForProfile -pRoot $p - foreach ($m in $Mappings) { Ensure-LinkOrCopy -src $m.Src -dst $m.Dst -label $m.Name } - - if ($Prune) { - foreach ($m in $Mappings) { - if (-not (Test-Path $m.Dst)) { continue } - $srcRel = Get-ChildItem $m.Src -Recurse -File | ForEach-Object { $_.FullName.Substring($m.Src.Length).TrimStart('\\') } - $srcSet = [System.Collections.Generic.HashSet[string]]::new([string[]]$srcRel) - Get-ChildItem $m.Dst -Recurse -File | ForEach-Object { - $rel = $_.FullName.Substring($m.Dst.Length).TrimStart('\\') - if (-not $srcSet.Contains($rel)) { - Log "Prune stale: $($m.Name)/$rel" 'WARN' - Remove-Item $_.FullName -Force - } - } - } - } -} - -# Mirror into workspace if requested -if ($WorkspaceRoot) { - $gh = Join-Path $WorkspaceRoot '.github' - if (-not (Test-Path $gh)) { New-Item -ItemType Directory -Path $gh | Out-Null } - foreach ($m in $Mappings) { - if (-not (Test-Path $m.Src)) { continue } - $dst = Join-Path $gh $m.Name - if (-not (Test-Path $dst)) { - Log "Seeding workspace $($m.Name) -> $dst" - Copy-Item $m.Src $dst -Recurse - } - else { - Log "Workspace already has $($m.Name); skipping" 'INFO' - } - } -} - -Log "Publish complete. Reload VS Code if new items aren't visible." 'INFO' diff --git a/scripts/init-repo.ps1 b/scripts/init-repo.ps1 new file mode 100644 index 0000000..7e1fd95 --- /dev/null +++ b/scripts/init-repo.ps1 @@ -0,0 +1,630 @@ +<# +Initialize a Repository with Copilot Resources + +Interactively selects and installs non-global Copilot resources from the local +awesome-copilot cache into a target repository's .github/ folder. + +Resources installed here are project-specific (opt-in) rather than global: + + Agents --> .github/agents/*.agent.md + Instructions --> .github/instructions/*.instructions.md + Hooks --> .github/hooks// (full directory) + Workflows --> .github/workflows/*.md + Skills --> .github/skills// (full directory) + +Usage: + # Interactive - run from within the target repo + .\init-repo.ps1 + + # Specify a repo path explicitly + .\init-repo.ps1 -RepoPath "C:\Projects\my-app" + + # Skip specific categories + .\init-repo.ps1 -SkipAgents -SkipHooks + .\init-repo.ps1 -SkipHooks -SkipWorkflows -SkipSkills + + # Non-interactive: specify items by name (comma-separated) + .\init-repo.ps1 -Instructions "angular,dotnet-framework" -Hooks "session-logger" + .\init-repo.ps1 -Agents "devops-expert,se-security-reviewer" + .\init-repo.ps1 -Skills "my-custom-skill" + # Dry run - show what would be installed + .\init-repo.ps1 -DryRun + +Notes: + - Existing files are only overwritten if the source is newer/different. + - .github/ is created if it doesn't exist. + - Skills are installed to .github/skills/ for version control with the project. + Global skills (available across all repos) are managed by publish-global.ps1. + - A subscription manifest (.github/.copilot-subscriptions.json) is written on + each run. Use update-repo.ps1 to check for and apply upstream changes. + - The selection UI uses Out-GridView where available (Windows GUI, filterable, + multi-select). Falls back to a numbered console menu automatically. + - Auto-detects language/framework from repo file signals and pre-marks + recommended instructions/hooks/workflows with ★ in the picker. + - For new/empty repos, prompts for intent one question at a time. +#> +[CmdletBinding()] param( + [string]$RepoPath = (Get-Location).Path, + [string]$SourceRoot = "$HOME/.awesome-copilot", + [string]$Instructions = '', # Comma-separated names to pre-select (non-interactive) + [string]$Agents = '', + [string]$Hooks = '', + [string]$Workflows = '', + [string]$Skills = '', # Comma-separated names to pre-select (non-interactive) + [switch]$SkipInstructions, + [switch]$SkipAgents, + [switch]$SkipHooks, + [switch]$SkipWorkflows, + [switch]$SkipSkills, + [switch]$DryRun +) + +#region Initialisation +$ErrorActionPreference = 'Stop' + +function Log($m, [string]$level = 'INFO') { + $ts = (Get-Date).ToString('s') + $color = switch ($level) { 'ERROR' { 'Red' } 'WARN' { 'Yellow' } 'SUCCESS' { 'Green' } default { 'Cyan' } } + Write-Host "[$ts][$level] $m" -ForegroundColor $color +} + +#endregion # Initialisation + +#region Stack detection +function Detect-RepoStack { + param([string]$RepoPath) + + $recs = [System.Collections.Generic.List[string]]::new() + $files = Get-ChildItem $RepoPath -Recurse -File -ErrorAction SilentlyContinue | + Where-Object { $_.FullName -notmatch '\\(\.git|node_modules|\.venv|bin|obj)\\' } + + $exts = $files | ForEach-Object { $_.Extension.ToLower() } | Sort-Object -Unique + $names = $files | ForEach-Object { $_.Name } | Sort-Object -Unique + $hasDotnet = $exts -contains '.cs' -or ($names | Where-Object { $_ -match '\.(csproj|sln)$' }) + $hasPy = $exts -contains '.py' -or ($names -contains 'requirements.txt') -or ($names -contains 'pyproject.toml') + $hasTs = $exts -contains '.ts' -or ($names -contains 'tsconfig.json') + $hasGo = $exts -contains '.go' -or ($names -contains 'go.mod') + $hasRs = $exts -contains '.rs' -or ($names -contains 'Cargo.toml') + $hasJava = $exts -contains '.java' -or ($names -contains 'pom.xml') -or ($names | Where-Object { $_ -eq 'build.gradle' }) + $hasKt = $exts -contains '.kt' + $hasTf = $exts -contains '.tf' + $hasBicep = $exts -contains '.bicep' + $hasPs1 = $exts -contains '.ps1' + $hasJs = $exts -contains '.js' -or $exts -contains '.jsx' -or ($names -contains 'package.json') + + if ($hasDotnet) { $recs.Add('csharp'); $recs.Add('dotnet-architecture-good-practices') } + if ($hasPy) { $recs.Add('python') } + if ($hasTs) { $recs.Add('typescript-5-es2022') } + if ($hasGo) { $recs.Add('go') } + if ($hasRs) { $recs.Add('rust') } + if ($hasJava -or $hasKt) { $recs.Add('java') } + if ($hasTf) { $recs.Add('terraform') } + if ($hasBicep) { $recs.Add('bicep-code-best-practices') } + if ($hasPs1) { $recs.Add('powershell') } + + # Docker + if (($names -contains 'Dockerfile') -or ($names | Where-Object { $_ -match '^docker-compose\.yml$' })) { + $recs.Add('containerization-docker-best-practices') + } + + # GitHub Actions workflows + $ghWorkflows = $files | Where-Object { $_.FullName -match '\\\.github\\workflows\\' -and $_.Extension -eq '.yml' } + if ($ghWorkflows) { $recs.Add('github-actions-ci-cd-best-practices') } + + # Playwright + $hasPlaywright = $files | Where-Object { $_.Name -match '^playwright\.config\.' } + if ($hasPlaywright) { + if ($hasDotnet) { $recs.Add('playwright-dotnet') } + elseif ($hasPy) { $recs.Add('playwright-python') } + else { $recs.Add('playwright-typescript') } + } + + # package.json framework detection + $pkgJson = $files | Where-Object { $_.Name -eq 'package.json' } | Select-Object -First 1 + if ($pkgJson) { + try { + $pkg = Get-Content $pkgJson.FullName -Raw | ConvertFrom-Json -ErrorAction Stop + $allDeps = @() + if ($pkg.dependencies) { $allDeps += $pkg.dependencies.PSObject.Properties.Name } + if ($pkg.devDependencies) { $allDeps += $pkg.devDependencies.PSObject.Properties.Name } + if ($allDeps -contains 'react') { $recs.Add('reactjs') } + if ($allDeps -contains 'next') { $recs.Add('nextjs') } + if ($allDeps | Where-Object { $_ -match '^@angular/' }) { $recs.Add('angular') } + if ($allDeps -contains 'vue') { $recs.Add('vuejs3') } + if ($allDeps -contains 'svelte') { $recs.Add('svelte') } + if ($allDeps | Where-Object { $_ -match '^@nestjs/' }) { $recs.Add('nestjs') } + } catch {} + } + + # Agent recommendations (name without .agent.md extension) + if ($hasPs1) { $recs.Add('devops-expert'); $recs.Add('github-actions-expert') } + if ($hasDotnet) { $recs.Add('CSharpExpert'); $recs.Add('expert-dotnet-software-engineer') } + if ($hasPy) { $recs.Add('python-mcp-expert') } + if ($hasTs -or $hasJs) { $recs.Add('typescript-mcp-expert') } + if ($hasGo) { $recs.Add('go-mcp-expert') } + if ($hasRs) { $recs.Add('rust-mcp-expert') } + if ($hasJava -or $hasKt) { $recs.Add('java-mcp-expert') } + if (($names -contains 'Dockerfile') -or ($names | Where-Object { $_ -match '^docker-compose\.yml$' })) { $recs.Add('platform-sre-kubernetes') } + if ($ghWorkflows) { $recs.Add('github-actions-expert') } + if ($hasPlaywright) { $recs.Add('playwright-tester') } + # Always recommend for all repos + $recs.Add('se-security-reviewer') + $recs.Add('se-technical-writer') + + $recs.Add('security-and-owasp') + $recs.Add('code-review-generic') + + return @($recs | Sort-Object -Unique) +} + +#endregion # Stack detection + +#region Intent prompt +function Prompt-RepoIntent { + $recs = [System.Collections.Generic.List[string]]::new() + + Write-Host "" + Write-Host " Q1: What is the primary language or stack?" -ForegroundColor Yellow + Write-Host " 1. C# / .NET" + Write-Host " 2. Python" + Write-Host " 3. TypeScript / JavaScript" + Write-Host " 4. Go" + Write-Host " 5. Java / Kotlin" + Write-Host " 6. Rust" + Write-Host " 7. PowerShell" + Write-Host " 8. Terraform / Bicep (Infrastructure)" + Write-Host " 9. Other" + Write-Host " Enter number: " -NoNewline -ForegroundColor Yellow + $q1 = (Read-Host).Trim() + switch ($q1) { + '1' { $recs.Add('csharp'); $recs.Add('dotnet-architecture-good-practices'); $recs.Add('CSharpExpert'); $recs.Add('expert-dotnet-software-engineer') } + '2' { $recs.Add('python'); $recs.Add('python-mcp-expert') } + '3' { $recs.Add('typescript-5-es2022'); $recs.Add('typescript-mcp-expert') } + '4' { $recs.Add('go'); $recs.Add('go-mcp-expert') } + '5' { $recs.Add('java'); $recs.Add('java-mcp-expert') } + '6' { $recs.Add('rust'); $recs.Add('rust-mcp-expert') } + '7' { $recs.Add('powershell'); $recs.Add('devops-expert'); $recs.Add('github-actions-expert') } + '8' { $recs.Add('terraform'); $recs.Add('bicep-code-best-practices') } + } + + Write-Host "" + Write-Host " Q2: What type of project is this?" -ForegroundColor Yellow + Write-Host " 1. Web API / REST service" + Write-Host " 2. Web application (frontend)" + Write-Host " 3. CLI tool" + Write-Host " 4. Library / SDK" + Write-Host " 5. Data pipeline / ML" + Write-Host " 6. Infrastructure / DevOps" + Write-Host " 7. Documentation / Content" + Write-Host " Enter number: " -NoNewline -ForegroundColor Yellow + $null = Read-Host # no mapping yet, reserved for future extensibility + + Write-Host "" + Write-Host " Q3: Any specific concerns? (comma-separated, e.g. 1,3)" -ForegroundColor Yellow + Write-Host " 1. Security / OWASP" + Write-Host " 2. Accessibility (a11y)" + Write-Host " 3. Testing / Playwright" + Write-Host " 4. Performance" + Write-Host " 5. Docker / Containers" + Write-Host " 6. CI/CD / GitHub Actions" + Write-Host " 7. None" + Write-Host " Enter numbers: " -NoNewline -ForegroundColor Yellow + $q3 = (Read-Host).Trim() + if ($q3 -and $q3 -ne '7') { + foreach ($part in $q3.Split(',')) { + switch ($part.Trim()) { + '1' { $recs.Add('security-and-owasp') } + '2' { $recs.Add('a11y') } + '3' { $recs.Add('playwright-typescript'); $recs.Add('playwright-tester') } + '4' { $recs.Add('performance-optimization') } + '5' { $recs.Add('containerization-docker-best-practices'); $recs.Add('platform-sre-kubernetes') } + '6' { $recs.Add('github-actions-ci-cd-best-practices'); $recs.Add('github-actions-expert') } + } + } + } + + $recs.Add('se-security-reviewer') + $recs.Add('se-technical-writer') + $recs.Add('code-review-generic') + return @($recs | Sort-Object -Unique) +} + +#endregion # Intent prompt + +#region Path validation and stack detection +if (-not (Test-Path $RepoPath)) { + Log "Repo path not found: $RepoPath" 'ERROR'; exit 1 +} +$RepoPath = Resolve-Path $RepoPath | Select-Object -ExpandProperty Path + +if (-not (Test-Path $SourceRoot)) { + Log "Cache not found: $SourceRoot -- run sync-awesome-copilot.ps1 first" 'ERROR'; exit 1 +} + +$GithubDir = Join-Path $RepoPath '.github' +Log "Target repo : $RepoPath" +Log "Copilot cache: $SourceRoot" + +# Auto-detect stack or prompt for intent +$script:Recommendations = @() +if (-not ($SkipInstructions -and $SkipHooks -and $SkipWorkflows -and $SkipAgents -and $SkipSkills)) { + $repoFileCount = (Get-ChildItem $RepoPath -Recurse -File -ErrorAction SilentlyContinue | + Where-Object { $_.FullName -notmatch '\\(\.git|node_modules|\.venv|bin|obj)\\' } | + Measure-Object).Count + + if ($repoFileCount -gt 3) { + Log "Scanning repo for language/framework signals..." + $script:Recommendations = Detect-RepoStack -RepoPath $RepoPath + if ($script:Recommendations.Count -gt 0) { + Log "Detected: $($script:Recommendations -join ', ')" + } else { + Log "No signals detected." 'WARN' + $script:Recommendations = Prompt-RepoIntent + } + } else { + Log "New or empty repo detected — prompting for intent." + $script:Recommendations = Prompt-RepoIntent + } +} + +#endregion # Path validation and stack detection + +#region Helpers +function Select-Items { + param( + [string]$Category, + [object[]]$Items, # objects with Name, Description, AlreadyInstalled + [string]$PreSelected = '', + [string[]]$Recommended = @() + ) + + if ($Items.Count -eq 0) { + Log "No $Category found in cache." 'WARN' + return @() + } + + # Non-interactive mode: pre-selection provided + if ($PreSelected) { + $names = $PreSelected.Split(',') | ForEach-Object { $_.Trim() } | Where-Object { $_ } + $selected = $Items | Where-Object { $names -contains $_.Name } + if (-not $selected) { Log "No matching $Category items for: $PreSelected" 'WARN' } + return @($selected) + } + + # Attach IsRecommended and sort: recommended first, then alphabetical within groups + $Items = $Items | ForEach-Object { + $_ | Add-Member -NotePropertyName 'IsRecommended' -NotePropertyValue ($Recommended -contains $_.Name) -PassThru -Force + } | Sort-Object @{ E={ if ($_.IsRecommended) { 0 } else { 1 } } }, Name + + Write-Host "" + Write-Host " === $Category ===" -ForegroundColor Yellow + Write-Host " Already installed items are marked with [*]" -ForegroundColor DarkGray + + # Try Out-GridView (Windows GUI - filterable, multi-select) + $ogvAvailable = $false + try { Get-Command Out-GridView -ErrorAction Stop | Out-Null; $ogvAvailable = $true } catch {} + + if ($ogvAvailable) { + $none = [pscustomobject]@{ Rec=''; Installed=''; Name='-- none / skip --'; Description='Select this (or nothing) to install nothing' } + $display = @($none) + @($Items | Select-Object ` + @{ N='Rec'; E={ if ($_.IsRecommended) { '★' } else { '' } } }, + @{ N='Installed'; E={ if ($_.AlreadyInstalled) { '[*]' } else { '' } } }, + @{ N='Name'; E={ $_.Name } }, + @{ N='Description'; E={ $_.Description } }) + + $picked = $display | Out-GridView -Title "Select $Category to install ★ = Recommended [*] = Already installed" -PassThru + if (-not $picked) { return @() } + $pickedNames = @($picked | Where-Object { $_.Name -ne '-- none / skip --' } | ForEach-Object { $_.Name }) + return @($Items | Where-Object { $pickedNames -contains $_.Name }) + } + + # Fallback: numbered console menu + Write-Host "" + for ($i = 0; $i -lt $Items.Count; $i++) { + $mark = if ($Items[$i].AlreadyInstalled) { '[*]' } elseif ($Items[$i].IsRecommended) { '[★]' } else { ' ' } + Write-Host (" {0,3}. {1} {2}" -f ($i+1), $mark, $Items[$i].Name) -ForegroundColor $(if ($Items[$i].AlreadyInstalled) { 'DarkCyan' } elseif ($Items[$i].IsRecommended) { 'Yellow' } else { 'White' }) + if ($Items[$i].Description) { + Write-Host (" {0}" -f $Items[$i].Description) -ForegroundColor DarkGray + } + } + Write-Host "" + Write-Host " Enter numbers to install (e.g. 1,3,5 or 1-3 or 'all' or blank to skip): " -NoNewline -ForegroundColor Yellow + $input = Read-Host + + if (-not $input -or $input.Trim() -eq '') { return @() } + if ($input.Trim() -eq 'all') { return $Items } + + $indices = @() + foreach ($part in $input.Split(',')) { + $part = $part.Trim() + if ($part -match '^(\d+)-(\d+)$') { + $indices += ([int]$Matches[1])..[int]$Matches[2] + } elseif ($part -match '^\d+$') { + $indices += [int]$part + } + } + return @($Items | Where-Object { $indices -contains ([Array]::IndexOf($Items, $_) + 1) }) +} + +function Install-File { + param([string]$Src, [string]$DestDir) + if (-not $DryRun -and -not (Test-Path $DestDir)) { + New-Item -ItemType Directory -Path $DestDir -Force | Out-Null + } + $dest = Join-Path $DestDir (Split-Path $Src -Leaf) + $srcHash = (Get-FileHash $Src -Algorithm SHA256).Hash + $dstHash = if (Test-Path $dest) { (Get-FileHash $dest -Algorithm SHA256).Hash } else { $null } + if ($srcHash -eq $dstHash) { return 'unchanged' } + if ($DryRun) { return 'would-copy' } + Copy-Item $Src $dest -Force + if ($dstHash) { return 'updated' } else { return 'added' } +} + +function Install-Directory { + param([string]$SrcDir, [string]$DestParent) + $name = Split-Path $SrcDir -Leaf + $destDir = Join-Path $DestParent $name + if (-not $DryRun -and -not (Test-Path $destDir)) { + New-Item -ItemType Directory -Path $destDir -Force | Out-Null + } + $added = 0; $updated = 0; $unchanged = 0 + Get-ChildItem $SrcDir -File -Recurse | ForEach-Object { + $rel = $_.FullName.Substring($SrcDir.Length).TrimStart('\','/') + $dest = Join-Path $destDir $rel + $destDir2 = Split-Path $dest -Parent + if (-not $DryRun -and -not (Test-Path $destDir2)) { + New-Item -ItemType Directory -Path $destDir2 -Force | Out-Null + } + $srcHash = (Get-FileHash $_.FullName -Algorithm SHA256).Hash + $dstHash = if (Test-Path $dest) { (Get-FileHash $dest -Algorithm SHA256).Hash } else { $null } + if ($srcHash -ne $dstHash) { + if (-not $DryRun) { Copy-Item $_.FullName $dest -Force } + if ($dstHash) { $updated++ } else { $added++ } + } else { $unchanged++ } + } + return [pscustomobject]@{ Added = $added; Updated = $updated; Unchanged = $unchanged } +} + +function Get-Description([string]$FilePath) { + try { + $lines = Get-Content $FilePath -TotalCount 20 -ErrorAction SilentlyContinue + # YAML frontmatter description field + $inFrontmatter = $false + foreach ($line in $lines) { + if ($line -eq '---') { $inFrontmatter = -not $inFrontmatter; continue } + if ($inFrontmatter -and $line -match '^description:\s*(.+)') { return $Matches[1].Trim('"''') } + } + # First non-heading markdown line + foreach ($line in $lines) { + if ($line -match '^#{1,3}\s+(.+)') { return $Matches[1] } + } + } catch {} + return '' +} + +function Get-DirHash([string]$DirPath) { + $hashes = Get-ChildItem $DirPath -Recurse -File | + Sort-Object FullName | + ForEach-Object { (Get-FileHash $_.FullName -Algorithm SHA256).Hash } + $combined = $hashes -join '|' + $bytes = [System.Text.Encoding]::UTF8.GetBytes($combined) + $stream = [System.IO.MemoryStream]::new($bytes) + return (Get-FileHash -InputStream $stream -Algorithm SHA256).Hash +} + +function Update-Subscriptions { + param([string]$ManifestPath, [object[]]$NewEntries) + if (-not $NewEntries -or $NewEntries.Count -eq 0) { return } + + $subs = $null + if (Test-Path $ManifestPath) { + try { $subs = Get-Content $ManifestPath -Raw | ConvertFrom-Json } catch {} + } + if (-not $subs) { + $subs = [pscustomobject]@{ version = 1; updatedAt = ''; subscriptions = @() } + } + + $existing = [System.Collections.Generic.List[object]]::new() + if ($subs.subscriptions) { $existing.AddRange([object[]]$subs.subscriptions) } + + foreach ($entry in $NewEntries) { + $idx = -1 + for ($i = 0; $i -lt $existing.Count; $i++) { + if ($existing[$i].name -eq $entry.name -and $existing[$i].category -eq $entry.category) { + $idx = $i; break + } + } + if ($idx -ge 0) { $existing[$idx] = $entry } else { $existing.Add($entry) } + } + + $subs | Add-Member -NotePropertyName 'updatedAt' -NotePropertyValue (Get-Date).ToString('o') -Force + $subs | Add-Member -NotePropertyName 'subscriptions' -NotePropertyValue $existing.ToArray() -Force + + if (-not $DryRun) { + $dir = Split-Path $ManifestPath -Parent + if (-not (Test-Path $dir)) { New-Item -ItemType Directory -Path $dir -Force | Out-Null } + $subs | ConvertTo-Json -Depth 5 | Set-Content $ManifestPath -Encoding UTF8 + Log "Updated subscriptions: $ManifestPath" + } else { + Log "[DryRun] Would update subscriptions: $ManifestPath ($($NewEntries.Count) new/updated entries)" + } +} + +#endregion # Helpers + +#region Catalogue builders +$totalInstalled = 0 + +function Build-FlatCatalogue([string]$CatDir, [string]$DestDir, [string]$Pattern) { + if (-not (Test-Path $CatDir)) { return @() } + Get-ChildItem $CatDir -File | Where-Object { $_.Name -match $Pattern } | ForEach-Object { + $destFile = Join-Path $DestDir $_.Name + [pscustomobject]@{ + Name = [System.IO.Path]::GetFileNameWithoutExtension($_.Name) -replace '\.(instructions|agent|prompt|chatmode)$','' + FileName = $_.Name + FullPath = $_.FullName + Description = Get-Description $_.FullName + AlreadyInstalled = (Test-Path $destFile) + } + } | Sort-Object Name +} + +function Build-DirCatalogue([string]$CatDir, [string]$DestDir) { + if (-not (Test-Path $CatDir)) { return @() } + Get-ChildItem $CatDir -Directory | ForEach-Object { + $destSubdir = Join-Path $DestDir $_.Name + $readmePath = Join-Path $_.FullName 'README.md' + if (-not (Test-Path $readmePath)) { $readmePath = Join-Path $_.FullName 'SKILL.md' } + [pscustomobject]@{ + Name = $_.Name + FullPath = $_.FullName + Description = if (Test-Path $readmePath) { Get-Description $readmePath } else { '' } + AlreadyInstalled = (Test-Path $destSubdir) + } + } | Sort-Object Name +} + +#endregion # Catalogue builders + +#region Agents +$script:SubscriptionEntries = [System.Collections.Generic.List[object]]::new() +$SubscriptionManifestPath = Join-Path $GithubDir '.copilot-subscriptions.json' + +if (-not $SkipAgents) { + $destDir = Join-Path $GithubDir 'agents' + $catalogue = Build-FlatCatalogue (Join-Path $SourceRoot 'agents') $destDir '\.agent\.md$' + $selected = Select-Items -Category 'Agents' -Items $catalogue -PreSelected $Agents -Recommended $script:Recommendations + + foreach ($item in $selected) { + $result = Install-File -Src $item.FullPath -DestDir $destDir + $verb = switch ($result) { 'added' { '✓ Added' } 'updated' { '↑ Updated' } 'unchanged' { '= Unchanged' } default { '~ DryRun' } } + Log "$verb agent: $($item.FileName)" + if ($result -in 'added','updated','would-copy') { $totalInstalled++ } + $script:SubscriptionEntries.Add([pscustomobject]@{ + name = $item.Name + category = 'agents' + type = 'file' + fileName = $item.FileName + sourceRelPath = "agents/$($item.FileName)" + hashAtInstall = (Get-FileHash $item.FullPath -Algorithm SHA256).Hash + installedAt = (Get-Date).ToString('o') + }) + } +} + +#endregion # Agents + +#region Instructions +if (-not $SkipInstructions) { + $destDir = Join-Path $GithubDir 'instructions' + $catalogue = Build-FlatCatalogue (Join-Path $SourceRoot 'instructions') $destDir '\.instructions\.md$' + $selected = Select-Items -Category 'Instructions' -Items $catalogue -PreSelected $Instructions -Recommended $script:Recommendations + + foreach ($item in $selected) { + $result = Install-File -Src $item.FullPath -DestDir $destDir + $verb = switch ($result) { 'added' { '✓ Added' } 'updated' { '↑ Updated' } 'unchanged' { '= Unchanged' } default { '~ DryRun' } } + Log "$verb instructions: $($item.FileName)" + if ($result -in 'added','updated','would-copy') { $totalInstalled++ } + $script:SubscriptionEntries.Add([pscustomobject]@{ + name = $item.Name + category = 'instructions' + type = 'file' + fileName = $item.FileName + sourceRelPath = "instructions/$($item.FileName)" + hashAtInstall = (Get-FileHash $item.FullPath -Algorithm SHA256).Hash + installedAt = (Get-Date).ToString('o') + }) + } +} + +#endregion # Instructions + +#region Hooks +if (-not $SkipHooks) { + $destDir = Join-Path $GithubDir 'hooks' + $catalogue = Build-DirCatalogue (Join-Path $SourceRoot 'hooks') $destDir + $selected = Select-Items -Category 'Hooks' -Items $catalogue -PreSelected $Hooks -Recommended $script:Recommendations + + foreach ($item in $selected) { + $r = Install-Directory -SrcDir $item.FullPath -DestParent $destDir + $verb = if ($DryRun) { '~ DryRun' } else { '✓ Installed' } + Log "$verb hook: $($item.Name) (added=$($r.Added) updated=$($r.Updated) unchanged=$($r.Unchanged))" + if (-not $DryRun) { $totalInstalled++ } + $script:SubscriptionEntries.Add([pscustomobject]@{ + name = $item.Name + category = 'hooks' + type = 'directory' + dirName = $item.Name + sourceRelPath = "hooks/$($item.Name)" + hashAtInstall = Get-DirHash $item.FullPath + installedAt = (Get-Date).ToString('o') + }) + } +} + +#endregion # Hooks + +#region Workflows +if (-not $SkipWorkflows) { + $destDir = Join-Path $GithubDir 'workflows' + $catalogue = Build-FlatCatalogue (Join-Path $SourceRoot 'workflows') $destDir '\.md$' + $selected = Select-Items -Category 'Agentic Workflows' -Items $catalogue -PreSelected $Workflows -Recommended $script:Recommendations + + foreach ($item in $selected) { + $result = Install-File -Src $item.FullPath -DestDir $destDir + $verb = switch ($result) { 'added' { '✓ Added' } 'updated' { '↑ Updated' } 'unchanged' { '= Unchanged' } default { '~ DryRun' } } + Log "$verb workflow: $($item.FileName)" + if ($result -in 'added','updated','would-copy') { $totalInstalled++ } + $script:SubscriptionEntries.Add([pscustomobject]@{ + name = $item.Name + category = 'workflows' + type = 'file' + fileName = $item.FileName + sourceRelPath = "workflows/$($item.FileName)" + hashAtInstall = (Get-FileHash $item.FullPath -Algorithm SHA256).Hash + installedAt = (Get-Date).ToString('o') + }) + } +} + +#endregion # Workflows + +#region Skills +if (-not $SkipSkills) { + $destDir = Join-Path $GithubDir 'skills' + $catalogue = Build-DirCatalogue (Join-Path $SourceRoot 'skills') $destDir + $selected = Select-Items -Category 'Skills' -Items $catalogue -PreSelected $Skills -Recommended $script:Recommendations + + foreach ($item in $selected) { + $r = Install-Directory -SrcDir $item.FullPath -DestParent $destDir + $verb = if ($DryRun) { '~ DryRun' } else { '✓ Installed' } + Log "$verb skill: $($item.Name) (added=$($r.Added) updated=$($r.Updated) unchanged=$($r.Unchanged))" + if (-not $DryRun) { $totalInstalled++ } + $script:SubscriptionEntries.Add([pscustomobject]@{ + name = $item.Name + category = 'skills' + type = 'directory' + dirName = $item.Name + sourceRelPath = "skills/$($item.Name)" + hashAtInstall = Get-DirHash $item.FullPath + installedAt = (Get-Date).ToString('o') + }) + } +} + +#endregion # Skills + +#region Summary +if ($script:SubscriptionEntries.Count -gt 0) { + Update-Subscriptions -ManifestPath $SubscriptionManifestPath -NewEntries $script:SubscriptionEntries.ToArray() +} + +Write-Host "" +if ($DryRun) { + Log "Dry run complete. Re-run without -DryRun to apply." 'WARN' +} else { + Log "$totalInstalled resource(s) installed/updated in $GithubDir" 'SUCCESS' + Log "Tip: commit .github/ to share Copilot resources with your team (agents, instructions, hooks, workflows, skills)." + Log "Tip: run update-repo.ps1 to check for and apply upstream changes to your subscribed resources." +} +#endregion # Summary diff --git a/install-scheduled-task.ps1 b/scripts/install-scheduled-task.ps1 similarity index 53% rename from install-scheduled-task.ps1 rename to scripts/install-scheduled-task.ps1 index d17fc65..48680d5 100644 --- a/install-scheduled-task.ps1 +++ b/scripts/install-scheduled-task.ps1 @@ -2,20 +2,20 @@ [string]$TaskName = 'AwesomeCopilotSync', [string]$Every = '4h', [string]$Dest = "$HOME/.awesome-copilot", - # Default categories now exclude 'collections' (can be re-enabled with -IncludeCollections) - [string]$Categories = 'chatmodes,instructions,prompts', - [switch]$IncludeCollections, - # Allow skipping the combine/publish step if user only wants raw sync - [switch]$SkipCombine, + # Default categories aligned with current awesome-copilot structure + [string]$Categories = 'agents,instructions,workflows,hooks,skills', + [switch]$IncludePlugins, + # Allow skipping the global publish step if user only wants raw sync + [switch]$SkipPublishGlobal, [string]$PwshPath = (Get-Command pwsh -ErrorAction SilentlyContinue | Select-Object -ExpandProperty Source), - [string]$ScriptPath = (Join-Path (Split-Path -Parent $MyInvocation.MyCommand.Path) 'sync-awesome-copilot.ps1'), - [string]$CombineScriptPath = (Join-Path (Split-Path -Parent $MyInvocation.MyCommand.Path) 'combine-and-publish-prompts.ps1'), + [string]$ScriptPath = (Join-Path $PSScriptRoot 'sync-awesome-copilot.ps1'), + [string]$PublishGlobalScriptPath = (Join-Path $PSScriptRoot 'publish-global.ps1'), [switch]$Force ) if (-not $PwshPath) { $PwshPath = (Get-Command powershell | Select-Object -ExpandProperty Source) } if (-not (Test-Path $ScriptPath)) { throw "Sync script not found at $ScriptPath" } -if (-not $SkipCombine -and -not (Test-Path $CombineScriptPath)) { throw "Combine script not found at $CombineScriptPath (use -SkipCombine to suppress)" } +if (-not $SkipPublishGlobal -and -not (Test-Path $PublishGlobalScriptPath)) { throw "Publish-global script not found at $PublishGlobalScriptPath (use -SkipPublishGlobal to suppress)" } function Parse-Interval($spec) { if ($spec -match '^(\d+)([hm])$') { @@ -28,21 +28,22 @@ function Parse-Interval($spec) { throw "Unsupported interval spec: $spec (use like 4h or 30m)" } -if ($IncludeCollections -and ($Categories -notmatch 'collections')) { - $Categories = ($Categories.TrimEnd(',') + ',collections') +if ($IncludePlugins -and ($Categories -notmatch 'plugins')) { + $Categories = ($Categories.TrimEnd(',') + ',plugins') } $int = Parse-Interval $Every # Primary sync action -$syncArgs = "-NoProfile -ExecutionPolicy Bypass -File `"$ScriptPath`" -Dest `"$Dest`" -Categories `"$Categories`" -Quiet" -$actions = @() -$actions += New-ScheduledTaskAction -Execute $PwshPath -Argument $syncArgs - -if (-not $SkipCombine) { - # Combine script runs after sync; no need for -DryRun. Include collections only if requested. - $combineArgs = "-NoProfile -ExecutionPolicy Bypass -File `"$CombineScriptPath`" -SourceRoot `"$Dest`"" + $(if ($IncludeCollections) { ' -IncludeCollections' } else { '' }) - $actions += New-ScheduledTaskAction -Execute $PwshPath -Argument $combineArgs +$ScriptDir = Split-Path -Parent $ScriptPath +$syncArgs = "-NoProfile -ExecutionPolicy Bypass -File `"$ScriptPath`" -Dest `"$Dest`" -Categories `"$Categories`" -Quiet" +$actions = @() +$actions += New-ScheduledTaskAction -Execute $PwshPath -Argument $syncArgs -WorkingDirectory $ScriptDir + +if (-not $SkipPublishGlobal) { + # publish-global runs after sync: updates VS Code agents folder and ~/.copilot/skills/ + $publishArgs = "-NoProfile -ExecutionPolicy Bypass -File `"$PublishGlobalScriptPath`" -SourceRoot `"$Dest`"" + $actions += New-ScheduledTaskAction -Execute $PwshPath -Argument $publishArgs -WorkingDirectory $ScriptDir } $Trigger = if ($int.Type -eq 'HOURLY') { New-ScheduledTaskTrigger -Once -At (Get-Date).AddMinutes(1) -RepetitionInterval ([TimeSpan]::FromHours($int.Modifier)) -RepetitionDuration ([TimeSpan]::FromDays(3650)) } else { New-ScheduledTaskTrigger -Once -At (Get-Date).AddMinutes(1) -RepetitionInterval ([TimeSpan]::FromMinutes($int.Modifier)) -RepetitionDuration ([TimeSpan]::FromDays(3650)) } $Settings = New-ScheduledTaskSettingsSet -AllowStartIfOnBatteries -DontStopIfGoingOnBatteries -StartWhenAvailable -MultipleInstances IgnoreNew -ExecutionTimeLimit (New-TimeSpan -Minutes 20) @@ -52,7 +53,7 @@ if (Get-ScheduledTask -TaskName $TaskName -ErrorAction SilentlyContinue) { Unregister-ScheduledTask -TaskName $TaskName -Confirm:$false } -Register-ScheduledTask -TaskName $TaskName -Action $actions -Trigger $Trigger -Settings $Settings -Description "Sync (and combine) Awesome Copilot resources" | Out-Null +Register-ScheduledTask -TaskName $TaskName -Action $actions -Trigger $Trigger -Settings $Settings -Description "Sync and publish Awesome Copilot resources (agents, instructions, skills, hooks, workflows)" | Out-Null -$post = if ($SkipCombine) { 'sync only' } else { 'sync + combine/publish' } +$post = if ($SkipPublishGlobal) { 'sync only' } else { 'sync + publish-global' } Write-Host "Scheduled task '$TaskName' created ($post). First run in ~1 minute, then every $Every." -ForegroundColor Green diff --git a/scripts/publish-global.ps1 b/scripts/publish-global.ps1 new file mode 100644 index 0000000..346a893 --- /dev/null +++ b/scripts/publish-global.ps1 @@ -0,0 +1,208 @@ +<# +Publish Global Copilot Resources + +Publishes two categories of resources from the local awesome-copilot cache to +global locations where they are always available across all workspaces/repos: + + Agents --> VS Code user agents folder (available in Copilot Chat globally) + Default: %APPDATA%\Code\User\prompts\ + Strategy: symlink / junction first, then file-copy fallback + + Skills --> Personal skills directory (loaded on-demand by VS Code Agent mode / Copilot CLI) + Default: ~\.copilot\skills\ + Strategy: mirror each skill subdirectory (incremental copy) + +Usage: + # Publish both agents and skills (default) + .\publish-global.ps1 + + # Publish only agents + .\publish-global.ps1 -SkipSkills + + # Publish only skills + .\publish-global.ps1 -SkipAgents + + # Override VS Code agents folder (e.g. for a named profile) + .\publish-global.ps1 -AgentsTarget "$env:APPDATA\Code\User\profiles\MyProfile\prompts" + + # Dry run - show what would happen + .\publish-global.ps1 -DryRun + +Notes: + - Agents are linked (not copied) where possible so that sync updates are + immediately reflected in VS Code without re-running this script. + - Skills are copied individually so each skill directory is self-contained + under ~/.copilot/skills//. + - Run after sync-awesome-copilot.ps1, or add to the scheduled task via + install-scheduled-task.ps1. +#> +[CmdletBinding()] param( + [string]$SourceRoot = "$HOME/.awesome-copilot", + [string]$AgentsTarget = (Join-Path $env:APPDATA 'Code\User\prompts'), + [string]$SkillsTarget = (Join-Path $HOME '.copilot\skills'), + [switch]$SkipAgents, + [switch]$SkipSkills, + [switch]$DryRun +) + +#region Initialisation +$ErrorActionPreference = 'Stop' + +function Log($m, [string]$level = 'INFO') { + $ts = (Get-Date).ToString('s') + $color = switch ($level) { 'ERROR' { 'Red' } 'WARN' { 'Yellow' } default { 'Cyan' } } + Write-Host "[$ts][$level] $m" -ForegroundColor $color +} + +$AgentsSource = Join-Path $SourceRoot 'agents' +$SkillsSource = Join-Path $SourceRoot 'skills' + +#endregion # Initialisation + +#region Agents +if (-not $SkipAgents) { + if (-not (Test-Path $AgentsSource)) { + Log "Agents source not found: $AgentsSource (run sync-awesome-copilot.ps1 first)" 'WARN' + } + else { + Log "Publishing agents: $AgentsSource --> $AgentsTarget" + + if ($DryRun) { + Log "[DryRun] Would link/copy agents folder to $AgentsTarget" + } + else { + # Attempt junction first (no elevation required on Windows), then symlink, then copy + $linked = $false + + if (Test-Path $AgentsTarget) { + $item = Get-Item $AgentsTarget -Force + if ($item.Attributes -band [IO.FileAttributes]::ReparsePoint) { + Log "Agents already linked at $AgentsTarget - skipping" + $linked = $true + } + else { + # Exists as a real directory - update files in place rather than replacing + Log "Agents folder exists as real directory; updating files in place" + Get-ChildItem $AgentsSource -File | ForEach-Object { + $dest = Join-Path $AgentsTarget $_.Name + $srcHash = (Get-FileHash $_.FullName -Algorithm SHA256).Hash + $dstHash = if (Test-Path $dest) { (Get-FileHash $dest -Algorithm SHA256).Hash } else { $null } + if ($srcHash -ne $dstHash) { + Copy-Item $_.FullName $dest -Force + Log " Updated: $($_.Name)" + } + } + $linked = $true + } + } + + if (-not $linked) { + $parent = Split-Path $AgentsTarget -Parent + if (-not (Test-Path $parent)) { New-Item -ItemType Directory -Path $parent -Force | Out-Null } + + try { + cmd /c mklink /J `"$AgentsTarget`" `"$AgentsSource`" | Out-Null + Log "Created junction: $AgentsTarget --> $AgentsSource" + } + catch { + Log "Junction failed ($($_.Exception.Message)); trying symlink" 'WARN' + try { + New-Item -ItemType SymbolicLink -Path $AgentsTarget -Target $AgentsSource -Force | Out-Null + Log "Created symlink: $AgentsTarget --> $AgentsSource" + } + catch { + Log "Symlink failed; copying files instead" 'WARN' + New-Item -ItemType Directory -Path $AgentsTarget -Force | Out-Null + Copy-Item (Join-Path $AgentsSource '*') $AgentsTarget -Force + Log "Copied agents to $AgentsTarget" + } + } + } + } + Log "Agents: done. Restart VS Code if agents do not appear immediately." + } +} + +#endregion # Agents + +#region Skills +if (-not $SkipSkills) { + if (-not (Test-Path $SkillsSource)) { + Log "Skills source not found: $SkillsSource (run sync-awesome-copilot.ps1 first)" 'WARN' + } + else { + Log "Publishing skills: $SkillsSource --> $SkillsTarget" + + if (-not $DryRun -and -not (Test-Path $SkillsTarget)) { + New-Item -ItemType Directory -Path $SkillsTarget -Force | Out-Null + } + + $added = 0; $updated = 0; $unchanged = 0 + + Get-ChildItem $SkillsSource -Directory | ForEach-Object { + $skillName = $_.Name + $skillSrc = $_.FullName + $skillDest = Join-Path $SkillsTarget $skillName + + if ($DryRun) { + Log "[DryRun] Would publish skill: $skillName" + $added++ + return + } + + if (-not (Test-Path $skillDest)) { + New-Item -ItemType Directory -Path $skillDest -Force | Out-Null + } + + Get-ChildItem $skillSrc -File -Recurse | ForEach-Object { + $rel = $_.FullName.Substring($skillSrc.Length).TrimStart('\','/') + $dest = Join-Path $skillDest $rel + $destDir = Split-Path $dest -Parent + if (-not (Test-Path $destDir)) { New-Item -ItemType Directory -Path $destDir -Force | Out-Null } + + $srcHash = (Get-FileHash $_.FullName -Algorithm SHA256).Hash + $dstHash = if (Test-Path $dest) { (Get-FileHash $dest -Algorithm SHA256).Hash } else { $null } + if ($srcHash -ne $dstHash) { + Copy-Item $_.FullName $dest -Force + if ($dstHash) { $updated++ } else { $added++ } + } + else { $unchanged++ } + } + } + + Log "Skills: added=$added updated=$updated unchanged=$unchanged --> $SkillsTarget" + + # Ensure VS Code is configured to discover skills + $vsCodeSettings = Join-Path $env:APPDATA 'Code\User\settings.json' + if (-not (Test-Path $vsCodeSettings)) { + Log "VS Code settings.json not found at $vsCodeSettings — skills discovery not configured. Open VS Code once to generate it, then re-run." 'WARN' + } + else { + try { + $s = Get-Content $vsCodeSettings -Raw | ConvertFrom-Json + $changed = $false + if (-not $s.'chat.useAgentSkills') { + $s | Add-Member -NotePropertyName 'chat.useAgentSkills' -NotePropertyValue $true -Force + $changed = $true + } + $loc = '~/.copilot/skills/**' + if (-not $s.'chat.agentSkillsLocations' -or -not $s.'chat.agentSkillsLocations'.$loc) { + $locs = if ($s.'chat.agentSkillsLocations') { $s.'chat.agentSkillsLocations' } else { [pscustomobject]@{} } + $locs | Add-Member -NotePropertyName $loc -NotePropertyValue $true -Force + $s | Add-Member -NotePropertyName 'chat.agentSkillsLocations' -NotePropertyValue $locs -Force + $changed = $true + } + if ($changed) { + $s | ConvertTo-Json -Depth 5 | Set-Content $vsCodeSettings -Encoding UTF8 + Log "Configured chat.useAgentSkills and chat.agentSkillsLocations in VS Code settings" + } + } + catch { Log "Could not update VS Code settings: $_" 'WARN' } + } + } +} + +#endregion # Skills + +if ($DryRun) { Log "[DryRun] No changes made." } +else { Log "Global publish complete." } diff --git a/scripts/sync-awesome-copilot.ps1 b/scripts/sync-awesome-copilot.ps1 new file mode 100644 index 0000000..8695e51 --- /dev/null +++ b/scripts/sync-awesome-copilot.ps1 @@ -0,0 +1,251 @@ +<# +Sync Awesome Copilot Resources + +Clones (first run) or pulls (subsequent runs) the github/awesome-copilot repository +using sparse checkout — only the categories you need are fetched. + +Requires 'gh' (GitHub CLI, preferred) or 'git' to be installed. + +Usage: + # Sync all default categories + .\sync-awesome-copilot.ps1 + + # Dry-run: show what would change without writing files + .\sync-awesome-copilot.ps1 -Plan + + # Sync specific categories only + .\sync-awesome-copilot.ps1 -Categories "agents,instructions" + + # Force a specific git tool + .\sync-awesome-copilot.ps1 -GitTool git +#> +[CmdletBinding()] param( + [string]$Dest = "$HOME/.awesome-copilot", + [string]$Categories = 'agents,instructions,workflows,hooks,skills', + [switch]$Quiet, + [switch]$Plan, # Dry-run: show what would change without writing files + [int]$LogRetentionDays = 14, + [int]$TimeoutSeconds = 600, + [ValidateSet('auto', 'gh', 'git')] + [string]$GitTool = 'auto' +) + +#region Initialisation +$ErrorActionPreference = 'Stop' + +$script:StartTime = Get-Date +$script:Deadline = $script:StartTime.AddSeconds($TimeoutSeconds) + +function Write-Log { + param([string]$Message, [string]$Level = 'INFO') + $ts = (Get-Date).ToString('s') + $line = "[$ts][$Level] $Message" + if (-not $Quiet) { Write-Host $line } + Add-Content -Path $Global:LogFile -Value $line +} + +function Check-Timeout { + if ((Get-Date) -gt $script:Deadline) { + Write-Log "Timeout reached ($TimeoutSeconds s), aborting." 'ERROR' + exit 1 + } +} + +# Prepare log — always relative to this script's directory, regardless of CWD +$RunId = (Get-Date -Format 'yyyyMMdd-HHmmss') +$LogDir = Join-Path $PSScriptRoot 'logs' +if (-not (Test-Path $LogDir)) { New-Item -ItemType Directory -Path $LogDir | Out-Null } +$Global:LogFile = Join-Path $LogDir "sync-$RunId.log" + +Write-Log "Starting Awesome Copilot sync. Dest=$Dest Categories=$Categories" + +#endregion # Initialisation + +#region Tool detection +function Resolve-GitTool { + if ($GitTool -ne 'auto') { + if (-not (Get-Command $GitTool -ErrorAction SilentlyContinue)) { + Write-Log "'$GitTool' not found on PATH." 'ERROR'; exit 1 + } + return $GitTool + } + if (Get-Command gh -ErrorAction SilentlyContinue) { return 'gh' } + if (Get-Command git -ErrorAction SilentlyContinue) { return 'git' } + Write-Log "Neither 'gh' nor 'git' found on PATH. Install one to continue." 'ERROR' + exit 1 +} + +$Tool = Resolve-GitTool +Write-Log "Using tool: $Tool" + +$RepoSlug = 'github/awesome-copilot' +$RepoUrl = 'https://github.com/github/awesome-copilot.git' +$CategoriesList = $Categories.Split(',') | ForEach-Object { $_.Trim() } | Where-Object { $_ } +$ManifestPath = Join-Path $Dest 'manifest.json' +$StatusPath = Join-Path $Dest 'status.txt' + +# Load previous manifest for change detection +$PrevManifest = $null +$PrevIndex = @{} +if (Test-Path $ManifestPath) { + try { + $PrevManifest = Get-Content $ManifestPath -Raw | ConvertFrom-Json + if ($PrevManifest.items) { + foreach ($it in $PrevManifest.items) { + $PrevIndex["$($it.category)|$($it.path)"] = $it + } + } + } + catch { Write-Log "Failed to parse previous manifest: $_" 'WARN' } +} + +function Get-Sha256 { + param([string]$FilePath) + return (Get-FileHash -LiteralPath $FilePath -Algorithm SHA256).Hash.ToLower() +} + +#endregion # Tool detection + +#region Clone or pull +$IsFirstRun = -not (Test-Path (Join-Path $Dest '.git')) + +if ($Plan) { + if ($IsFirstRun) { + Write-Log "[Plan] Would clone $RepoSlug → $Dest (sparse: $($CategoriesList -join ', '))" 'INFO' + } else { + Write-Log "[Plan] Would pull latest changes from $RepoSlug into $Dest" 'INFO' + } + Write-Log "[Plan] No files written. Exiting." 'INFO' + exit 0 +} + +if ($IsFirstRun) { + Write-Log "First run — cloning $RepoSlug (sparse, shallow)..." + + # Migrate: if a non-git directory already exists (e.g. from the old API-based sync), + # rename it so git can clone into a clean destination. + if ((Test-Path $Dest) -and (Get-ChildItem $Dest -Force | Measure-Object).Count -gt 0) { + $backupPath = "${Dest}-backup-$RunId" + Write-Log "Existing non-git cache found — moving to $backupPath before cloning." 'WARN' + Move-Item $Dest $backupPath + } + + if (-not (Test-Path $Dest)) { New-Item -ItemType Directory -Path $Dest -Force | Out-Null } + + if ($Tool -eq 'gh') { + & gh repo clone $RepoSlug $Dest -- --depth 1 --filter=blob:none --sparse 2>&1 | + ForEach-Object { Write-Log $_ } + } else { + & git clone --depth 1 --filter=blob:none --sparse $RepoUrl $Dest 2>&1 | + ForEach-Object { Write-Log $_ } + } + if ($LASTEXITCODE -and $LASTEXITCODE -ne 0) { Write-Log "Clone failed (exit $LASTEXITCODE)" 'ERROR'; exit $LASTEXITCODE } + + # Set which directories to check out, then materialise them + & git -C $Dest sparse-checkout set @CategoriesList 2>&1 | Out-Null + Write-Log "Repository cloned successfully." 'SUCCESS' +} else { + Write-Log "Pulling latest changes from $RepoSlug..." + + # Re-apply sparse-checkout in case -Categories changed since last run + & git -C $Dest sparse-checkout set @CategoriesList 2>&1 | Out-Null + + $pullOutput = & git -C $Dest pull 2>&1 + $pullOutput | ForEach-Object { Write-Log $_ } + if ($LASTEXITCODE -and $LASTEXITCODE -ne 0) { + if (($pullOutput -join "`n") -match 'unrelated histories') { + Write-Log "Unrelated histories detected — fetching and resetting to remote HEAD..." 'WARN' + & git -C $Dest fetch origin 2>&1 | ForEach-Object { Write-Log $_ } + & git -C $Dest reset --hard origin/HEAD 2>&1 | ForEach-Object { Write-Log $_ } + if ($LASTEXITCODE -and $LASTEXITCODE -ne 0) { Write-Log "Reset failed (exit $LASTEXITCODE)" 'ERROR'; exit $LASTEXITCODE } + } else { + Write-Log "Pull failed (exit $LASTEXITCODE)" 'ERROR'; exit $LASTEXITCODE + } + } +} + +Check-Timeout + +#endregion # Clone or pull + +#region File scan and change detection +$NewItems = @() +$Added = 0; $Updated = 0; $Unchanged = 0; $Removed = 0 +$DestResolved = (Resolve-Path $Dest).Path + +foreach ($cat in $CategoriesList) { + $catDir = Join-Path $DestResolved $cat + if (-not (Test-Path $catDir)) { Write-Log "Category folder not found after sync: $cat" 'WARN'; continue } + + $files = Get-ChildItem -Path $catDir -Recurse -File | + Where-Object { $_.Name -match '\.(md|markdown|json|sh)$' } + + foreach ($file in $files) { + Check-Timeout + $relativePath = $file.FullName.Substring($DestResolved.Length + 1) -replace '\\', '/' + $hash = Get-Sha256 -FilePath $file.FullName + $key = "$cat|$relativePath" + $prev = $PrevIndex[$key] + + if ($prev -and $prev.hash -eq $hash) { $Unchanged++ } + elseif ($prev) { $Updated++; Write-Log "Updated: $relativePath" } + else { $Added++; Write-Log "Added: $relativePath" } + + $NewItems += [pscustomobject]@{ + category = $cat + path = $relativePath + size = $file.Length + lastFetched = (Get-Date).ToString('o') + hash = $hash + } + } +} + +# Count removals (files present in previous manifest but gone after pull) +$NewKeySet = @{}; foreach ($ni in $NewItems) { $NewKeySet["$($ni.category)|$($ni.path)"] = $true } +if ($PrevManifest -and $PrevManifest.items) { + foreach ($old in $PrevManifest.items) { + if (-not $NewKeySet.ContainsKey("$($old.category)|$($old.path)")) { + $Removed++ + Write-Log "Removed: $($old.path)" + } + } +} + +#endregion # File scan + +#region Write manifest and status +$Manifest = [pscustomobject]@{ + version = 1 + repo = $RepoSlug + fetchedAt = (Get-Date).ToString('o') + categories = $CategoriesList + items = $NewItems + summary = [pscustomobject]@{ added=$Added; updated=$Updated; removed=$Removed; unchanged=$Unchanged } +} +$Manifest | ConvertTo-Json -Depth 6 | Set-Content -Path $ManifestPath -Encoding UTF8 + +@( + "Sync run: $(Get-Date -Format o)" + "Added: $Added" + "Updated: $Updated" + "Removed: $Removed" + "Unchanged:$Unchanged" + "Total: $($NewItems.Count)" + "Manifest: manifest.json" + "Repo: $RepoSlug" + "Duration: $([int]((Get-Date)-$script:StartTime).TotalSeconds)s" +) | Set-Content -Path $StatusPath -Encoding UTF8 + +Write-Log "Summary Added=$Added Updated=$Updated Removed=$Removed Unchanged=$Unchanged" 'SUCCESS' + +#endregion # Write manifest + +#region Log retention +Get-ChildItem $LogDir -Filter 'sync-*.log' | + Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-$LogRetentionDays) } | + ForEach-Object { Remove-Item $_.FullName -Force } + +#endregion # Log retention + +exit 0 diff --git a/uninstall-scheduled-task.ps1 b/scripts/uninstall-scheduled-task.ps1 similarity index 100% rename from uninstall-scheduled-task.ps1 rename to scripts/uninstall-scheduled-task.ps1 diff --git a/scripts/update-repo.ps1 b/scripts/update-repo.ps1 new file mode 100644 index 0000000..3f926ed --- /dev/null +++ b/scripts/update-repo.ps1 @@ -0,0 +1,196 @@ +<# +Update Subscribed Per-Repo Copilot Resources + +Reads the subscription manifest (.github/.copilot-subscriptions.json) written by +init-repo.ps1, compares each subscribed resource against the local awesome-copilot +cache, and applies any upstream updates. + +Resources are updated in place — the same files/directories that were originally +installed to .github/ are refreshed from the cache. + +Usage: + # Check for and apply updates (interactive prompt) + .\update-repo.ps1 + + # Dry run — show what would be updated without writing any files + .\update-repo.ps1 -DryRun + + # Apply all updates without prompting + .\update-repo.ps1 -Force + + # Check a specific repo + .\update-repo.ps1 -RepoPath "C:\Projects\my-app" + +Notes: + - Only resources present in .github/.copilot-subscriptions.json are checked. + Run init-repo.ps1 to add new subscriptions. + - Resources whose destination file/directory has been manually deleted are + skipped (treated as intentionally removed). + - The subscription manifest is updated with new hashes after each successful update. + - Requires the local awesome-copilot cache (~/.awesome-copilot/). Run + sync-awesome-copilot.ps1 first if the cache is stale. +#> +[CmdletBinding()] param( + [string]$RepoPath = (Get-Location).Path, + [string]$SourceRoot = "$HOME/.awesome-copilot", + [switch]$Force, + [switch]$DryRun +) + +#region Initialisation +$ErrorActionPreference = 'Stop' + +function Log($m, [string]$level = 'INFO') { + $ts = (Get-Date).ToString('s') + $color = switch ($level) { 'ERROR' { 'Red' } 'WARN' { 'Yellow' } 'SUCCESS' { 'Green' } default { 'Cyan' } } + Write-Host "[$ts][$level] $m" -ForegroundColor $color +} + +function Get-DirHash([string]$DirPath) { + $hashes = Get-ChildItem $DirPath -Recurse -File | + Sort-Object FullName | + ForEach-Object { (Get-FileHash $_.FullName -Algorithm SHA256).Hash } + $combined = $hashes -join '|' + $bytes = [System.Text.Encoding]::UTF8.GetBytes($combined) + $stream = [System.IO.MemoryStream]::new($bytes) + return (Get-FileHash -InputStream $stream -Algorithm SHA256).Hash +} + +#endregion # Initialisation + +#region Load subscriptions +if (-not (Test-Path $RepoPath)) { + Log "Repo path not found: $RepoPath" 'ERROR'; exit 1 +} +$RepoPath = Resolve-Path $RepoPath | Select-Object -ExpandProperty Path + +$GithubDir = Join-Path $RepoPath '.github' +$ManifestPath = Join-Path $GithubDir '.copilot-subscriptions.json' + +if (-not (Test-Path $ManifestPath)) { + Log "No subscriptions manifest found: $ManifestPath" 'WARN' + Log "Run init-repo.ps1 first to subscribe to resources." + exit 0 +} + +$manifest = Get-Content $ManifestPath -Raw | ConvertFrom-Json +$subs = @($manifest.subscriptions) + +if (-not $subs -or $subs.Count -eq 0) { + Log "No subscriptions recorded in manifest." 'WARN' + exit 0 +} + +if (-not (Test-Path $SourceRoot)) { + Log "Cache not found: $SourceRoot -- run sync-awesome-copilot.ps1 first" 'ERROR'; exit 1 +} + +Log "Checking $($subs.Count) subscribed resource(s) for upstream changes..." +Log "Cache : $SourceRoot" +Log "Repo : $RepoPath" + +#endregion # Load subscriptions + +#region Check for stale resources +$stale = [System.Collections.Generic.List[object]]::new() + +foreach ($sub in $subs) { + $sourcePath = Join-Path $SourceRoot $sub.sourceRelPath + + if (-not (Test-Path $sourcePath)) { + Log "= Skipping $($sub.name) ($($sub.category)) — no longer in cache." 'WARN' + continue + } + + # Destination: .github/ e.g. .github/agents/foo.agent.md + $destPath = Join-Path $GithubDir $sub.sourceRelPath + + if (-not (Test-Path $destPath)) { + Log "= Skipping $($sub.name) ($($sub.category)) — destination removed locally." + continue + } + + $currentHash = if ($sub.type -eq 'file') { + (Get-FileHash $sourcePath -Algorithm SHA256).Hash + } else { + Get-DirHash $sourcePath + } + + if ($currentHash -ne $sub.hashAtInstall) { + $stale.Add([pscustomobject]@{ + Sub = $sub + SourcePath = $sourcePath + DestPath = $destPath + CurrentHash = $currentHash + }) + Log "↑ Stale : $($sub.name) ($($sub.category))" + } else { + Log "= Current: $($sub.name) ($($sub.category))" + } +} + +#endregion # Check for stale resources + +#region Apply updates +if ($stale.Count -eq 0) { + Write-Host "" + Log "All $($subs.Count) subscribed resource(s) are up to date." 'SUCCESS' + exit 0 +} + +Write-Host "" +Log "$($stale.Count) resource(s) have upstream updates available." 'WARN' + +if ($DryRun) { + Log "[DryRun] Re-run without -DryRun to apply updates." 'WARN' + exit 0 +} + +if (-not $Force) { + Write-Host "" + Write-Host " Apply all $($stale.Count) update(s)? [Y] Yes [N] No (default): " -NoNewline -ForegroundColor Yellow + $answer = (Read-Host).Trim() + if ($answer -notmatch '^[Yy]') { + Log "Update skipped." + exit 0 + } +} + +$updated = 0 +foreach ($item in $stale) { + $sub = $item.Sub + try { + if ($sub.type -eq 'file') { + $destDir = Split-Path $item.DestPath -Parent + if (-not (Test-Path $destDir)) { New-Item -ItemType Directory -Path $destDir -Force | Out-Null } + Copy-Item $item.SourcePath $item.DestPath -Force + } else { + # Mirror all files from the source directory into the destination + Get-ChildItem $item.SourcePath -File -Recurse | ForEach-Object { + $rel = $_.FullName.Substring($item.SourcePath.Length).TrimStart('\', '/') + $dest = Join-Path $item.DestPath $rel + $destDir = Split-Path $dest -Parent + if (-not (Test-Path $destDir)) { New-Item -ItemType Directory -Path $destDir -Force | Out-Null } + Copy-Item $_.FullName $dest -Force + } + } + + $sub | Add-Member -NotePropertyName 'hashAtInstall' -NotePropertyValue $item.CurrentHash -Force + $sub | Add-Member -NotePropertyName 'installedAt' -NotePropertyValue (Get-Date).ToString('o') -Force + Log "✓ Updated: $($sub.name) ($($sub.category))" + $updated++ + } catch { + Log "Failed to update $($sub.name): $_" 'ERROR' + } +} + +# Persist updated hashes back to the manifest +$manifest | Add-Member -NotePropertyName 'updatedAt' -NotePropertyValue (Get-Date).ToString('o') -Force +$manifest | Add-Member -NotePropertyName 'subscriptions' -NotePropertyValue $subs -Force +$manifest | ConvertTo-Json -Depth 5 | Set-Content $ManifestPath -Encoding UTF8 + +Write-Host "" +Log "$updated resource(s) updated in $GithubDir" 'SUCCESS' +Log "Tip: commit .github/ to share the updates with your team." + +#endregion # Apply updates diff --git a/sync-awesome-copilot.ps1 b/sync-awesome-copilot.ps1 deleted file mode 100644 index 5639dfa..0000000 --- a/sync-awesome-copilot.ps1 +++ /dev/null @@ -1,328 +0,0 @@ -[CmdletBinding()] param( - [string]$Dest = "$HOME/.awesome-copilot", - # Default now excludes 'collections' (can still be added explicitly via -Categories) - [string]$Categories = 'chatmodes,instructions,prompts', - [switch]$Quiet, - [switch]$NoDelete, - [switch]$DiffOnly, - [switch]$Plan, # Dry-run: compute changes, no file writes / deletions / manifest update - [switch]$SkipBackup, # Skip pre-deletion backup snapshot - [int]$BackupRetention = 5, # Number of recent backups to retain - [int]$LogRetentionDays = 14, - [int]$TimeoutSeconds = 600 -) - -$ErrorActionPreference = 'Inquire' - -$script:StartTime = Get-Date -$script:Deadline = $script:StartTime.AddSeconds($TimeoutSeconds) - -function Write-Log { - param([string]$Message, [string]$Level = 'INFO') - $ts = (Get-Date).ToString('s') - $line = "[$ts][$Level] $Message" - if (-not $Quiet) { Write-Host $line } - Add-Content -Path $Global:LogFile -Value $line -} - -function Check-Timeout { - if ((Get-Date) -gt $script:Deadline) { - Write-Log "Timeout reached, aborting." 'ERROR' - exit 1 - } -} - -# Prepare paths -$Root = Resolve-Path -Path . | Select-Object -ExpandProperty Path -$RunId = (Get-Date -Format 'yyyyMMdd-HHmmss') -if (-not (Test-Path logs)) { New-Item -ItemType Directory -Path logs | Out-Null } -$Global:LogFile = Join-Path logs "sync-$RunId.log" - -Write-Log "Starting Awesome Copilot scheduled sync. Dest=$Dest Categories=$Categories" 'INFO' - -# Ensure destination -if (-not (Test-Path $Dest)) { New-Item -ItemType Directory -Path $Dest -Force | Out-Null } - -$ManifestPath = Join-Path $Dest 'manifest.json' -$StatusPath = Join-Path $Dest 'status.txt' - -# Load previous manifest -$PrevManifest = $null -if (Test-Path $ManifestPath) { - try { $PrevManifest = Get-Content $ManifestPath -Raw | ConvertFrom-Json } catch { Write-Log "Failed to parse previous manifest: $_" 'WARN' } -} - -$CategoriesList = $Categories.Split(',') | ForEach-Object { $_.Trim() } | Where-Object { $_ } - -$Repo = 'github/awesome-copilot' -$ApiBase = 'https://api.github.com' -$UserAgent = 'awesome-copilot-scheduled-sync' -$Token = $env:GITHUB_TOKEN - -function Invoke-Github { - param( - [string]$Url, - [int]$Attempt = 1 - ) - Check-Timeout - $Headers = @{ 'User-Agent' = $UserAgent; 'Accept' = 'application/vnd.github.v3+json' } - if ($Token) { $Headers['Authorization'] = "Bearer $Token" } - try { - return Invoke-RestMethod -Uri $Url -Headers $Headers -TimeoutSec 60 - } - catch { - # Rate limit detection (403 + Remaining=0) - try { - $resp = $_.Exception.Response - if ($resp -and $resp.StatusCode.value__ -eq 403) { - $remainingHeader = $resp.Headers['X-RateLimit-Remaining'] - if ($remainingHeader -eq '0') { - $script:RateLimitHit = $true - Write-Log "Rate limit hit for $Url (Remaining=0)." 'WARN' - } - } - } - catch {} - if ($Attempt -lt 3 -and ($_.Exception.Response.StatusCode.value__ -ge 500 -or $_.Exception.Response.StatusCode.value__ -eq 429)) { - $delay = [math]::Pow(2, $Attempt) - Write-Log "Transient error on $Url. Retry in $delay s" 'WARN' - Start-Sleep -Seconds $delay - return Invoke-Github -Url $Url -Attempt ($Attempt + 1) - } - Write-Log "Request failed: $Url :: $_" 'ERROR' - throw - } -} - -function Get-FileHashSha256String { - param([byte[]]$Bytes) - $sha256 = [System.Security.Cryptography.SHA256]::Create() - $hashBytes = $sha256.ComputeHash($Bytes) - ($hashBytes | ForEach-Object { $_.ToString('x2') }) -join '' -} - -if ($DiffOnly) { - if (-not $PrevManifest) { Write-Log "No previous manifest; diff-only mode cannot proceed." 'ERROR'; exit 1 } - Write-Log "Diff-only mode: no network calls. Summarizing previous manifest." 'INFO' - $summary = $PrevManifest.summary - $content = @() - $content += "Diff-only summary (previous run)" - $content += "Added: $($summary.added)" - $content += "Updated: $($summary.updated)" - $content += "Removed: $($summary.removed)" - $content += "Unchanged:$($summary.unchanged)" - Set-Content -Path $StatusPath -Value ($content -join [Environment]::NewLine) - exit 0 -} - -$NewItems = @() -$Added = 0; $Updated = 0; $Removed = 0; $Unchanged = 0 -$PrevIndex = @{} -if ($PrevManifest -and $PrevManifest.items) { - foreach ($it in $PrevManifest.items) { $PrevIndex["$($it.category)|$($it.path)"] = $it } -} - -foreach ($cat in $CategoriesList) { - Write-Log "Fetching category: $cat" 'INFO' - $url = "$ApiBase/repos/$Repo/contents/$cat" - try { - $listing = Invoke-Github -Url $url - } - catch { - Write-Log "Failed to list $cat" 'ERROR' - continue - } - - if (-not $script:SuccessfulCategories) { $script:SuccessfulCategories = @() } - $script:SuccessfulCategories += $cat - - foreach ($entry in $listing) { - if ($entry.type -ne 'file') { continue } - if (-not ($entry.name -match '\.(md|markdown|json)$')) { continue } - Check-Timeout - $downloadUrl = $entry.download_url - if (-not $downloadUrl) { continue } - $rawBytes = $null - try { - # Primary attempt: Invoke-WebRequest and derive bytes (ContentBytes is not a valid property in modern PowerShell) - $resp = Invoke-WebRequest -Uri $downloadUrl -UserAgent $UserAgent -TimeoutSec 60 -ErrorAction Stop - if ($resp.RawContentStream) { - $ms = New-Object System.IO.MemoryStream - $resp.RawContentStream.CopyTo($ms) - $rawBytes = $ms.ToArray() - } - elseif ($resp.Content) { - # Fallback: encode string content as UTF8 (raw text files like md/json are UTF-8 on GitHub) - $rawBytes = [System.Text.Encoding]::UTF8.GetBytes($resp.Content) - } - if (-not $rawBytes -or $rawBytes.Length -eq 0) { throw "Empty response body" } - } - catch { - Write-Log "Direct download failed for $($entry.path): $_ (will fallback to contents API)" 'WARN' - try { - # Fallback: GitHub contents API returns base64 content - $fileMeta = Invoke-Github -Url "$ApiBase/repos/$Repo/contents/$($entry.path)" - if ($fileMeta.content) { - $b64 = ($fileMeta.content -replace "\s", '') - $rawBytes = [System.Convert]::FromBase64String($b64) - } - else { - throw "No content field in contents API response" - } - } - catch { - Write-Log "Failed download $($entry.path): $_" 'ERROR' - continue - } - } - $hash = Get-FileHashSha256String -Bytes $rawBytes - $key = "$cat|$($entry.path)" - $prev = $PrevIndex[$key] - $relativePath = $entry.path - $targetFile = Join-Path $Dest $relativePath - $targetDir = Split-Path $targetFile -Parent - if (-not (Test-Path $targetDir)) { New-Item -ItemType Directory -Path $targetDir -Force | Out-Null } - - $isChange = $true - if ($prev -and $prev.sha -eq $entry.sha -and $prev.hash -eq $hash) { $isChange = $false } - if ($isChange) { - if ($Plan) { - Write-Log "[Plan] Would save: $relativePath" 'INFO' - } - else { - # Ensure the file is fully replaced by removing it first if it exists - if (Test-Path $targetFile) { - Remove-Item $targetFile -Force - } - [System.IO.File]::WriteAllBytes($targetFile, $rawBytes) - } - if ($prev) { $Updated++ } else { $Added++ } - if (-not $Plan) { Write-Log "Saved: $relativePath" 'INFO' } - } - else { $Unchanged++ } - - $NewItems += [pscustomobject]@{ - category = $cat - path = $relativePath - sha = $entry.sha - size = $entry.size - lastFetched = (Get-Date).ToString('o') - hash = $hash - } - } -} - -# Determine removals (only for categories successfully fetched this run) -if (-not $Plan -and -not $NoDelete -and $PrevManifest) { - if ($script:RateLimitHit) { - Write-Log 'Rate limit encountered this run; skipping stale file deletion.' 'WARN' - } - else { - $successful = $script:SuccessfulCategories | Sort-Object -Unique - if (-not $successful -or $successful.Count -eq 0) { - Write-Log 'No categories fetched successfully this run; skipping stale file deletion for safety.' 'WARN' - } - else { - # Backup snapshot before deletions - if (-not $SkipBackup) { - try { - $backupRoot = Join-Path $Dest 'backups' - if (-not (Test-Path $backupRoot)) { New-Item -ItemType Directory -Path $backupRoot | Out-Null } - $backupFile = Join-Path $backupRoot ("pre-delete-" + $RunId + '.zip') - Write-Log "Creating backup snapshot: $backupFile" 'INFO' - Add-Type -AssemblyName System.IO.Compression.FileSystem -ErrorAction SilentlyContinue - # Zip only successfully fetched category folders (if present) - $tempStage = Join-Path $backupRoot ("stage-" + $RunId) - New-Item -ItemType Directory -Path $tempStage | Out-Null - foreach ($c in $successful) { - $cDir = Join-Path $Dest $c - if (Test-Path $cDir) { Copy-Item $cDir (Join-Path $tempStage $c) -Recurse -Force } - } - [IO.Compression.ZipFile]::CreateFromDirectory($tempStage, $backupFile) - Remove-Item $tempStage -Recurse -Force -ErrorAction SilentlyContinue - # Retention - $backups = Get-ChildItem $backupRoot -Filter 'pre-delete-*.zip' | Sort-Object LastWriteTime -Descending - if ($backups.Count -gt $BackupRetention) { - $toRemove = $backups | Select-Object -Skip $BackupRetention - foreach ($oldB in $toRemove) { Remove-Item $oldB.FullName -Force } - } - } - catch { - Write-Log "Backup snapshot failed (continuing without backup): $_" 'WARN' - } - } - $NewKeySet = @{} - foreach ($ni in $NewItems) { $NewKeySet["$($ni.category)|$($ni.path)"] = $true } - foreach ($old in $PrevManifest.items) { - $k = "$($old.category)|$($old.path)" - # Only consider deletion if the category was fetched this run - if ($successful -contains $old.category) { - if (-not $NewKeySet.ContainsKey($k)) { - $Removed++ - $fileToRemove = Join-Path $Dest $old.path - if (Test-Path $fileToRemove) { Remove-Item $fileToRemove -Force } - Write-Log "Removed stale file: $($old.path)" 'INFO' - } - } - } - } - } -} - -if ($Plan) { - Write-Log "[Plan] Summary Added=$Added Updated=$Updated Removed=(planned) Unchanged=$Unchanged" 'INFO' - Write-Log '[Plan] No files written. Exiting without manifest/status update.' 'INFO' - exit 0 -} - -# Write manifest -$Manifest = [pscustomobject]@{ - version = 1 - repo = $Repo - fetchedAt = (Get-Date).ToString('o') - categories = $CategoriesList - items = $NewItems - summary = [pscustomobject]@{ - added = $Added - updated = $Updated - removed = $Removed - unchanged = $Unchanged - } -} -if ($script:SuccessfulCategories -and $script:SuccessfulCategories.Count -gt 0) { - $Manifest | ConvertTo-Json -Depth 6 | Set-Content -Path $ManifestPath -Encoding UTF8 - # Integrity marker - try { - $integrity = [pscustomobject]@{ - fetchedAt = (Get-Date).ToString('o') - successfulCategories = ($script:SuccessfulCategories | Sort-Object -Unique) - summary = $Manifest.summary - manifestSha256 = (Get-FileHash -Algorithm SHA256 $ManifestPath).Hash - } - $integrity | ConvertTo-Json -Depth 4 | Set-Content -Path (Join-Path $Dest 'last-success.json') -Encoding UTF8 - } - catch { Write-Log "Failed writing integrity marker: $_" 'WARN' } -} -else { - Write-Log 'No successful categories; manifest not updated this run.' 'WARN' -} - -# Status file -$StatusLines = @() -$StatusLines += "Sync run: $(Get-Date -Format o)" -$StatusLines += "Added: $Added" -$StatusLines += "Updated: $Updated" -$StatusLines += "Removed: $Removed" -$StatusLines += "Unchanged:$Unchanged" -$StatusLines += "Total: $($Manifest.items.Count)" -$StatusLines += "Manifest: manifest.json" -$StatusLines += "Repo: $Repo" -$StatusLines += "Duration: $([int]((Get-Date)-$script:StartTime).TotalSeconds)s" -$StatusLines | Set-Content -Path $StatusPath -Encoding UTF8 - -Write-Log "Summary Added=$Added Updated=$Updated Removed=$Removed Unchanged=$Unchanged" 'INFO' - -# Log retention -Get-ChildItem logs -Filter 'sync-*.log' | Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-$LogRetentionDays) } | ForEach-Object { Remove-Item $_.FullName -Force } - -exit 0