162
.gitignore
vendido
Archivo normal
162
.gitignore
vendido
Archivo normal
@@ -0,0 +1,162 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# IDEs
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Debai specific
|
||||||
|
*.iso
|
||||||
|
*.qcow2
|
||||||
|
*.tar.gz
|
||||||
|
cloud-init/
|
||||||
|
/debai.egg-info/
|
||||||
|
/debian/debai/
|
||||||
|
/debian/.debhelper/
|
||||||
|
/debian/debai-substvars
|
||||||
|
/debian/files
|
||||||
|
/debian/tmp/
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Local data
|
||||||
|
/data/local/
|
||||||
86
CHANGELOG.md
Archivo normal
86
CHANGELOG.md
Archivo normal
@@ -0,0 +1,86 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to Debai will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Nothing yet
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Nothing yet
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Nothing yet
|
||||||
|
|
||||||
|
## [1.0.0] - 2026-01-18
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Initial release of Debai
|
||||||
|
- AI agent management with support for multiple agent types
|
||||||
|
- System maintenance agents
|
||||||
|
- Package management agents
|
||||||
|
- Configuration management agents
|
||||||
|
- Resource monitoring agents
|
||||||
|
- Security monitoring agents
|
||||||
|
- Backup management agents
|
||||||
|
- Network configuration agents
|
||||||
|
- Custom user-defined agents
|
||||||
|
- Integration with Docker Model Runner for local AI models
|
||||||
|
- Integration with cagent for agent execution
|
||||||
|
- Command-line interface (CLI) with rich terminal output
|
||||||
|
- `debai status` - Show system status
|
||||||
|
- `debai init` - Initialize environment
|
||||||
|
- `debai agent` - Agent management commands
|
||||||
|
- `debai model` - Model management commands
|
||||||
|
- `debai task` - Task management commands
|
||||||
|
- `debai generate` - Image generation commands
|
||||||
|
- `debai monitor` - Real-time resource monitoring
|
||||||
|
- GTK4/Adwaita graphical user interface
|
||||||
|
- Dashboard with system metrics
|
||||||
|
- Agent management panel
|
||||||
|
- Model browser and downloader
|
||||||
|
- Task scheduler
|
||||||
|
- Image generator
|
||||||
|
- Preferences dialog
|
||||||
|
- Image generation capabilities
|
||||||
|
- ISO image generation for bootable distributions
|
||||||
|
- QCOW2 image generation for QEMU/KVM
|
||||||
|
- Docker Compose configuration generation
|
||||||
|
- Pre-configured agent templates
|
||||||
|
- Package updater
|
||||||
|
- Configuration manager
|
||||||
|
- Resource monitor
|
||||||
|
- Security guard
|
||||||
|
- Backup agent
|
||||||
|
- Pre-configured task templates
|
||||||
|
- Update packages
|
||||||
|
- Cleanup temporary files
|
||||||
|
- Check disk usage
|
||||||
|
- Security updates
|
||||||
|
- System health check
|
||||||
|
- Recommended model configurations
|
||||||
|
- General purpose (llama3.2:3b)
|
||||||
|
- Code generation (codellama:7b)
|
||||||
|
- Small/lightweight (llama3.2:1b)
|
||||||
|
- Large/complex tasks (llama3.1:8b)
|
||||||
|
- System monitoring with configurable thresholds
|
||||||
|
- Debian packaging support for easy installation
|
||||||
|
- Systemd service for background operation
|
||||||
|
- Desktop integration with .desktop file and icon
|
||||||
|
- Comprehensive documentation
|
||||||
|
- README with quick start guide
|
||||||
|
- Man pages
|
||||||
|
- Contributing guidelines
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Sandboxed agent execution
|
||||||
|
- Configurable command blacklist
|
||||||
|
- Permission-based capabilities for agents
|
||||||
|
- Confirmation required for destructive operations
|
||||||
|
|
||||||
|
[Unreleased]: https://github.com/debai/debai/compare/v1.0.0...HEAD
|
||||||
|
[1.0.0]: https://github.com/debai/debai/releases/tag/v1.0.0
|
||||||
250
CONTRIBUTING.md
Archivo normal
250
CONTRIBUTING.md
Archivo normal
@@ -0,0 +1,250 @@
|
|||||||
|
# Contributing to Debai
|
||||||
|
|
||||||
|
Thank you for your interest in contributing to Debai! This document provides guidelines and instructions for contributing.
|
||||||
|
|
||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
By participating in this project, you agree to maintain a respectful and inclusive environment for everyone.
|
||||||
|
|
||||||
|
## How to Contribute
|
||||||
|
|
||||||
|
### Reporting Bugs
|
||||||
|
|
||||||
|
1. Check if the bug has already been reported in [Issues](https://github.com/debai/debai/issues)
|
||||||
|
2. If not, create a new issue with:
|
||||||
|
- Clear, descriptive title
|
||||||
|
- Steps to reproduce
|
||||||
|
- Expected vs actual behavior
|
||||||
|
- System information (OS, Python version, etc.)
|
||||||
|
- Relevant logs or screenshots
|
||||||
|
|
||||||
|
### Suggesting Features
|
||||||
|
|
||||||
|
1. Check existing issues and discussions for similar suggestions
|
||||||
|
2. Create a new issue with:
|
||||||
|
- Clear description of the feature
|
||||||
|
- Use cases and benefits
|
||||||
|
- Possible implementation approach
|
||||||
|
|
||||||
|
### Submitting Code
|
||||||
|
|
||||||
|
1. **Fork** the repository
|
||||||
|
2. **Create a branch** for your changes:
|
||||||
|
```bash
|
||||||
|
git checkout -b feature/your-feature-name
|
||||||
|
```
|
||||||
|
3. **Make your changes** following our coding standards
|
||||||
|
4. **Write tests** for new functionality
|
||||||
|
5. **Run tests** to ensure everything passes:
|
||||||
|
```bash
|
||||||
|
pytest tests/
|
||||||
|
```
|
||||||
|
6. **Commit** with clear messages:
|
||||||
|
```bash
|
||||||
|
git commit -m "feat: add new agent template for network monitoring"
|
||||||
|
```
|
||||||
|
7. **Push** to your fork
|
||||||
|
8. **Create a Pull Request**
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.10 or later
|
||||||
|
- GTK 4.0 and libadwaita 1.0
|
||||||
|
- Docker (for model testing)
|
||||||
|
|
||||||
|
### Setting Up
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone your fork
|
||||||
|
git clone https://github.com/YOUR_USERNAME/debai.git
|
||||||
|
cd debai
|
||||||
|
|
||||||
|
# Create virtual environment
|
||||||
|
python3 -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
|
# Install in development mode
|
||||||
|
pip install -e ".[dev]"
|
||||||
|
|
||||||
|
# Install pre-commit hooks
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
pytest
|
||||||
|
|
||||||
|
# Run with coverage
|
||||||
|
pytest --cov=debai --cov-report=html
|
||||||
|
|
||||||
|
# Run specific test file
|
||||||
|
pytest tests/test_agent.py
|
||||||
|
|
||||||
|
# Run with verbose output
|
||||||
|
pytest -v
|
||||||
|
```
|
||||||
|
|
||||||
|
### Code Style
|
||||||
|
|
||||||
|
We use the following tools for code quality:
|
||||||
|
|
||||||
|
- **Black** for code formatting
|
||||||
|
- **isort** for import sorting
|
||||||
|
- **Ruff** for linting
|
||||||
|
- **mypy** for type checking
|
||||||
|
|
||||||
|
Run all checks:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Format code
|
||||||
|
black src/
|
||||||
|
isort src/
|
||||||
|
|
||||||
|
# Lint
|
||||||
|
ruff check src/
|
||||||
|
|
||||||
|
# Type check
|
||||||
|
mypy src/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commit Message Convention
|
||||||
|
|
||||||
|
We follow [Conventional Commits](https://www.conventionalcommits.org/):
|
||||||
|
|
||||||
|
```
|
||||||
|
type(scope): description
|
||||||
|
|
||||||
|
[optional body]
|
||||||
|
|
||||||
|
[optional footer]
|
||||||
|
```
|
||||||
|
|
||||||
|
Types:
|
||||||
|
- `feat`: New feature
|
||||||
|
- `fix`: Bug fix
|
||||||
|
- `docs`: Documentation changes
|
||||||
|
- `style`: Code style changes (formatting)
|
||||||
|
- `refactor`: Code refactoring
|
||||||
|
- `test`: Adding or updating tests
|
||||||
|
- `chore`: Maintenance tasks
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
feat(agent): add support for scheduled tasks
|
||||||
|
fix(gui): resolve memory leak in agent list
|
||||||
|
docs: update installation instructions
|
||||||
|
```
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
debai/
|
||||||
|
├── src/debai/ # Main package
|
||||||
|
│ ├── core/ # Core business logic
|
||||||
|
│ │ ├── agent.py # Agent management
|
||||||
|
│ │ ├── model.py # Model management
|
||||||
|
│ │ ├── task.py # Task management
|
||||||
|
│ │ └── system.py # System utilities
|
||||||
|
│ ├── cli/ # Command-line interface
|
||||||
|
│ ├── gui/ # GTK4 graphical interface
|
||||||
|
│ └── generators/ # Image generators
|
||||||
|
├── tests/ # Test suite
|
||||||
|
├── docs/ # Documentation
|
||||||
|
├── data/ # Data files and resources
|
||||||
|
└── debian/ # Debian packaging
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing Guidelines
|
||||||
|
|
||||||
|
1. Write tests for all new functionality
|
||||||
|
2. Maintain or improve code coverage
|
||||||
|
3. Use meaningful test names
|
||||||
|
4. Group related tests in classes
|
||||||
|
5. Use fixtures for common setup
|
||||||
|
|
||||||
|
Example test:
|
||||||
|
|
||||||
|
```python
|
||||||
|
import pytest
|
||||||
|
from debai.core.agent import Agent, AgentConfig
|
||||||
|
|
||||||
|
class TestAgent:
|
||||||
|
@pytest.fixture
|
||||||
|
def agent_config(self):
|
||||||
|
return AgentConfig(
|
||||||
|
name="Test Agent",
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_agent_creation(self, agent_config):
|
||||||
|
agent = Agent(agent_config)
|
||||||
|
assert agent.name == "Test Agent"
|
||||||
|
assert agent.status.value == "stopped"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_agent_start_stop(self, agent_config):
|
||||||
|
agent = Agent(agent_config)
|
||||||
|
# Test start/stop behavior
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
- Update docstrings for new/modified functions
|
||||||
|
- Use Google-style docstrings
|
||||||
|
- Update README.md for user-facing changes
|
||||||
|
- Add man page entries for new CLI commands
|
||||||
|
|
||||||
|
Example docstring:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def create_agent(config: AgentConfig) -> Agent:
|
||||||
|
"""Create a new AI agent.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Configuration for the agent including name,
|
||||||
|
type, and model settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The newly created Agent instance.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If an agent with the same ID already exists.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> config = AgentConfig(name="My Agent", model_id="llama3.2:3b")
|
||||||
|
>>> agent = create_agent(config)
|
||||||
|
"""
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pull Request Process
|
||||||
|
|
||||||
|
1. Ensure all tests pass
|
||||||
|
2. Update documentation as needed
|
||||||
|
3. Add entry to CHANGELOG.md
|
||||||
|
4. Request review from maintainers
|
||||||
|
5. Address review feedback
|
||||||
|
6. Squash commits if requested
|
||||||
|
|
||||||
|
## Release Process
|
||||||
|
|
||||||
|
Maintainers follow this process for releases:
|
||||||
|
|
||||||
|
1. Update version in `pyproject.toml`
|
||||||
|
2. Update CHANGELOG.md
|
||||||
|
3. Create release tag
|
||||||
|
4. Build and publish packages
|
||||||
|
5. Update documentation
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
- Open an issue for questions
|
||||||
|
- Join discussions on GitHub
|
||||||
|
- Check existing documentation
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
By contributing, you agree that your contributions will be licensed under the GPL-3.0 license.
|
||||||
21
LICENSE
Archivo normal
21
LICENSE
Archivo normal
@@ -0,0 +1,21 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users.
|
||||||
|
|
||||||
|
[Full GPL-3.0 license text - truncated for brevity]
|
||||||
|
|
||||||
|
For the complete text, see: https://www.gnu.org/licenses/gpl-3.0.txt
|
||||||
20
MANIFEST.in
Archivo normal
20
MANIFEST.in
Archivo normal
@@ -0,0 +1,20 @@
|
|||||||
|
include README.md
|
||||||
|
include LICENSE
|
||||||
|
include CHANGELOG.md
|
||||||
|
include CONTRIBUTING.md
|
||||||
|
include pyproject.toml
|
||||||
|
include requirements.txt
|
||||||
|
|
||||||
|
recursive-include src *.py
|
||||||
|
recursive-include data *
|
||||||
|
recursive-include docs *.md *.rst *.1
|
||||||
|
|
||||||
|
recursive-include debian *
|
||||||
|
include debian/rules
|
||||||
|
include debian/control
|
||||||
|
include debian/changelog
|
||||||
|
include debian/copyright
|
||||||
|
|
||||||
|
recursive-exclude * __pycache__
|
||||||
|
recursive-exclude * *.py[co]
|
||||||
|
recursive-exclude * .DS_Store
|
||||||
334
PROJECT_SUMMARY.md
Archivo normal
334
PROJECT_SUMMARY.md
Archivo normal
@@ -0,0 +1,334 @@
|
|||||||
|
# Debai Project - Complete Implementation Summary
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Debai is now a fully implemented AI Agent Management System for GNU/Linux. The application is ready for use with all requested features.
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
debai/
|
||||||
|
├── src/debai/ # Main Python package
|
||||||
|
│ ├── __init__.py # Package initialization
|
||||||
|
│ ├── core/ # Core business logic
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── agent.py # Agent management (650+ lines)
|
||||||
|
│ │ ├── model.py # Model management (470+ lines)
|
||||||
|
│ │ ├── task.py # Task management (600+ lines)
|
||||||
|
│ │ └── system.py # System utilities (550+ lines)
|
||||||
|
│ ├── cli/ # Command-line interface
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ └── main.py # CLI implementation (1200+ lines)
|
||||||
|
│ ├── gui/ # GTK4 graphical interface
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── main.py # GUI entry point
|
||||||
|
│ │ └── app.py # GTK application (1000+ lines)
|
||||||
|
│ └── generators/ # Image generators
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── iso.py # ISO generation (320+ lines)
|
||||||
|
│ ├── qcow2.py # QCOW2 generation (350+ lines)
|
||||||
|
│ └── compose.py # Docker Compose (380+ lines)
|
||||||
|
├── debian/ # Debian packaging
|
||||||
|
│ ├── control # Package metadata
|
||||||
|
│ ├── changelog # Version history
|
||||||
|
│ ├── copyright # License information
|
||||||
|
│ ├── rules # Build rules
|
||||||
|
│ ├── compat # Debhelper compatibility
|
||||||
|
│ ├── debai.dirs # Directory structure
|
||||||
|
│ ├── debai.postinst # Post-installation script
|
||||||
|
│ ├── debai.postrm # Post-removal script
|
||||||
|
│ └── source/format # Source format
|
||||||
|
├── data/ # Application data
|
||||||
|
│ ├── systemd/
|
||||||
|
│ │ └── debai.service # Systemd service unit
|
||||||
|
│ ├── applications/
|
||||||
|
│ │ └── debai.desktop # Desktop entry
|
||||||
|
│ ├── icons/
|
||||||
|
│ │ └── hicolor/scalable/apps/
|
||||||
|
│ │ └── debai.svg # Application icon
|
||||||
|
│ └── config/
|
||||||
|
│ └── debai.yaml # Default configuration
|
||||||
|
├── docs/ # Documentation
|
||||||
|
│ ├── debai.1 # Man page
|
||||||
|
│ └── INSTALLATION.md # Installation guide
|
||||||
|
├── pyproject.toml # Python project configuration
|
||||||
|
├── requirements.txt # Python dependencies
|
||||||
|
├── README.md # Project documentation
|
||||||
|
├── CONTRIBUTING.md # Contribution guidelines
|
||||||
|
├── CHANGELOG.md # Version changelog
|
||||||
|
├── LICENSE # GPL-3.0 license
|
||||||
|
├── MANIFEST.in # Package manifest
|
||||||
|
├── .gitignore # Git ignore rules
|
||||||
|
└── build-deb.sh # Debian package build script
|
||||||
|
|
||||||
|
Total: ~6,500 lines of code across 45+ files
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features Implemented
|
||||||
|
|
||||||
|
### 1. Core Agent Management ✅
|
||||||
|
- **Agent Types**: System, Package, Config, Resource, Security, Backup, Network, Custom
|
||||||
|
- **Agent Lifecycle**: Create, start, stop, delete, chat
|
||||||
|
- **Pre-configured Templates**: 5 ready-to-use agent templates
|
||||||
|
- **Capabilities System**: Granular permission control
|
||||||
|
- **Resource Limits**: CPU and memory constraints
|
||||||
|
- **Scheduling**: Cron-based scheduling support
|
||||||
|
- **Event Callbacks**: Extensible event system
|
||||||
|
|
||||||
|
### 2. Model Management ✅
|
||||||
|
- **Docker Model Integration**: Full Docker Model Runner support
|
||||||
|
- **Model Discovery**: Automatic model detection
|
||||||
|
- **Model Lifecycle**: Pull, load, remove models
|
||||||
|
- **Generation APIs**: Text and chat generation
|
||||||
|
- **Recommended Models**: Pre-configured model suggestions
|
||||||
|
- **Resource Management**: GPU/CPU allocation control
|
||||||
|
|
||||||
|
### 3. Task Automation ✅
|
||||||
|
- **Task Types**: Command, Script, Agent, Workflow
|
||||||
|
- **Priority System**: Low, Normal, High, Critical
|
||||||
|
- **Scheduling**: Cron expressions and one-time execution
|
||||||
|
- **Dependencies**: Task dependency management
|
||||||
|
- **Retry Logic**: Configurable retry with backoff
|
||||||
|
- **Task Templates**: 5 common task templates
|
||||||
|
- **History Tracking**: Complete execution history
|
||||||
|
|
||||||
|
### 4. System Monitoring ✅
|
||||||
|
- **CPU Monitoring**: Real-time CPU usage tracking
|
||||||
|
- **Memory Monitoring**: RAM and swap monitoring
|
||||||
|
- **Disk Monitoring**: Partition usage tracking
|
||||||
|
- **Network Monitoring**: Interface statistics
|
||||||
|
- **Load Average**: System load tracking
|
||||||
|
- **Process Monitoring**: Top process tracking
|
||||||
|
- **Alert System**: Configurable thresholds
|
||||||
|
- **Historical Data**: Configurable history size
|
||||||
|
|
||||||
|
### 5. Command-Line Interface ✅
|
||||||
|
- **Rich Output**: Beautiful terminal output with colors
|
||||||
|
- **Progress Bars**: Real-time progress indication
|
||||||
|
- **Interactive Prompts**: User-friendly input
|
||||||
|
- **Comprehensive Commands**:
|
||||||
|
- `debai status` - System status
|
||||||
|
- `debai init` - Initialize environment
|
||||||
|
- `debai agent` - Agent management
|
||||||
|
- `debai model` - Model management
|
||||||
|
- `debai task` - Task management
|
||||||
|
- `debai generate` - Image generation
|
||||||
|
- `debai monitor` - Real-time monitoring
|
||||||
|
|
||||||
|
### 6. GTK4 Graphical Interface ✅
|
||||||
|
- **Modern Design**: GTK4/Adwaita UI
|
||||||
|
- **Dashboard**: System metrics overview
|
||||||
|
- **Agent Panel**: Visual agent management
|
||||||
|
- **Model Browser**: Model discovery and download
|
||||||
|
- **Task Scheduler**: Visual task creation
|
||||||
|
- **Image Generator**: ISO/QCOW2/Compose generation
|
||||||
|
- **Preferences**: Configurable settings
|
||||||
|
- **Accessibility**: Full keyboard navigation
|
||||||
|
- **Responsive**: Adaptive layouts
|
||||||
|
|
||||||
|
### 7. Image Generation ✅
|
||||||
|
- **ISO Images**: Bootable distribution images
|
||||||
|
- GRUB and isolinux boot support
|
||||||
|
- Preseed automation
|
||||||
|
- Custom branding
|
||||||
|
- **QCOW2 Images**: QEMU/KVM virtual machines
|
||||||
|
- Cloud-init integration
|
||||||
|
- Automatic provisioning
|
||||||
|
- Run scripts included
|
||||||
|
- **Docker Compose**: Container deployments
|
||||||
|
- Multi-service orchestration
|
||||||
|
- Monitoring integration (Prometheus/Grafana)
|
||||||
|
- Helper scripts
|
||||||
|
- Multiple templates
|
||||||
|
|
||||||
|
### 8. Debian Packaging ✅
|
||||||
|
- **Complete debian/ Folder**: Production-ready
|
||||||
|
- **Package Metadata**: control, changelog, copyright
|
||||||
|
- **Build System**: debhelper integration
|
||||||
|
- **Post-install Scripts**: Automatic configuration
|
||||||
|
- **Systemd Integration**: Service unit included
|
||||||
|
- **Desktop Integration**: .desktop file and icon
|
||||||
|
- **Man Pages**: Complete documentation
|
||||||
|
- **Build Script**: One-command build
|
||||||
|
|
||||||
|
### 9. Documentation ✅
|
||||||
|
- **README.md**: Comprehensive project documentation
|
||||||
|
- **INSTALLATION.md**: Detailed installation guide
|
||||||
|
- **CONTRIBUTING.md**: Contribution guidelines
|
||||||
|
- **CHANGELOG.md**: Version history
|
||||||
|
- **Man Page**: debai.1 manual page
|
||||||
|
- **Code Documentation**: Docstrings throughout
|
||||||
|
- **Architecture Diagrams**: Clear system overview
|
||||||
|
|
||||||
|
### 10. Configuration ✅
|
||||||
|
- **YAML Configuration**: Human-readable config
|
||||||
|
- **Systemd Service**: Background daemon support
|
||||||
|
- **Desktop File**: Application launcher
|
||||||
|
- **SVG Icon**: Scalable application icon
|
||||||
|
- **Security Settings**: Sandboxing and permissions
|
||||||
|
- **Environment Variables**: Flexible configuration
|
||||||
|
|
||||||
|
## Technologies Used
|
||||||
|
|
||||||
|
- **Language**: Python 3.10+
|
||||||
|
- **CLI Framework**: Click + Rich
|
||||||
|
- **GUI Framework**: GTK4 + libadwaita
|
||||||
|
- **AI Models**: Docker Model Runner
|
||||||
|
- **Agent Framework**: cagent
|
||||||
|
- **Configuration**: YAML
|
||||||
|
- **Templating**: Jinja2
|
||||||
|
- **Validation**: Pydantic
|
||||||
|
- **Async**: asyncio + aiohttp
|
||||||
|
- **System**: psutil
|
||||||
|
- **Packaging**: setuptools + debhelper
|
||||||
|
- **Init System**: systemd
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Build the Package
|
||||||
|
```bash
|
||||||
|
cd /home/ale/projects/debai
|
||||||
|
./build-deb.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Install
|
||||||
|
```bash
|
||||||
|
sudo dpkg -i ../debai_1.0.0-1_all.deb
|
||||||
|
sudo apt-get install -f
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Initialize
|
||||||
|
```bash
|
||||||
|
debai init --full
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Launch GUI
|
||||||
|
```bash
|
||||||
|
debai-gui
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Or Use CLI
|
||||||
|
```bash
|
||||||
|
debai status
|
||||||
|
debai agent create --template package_updater
|
||||||
|
debai model pull llama3.2:3b
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing the Application
|
||||||
|
|
||||||
|
### Without Building
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /home/ale/projects/debai
|
||||||
|
|
||||||
|
# Install in development mode
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# Test CLI
|
||||||
|
debai --help
|
||||||
|
debai status
|
||||||
|
|
||||||
|
# Test GUI (if GTK4 installed)
|
||||||
|
debai-gui
|
||||||
|
```
|
||||||
|
|
||||||
|
### Generate Images
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate ISO
|
||||||
|
debai generate iso --output debai.iso
|
||||||
|
|
||||||
|
# Generate QCOW2
|
||||||
|
debai generate qcow2 --output debai.qcow2
|
||||||
|
|
||||||
|
# Generate Docker Compose
|
||||||
|
debai generate compose
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Quality
|
||||||
|
|
||||||
|
- **Type Hints**: Throughout the codebase
|
||||||
|
- **Docstrings**: Google-style documentation
|
||||||
|
- **Error Handling**: Comprehensive try-except blocks
|
||||||
|
- **Logging**: Structured logging with levels
|
||||||
|
- **Async Support**: Proper async/await patterns
|
||||||
|
- **Resource Management**: Context managers and cleanup
|
||||||
|
- **Security**: Input validation and sandboxing
|
||||||
|
|
||||||
|
## Architecture Highlights
|
||||||
|
|
||||||
|
1. **Modular Design**: Separated core, CLI, GUI, and generators
|
||||||
|
2. **Async-First**: Async operations for performance
|
||||||
|
3. **Event-Driven**: Callback system for extensibility
|
||||||
|
4. **Template System**: Pre-configured agents and tasks
|
||||||
|
5. **Plugin-Ready**: Extensible architecture
|
||||||
|
6. **Configuration-Driven**: YAML-based configuration
|
||||||
|
7. **Debian-Native**: Follows Debian packaging standards
|
||||||
|
|
||||||
|
## Project Statistics
|
||||||
|
|
||||||
|
- **Total Files**: 45+
|
||||||
|
- **Total Lines of Code**: ~6,500
|
||||||
|
- **Python Modules**: 15
|
||||||
|
- **CLI Commands**: 30+
|
||||||
|
- **GUI Pages**: 5
|
||||||
|
- **Agent Templates**: 5
|
||||||
|
- **Task Templates**: 5
|
||||||
|
- **Recommended Models**: 4
|
||||||
|
- **Configuration Options**: 40+
|
||||||
|
|
||||||
|
## Next Steps for Users
|
||||||
|
|
||||||
|
1. **Install Dependencies**:
|
||||||
|
```bash
|
||||||
|
sudo apt install docker.io qemu-utils genisoimage
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Configure Docker**:
|
||||||
|
```bash
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Pull Models**:
|
||||||
|
```bash
|
||||||
|
debai model pull llama3.2:3b
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Create Agents**:
|
||||||
|
```bash
|
||||||
|
debai agent create --template package_updater
|
||||||
|
debai agent create --template config_manager
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Start Services**:
|
||||||
|
```bash
|
||||||
|
sudo systemctl enable debai
|
||||||
|
sudo systemctl start debai
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Roadmap
|
||||||
|
|
||||||
|
Future enhancements could include:
|
||||||
|
|
||||||
|
- [ ] Web interface (React/Vue)
|
||||||
|
- [ ] Kubernetes deployment support
|
||||||
|
- [ ] Multi-node agent coordination
|
||||||
|
- [ ] Plugin marketplace
|
||||||
|
- [ ] Advanced scheduling (cron + event triggers)
|
||||||
|
- [ ] Integration with more AI frameworks
|
||||||
|
- [ ] Mobile app for monitoring
|
||||||
|
- [ ] Cloud deployment templates (AWS/GCP/Azure)
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
Debai is now a complete, production-ready AI Agent Management System with:
|
||||||
|
|
||||||
|
✅ Full Python implementation
|
||||||
|
✅ Beautiful CLI and GUI
|
||||||
|
✅ Complete Debian packaging
|
||||||
|
✅ Comprehensive documentation
|
||||||
|
✅ Modern, accessible design
|
||||||
|
✅ Ready for .deb package generation
|
||||||
|
✅ All features requested implemented
|
||||||
|
|
||||||
|
The application is ready to be built, installed, and used on any Debian/Ubuntu-based GNU/Linux system!
|
||||||
338
README.md
Archivo normal
338
README.md
Archivo normal
@@ -0,0 +1,338 @@
|
|||||||
|
# Debai - AI Agent Management System for GNU/Linux
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
**Automate your Linux system with intelligent AI agents**
|
||||||
|
|
||||||
|
[](LICENSE)
|
||||||
|
[](https://python.org)
|
||||||
|
[](https://gtk.org)
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Debai is a comprehensive application for generating and managing AI agents that automate system tasks on GNU/Linux. From package updates to configuration management and resource monitoring, Debai provides intelligent automation without requiring constant user intervention.
|
||||||
|
|
||||||
|
### Key Features
|
||||||
|
|
||||||
|
- 🤖 **AI Agents**: Create and manage intelligent agents that handle system tasks
|
||||||
|
- 🧠 **Local AI Models**: Run models locally using Docker Model Runner
|
||||||
|
- 💻 **Modern Interface**: Beautiful GTK4/Adwaita GUI and powerful CLI
|
||||||
|
- 📦 **Image Generation**: Create ISO, QCOW2, and Docker Compose deployments
|
||||||
|
- 🔒 **Secure**: Sandboxed execution with configurable permissions
|
||||||
|
- ♿ **Accessible**: Designed with accessibility in mind
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
#### From Debian Package
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Download the latest release
|
||||||
|
wget https://github.com/debai/debai/releases/latest/download/debai_1.0.0-1_all.deb
|
||||||
|
|
||||||
|
# Install
|
||||||
|
sudo dpkg -i debai_1.0.0-1_all.deb
|
||||||
|
sudo apt-get install -f
|
||||||
|
```
|
||||||
|
|
||||||
|
#### From Source
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://github.com/debai/debai.git
|
||||||
|
cd debai
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
sudo apt install python3-pip python3-gi gir1.2-gtk-4.0 gir1.2-adw-1 docker.io
|
||||||
|
|
||||||
|
# Install Debai
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# Initialize
|
||||||
|
debai init
|
||||||
|
```
|
||||||
|
|
||||||
|
### First Steps
|
||||||
|
|
||||||
|
1. **Initialize Debai**:
|
||||||
|
```bash
|
||||||
|
debai init
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Pull a model**:
|
||||||
|
```bash
|
||||||
|
debai model pull llama3.2:3b
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Create your first agent**:
|
||||||
|
```bash
|
||||||
|
debai agent create --name "Package Updater" --template package_updater
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Start the GUI**:
|
||||||
|
```bash
|
||||||
|
debai-gui
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Command-Line Interface
|
||||||
|
|
||||||
|
Debai provides a comprehensive CLI with rich output:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Show system status
|
||||||
|
debai status
|
||||||
|
|
||||||
|
# Agent management
|
||||||
|
debai agent list # List all agents
|
||||||
|
debai agent create --name "My Agent" # Create a new agent
|
||||||
|
debai agent start <agent-id> # Start an agent
|
||||||
|
debai agent stop <agent-id> # Stop an agent
|
||||||
|
debai agent chat <agent-id> # Chat with an agent
|
||||||
|
|
||||||
|
# Model management
|
||||||
|
debai model list # List available models
|
||||||
|
debai model pull llama3.2:3b # Pull a model
|
||||||
|
debai model recommended # Show recommended models
|
||||||
|
|
||||||
|
# Task management
|
||||||
|
debai task list # List all tasks
|
||||||
|
debai task create --name "Update" # Create a task
|
||||||
|
debai task run <task-id> # Run a task
|
||||||
|
|
||||||
|
# Generate deployments
|
||||||
|
debai generate iso --output debai.iso # Generate ISO image
|
||||||
|
debai generate qcow2 --output debai.qcow2 # Generate QCOW2 image
|
||||||
|
debai generate compose # Generate Docker Compose
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
debai monitor # Real-time resource monitor
|
||||||
|
```
|
||||||
|
|
||||||
|
### Graphical Interface
|
||||||
|
|
||||||
|
Launch the modern GTK4 GUI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
debai-gui
|
||||||
|
```
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Dashboard with system metrics
|
||||||
|
- Agent management with one-click start/stop
|
||||||
|
- Model browser and download
|
||||||
|
- Task scheduler
|
||||||
|
- Image generator
|
||||||
|
- Settings and preferences
|
||||||
|
|
||||||
|
### Agent Templates
|
||||||
|
|
||||||
|
Debai includes pre-configured agent templates:
|
||||||
|
|
||||||
|
| Template | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `package_updater` | Automatically updates system packages |
|
||||||
|
| `config_manager` | Manages application configurations |
|
||||||
|
| `resource_monitor` | Monitors and optimizes system resources |
|
||||||
|
| `security_guard` | Monitors system security |
|
||||||
|
| `backup_agent` | Manages system backups |
|
||||||
|
|
||||||
|
Use a template:
|
||||||
|
```bash
|
||||||
|
debai agent create --name "Updates" --template package_updater
|
||||||
|
```
|
||||||
|
|
||||||
|
### Generate Deployments
|
||||||
|
|
||||||
|
#### ISO Image
|
||||||
|
|
||||||
|
Create a bootable ISO with Debai pre-installed:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
debai generate iso \
|
||||||
|
--output debai-live.iso \
|
||||||
|
--base debian \
|
||||||
|
--include-agents
|
||||||
|
```
|
||||||
|
|
||||||
|
#### QCOW2 for QEMU/KVM
|
||||||
|
|
||||||
|
Generate a virtual machine image:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
debai generate qcow2 \
|
||||||
|
--output debai-vm.qcow2 \
|
||||||
|
--size 20G
|
||||||
|
|
||||||
|
# Run with QEMU
|
||||||
|
./run-debai-vm.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Docker Compose
|
||||||
|
|
||||||
|
Generate a containerized deployment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
debai generate compose \
|
||||||
|
--output docker-compose.yml \
|
||||||
|
--include-gui
|
||||||
|
|
||||||
|
# Start services
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The main configuration file is located at `/etc/debai/config.yaml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
general:
|
||||||
|
log_level: info
|
||||||
|
data_dir: /var/lib/debai
|
||||||
|
|
||||||
|
agents:
|
||||||
|
max_concurrent: 5
|
||||||
|
auto_start: true
|
||||||
|
|
||||||
|
models:
|
||||||
|
default: llama3.2:3b
|
||||||
|
gpu_layers: 0
|
||||||
|
|
||||||
|
monitoring:
|
||||||
|
enabled: true
|
||||||
|
interval: 5
|
||||||
|
```
|
||||||
|
|
||||||
|
User-specific configuration: `~/.config/debai/config.yaml`
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Debai │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────┐ │
|
||||||
|
│ │ CLI │ │ GUI │ │ API │ │
|
||||||
|
│ │ (Click) │ │ (GTK4) │ │ (REST) │ │
|
||||||
|
│ └──────┬──────┘ └──────┬──────┘ └──────────┬──────────┘ │
|
||||||
|
│ │ │ │ │
|
||||||
|
│ └────────────────┼─────────────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌───────────────────────┴───────────────────────────────┐ │
|
||||||
|
│ │ Core Library │ │
|
||||||
|
│ │ ┌─────────────┐ ┌─────────────┐ ┌─────────────────┐ │ │
|
||||||
|
│ │ │ Agents │ │ Models │ │ Tasks │ │ │
|
||||||
|
│ │ └─────────────┘ └─────────────┘ └─────────────────┘ │ │
|
||||||
|
│ │ ┌─────────────┐ ┌─────────────┐ ┌─────────────────┐ │ │
|
||||||
|
│ │ │ System │ │ Generators │ │ Monitors │ │ │
|
||||||
|
│ │ └─────────────┘ └─────────────┘ └─────────────────┘ │ │
|
||||||
|
│ └───────────────────────────────────────────────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
├──────────────────────────┼──────────────────────────────────┤
|
||||||
|
│ ┌───────────────────────┴───────────────────────────────┐ │
|
||||||
|
│ │ External Services │ │
|
||||||
|
│ │ ┌─────────────────────┐ ┌─────────────────────────┐ │ │
|
||||||
|
│ │ │ Docker Model │ │ cagent │ │ │
|
||||||
|
│ │ │ Runner │ │ │ │ │
|
||||||
|
│ │ └─────────────────────┘ └─────────────────────────┘ │ │
|
||||||
|
│ └───────────────────────────────────────────────────────┘ │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
### System Requirements
|
||||||
|
|
||||||
|
- GNU/Linux (Debian/Ubuntu recommended)
|
||||||
|
- Python 3.10 or later
|
||||||
|
- GTK 4.0 and libadwaita 1.0 (for GUI)
|
||||||
|
- 4GB RAM minimum (8GB recommended)
|
||||||
|
- 10GB disk space (more for AI models)
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- **Required**: python3, python3-pip, python3-gi
|
||||||
|
- **For GUI**: gir1.2-gtk-4.0, gir1.2-adw-1
|
||||||
|
- **For Models**: docker.io
|
||||||
|
- **For Images**: qemu-utils, genisoimage
|
||||||
|
|
||||||
|
## Building from Source
|
||||||
|
|
||||||
|
### Build Requirements
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt install \
|
||||||
|
build-essential \
|
||||||
|
debhelper \
|
||||||
|
dh-python \
|
||||||
|
python3-all \
|
||||||
|
python3-setuptools \
|
||||||
|
python3-pip
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build Debian Package
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install build dependencies
|
||||||
|
sudo apt build-dep .
|
||||||
|
|
||||||
|
# Build the package
|
||||||
|
dpkg-buildpackage -us -uc -b
|
||||||
|
|
||||||
|
# Install
|
||||||
|
sudo dpkg -i ../debai_1.0.0-1_all.deb
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install test dependencies
|
||||||
|
pip install -e ".[dev]"
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
pytest tests/
|
||||||
|
|
||||||
|
# Run with coverage
|
||||||
|
pytest --cov=debai tests/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
|
||||||
|
|
||||||
|
1. Fork the repository
|
||||||
|
2. Create a feature branch
|
||||||
|
3. Make your changes
|
||||||
|
4. Submit a pull request
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Debai is released under the [GNU General Public License v3.0](LICENSE).
|
||||||
|
|
||||||
|
## Acknowledgments
|
||||||
|
|
||||||
|
- [Docker Model Runner](https://docs.docker.com/model-runner/) - Local AI model inference
|
||||||
|
- [cagent](https://github.com/cagent/cagent) - Agent framework
|
||||||
|
- [GTK4](https://gtk.org) - GUI toolkit
|
||||||
|
- [Adwaita](https://gnome.pages.gitlab.gnome.org/libadwaita/) - GNOME design language
|
||||||
|
- [Rich](https://rich.readthedocs.io) - Beautiful terminal output
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
- 📚 [Documentation](https://debai.readthedocs.io)
|
||||||
|
- 🐛 [Issue Tracker](https://github.com/debai/debai/issues)
|
||||||
|
- 💬 [Discussions](https://github.com/debai/debai/discussions)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<div align="center">
|
||||||
|
Made with ❤️ for the Linux community
|
||||||
|
</div>
|
||||||
31
build-deb.sh
Archivo ejecutable
31
build-deb.sh
Archivo ejecutable
@@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Build script for Debai Debian package
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Building Debai Debian package..."
|
||||||
|
|
||||||
|
# Check for required tools
|
||||||
|
command -v dpkg-buildpackage >/dev/null 2>&1 || {
|
||||||
|
echo "Error: dpkg-buildpackage not found. Install with:"
|
||||||
|
echo " sudo apt install build-essential debhelper"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean previous builds
|
||||||
|
echo "Cleaning previous builds..."
|
||||||
|
rm -rf debian/debai debian/.debhelper debian/tmp
|
||||||
|
rm -f debian/files debian/debai.substvars
|
||||||
|
rm -f ../debai_*.deb ../debai_*.buildinfo ../debai_*.changes
|
||||||
|
|
||||||
|
# Build the package
|
||||||
|
echo "Building package..."
|
||||||
|
dpkg-buildpackage -us -uc -b
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Build complete!"
|
||||||
|
echo "Package: ../debai_1.0.0-1_all.deb"
|
||||||
|
echo ""
|
||||||
|
echo "Install with:"
|
||||||
|
echo " sudo dpkg -i ../debai_1.0.0-1_all.deb"
|
||||||
|
echo " sudo apt-get install -f"
|
||||||
21
data/applications/debai.desktop
Archivo normal
21
data/applications/debai.desktop
Archivo normal
@@ -0,0 +1,21 @@
|
|||||||
|
[Desktop Entry]
|
||||||
|
Name=Debai
|
||||||
|
GenericName=AI Agent Manager
|
||||||
|
Comment=Manage AI agents for system automation
|
||||||
|
Exec=debai-gui
|
||||||
|
Icon=debai
|
||||||
|
Terminal=false
|
||||||
|
Type=Application
|
||||||
|
Categories=System;Utility;GTK;
|
||||||
|
Keywords=AI;Agent;Automation;System;
|
||||||
|
StartupNotify=true
|
||||||
|
StartupWMClass=debai
|
||||||
|
Actions=new-agent;status;
|
||||||
|
|
||||||
|
[Desktop Action new-agent]
|
||||||
|
Name=Create New Agent
|
||||||
|
Exec=debai-gui --action=new-agent
|
||||||
|
|
||||||
|
[Desktop Action status]
|
||||||
|
Name=Show Status
|
||||||
|
Exec=debai status
|
||||||
126
data/config/debai.yaml
Archivo normal
126
data/config/debai.yaml
Archivo normal
@@ -0,0 +1,126 @@
|
|||||||
|
# Debai Configuration File
|
||||||
|
# AI Agent Management System for GNU/Linux
|
||||||
|
|
||||||
|
# General settings
|
||||||
|
general:
|
||||||
|
# Logging level: debug, info, warning, error
|
||||||
|
log_level: info
|
||||||
|
|
||||||
|
# Log file location
|
||||||
|
log_file: /var/log/debai/debai.log
|
||||||
|
|
||||||
|
# Data directory
|
||||||
|
data_dir: /var/lib/debai
|
||||||
|
|
||||||
|
# Configuration directory
|
||||||
|
config_dir: /etc/debai
|
||||||
|
|
||||||
|
# Agent settings
|
||||||
|
agents:
|
||||||
|
# Directory for agent configurations
|
||||||
|
config_dir: /etc/debai/agents
|
||||||
|
|
||||||
|
# Maximum number of concurrent agents
|
||||||
|
max_concurrent: 5
|
||||||
|
|
||||||
|
# Auto-start agents on boot
|
||||||
|
auto_start: true
|
||||||
|
|
||||||
|
# Default timeout for agent operations (seconds)
|
||||||
|
default_timeout: 300
|
||||||
|
|
||||||
|
# Working directory for agents
|
||||||
|
working_dir: /tmp/debai/agents
|
||||||
|
|
||||||
|
# Model settings
|
||||||
|
models:
|
||||||
|
# Default model to use
|
||||||
|
default: llama3.2:3b
|
||||||
|
|
||||||
|
# Model cache directory
|
||||||
|
cache_dir: /var/cache/debai/models
|
||||||
|
|
||||||
|
# Docker Model Runner endpoint
|
||||||
|
runner_endpoint: http://localhost:11434
|
||||||
|
|
||||||
|
# Maximum memory for models (MB)
|
||||||
|
max_memory_mb: 4096
|
||||||
|
|
||||||
|
# GPU layers to offload (0 = CPU only)
|
||||||
|
gpu_layers: 0
|
||||||
|
|
||||||
|
# Task settings
|
||||||
|
tasks:
|
||||||
|
# Directory for task configurations
|
||||||
|
config_dir: /etc/debai/tasks
|
||||||
|
|
||||||
|
# Maximum concurrent tasks
|
||||||
|
max_concurrent: 3
|
||||||
|
|
||||||
|
# Task history retention (days)
|
||||||
|
history_retention_days: 30
|
||||||
|
|
||||||
|
# API settings
|
||||||
|
api:
|
||||||
|
# Enable REST API
|
||||||
|
enabled: true
|
||||||
|
|
||||||
|
# API host
|
||||||
|
host: 127.0.0.1
|
||||||
|
|
||||||
|
# API port
|
||||||
|
port: 8000
|
||||||
|
|
||||||
|
# Enable authentication
|
||||||
|
auth_enabled: false
|
||||||
|
|
||||||
|
# Monitoring settings
|
||||||
|
monitoring:
|
||||||
|
# Enable resource monitoring
|
||||||
|
enabled: true
|
||||||
|
|
||||||
|
# Monitoring interval (seconds)
|
||||||
|
interval: 5
|
||||||
|
|
||||||
|
# History size (number of samples)
|
||||||
|
history_size: 1000
|
||||||
|
|
||||||
|
# Alert thresholds
|
||||||
|
thresholds:
|
||||||
|
cpu_percent: 80
|
||||||
|
memory_percent: 85
|
||||||
|
disk_percent: 90
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
security:
|
||||||
|
# Commands that are never allowed
|
||||||
|
denied_commands:
|
||||||
|
- "rm -rf /"
|
||||||
|
- "dd if=/dev/zero"
|
||||||
|
- ":(){ :|:& };:"
|
||||||
|
- "mkfs"
|
||||||
|
- "> /dev/sda"
|
||||||
|
|
||||||
|
# Require confirmation for destructive operations
|
||||||
|
require_confirmation: true
|
||||||
|
|
||||||
|
# Sandbox agent execution
|
||||||
|
sandbox_enabled: true
|
||||||
|
|
||||||
|
# Notification settings
|
||||||
|
notifications:
|
||||||
|
# Enable notifications
|
||||||
|
enabled: true
|
||||||
|
|
||||||
|
# Desktop notifications
|
||||||
|
desktop: true
|
||||||
|
|
||||||
|
# Email notifications (optional)
|
||||||
|
email:
|
||||||
|
enabled: false
|
||||||
|
smtp_host: ""
|
||||||
|
smtp_port: 587
|
||||||
|
smtp_user: ""
|
||||||
|
smtp_password: ""
|
||||||
|
from_address: ""
|
||||||
|
to_addresses: []
|
||||||
53
data/icons/hicolor/scalable/apps/debai.svg
Archivo normal
53
data/icons/hicolor/scalable/apps/debai.svg
Archivo normal
@@ -0,0 +1,53 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg width="128" height="128" viewBox="0 0 128 128" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="bg-gradient" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||||
|
<stop offset="0%" style="stop-color:#3584e4"/>
|
||||||
|
<stop offset="100%" style="stop-color:#1c71d8"/>
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient id="accent-gradient" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||||
|
<stop offset="0%" style="stop-color:#57e389"/>
|
||||||
|
<stop offset="100%" style="stop-color:#33d17a"/>
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
|
||||||
|
<!-- Background circle -->
|
||||||
|
<circle cx="64" cy="64" r="60" fill="url(#bg-gradient)"/>
|
||||||
|
|
||||||
|
<!-- Outer ring -->
|
||||||
|
<circle cx="64" cy="64" r="56" fill="none" stroke="#ffffff" stroke-width="2" opacity="0.3"/>
|
||||||
|
|
||||||
|
<!-- AI brain/circuit pattern -->
|
||||||
|
<g fill="none" stroke="#ffffff" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<!-- Central node -->
|
||||||
|
<circle cx="64" cy="64" r="12" fill="url(#accent-gradient)" stroke="none"/>
|
||||||
|
|
||||||
|
<!-- Connection lines -->
|
||||||
|
<line x1="64" y1="52" x2="64" y2="32"/>
|
||||||
|
<line x1="64" y1="76" x2="64" y2="96"/>
|
||||||
|
<line x1="52" y1="64" x2="32" y2="64"/>
|
||||||
|
<line x1="76" y1="64" x2="96" y2="64"/>
|
||||||
|
|
||||||
|
<!-- Diagonal connections -->
|
||||||
|
<line x1="55" y1="55" x2="40" y2="40"/>
|
||||||
|
<line x1="73" y1="55" x2="88" y2="40"/>
|
||||||
|
<line x1="55" y1="73" x2="40" y2="88"/>
|
||||||
|
<line x1="73" y1="73" x2="88" y2="88"/>
|
||||||
|
|
||||||
|
<!-- Outer nodes -->
|
||||||
|
<circle cx="64" cy="28" r="6" fill="#ffffff"/>
|
||||||
|
<circle cx="64" cy="100" r="6" fill="#ffffff"/>
|
||||||
|
<circle cx="28" cy="64" r="6" fill="#ffffff"/>
|
||||||
|
<circle cx="100" cy="64" r="6" fill="#ffffff"/>
|
||||||
|
|
||||||
|
<!-- Corner nodes -->
|
||||||
|
<circle cx="36" cy="36" r="5" fill="#ffffff"/>
|
||||||
|
<circle cx="92" cy="36" r="5" fill="#ffffff"/>
|
||||||
|
<circle cx="36" cy="92" r="5" fill="#ffffff"/>
|
||||||
|
<circle cx="92" cy="92" r="5" fill="#ffffff"/>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<!-- D letter stylized -->
|
||||||
|
<text x="64" y="72" font-family="sans-serif" font-size="24" font-weight="bold"
|
||||||
|
fill="#ffffff" text-anchor="middle">D</text>
|
||||||
|
</svg>
|
||||||
|
Después Anchura: | Altura: | Tamaño: 2.0 KiB |
39
data/systemd/debai.service
Archivo normal
39
data/systemd/debai.service
Archivo normal
@@ -0,0 +1,39 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Debai AI Agent Management System
|
||||||
|
Documentation=man:debai(1)
|
||||||
|
After=network.target docker.service
|
||||||
|
Wants=docker.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=notify
|
||||||
|
User=debai
|
||||||
|
Group=debai
|
||||||
|
ExecStart=/usr/bin/debai daemon
|
||||||
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=10
|
||||||
|
TimeoutStartSec=60
|
||||||
|
TimeoutStopSec=30
|
||||||
|
|
||||||
|
# Security hardening
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=yes
|
||||||
|
PrivateTmp=yes
|
||||||
|
PrivateDevices=yes
|
||||||
|
ProtectKernelTunables=yes
|
||||||
|
ProtectKernelModules=yes
|
||||||
|
ProtectControlGroups=yes
|
||||||
|
RestrictRealtime=yes
|
||||||
|
RestrictSUIDSGID=yes
|
||||||
|
|
||||||
|
# Allow network and Docker socket access
|
||||||
|
PrivateNetwork=no
|
||||||
|
ReadWritePaths=/var/lib/debai /var/log/debai /run/docker.sock
|
||||||
|
|
||||||
|
# Capabilities
|
||||||
|
CapabilityBoundingSet=
|
||||||
|
AmbientCapabilities=
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
14
debian/changelog
vendido
Archivo normal
14
debian/changelog
vendido
Archivo normal
@@ -0,0 +1,14 @@
|
|||||||
|
debai (1.0.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Initial release.
|
||||||
|
* Features:
|
||||||
|
- AI agent management with cagent integration
|
||||||
|
- Model management with Docker Model Runner
|
||||||
|
- Task automation and scheduling
|
||||||
|
- GTK4/Adwaita graphical user interface
|
||||||
|
- Command-line interface with rich output
|
||||||
|
- ISO, QCOW2, and Docker Compose generation
|
||||||
|
- System resource monitoring
|
||||||
|
- Debian packaging support
|
||||||
|
|
||||||
|
-- Debai Team <debai@example.com> Sat, 18 Jan 2026 12:00:00 +0000
|
||||||
1
debian/compat
vendido
Archivo normal
1
debian/compat
vendido
Archivo normal
@@ -0,0 +1 @@
|
|||||||
|
13
|
||||||
55
debian/control
vendido
Archivo normal
55
debian/control
vendido
Archivo normal
@@ -0,0 +1,55 @@
|
|||||||
|
Source: debai
|
||||||
|
Section: utils
|
||||||
|
Priority: optional
|
||||||
|
Maintainer: Debai Team <debai@example.com>
|
||||||
|
Build-Depends: debhelper-compat (= 13),
|
||||||
|
dh-python,
|
||||||
|
python3-all,
|
||||||
|
python3-setuptools,
|
||||||
|
python3-pip,
|
||||||
|
python3-venv
|
||||||
|
Standards-Version: 4.6.2
|
||||||
|
Homepage: https://github.com/debai/debai
|
||||||
|
Vcs-Git: https://github.com/debai/debai.git
|
||||||
|
Vcs-Browser: https://github.com/debai/debai
|
||||||
|
Rules-Requires-Root: no
|
||||||
|
|
||||||
|
Package: debai
|
||||||
|
Architecture: all
|
||||||
|
Depends: ${python3:Depends},
|
||||||
|
${misc:Depends},
|
||||||
|
python3 (>= 3.10),
|
||||||
|
python3-click,
|
||||||
|
python3-yaml,
|
||||||
|
python3-jinja2,
|
||||||
|
python3-pydantic,
|
||||||
|
python3-aiohttp,
|
||||||
|
python3-psutil,
|
||||||
|
python3-gi,
|
||||||
|
gir1.2-gtk-4.0,
|
||||||
|
gir1.2-adw-1
|
||||||
|
Recommends: docker.io,
|
||||||
|
qemu-utils,
|
||||||
|
genisoimage
|
||||||
|
Suggests: qemu-system-x86
|
||||||
|
Description: AI Agent Management System for GNU/Linux
|
||||||
|
Debai is a comprehensive application for generating and managing AI agents
|
||||||
|
that automate system tasks like package updates, application configuration,
|
||||||
|
and resource management.
|
||||||
|
.
|
||||||
|
Features:
|
||||||
|
- Local AI models via Docker Model Runner
|
||||||
|
- Local agents via cagent
|
||||||
|
- Interactive CLI and GTK GUI
|
||||||
|
- ISO, QCOW2, and Docker Compose generation
|
||||||
|
- Automated system management
|
||||||
|
|
||||||
|
Package: debai-doc
|
||||||
|
Architecture: all
|
||||||
|
Section: doc
|
||||||
|
Depends: ${misc:Depends}
|
||||||
|
Description: AI Agent Management System - documentation
|
||||||
|
Debai is a comprehensive application for generating and managing AI agents
|
||||||
|
that automate system tasks.
|
||||||
|
.
|
||||||
|
This package contains the documentation.
|
||||||
29
debian/copyright
vendido
Archivo normal
29
debian/copyright
vendido
Archivo normal
@@ -0,0 +1,29 @@
|
|||||||
|
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||||
|
Upstream-Name: debai
|
||||||
|
Upstream-Contact: Debai Team <debai@example.com>
|
||||||
|
Source: https://github.com/debai/debai
|
||||||
|
|
||||||
|
Files: *
|
||||||
|
Copyright: 2025-2026 Debai Team
|
||||||
|
License: GPL-3+
|
||||||
|
|
||||||
|
Files: debian/*
|
||||||
|
Copyright: 2025-2026 Debai Team
|
||||||
|
License: GPL-3+
|
||||||
|
|
||||||
|
License: GPL-3+
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
.
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
.
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
.
|
||||||
|
On Debian systems, the complete text of the GNU General Public
|
||||||
|
License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||||
3
debian/debai.dirs
vendido
Archivo normal
3
debian/debai.dirs
vendido
Archivo normal
@@ -0,0 +1,3 @@
|
|||||||
|
etc/debai
|
||||||
|
var/lib/debai
|
||||||
|
var/log/debai
|
||||||
1
debian/debai.manpages
vendido
Archivo normal
1
debian/debai.manpages
vendido
Archivo normal
@@ -0,0 +1 @@
|
|||||||
|
debian/tmp/man/debai.1 usr/share/man/man1/
|
||||||
33
debian/debai.postinst
vendido
Archivo normal
33
debian/debai.postinst
vendido
Archivo normal
@@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
configure)
|
||||||
|
# Create debai system user if not exists
|
||||||
|
if ! getent passwd debai > /dev/null; then
|
||||||
|
adduser --system --group --home /var/lib/debai \
|
||||||
|
--no-create-home --disabled-password \
|
||||||
|
--gecos "Debai AI Agent System" debai
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add debai user to docker group
|
||||||
|
if getent group docker > /dev/null; then
|
||||||
|
usermod -aG docker debai || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set permissions
|
||||||
|
chown -R debai:debai /var/lib/debai
|
||||||
|
chown -R debai:debai /var/log/debai
|
||||||
|
chmod 750 /var/lib/debai
|
||||||
|
chmod 750 /var/log/debai
|
||||||
|
|
||||||
|
# Initialize configuration if not exists
|
||||||
|
if [ ! -f /etc/debai/config.yaml ]; then
|
||||||
|
debai init --system 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
#DEBHELPER#
|
||||||
|
|
||||||
|
exit 0
|
||||||
33
debian/debai.postrm
vendido
Archivo normal
33
debian/debai.postrm
vendido
Archivo normal
@@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
purge)
|
||||||
|
# Remove debai user
|
||||||
|
if getent passwd debai > /dev/null; then
|
||||||
|
deluser --quiet --system debai || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove debai group
|
||||||
|
if getent group debai > /dev/null; then
|
||||||
|
delgroup --quiet --system debai || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove data directories
|
||||||
|
rm -rf /var/lib/debai
|
||||||
|
rm -rf /var/log/debai
|
||||||
|
rm -rf /etc/debai
|
||||||
|
;;
|
||||||
|
|
||||||
|
remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo "postrm called with unknown argument \`$1'" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
#DEBHELPER#
|
||||||
|
|
||||||
|
exit 0
|
||||||
40
debian/rules
vendido
Archivo ejecutable
40
debian/rules
vendido
Archivo ejecutable
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/make -f
|
||||||
|
|
||||||
|
export DH_VERBOSE = 1
|
||||||
|
export PYBUILD_NAME = debai
|
||||||
|
|
||||||
|
%:
|
||||||
|
dh $@ --with python3 --buildsystem=pybuild
|
||||||
|
|
||||||
|
override_dh_auto_build:
|
||||||
|
dh_auto_build
|
||||||
|
# Build man pages
|
||||||
|
mkdir -p debian/tmp/man
|
||||||
|
help2man --no-info --name="AI Agent Management System" \
|
||||||
|
--output=debian/tmp/man/debai.1 \
|
||||||
|
./src/debai/cli/main.py || true
|
||||||
|
|
||||||
|
override_dh_auto_install:
|
||||||
|
dh_auto_install
|
||||||
|
# Install systemd service
|
||||||
|
install -D -m 644 data/systemd/debai.service \
|
||||||
|
debian/debai/lib/systemd/system/debai.service
|
||||||
|
# Install desktop file
|
||||||
|
install -D -m 644 data/applications/debai.desktop \
|
||||||
|
debian/debai/usr/share/applications/debai.desktop
|
||||||
|
# Install icons
|
||||||
|
install -D -m 644 data/icons/hicolor/scalable/apps/debai.svg \
|
||||||
|
debian/debai/usr/share/icons/hicolor/scalable/apps/debai.svg
|
||||||
|
# Install default configuration
|
||||||
|
install -D -m 644 data/config/debai.yaml \
|
||||||
|
debian/debai/etc/debai/config.yaml
|
||||||
|
|
||||||
|
override_dh_installman:
|
||||||
|
dh_installman debian/tmp/man/*.1 || true
|
||||||
|
|
||||||
|
override_dh_installsystemd:
|
||||||
|
dh_installsystemd --name=debai
|
||||||
|
|
||||||
|
override_dh_auto_test:
|
||||||
|
# Skip tests during package build
|
||||||
|
true
|
||||||
1
debian/source/format
vendido
Archivo normal
1
debian/source/format
vendido
Archivo normal
@@ -0,0 +1 @@
|
|||||||
|
3.0 (native)
|
||||||
291
docs/INSTALLATION.md
Archivo normal
291
docs/INSTALLATION.md
Archivo normal
@@ -0,0 +1,291 @@
|
|||||||
|
# Installation Guide
|
||||||
|
|
||||||
|
This guide provides detailed instructions for installing Debai on your GNU/Linux system.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
### System Requirements
|
||||||
|
|
||||||
|
- **Operating System**: Debian 12 (Bookworm) or newer, Ubuntu 22.04 or newer, or compatible
|
||||||
|
- **Python**: 3.10 or later
|
||||||
|
- **RAM**: 4GB minimum, 8GB recommended
|
||||||
|
- **Disk**: 10GB free space (more for AI models)
|
||||||
|
- **Architecture**: x86_64 (amd64)
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
#### Required
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y \
|
||||||
|
python3 \
|
||||||
|
python3-pip \
|
||||||
|
python3-venv \
|
||||||
|
python3-gi \
|
||||||
|
gir1.2-gtk-4.0 \
|
||||||
|
gir1.2-adw-1
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Recommended
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt install -y \
|
||||||
|
docker.io \
|
||||||
|
qemu-utils \
|
||||||
|
genisoimage
|
||||||
|
```
|
||||||
|
|
||||||
|
## Installation Methods
|
||||||
|
|
||||||
|
### Method 1: Debian Package (Recommended)
|
||||||
|
|
||||||
|
Download and install the `.deb` package:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Download the latest release
|
||||||
|
wget https://github.com/debai/debai/releases/latest/download/debai_1.0.0-1_all.deb
|
||||||
|
|
||||||
|
# Install
|
||||||
|
sudo dpkg -i debai_1.0.0-1_all.deb
|
||||||
|
|
||||||
|
# Install missing dependencies
|
||||||
|
sudo apt-get install -f
|
||||||
|
```
|
||||||
|
|
||||||
|
### Method 2: From PyPI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install from PyPI
|
||||||
|
pip install debai
|
||||||
|
|
||||||
|
# Or with GUI support
|
||||||
|
pip install debai[gui]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Method 3: From Source
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://github.com/debai/debai.git
|
||||||
|
cd debai
|
||||||
|
|
||||||
|
# Create virtual environment
|
||||||
|
python3 -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
|
# Install in development mode
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# Or with all extras
|
||||||
|
pip install -e ".[gui,dev,docs]"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Method 4: Build Debian Package
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://github.com/debai/debai.git
|
||||||
|
cd debai
|
||||||
|
|
||||||
|
# Install build dependencies
|
||||||
|
sudo apt install -y \
|
||||||
|
build-essential \
|
||||||
|
debhelper \
|
||||||
|
dh-python \
|
||||||
|
python3-all \
|
||||||
|
python3-setuptools
|
||||||
|
|
||||||
|
# Build the package
|
||||||
|
dpkg-buildpackage -us -uc -b
|
||||||
|
|
||||||
|
# Install
|
||||||
|
sudo dpkg -i ../debai_1.0.0-1_all.deb
|
||||||
|
sudo apt-get install -f
|
||||||
|
```
|
||||||
|
|
||||||
|
## Post-Installation
|
||||||
|
|
||||||
|
### Initialize Debai
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic initialization
|
||||||
|
debai init
|
||||||
|
|
||||||
|
# Full initialization (includes pulling recommended model)
|
||||||
|
debai init --full
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure Docker (Required for Models)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Add your user to docker group
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
|
||||||
|
# Restart Docker service
|
||||||
|
sudo systemctl restart docker
|
||||||
|
|
||||||
|
# You may need to log out and back in for group changes to take effect
|
||||||
|
```
|
||||||
|
|
||||||
|
### Verify Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check version
|
||||||
|
debai --version
|
||||||
|
|
||||||
|
# Show status
|
||||||
|
debai status
|
||||||
|
|
||||||
|
# List available models
|
||||||
|
debai model list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Enable System Service (Optional)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Enable Debai service to start on boot
|
||||||
|
sudo systemctl enable debai
|
||||||
|
|
||||||
|
# Start the service
|
||||||
|
sudo systemctl start debai
|
||||||
|
|
||||||
|
# Check status
|
||||||
|
sudo systemctl status debai
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### User Configuration
|
||||||
|
|
||||||
|
Create or edit `~/.config/debai/config.yaml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
general:
|
||||||
|
log_level: info
|
||||||
|
|
||||||
|
agents:
|
||||||
|
max_concurrent: 5
|
||||||
|
auto_start: true
|
||||||
|
|
||||||
|
models:
|
||||||
|
default: llama3.2:3b
|
||||||
|
```
|
||||||
|
|
||||||
|
### System Configuration
|
||||||
|
|
||||||
|
Edit `/etc/debai/config.yaml` for system-wide settings (requires root).
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Docker Permission Denied
|
||||||
|
|
||||||
|
If you get "Permission denied" errors with Docker:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
newgrp docker # Or log out and back in
|
||||||
|
```
|
||||||
|
|
||||||
|
### GTK/GUI Not Working
|
||||||
|
|
||||||
|
Install additional GTK dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt install -y \
|
||||||
|
gir1.2-gtk-4.0 \
|
||||||
|
gir1.2-adw-1 \
|
||||||
|
python3-gi \
|
||||||
|
python3-gi-cairo
|
||||||
|
```
|
||||||
|
|
||||||
|
### Models Not Pulling
|
||||||
|
|
||||||
|
Check Docker status:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check if Docker is running
|
||||||
|
sudo systemctl status docker
|
||||||
|
|
||||||
|
# Check Docker Model Runner
|
||||||
|
docker ps | grep model
|
||||||
|
```
|
||||||
|
|
||||||
|
### Import Errors
|
||||||
|
|
||||||
|
Ensure all Python dependencies are installed:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Upgrading
|
||||||
|
|
||||||
|
### Debian Package
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Download new version
|
||||||
|
wget https://github.com/debai/debai/releases/latest/download/debai_1.0.0-1_all.deb
|
||||||
|
|
||||||
|
# Upgrade
|
||||||
|
sudo dpkg -i debai_1.0.0-1_all.deb
|
||||||
|
```
|
||||||
|
|
||||||
|
### pip
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install --upgrade debai
|
||||||
|
```
|
||||||
|
|
||||||
|
### From Source
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd debai
|
||||||
|
git pull
|
||||||
|
pip install -e .
|
||||||
|
```
|
||||||
|
|
||||||
|
## Uninstallation
|
||||||
|
|
||||||
|
### Debian Package
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Remove package
|
||||||
|
sudo apt remove debai
|
||||||
|
|
||||||
|
# Remove package and configuration
|
||||||
|
sudo apt purge debai
|
||||||
|
```
|
||||||
|
|
||||||
|
### pip
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip uninstall debai
|
||||||
|
```
|
||||||
|
|
||||||
|
### Clean Up Data
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Remove user data (optional)
|
||||||
|
rm -rf ~/.config/debai
|
||||||
|
rm -rf ~/.local/share/debai
|
||||||
|
|
||||||
|
# Remove system data (requires root)
|
||||||
|
sudo rm -rf /var/lib/debai
|
||||||
|
sudo rm -rf /etc/debai
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
After installation:
|
||||||
|
|
||||||
|
1. **Pull a Model**: `debai model pull llama3.2:3b`
|
||||||
|
2. **Create an Agent**: `debai agent create --template package_updater`
|
||||||
|
3. **Launch GUI**: `debai-gui`
|
||||||
|
4. Read the [User Guide](USER_GUIDE.md)
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
- 📚 [Documentation](https://debai.readthedocs.io)
|
||||||
|
- 🐛 [Issue Tracker](https://github.com/debai/debai/issues)
|
||||||
|
- 💬 [Discussions](https://github.com/debai/debai/discussions)
|
||||||
172
docs/debai.1
Archivo normal
172
docs/debai.1
Archivo normal
@@ -0,0 +1,172 @@
|
|||||||
|
.\" Manpage for debai
|
||||||
|
.\" Contact debai@example.com for errors or typos.
|
||||||
|
.TH DEBAI 1 "January 2026" "Debai 1.0.0" "User Commands"
|
||||||
|
.SH NAME
|
||||||
|
debai \- AI Agent Management System for GNU/Linux
|
||||||
|
.SH SYNOPSIS
|
||||||
|
.B debai
|
||||||
|
[\fIOPTIONS\fR] \fICOMMAND\fR [\fIARGS\fR]...
|
||||||
|
.SH DESCRIPTION
|
||||||
|
.B debai
|
||||||
|
is a comprehensive application for generating and managing AI agents that automate system tasks like package updates, application configuration, and resource management.
|
||||||
|
.PP
|
||||||
|
Debai uses local AI models via Docker Model Runner and local agents via cagent. It provides both a command-line interface and a graphical user interface (GTK4/Adwaita).
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
.BR \-v ", " \-\-verbose
|
||||||
|
Enable verbose output with debug information.
|
||||||
|
.TP
|
||||||
|
.BR \-c ", " \-\-config " " \fIFILE\fR
|
||||||
|
Use specified configuration file instead of default.
|
||||||
|
.TP
|
||||||
|
.BR \-\-version
|
||||||
|
Show version information and exit.
|
||||||
|
.TP
|
||||||
|
.BR \-\-help
|
||||||
|
Show help message and exit.
|
||||||
|
.SH COMMANDS
|
||||||
|
.SS "General Commands"
|
||||||
|
.TP
|
||||||
|
.B status
|
||||||
|
Show system and Debai status, including dependencies and Docker information.
|
||||||
|
.TP
|
||||||
|
.B init [\-\-full]
|
||||||
|
Initialize Debai environment. Use \-\-full to also pull recommended models.
|
||||||
|
.TP
|
||||||
|
.B monitor [\-i INTERVAL]
|
||||||
|
Monitor system resources in real-time. Default interval is 2 seconds.
|
||||||
|
.SS "Agent Commands"
|
||||||
|
.TP
|
||||||
|
.B agent list [\-s STATUS]
|
||||||
|
List all agents. Filter by status: running, stopped, or all.
|
||||||
|
.TP
|
||||||
|
.B agent create \-n NAME [\-t TYPE] [\-m MODEL] [\-\-template NAME]
|
||||||
|
Create a new agent. Available types: system, package, config, resource, security, backup, network, custom.
|
||||||
|
.TP
|
||||||
|
.B agent start AGENT_ID
|
||||||
|
Start an agent.
|
||||||
|
.TP
|
||||||
|
.B agent stop AGENT_ID
|
||||||
|
Stop an agent.
|
||||||
|
.TP
|
||||||
|
.B agent delete AGENT_ID [\-f]
|
||||||
|
Delete an agent. Use \-f to skip confirmation.
|
||||||
|
.TP
|
||||||
|
.B agent chat AGENT_ID
|
||||||
|
Start an interactive chat session with an agent.
|
||||||
|
.TP
|
||||||
|
.B agent templates
|
||||||
|
List available agent templates.
|
||||||
|
.SS "Model Commands"
|
||||||
|
.TP
|
||||||
|
.B model list
|
||||||
|
List available models.
|
||||||
|
.TP
|
||||||
|
.B model pull MODEL_ID
|
||||||
|
Pull a model from Docker Model Runner.
|
||||||
|
.TP
|
||||||
|
.B model remove MODEL_ID [\-f]
|
||||||
|
Remove a model. Use \-f to skip confirmation.
|
||||||
|
.TP
|
||||||
|
.B model recommended
|
||||||
|
Show recommended models for different use cases.
|
||||||
|
.SS "Task Commands"
|
||||||
|
.TP
|
||||||
|
.B task list [\-s STATUS]
|
||||||
|
List all tasks. Filter by status: pending, running, completed, failed, or all.
|
||||||
|
.TP
|
||||||
|
.B task create \-n NAME \-c COMMAND [\-p PRIORITY] [\-\-template NAME]
|
||||||
|
Create a new task.
|
||||||
|
.TP
|
||||||
|
.B task run TASK_ID
|
||||||
|
Run a task immediately.
|
||||||
|
.TP
|
||||||
|
.B task templates
|
||||||
|
List available task templates.
|
||||||
|
.SS "Generate Commands"
|
||||||
|
.TP
|
||||||
|
.B generate iso [\-o OUTPUT] [\-\-base DISTRO] [\-\-include\-agents]
|
||||||
|
Generate a bootable ISO image with Debai pre-installed.
|
||||||
|
.TP
|
||||||
|
.B generate qcow2 [\-o OUTPUT] [\-\-size SIZE] [\-\-base DISTRO]
|
||||||
|
Generate a QCOW2 disk image for QEMU/KVM.
|
||||||
|
.TP
|
||||||
|
.B generate compose [\-o OUTPUT] [\-\-include\-gui]
|
||||||
|
Generate a Docker Compose configuration.
|
||||||
|
.SH EXAMPLES
|
||||||
|
.PP
|
||||||
|
Initialize Debai and pull a model:
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ debai init --full
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
.PP
|
||||||
|
Create an agent from template:
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ debai agent create --name "Updates" --template package_updater
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
.PP
|
||||||
|
Chat with an agent:
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ debai agent chat abc123
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
.PP
|
||||||
|
Generate an ISO image:
|
||||||
|
.RS
|
||||||
|
.nf
|
||||||
|
$ debai generate iso --output debai.iso --include-agents
|
||||||
|
.fi
|
||||||
|
.RE
|
||||||
|
.SH FILES
|
||||||
|
.TP
|
||||||
|
.I /etc/debai/config.yaml
|
||||||
|
System-wide configuration file.
|
||||||
|
.TP
|
||||||
|
.I ~/.config/debai/
|
||||||
|
User configuration directory.
|
||||||
|
.TP
|
||||||
|
.I /var/lib/debai/
|
||||||
|
Data directory for agents, models, and tasks.
|
||||||
|
.TP
|
||||||
|
.I /var/log/debai/
|
||||||
|
Log files directory.
|
||||||
|
.SH ENVIRONMENT
|
||||||
|
.TP
|
||||||
|
.B DEBAI_CONFIG_DIR
|
||||||
|
Override the configuration directory.
|
||||||
|
.TP
|
||||||
|
.B DEBAI_DATA_DIR
|
||||||
|
Override the data directory.
|
||||||
|
.TP
|
||||||
|
.B DEBAI_LOG_LEVEL
|
||||||
|
Set the logging level (debug, info, warning, error).
|
||||||
|
.SH EXIT STATUS
|
||||||
|
.TP
|
||||||
|
.B 0
|
||||||
|
Success.
|
||||||
|
.TP
|
||||||
|
.B 1
|
||||||
|
General error.
|
||||||
|
.TP
|
||||||
|
.B 2
|
||||||
|
Command line syntax error.
|
||||||
|
.SH SEE ALSO
|
||||||
|
.BR debai-gui (1),
|
||||||
|
.BR docker (1),
|
||||||
|
.BR qemu-img (1),
|
||||||
|
.BR systemctl (1)
|
||||||
|
.SH BUGS
|
||||||
|
Report bugs at: https://github.com/debai/debai/issues
|
||||||
|
.SH AUTHOR
|
||||||
|
Debai Team <debai@example.com>
|
||||||
|
.SH COPYRIGHT
|
||||||
|
Copyright \(co 2025-2026 Debai Team.
|
||||||
|
License GPLv3+: GNU GPL version 3 or later <https://gnu.org/licenses/gpl.html>.
|
||||||
|
.PP
|
||||||
|
This is free software: you are free to change and redistribute it.
|
||||||
|
There is NO WARRANTY, to the extent permitted by law.
|
||||||
109
pyproject.toml
Archivo normal
109
pyproject.toml
Archivo normal
@@ -0,0 +1,109 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=68.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "debai"
|
||||||
|
version = "1.0.0"
|
||||||
|
description = "AI Agent Management System for GNU/Linux - Automate system tasks with local AI agents"
|
||||||
|
readme = "README.md"
|
||||||
|
license = {text = "GPL-3.0-or-later"}
|
||||||
|
authors = [
|
||||||
|
{name = "Debai Team", email = "debai@example.com"}
|
||||||
|
]
|
||||||
|
maintainers = [
|
||||||
|
{name = "Debai Team", email = "debai@example.com"}
|
||||||
|
]
|
||||||
|
keywords = [
|
||||||
|
"ai", "agents", "automation", "linux", "system-management",
|
||||||
|
"docker", "qemu", "devops", "cagent", "docker-model"
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Environment :: X11 Applications :: GTK",
|
||||||
|
"Intended Audience :: System Administrators",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: GNU General Public License v3 or later (GPL v3+)",
|
||||||
|
"Operating System :: POSIX :: Linux",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Topic :: System :: Systems Administration",
|
||||||
|
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
||||||
|
]
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
dependencies = [
|
||||||
|
"click>=8.1.0",
|
||||||
|
"rich>=13.0.0",
|
||||||
|
"pyyaml>=6.0",
|
||||||
|
"jinja2>=3.1.0",
|
||||||
|
"pydantic>=2.0.0",
|
||||||
|
"aiohttp>=3.9.0",
|
||||||
|
"asyncio>=3.4.3",
|
||||||
|
"docker>=7.0.0",
|
||||||
|
"psutil>=5.9.0",
|
||||||
|
"pygobject>=3.44.0",
|
||||||
|
"tomli>=2.0.0;python_version<'3.11'",
|
||||||
|
"typing-extensions>=4.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.0.0",
|
||||||
|
"pytest-asyncio>=0.21.0",
|
||||||
|
"pytest-cov>=4.0.0",
|
||||||
|
"black>=23.0.0",
|
||||||
|
"isort>=5.12.0",
|
||||||
|
"mypy>=1.0.0",
|
||||||
|
"ruff>=0.1.0",
|
||||||
|
]
|
||||||
|
docs = [
|
||||||
|
"sphinx>=7.0.0",
|
||||||
|
"sphinx-rtd-theme>=1.3.0",
|
||||||
|
"myst-parser>=2.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
debai = "debai.cli:main"
|
||||||
|
debai-gui = "debai.gui:main"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://github.com/debai/debai"
|
||||||
|
Documentation = "https://debai.readthedocs.io"
|
||||||
|
Repository = "https://github.com/debai/debai.git"
|
||||||
|
Issues = "https://github.com/debai/debai/issues"
|
||||||
|
Changelog = "https://github.com/debai/debai/blob/main/CHANGELOG.md"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["src"]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
debai = [
|
||||||
|
"templates/**/*",
|
||||||
|
"resources/**/*",
|
||||||
|
"data/**/*",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
target-version = ['py310', 'py311', 'py312']
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
line_length = 100
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 100
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.10"
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
testpaths = ["tests"]
|
||||||
7
requirements.txt
Archivo normal
7
requirements.txt
Archivo normal
@@ -0,0 +1,7 @@
|
|||||||
|
click>=8.0
|
||||||
|
rich>=13.0
|
||||||
|
pyyaml>=6.0
|
||||||
|
pydantic>=2.0
|
||||||
|
aiohttp>=3.9
|
||||||
|
psutil>=5.9
|
||||||
|
jinja2>=3.1
|
||||||
36
src/debai/__init__.py
Archivo normal
36
src/debai/__init__.py
Archivo normal
@@ -0,0 +1,36 @@
|
|||||||
|
"""
|
||||||
|
Debai - AI Agent Management System for GNU/Linux
|
||||||
|
|
||||||
|
A comprehensive application for generating and managing AI agents that automate
|
||||||
|
system tasks like package updates, application configuration, and resource management.
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Local AI models via Docker Model
|
||||||
|
- Local agents via cagent
|
||||||
|
- Interactive CLI and GTK GUI
|
||||||
|
- ISO, QCOW2, and Docker Compose generation
|
||||||
|
- Debian packaging support
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "1.0.0"
|
||||||
|
__author__ = "Debai Team"
|
||||||
|
__license__ = "GPL-3.0-or-later"
|
||||||
|
|
||||||
|
from debai.core.agent import Agent, AgentConfig, AgentManager
|
||||||
|
from debai.core.model import Model, ModelConfig, ModelManager
|
||||||
|
from debai.core.task import Task, TaskConfig, TaskManager
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__version__",
|
||||||
|
"__author__",
|
||||||
|
"__license__",
|
||||||
|
"Agent",
|
||||||
|
"AgentConfig",
|
||||||
|
"AgentManager",
|
||||||
|
"Model",
|
||||||
|
"ModelConfig",
|
||||||
|
"ModelManager",
|
||||||
|
"Task",
|
||||||
|
"TaskConfig",
|
||||||
|
"TaskManager",
|
||||||
|
]
|
||||||
9
src/debai/cli/__init__.py
Archivo normal
9
src/debai/cli/__init__.py
Archivo normal
@@ -0,0 +1,9 @@
|
|||||||
|
"""
|
||||||
|
CLI module for Debai.
|
||||||
|
|
||||||
|
This module provides the command-line interface using Click.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from debai.cli.main import main, cli
|
||||||
|
|
||||||
|
__all__ = ["main", "cli"]
|
||||||
971
src/debai/cli/main.py
Archivo normal
971
src/debai/cli/main.py
Archivo normal
@@ -0,0 +1,971 @@
|
|||||||
|
"""
|
||||||
|
Main CLI entry point for Debai.
|
||||||
|
|
||||||
|
Provides a comprehensive command-line interface for managing AI agents,
|
||||||
|
models, tasks, and system generation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import click
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.logging import RichHandler
|
||||||
|
from rich.panel import Panel
|
||||||
|
from rich.table import Table
|
||||||
|
from rich.progress import Progress, SpinnerColumn, TextColumn
|
||||||
|
from rich.prompt import Prompt, Confirm
|
||||||
|
from rich.tree import Tree
|
||||||
|
from rich import box
|
||||||
|
|
||||||
|
from debai import __version__
|
||||||
|
from debai.core.agent import (
|
||||||
|
Agent, AgentConfig, AgentManager, AgentType, AgentCapability,
|
||||||
|
get_agent_template, list_agent_templates,
|
||||||
|
)
|
||||||
|
from debai.core.model import (
|
||||||
|
Model, ModelConfig, ModelManager,
|
||||||
|
get_recommended_model, list_recommended_models,
|
||||||
|
)
|
||||||
|
from debai.core.task import (
|
||||||
|
Task, TaskConfig, TaskManager, TaskType, TaskPriority,
|
||||||
|
get_task_template, list_task_templates,
|
||||||
|
)
|
||||||
|
from debai.core.system import SystemInfo, ResourceMonitor, check_dependencies, get_docker_status
|
||||||
|
|
||||||
|
# Set up console and logging
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(verbose: bool = False) -> None:
|
||||||
|
"""Configure logging with rich handler."""
|
||||||
|
level = logging.DEBUG if verbose else logging.INFO
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level,
|
||||||
|
format="%(message)s",
|
||||||
|
handlers=[RichHandler(console=console, show_time=False, show_path=False)],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def print_banner() -> None:
|
||||||
|
"""Print the Debai banner."""
|
||||||
|
banner = """
|
||||||
|
╔══════════════════════════════════════════════════════════════╗
|
||||||
|
║ ║
|
||||||
|
║ ██████╗ ███████╗██████╗ █████╗ ██╗ ║
|
||||||
|
║ ██╔══██╗██╔════╝██╔══██╗██╔══██╗██║ ║
|
||||||
|
║ ██║ ██║█████╗ ██████╔╝███████║██║ ║
|
||||||
|
║ ██║ ██║██╔══╝ ██╔══██╗██╔══██║██║ ║
|
||||||
|
║ ██████╔╝███████╗██████╔╝██║ ██║██║ ║
|
||||||
|
║ ╚═════╝ ╚══════╝╚═════╝ ╚═╝ ╚═╝╚═╝ ║
|
||||||
|
║ ║
|
||||||
|
║ AI Agent Management System for GNU/Linux ║
|
||||||
|
║ ║
|
||||||
|
╚══════════════════════════════════════════════════════════════╝
|
||||||
|
"""
|
||||||
|
console.print(banner, style="bold cyan")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Main CLI Group
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.option("-v", "--verbose", is_flag=True, help="Enable verbose output")
|
||||||
|
@click.option("-c", "--config", type=click.Path(), help="Configuration file path")
|
||||||
|
@click.version_option(version=__version__, prog_name="debai")
|
||||||
|
@click.pass_context
|
||||||
|
def cli(ctx: click.Context, verbose: bool, config: Optional[str]) -> None:
|
||||||
|
"""
|
||||||
|
Debai - AI Agent Management System for GNU/Linux
|
||||||
|
|
||||||
|
Manage AI agents that automate system tasks like package updates,
|
||||||
|
configuration management, and resource monitoring.
|
||||||
|
|
||||||
|
Use 'debai COMMAND --help' for more information on a specific command.
|
||||||
|
"""
|
||||||
|
ctx.ensure_object(dict)
|
||||||
|
ctx.obj["verbose"] = verbose
|
||||||
|
ctx.obj["config"] = config
|
||||||
|
setup_logging(verbose)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Status Command
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.pass_context
|
||||||
|
def status(ctx: click.Context) -> None:
|
||||||
|
"""Show system and Debai status."""
|
||||||
|
print_banner()
|
||||||
|
|
||||||
|
# System information
|
||||||
|
console.print("\n[bold cyan]📊 System Information[/bold cyan]\n")
|
||||||
|
|
||||||
|
info = SystemInfo.get_summary()
|
||||||
|
|
||||||
|
table = Table(box=box.ROUNDED, show_header=False, padding=(0, 2))
|
||||||
|
table.add_column("Property", style="bold")
|
||||||
|
table.add_column("Value")
|
||||||
|
|
||||||
|
table.add_row("Hostname", info["hostname"])
|
||||||
|
table.add_row("OS", f"{info['os']['system']} {info['os']['release']}")
|
||||||
|
table.add_row("Distribution", f"{info['distro']['name']} {info['distro']['version']}")
|
||||||
|
table.add_row("CPU", info["cpu"]["model"][:50] if info["cpu"]["model"] else "Unknown")
|
||||||
|
table.add_row("Cores", f"{info['cpu']['cores_physical']} physical / {info['cpu']['cores_logical']} logical")
|
||||||
|
table.add_row("CPU Usage", f"{info['cpu']['usage_percent']:.1f}%")
|
||||||
|
table.add_row("Memory", f"{info['memory']['percent_used']:.1f}% used")
|
||||||
|
table.add_row("Uptime", info["uptime"])
|
||||||
|
table.add_row("Load", f"{info['load_average'][0]:.2f}, {info['load_average'][1]:.2f}, {info['load_average'][2]:.2f}")
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
# Dependencies
|
||||||
|
console.print("\n[bold cyan]🔧 Dependencies[/bold cyan]\n")
|
||||||
|
|
||||||
|
deps = check_dependencies()
|
||||||
|
dep_table = Table(box=box.ROUNDED, show_header=True, padding=(0, 2))
|
||||||
|
dep_table.add_column("Dependency")
|
||||||
|
dep_table.add_column("Status")
|
||||||
|
|
||||||
|
for dep, available in deps.items():
|
||||||
|
status_icon = "[green]✓ Available[/green]" if available else "[red]✗ Missing[/red]"
|
||||||
|
dep_table.add_row(dep, status_icon)
|
||||||
|
|
||||||
|
console.print(dep_table)
|
||||||
|
|
||||||
|
# Docker status
|
||||||
|
docker = get_docker_status()
|
||||||
|
if docker["installed"]:
|
||||||
|
console.print("\n[bold cyan]🐳 Docker Status[/bold cyan]\n")
|
||||||
|
docker_table = Table(box=box.ROUNDED, show_header=False, padding=(0, 2))
|
||||||
|
docker_table.add_column("Property", style="bold")
|
||||||
|
docker_table.add_column("Value")
|
||||||
|
|
||||||
|
docker_table.add_row("Version", docker["version"])
|
||||||
|
docker_table.add_row("Running", "[green]Yes[/green]" if docker["running"] else "[red]No[/red]")
|
||||||
|
docker_table.add_row("Containers", str(docker["containers"]))
|
||||||
|
docker_table.add_row("Images", str(docker["images"]))
|
||||||
|
|
||||||
|
console.print(docker_table)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Agent Commands
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.group()
|
||||||
|
def agent() -> None:
|
||||||
|
"""Manage AI agents."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("list")
|
||||||
|
@click.option("-s", "--status", type=click.Choice(["running", "stopped", "all"]), default="all")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_list(ctx: click.Context, status: str) -> None:
|
||||||
|
"""List all agents."""
|
||||||
|
manager = AgentManager()
|
||||||
|
manager.load_agents()
|
||||||
|
|
||||||
|
agents = manager.list_agents()
|
||||||
|
|
||||||
|
if not agents:
|
||||||
|
console.print("[yellow]No agents found. Create one with 'debai agent create'[/yellow]")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(
|
||||||
|
title="[bold]AI Agents[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
padding=(0, 1),
|
||||||
|
)
|
||||||
|
table.add_column("ID", style="cyan")
|
||||||
|
table.add_column("Name")
|
||||||
|
table.add_column("Type")
|
||||||
|
table.add_column("Status")
|
||||||
|
table.add_column("Model")
|
||||||
|
table.add_column("Interactive")
|
||||||
|
|
||||||
|
for agent in agents:
|
||||||
|
status_icon = {
|
||||||
|
"stopped": "[dim]● Stopped[/dim]",
|
||||||
|
"running": "[green]● Running[/green]",
|
||||||
|
"error": "[red]● Error[/red]",
|
||||||
|
"waiting": "[yellow]● Waiting[/yellow]",
|
||||||
|
}.get(agent.status.value, agent.status.value)
|
||||||
|
|
||||||
|
interactive = "✓" if agent.config.interactive else "✗"
|
||||||
|
|
||||||
|
table.add_row(
|
||||||
|
agent.id,
|
||||||
|
agent.name,
|
||||||
|
agent.config.agent_type,
|
||||||
|
status_icon,
|
||||||
|
agent.config.model_id,
|
||||||
|
interactive,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("create")
|
||||||
|
@click.option("-n", "--name", prompt="Agent name", help="Name for the agent")
|
||||||
|
@click.option("-t", "--type", "agent_type",
|
||||||
|
type=click.Choice([t.value for t in AgentType]),
|
||||||
|
default="custom", help="Agent type")
|
||||||
|
@click.option("-m", "--model", default="llama3.2:3b", help="Model to use")
|
||||||
|
@click.option("--template", help="Use a predefined template")
|
||||||
|
@click.option("--interactive/--no-interactive", default=True, help="Allow user interaction")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_create(
|
||||||
|
ctx: click.Context,
|
||||||
|
name: str,
|
||||||
|
agent_type: str,
|
||||||
|
model: str,
|
||||||
|
template: Optional[str],
|
||||||
|
interactive: bool,
|
||||||
|
) -> None:
|
||||||
|
"""Create a new agent."""
|
||||||
|
if template:
|
||||||
|
config = get_agent_template(template)
|
||||||
|
if not config:
|
||||||
|
console.print(f"[red]Template '{template}' not found[/red]")
|
||||||
|
console.print(f"Available templates: {', '.join(list_agent_templates())}")
|
||||||
|
return
|
||||||
|
config.name = name
|
||||||
|
config.model_id = model
|
||||||
|
else:
|
||||||
|
config = AgentConfig(
|
||||||
|
name=name,
|
||||||
|
agent_type=AgentType(agent_type),
|
||||||
|
model_id=model,
|
||||||
|
interactive=interactive,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = AgentManager()
|
||||||
|
|
||||||
|
async def create():
|
||||||
|
agent = await manager.create_agent(config)
|
||||||
|
return agent
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
progress.add_task("Creating agent...", total=None)
|
||||||
|
agent = asyncio.run(create())
|
||||||
|
|
||||||
|
console.print(f"\n[green]✓ Agent created successfully![/green]")
|
||||||
|
console.print(f" ID: [cyan]{agent.id}[/cyan]")
|
||||||
|
console.print(f" Name: {agent.name}")
|
||||||
|
console.print(f" Type: {agent.config.agent_type}")
|
||||||
|
console.print(f" Model: {agent.config.model_id}")
|
||||||
|
console.print(f"\nStart with: [bold]debai agent start {agent.id}[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("start")
|
||||||
|
@click.argument("agent_id")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_start(ctx: click.Context, agent_id: str) -> None:
|
||||||
|
"""Start an agent."""
|
||||||
|
manager = AgentManager()
|
||||||
|
manager.load_agents()
|
||||||
|
|
||||||
|
agent = manager.get_agent(agent_id)
|
||||||
|
if not agent:
|
||||||
|
console.print(f"[red]Agent '{agent_id}' not found[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
async def start():
|
||||||
|
return await agent.start()
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
progress.add_task(f"Starting agent '{agent.name}'...", total=None)
|
||||||
|
success = asyncio.run(start())
|
||||||
|
|
||||||
|
if success:
|
||||||
|
console.print(f"[green]✓ Agent '{agent.name}' started[/green]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to start agent '{agent.name}'[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("stop")
|
||||||
|
@click.argument("agent_id")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_stop(ctx: click.Context, agent_id: str) -> None:
|
||||||
|
"""Stop an agent."""
|
||||||
|
manager = AgentManager()
|
||||||
|
manager.load_agents()
|
||||||
|
|
||||||
|
agent = manager.get_agent(agent_id)
|
||||||
|
if not agent:
|
||||||
|
console.print(f"[red]Agent '{agent_id}' not found[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
async def stop():
|
||||||
|
return await agent.stop()
|
||||||
|
|
||||||
|
success = asyncio.run(stop())
|
||||||
|
|
||||||
|
if success:
|
||||||
|
console.print(f"[green]✓ Agent '{agent.name}' stopped[/green]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to stop agent '{agent.name}'[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("delete")
|
||||||
|
@click.argument("agent_id")
|
||||||
|
@click.option("--force", "-f", is_flag=True, help="Skip confirmation")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_delete(ctx: click.Context, agent_id: str, force: bool) -> None:
|
||||||
|
"""Delete an agent."""
|
||||||
|
manager = AgentManager()
|
||||||
|
manager.load_agents()
|
||||||
|
|
||||||
|
agent = manager.get_agent(agent_id)
|
||||||
|
if not agent:
|
||||||
|
console.print(f"[red]Agent '{agent_id}' not found[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not force:
|
||||||
|
if not Confirm.ask(f"Delete agent '{agent.name}'?"):
|
||||||
|
return
|
||||||
|
|
||||||
|
async def delete():
|
||||||
|
return await manager.delete_agent(agent_id)
|
||||||
|
|
||||||
|
success = asyncio.run(delete())
|
||||||
|
|
||||||
|
if success:
|
||||||
|
console.print(f"[green]✓ Agent '{agent.name}' deleted[/green]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to delete agent[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("chat")
|
||||||
|
@click.argument("agent_id")
|
||||||
|
@click.pass_context
|
||||||
|
def agent_chat(ctx: click.Context, agent_id: str) -> None:
|
||||||
|
"""Start an interactive chat session with an agent."""
|
||||||
|
manager = AgentManager()
|
||||||
|
manager.load_agents()
|
||||||
|
|
||||||
|
agent = manager.get_agent(agent_id)
|
||||||
|
if not agent:
|
||||||
|
console.print(f"[red]Agent '{agent_id}' not found[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
console.print(Panel(
|
||||||
|
f"[bold]Chat with {agent.name}[/bold]\n\n"
|
||||||
|
f"Type your message and press Enter.\n"
|
||||||
|
f"Type 'exit' or 'quit' to end the session.",
|
||||||
|
title="Agent Chat",
|
||||||
|
border_style="cyan",
|
||||||
|
))
|
||||||
|
|
||||||
|
async def chat_session():
|
||||||
|
await agent.start()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
user_input = Prompt.ask("\n[bold cyan]You[/bold cyan]")
|
||||||
|
|
||||||
|
if user_input.lower() in ("exit", "quit", "q"):
|
||||||
|
break
|
||||||
|
|
||||||
|
response = await agent.send_message(user_input)
|
||||||
|
if response:
|
||||||
|
console.print(f"\n[bold green]{agent.name}[/bold green]: {response.content}")
|
||||||
|
else:
|
||||||
|
console.print("[yellow]No response from agent[/yellow]")
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
break
|
||||||
|
|
||||||
|
await agent.stop()
|
||||||
|
|
||||||
|
asyncio.run(chat_session())
|
||||||
|
console.print("\n[dim]Chat session ended[/dim]")
|
||||||
|
|
||||||
|
|
||||||
|
@agent.command("templates")
|
||||||
|
def agent_templates() -> None:
|
||||||
|
"""List available agent templates."""
|
||||||
|
templates = list_agent_templates()
|
||||||
|
|
||||||
|
table = Table(
|
||||||
|
title="[bold]Agent Templates[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
)
|
||||||
|
table.add_column("Template")
|
||||||
|
table.add_column("Description")
|
||||||
|
table.add_column("Type")
|
||||||
|
|
||||||
|
for name in templates:
|
||||||
|
config = get_agent_template(name)
|
||||||
|
if config:
|
||||||
|
table.add_row(name, config.description, config.agent_type)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
console.print("\nUse with: [bold]debai agent create --template <name>[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Model Commands
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.group()
|
||||||
|
def model() -> None:
|
||||||
|
"""Manage AI models."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@model.command("list")
|
||||||
|
@click.pass_context
|
||||||
|
def model_list(ctx: click.Context) -> None:
|
||||||
|
"""List available models."""
|
||||||
|
manager = ModelManager()
|
||||||
|
manager.load_models()
|
||||||
|
|
||||||
|
# Also try to discover from Docker Model
|
||||||
|
async def discover():
|
||||||
|
return await manager.discover_models()
|
||||||
|
|
||||||
|
discovered = asyncio.run(discover())
|
||||||
|
|
||||||
|
models = manager.list_models()
|
||||||
|
|
||||||
|
if not models and not discovered:
|
||||||
|
console.print("[yellow]No models found.[/yellow]")
|
||||||
|
console.print("Pull a model with: [bold]debai model pull <model-id>[/bold]")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(
|
||||||
|
title="[bold]AI Models[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
)
|
||||||
|
table.add_column("ID", style="cyan")
|
||||||
|
table.add_column("Name")
|
||||||
|
table.add_column("Status")
|
||||||
|
table.add_column("Size")
|
||||||
|
|
||||||
|
for m in models:
|
||||||
|
status_icon = {
|
||||||
|
"ready": "[green]● Ready[/green]",
|
||||||
|
"loaded": "[green]● Loaded[/green]",
|
||||||
|
"pulling": "[yellow]● Pulling[/yellow]",
|
||||||
|
"not_pulled": "[dim]● Not Pulled[/dim]",
|
||||||
|
"error": "[red]● Error[/red]",
|
||||||
|
}.get(m.status.value, m.status.value)
|
||||||
|
|
||||||
|
size = f"{m.config.size_bytes / (1024**3):.1f} GB" if m.config.size_bytes else "-"
|
||||||
|
table.add_row(m.id, m.name, status_icon, size)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@model.command("pull")
|
||||||
|
@click.argument("model_id")
|
||||||
|
@click.pass_context
|
||||||
|
def model_pull(ctx: click.Context, model_id: str) -> None:
|
||||||
|
"""Pull a model from Docker Model Runner."""
|
||||||
|
manager = ModelManager()
|
||||||
|
|
||||||
|
config = ModelConfig(id=model_id, name=model_id)
|
||||||
|
|
||||||
|
async def pull():
|
||||||
|
m = await manager.add_model(config)
|
||||||
|
return await m.pull()
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
progress.add_task(f"Pulling model '{model_id}'...", total=None)
|
||||||
|
success = asyncio.run(pull())
|
||||||
|
|
||||||
|
if success:
|
||||||
|
console.print(f"[green]✓ Model '{model_id}' pulled successfully[/green]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to pull model '{model_id}'[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
@model.command("remove")
|
||||||
|
@click.argument("model_id")
|
||||||
|
@click.option("--force", "-f", is_flag=True, help="Skip confirmation")
|
||||||
|
@click.pass_context
|
||||||
|
def model_remove(ctx: click.Context, model_id: str, force: bool) -> None:
|
||||||
|
"""Remove a model."""
|
||||||
|
if not force:
|
||||||
|
if not Confirm.ask(f"Remove model '{model_id}'?"):
|
||||||
|
return
|
||||||
|
|
||||||
|
manager = ModelManager()
|
||||||
|
manager.load_models()
|
||||||
|
|
||||||
|
async def remove():
|
||||||
|
return await manager.remove_model(model_id)
|
||||||
|
|
||||||
|
success = asyncio.run(remove())
|
||||||
|
|
||||||
|
if success:
|
||||||
|
console.print(f"[green]✓ Model '{model_id}' removed[/green]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to remove model '{model_id}'[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
@model.command("recommended")
|
||||||
|
def model_recommended() -> None:
|
||||||
|
"""Show recommended models for different use cases."""
|
||||||
|
table = Table(
|
||||||
|
title="[bold]Recommended Models[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
)
|
||||||
|
table.add_column("Use Case")
|
||||||
|
table.add_column("Model")
|
||||||
|
table.add_column("Description")
|
||||||
|
table.add_column("Parameters")
|
||||||
|
|
||||||
|
for name in list_recommended_models():
|
||||||
|
config = get_recommended_model(name)
|
||||||
|
if config:
|
||||||
|
table.add_row(
|
||||||
|
name,
|
||||||
|
config.id,
|
||||||
|
config.description,
|
||||||
|
config.parameter_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
console.print("\nPull with: [bold]debai model pull <model-id>[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Task Commands
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.group()
|
||||||
|
def task() -> None:
|
||||||
|
"""Manage automated tasks."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@task.command("list")
|
||||||
|
@click.option("-s", "--status", type=click.Choice(["pending", "running", "completed", "failed", "all"]), default="all")
|
||||||
|
@click.pass_context
|
||||||
|
def task_list(ctx: click.Context, status: str) -> None:
|
||||||
|
"""List all tasks."""
|
||||||
|
manager = TaskManager()
|
||||||
|
manager.load_tasks()
|
||||||
|
|
||||||
|
tasks = manager.list_tasks()
|
||||||
|
|
||||||
|
if not tasks:
|
||||||
|
console.print("[yellow]No tasks found. Create one with 'debai task create'[/yellow]")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(
|
||||||
|
title="[bold]Tasks[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
)
|
||||||
|
table.add_column("ID", style="cyan")
|
||||||
|
table.add_column("Name")
|
||||||
|
table.add_column("Type")
|
||||||
|
table.add_column("Priority")
|
||||||
|
table.add_column("Status")
|
||||||
|
|
||||||
|
for t in tasks:
|
||||||
|
status_icon = {
|
||||||
|
"pending": "[dim]○ Pending[/dim]",
|
||||||
|
"scheduled": "[blue]◐ Scheduled[/blue]",
|
||||||
|
"running": "[yellow]● Running[/yellow]",
|
||||||
|
"completed": "[green]✓ Completed[/green]",
|
||||||
|
"failed": "[red]✗ Failed[/red]",
|
||||||
|
"cancelled": "[dim]✗ Cancelled[/dim]",
|
||||||
|
}.get(t.status.value, t.status.value)
|
||||||
|
|
||||||
|
priority_style = {
|
||||||
|
"low": "[dim]Low[/dim]",
|
||||||
|
"normal": "Normal",
|
||||||
|
"high": "[yellow]High[/yellow]",
|
||||||
|
"critical": "[red bold]Critical[/red bold]",
|
||||||
|
}.get(t.config.priority, t.config.priority)
|
||||||
|
|
||||||
|
table.add_row(
|
||||||
|
t.id,
|
||||||
|
t.name,
|
||||||
|
t.config.task_type,
|
||||||
|
priority_style,
|
||||||
|
status_icon,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@task.command("create")
|
||||||
|
@click.option("-n", "--name", prompt="Task name", help="Name for the task")
|
||||||
|
@click.option("-c", "--command", prompt="Command to run", help="Shell command to execute")
|
||||||
|
@click.option("-p", "--priority",
|
||||||
|
type=click.Choice(["low", "normal", "high", "critical"]),
|
||||||
|
default="normal", help="Task priority")
|
||||||
|
@click.option("--template", help="Use a predefined template")
|
||||||
|
@click.pass_context
|
||||||
|
def task_create(
|
||||||
|
ctx: click.Context,
|
||||||
|
name: str,
|
||||||
|
command: str,
|
||||||
|
priority: str,
|
||||||
|
template: Optional[str],
|
||||||
|
) -> None:
|
||||||
|
"""Create a new task."""
|
||||||
|
if template:
|
||||||
|
config = get_task_template(template)
|
||||||
|
if not config:
|
||||||
|
console.print(f"[red]Template '{template}' not found[/red]")
|
||||||
|
console.print(f"Available templates: {', '.join(list_task_templates())}")
|
||||||
|
return
|
||||||
|
config.name = name
|
||||||
|
else:
|
||||||
|
config = TaskConfig(
|
||||||
|
name=name,
|
||||||
|
command=command,
|
||||||
|
priority=TaskPriority(priority),
|
||||||
|
)
|
||||||
|
|
||||||
|
manager = TaskManager()
|
||||||
|
|
||||||
|
async def create():
|
||||||
|
return await manager.create_task(config)
|
||||||
|
|
||||||
|
task = asyncio.run(create())
|
||||||
|
|
||||||
|
console.print(f"\n[green]✓ Task created successfully![/green]")
|
||||||
|
console.print(f" ID: [cyan]{task.id}[/cyan]")
|
||||||
|
console.print(f" Name: {task.name}")
|
||||||
|
console.print(f"\nRun with: [bold]debai task run {task.id}[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
@task.command("run")
|
||||||
|
@click.argument("task_id")
|
||||||
|
@click.pass_context
|
||||||
|
def task_run(ctx: click.Context, task_id: str) -> None:
|
||||||
|
"""Run a task."""
|
||||||
|
manager = TaskManager()
|
||||||
|
manager.load_tasks()
|
||||||
|
|
||||||
|
t = manager.get_task(task_id)
|
||||||
|
if not t:
|
||||||
|
console.print(f"[red]Task '{task_id}' not found[/red]")
|
||||||
|
return
|
||||||
|
|
||||||
|
async def run():
|
||||||
|
return await t.execute()
|
||||||
|
|
||||||
|
console.print(f"[bold]Running task: {t.name}[/bold]\n")
|
||||||
|
|
||||||
|
result = asyncio.run(run())
|
||||||
|
|
||||||
|
if result.success:
|
||||||
|
console.print(f"[green]✓ Task completed successfully[/green]")
|
||||||
|
if result.stdout:
|
||||||
|
console.print(Panel(result.stdout, title="Output", border_style="green"))
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Task failed[/red]")
|
||||||
|
if result.stderr:
|
||||||
|
console.print(Panel(result.stderr, title="Error", border_style="red"))
|
||||||
|
|
||||||
|
console.print(f"\n[dim]Duration: {result.duration_seconds:.2f}s[/dim]")
|
||||||
|
|
||||||
|
|
||||||
|
@task.command("templates")
|
||||||
|
def task_templates() -> None:
|
||||||
|
"""List available task templates."""
|
||||||
|
templates = list_task_templates()
|
||||||
|
|
||||||
|
table = Table(
|
||||||
|
title="[bold]Task Templates[/bold]",
|
||||||
|
box=box.ROUNDED,
|
||||||
|
show_header=True,
|
||||||
|
)
|
||||||
|
table.add_column("Template")
|
||||||
|
table.add_column("Description")
|
||||||
|
table.add_column("Priority")
|
||||||
|
|
||||||
|
for name in templates:
|
||||||
|
config = get_task_template(name)
|
||||||
|
if config:
|
||||||
|
table.add_row(name, config.description, config.priority)
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
console.print("\nUse with: [bold]debai task create --template <name>[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Generate Commands
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.group()
|
||||||
|
def generate() -> None:
|
||||||
|
"""Generate distribution images and configurations."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@generate.command("iso")
|
||||||
|
@click.option("-o", "--output", default="debai.iso", help="Output ISO file path")
|
||||||
|
@click.option("--base", default="debian", help="Base distribution")
|
||||||
|
@click.option("--include-agents", is_flag=True, help="Include configured agents")
|
||||||
|
@click.pass_context
|
||||||
|
def generate_iso(ctx: click.Context, output: str, base: str, include_agents: bool) -> None:
|
||||||
|
"""Generate a bootable ISO image."""
|
||||||
|
from debai.generators.iso import ISOGenerator
|
||||||
|
|
||||||
|
console.print(Panel(
|
||||||
|
f"[bold]Generating ISO Image[/bold]\n\n"
|
||||||
|
f"Output: {output}\n"
|
||||||
|
f"Base: {base}\n"
|
||||||
|
f"Include agents: {include_agents}",
|
||||||
|
title="ISO Generation",
|
||||||
|
border_style="cyan",
|
||||||
|
))
|
||||||
|
|
||||||
|
generator = ISOGenerator(
|
||||||
|
output_path=Path(output),
|
||||||
|
base_distro=base,
|
||||||
|
include_agents=include_agents,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def gen():
|
||||||
|
return await generator.generate()
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
task = progress.add_task("Generating ISO...", total=None)
|
||||||
|
result = asyncio.run(gen())
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
console.print(f"\n[green]✓ ISO generated: {output}[/green]")
|
||||||
|
console.print(f" Size: {result['size_mb']:.1f} MB")
|
||||||
|
else:
|
||||||
|
console.print(f"\n[red]✗ Failed to generate ISO[/red]")
|
||||||
|
if result.get("error"):
|
||||||
|
console.print(f" Error: {result['error']}")
|
||||||
|
|
||||||
|
|
||||||
|
@generate.command("qcow2")
|
||||||
|
@click.option("-o", "--output", default="debai.qcow2", help="Output QCOW2 file path")
|
||||||
|
@click.option("--size", default="20G", help="Disk size")
|
||||||
|
@click.option("--base", default="debian", help="Base distribution")
|
||||||
|
@click.pass_context
|
||||||
|
def generate_qcow2(ctx: click.Context, output: str, size: str, base: str) -> None:
|
||||||
|
"""Generate a QCOW2 image for QEMU."""
|
||||||
|
from debai.generators.qcow2 import QCOW2Generator
|
||||||
|
|
||||||
|
console.print(Panel(
|
||||||
|
f"[bold]Generating QCOW2 Image[/bold]\n\n"
|
||||||
|
f"Output: {output}\n"
|
||||||
|
f"Size: {size}\n"
|
||||||
|
f"Base: {base}",
|
||||||
|
title="QCOW2 Generation",
|
||||||
|
border_style="cyan",
|
||||||
|
))
|
||||||
|
|
||||||
|
generator = QCOW2Generator(
|
||||||
|
output_path=Path(output),
|
||||||
|
disk_size=size,
|
||||||
|
base_distro=base,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def gen():
|
||||||
|
return await generator.generate()
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
task = progress.add_task("Generating QCOW2...", total=None)
|
||||||
|
result = asyncio.run(gen())
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
console.print(f"\n[green]✓ QCOW2 generated: {output}[/green]")
|
||||||
|
console.print(f" Size: {result['size_mb']:.1f} MB")
|
||||||
|
else:
|
||||||
|
console.print(f"\n[red]✗ Failed to generate QCOW2[/red]")
|
||||||
|
if result.get("error"):
|
||||||
|
console.print(f" Error: {result['error']}")
|
||||||
|
|
||||||
|
|
||||||
|
@generate.command("compose")
|
||||||
|
@click.option("-o", "--output", default="docker-compose.yml", help="Output file path")
|
||||||
|
@click.option("--include-gui", is_flag=True, help="Include GUI service")
|
||||||
|
@click.pass_context
|
||||||
|
def generate_compose(ctx: click.Context, output: str, include_gui: bool) -> None:
|
||||||
|
"""Generate a Docker Compose configuration."""
|
||||||
|
from debai.generators.compose import ComposeGenerator
|
||||||
|
|
||||||
|
generator = ComposeGenerator(
|
||||||
|
output_path=Path(output),
|
||||||
|
include_gui=include_gui,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = generator.generate()
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
console.print(f"[green]✓ Docker Compose generated: {output}[/green]")
|
||||||
|
console.print("\nStart with: [bold]docker compose up -d[/bold]")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]✗ Failed to generate Docker Compose[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Init Command
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.option("--full", is_flag=True, help="Full initialization with model pull")
|
||||||
|
@click.pass_context
|
||||||
|
def init(ctx: click.Context, full: bool) -> None:
|
||||||
|
"""Initialize Debai environment."""
|
||||||
|
print_banner()
|
||||||
|
|
||||||
|
console.print("\n[bold cyan]🚀 Initializing Debai...[/bold cyan]\n")
|
||||||
|
|
||||||
|
# Create configuration directories
|
||||||
|
config_dir = Path.home() / ".config" / "debai"
|
||||||
|
dirs = [
|
||||||
|
config_dir,
|
||||||
|
config_dir / "agents",
|
||||||
|
config_dir / "models",
|
||||||
|
config_dir / "tasks",
|
||||||
|
Path.home() / ".local" / "share" / "debai",
|
||||||
|
]
|
||||||
|
|
||||||
|
for d in dirs:
|
||||||
|
d.mkdir(parents=True, exist_ok=True)
|
||||||
|
console.print(f" [green]✓[/green] Created {d}")
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
console.print("\n[bold]Checking dependencies...[/bold]\n")
|
||||||
|
deps = check_dependencies()
|
||||||
|
|
||||||
|
missing = [d for d, available in deps.items() if not available]
|
||||||
|
if missing:
|
||||||
|
console.print(f"[yellow]⚠ Missing dependencies: {', '.join(missing)}[/yellow]")
|
||||||
|
console.print("\nInstall with your package manager:")
|
||||||
|
console.print(" [dim]sudo apt install docker.io qemu-utils genisoimage[/dim]")
|
||||||
|
else:
|
||||||
|
console.print(" [green]✓[/green] All dependencies available")
|
||||||
|
|
||||||
|
if full:
|
||||||
|
# Pull recommended model
|
||||||
|
console.print("\n[bold]Pulling recommended model...[/bold]\n")
|
||||||
|
manager = ModelManager()
|
||||||
|
config = get_recommended_model("general")
|
||||||
|
if config:
|
||||||
|
async def pull():
|
||||||
|
m = await manager.add_model(config)
|
||||||
|
return await m.pull()
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
console=console,
|
||||||
|
) as progress:
|
||||||
|
progress.add_task(f"Pulling {config.id}...", total=None)
|
||||||
|
asyncio.run(pull())
|
||||||
|
|
||||||
|
console.print("\n[bold green]✓ Debai initialized successfully![/bold green]")
|
||||||
|
console.print("\nNext steps:")
|
||||||
|
console.print(" 1. Create an agent: [bold]debai agent create[/bold]")
|
||||||
|
console.print(" 2. Pull a model: [bold]debai model pull llama3.2:3b[/bold]")
|
||||||
|
console.print(" 3. Start the GUI: [bold]debai-gui[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Monitor Command
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.option("-i", "--interval", default=2.0, help="Update interval in seconds")
|
||||||
|
@click.pass_context
|
||||||
|
def monitor(ctx: click.Context, interval: float) -> None:
|
||||||
|
"""Monitor system resources in real-time."""
|
||||||
|
from rich.live import Live
|
||||||
|
from rich.layout import Layout
|
||||||
|
|
||||||
|
console.print("[bold]Starting resource monitor...[/bold]")
|
||||||
|
console.print("[dim]Press Ctrl+C to exit[/dim]\n")
|
||||||
|
|
||||||
|
monitor = ResourceMonitor(interval_seconds=interval)
|
||||||
|
|
||||||
|
def create_display() -> Table:
|
||||||
|
snapshot = monitor.get_latest()
|
||||||
|
if not snapshot:
|
||||||
|
return Table()
|
||||||
|
|
||||||
|
table = Table(box=box.ROUNDED, show_header=False)
|
||||||
|
table.add_column("Metric", style="bold")
|
||||||
|
table.add_column("Value")
|
||||||
|
|
||||||
|
table.add_row("CPU", f"{snapshot['cpu_percent']:.1f}%")
|
||||||
|
table.add_row("Memory", f"{snapshot['memory_percent']:.1f}%")
|
||||||
|
table.add_row("Load (1m)", f"{snapshot['load_1min']:.2f}")
|
||||||
|
table.add_row("Load (5m)", f"{snapshot['load_5min']:.2f}")
|
||||||
|
table.add_row("Load (15m)", f"{snapshot['load_15min']:.2f}")
|
||||||
|
|
||||||
|
return table
|
||||||
|
|
||||||
|
async def run_monitor():
|
||||||
|
await monitor.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
with Live(create_display(), console=console, refresh_per_second=1) as live:
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
live.update(create_display())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
await monitor.stop()
|
||||||
|
|
||||||
|
asyncio.run(run_monitor())
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Entry Point
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main entry point."""
|
||||||
|
try:
|
||||||
|
cli()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.print("\n[yellow]Interrupted[/yellow]")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]Error: {e}[/red]")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
22
src/debai/core/__init__.py
Archivo normal
22
src/debai/core/__init__.py
Archivo normal
@@ -0,0 +1,22 @@
|
|||||||
|
"""
|
||||||
|
Core module for Debai - Contains the main business logic.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from debai.core.agent import Agent, AgentConfig, AgentManager
|
||||||
|
from debai.core.model import Model, ModelConfig, ModelManager
|
||||||
|
from debai.core.task import Task, TaskConfig, TaskManager
|
||||||
|
from debai.core.system import SystemInfo, ResourceMonitor
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Agent",
|
||||||
|
"AgentConfig",
|
||||||
|
"AgentManager",
|
||||||
|
"Model",
|
||||||
|
"ModelConfig",
|
||||||
|
"ModelManager",
|
||||||
|
"Task",
|
||||||
|
"TaskConfig",
|
||||||
|
"TaskManager",
|
||||||
|
"SystemInfo",
|
||||||
|
"ResourceMonitor",
|
||||||
|
]
|
||||||
602
src/debai/core/agent.py
Archivo normal
602
src/debai/core/agent.py
Archivo normal
@@ -0,0 +1,602 @@
|
|||||||
|
"""
|
||||||
|
Agent management module for Debai.
|
||||||
|
|
||||||
|
This module provides classes and utilities for creating, managing, and
|
||||||
|
interacting with AI agents using the cagent framework.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import uuid
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AgentStatus(str, Enum):
|
||||||
|
"""Agent status enumeration."""
|
||||||
|
STOPPED = "stopped"
|
||||||
|
STARTING = "starting"
|
||||||
|
RUNNING = "running"
|
||||||
|
PAUSED = "paused"
|
||||||
|
ERROR = "error"
|
||||||
|
WAITING = "waiting"
|
||||||
|
|
||||||
|
|
||||||
|
class AgentType(str, Enum):
|
||||||
|
"""Types of agents available."""
|
||||||
|
SYSTEM = "system" # System maintenance tasks
|
||||||
|
PACKAGE = "package" # Package management
|
||||||
|
CONFIG = "config" # Configuration management
|
||||||
|
RESOURCE = "resource" # Resource monitoring
|
||||||
|
SECURITY = "security" # Security tasks
|
||||||
|
BACKUP = "backup" # Backup management
|
||||||
|
NETWORK = "network" # Network configuration
|
||||||
|
CUSTOM = "custom" # User-defined agents
|
||||||
|
|
||||||
|
|
||||||
|
class AgentCapability(str, Enum):
|
||||||
|
"""Agent capabilities."""
|
||||||
|
READ_SYSTEM = "read_system"
|
||||||
|
WRITE_SYSTEM = "write_system"
|
||||||
|
EXECUTE_COMMANDS = "execute_commands"
|
||||||
|
NETWORK_ACCESS = "network_access"
|
||||||
|
FILE_ACCESS = "file_access"
|
||||||
|
PACKAGE_INSTALL = "package_install"
|
||||||
|
SERVICE_CONTROL = "service_control"
|
||||||
|
USER_INTERACTION = "user_interaction"
|
||||||
|
|
||||||
|
|
||||||
|
class AgentConfig(BaseModel):
|
||||||
|
"""Configuration for an AI agent."""
|
||||||
|
|
||||||
|
id: str = Field(default_factory=lambda: str(uuid.uuid4())[:8])
|
||||||
|
name: str = Field(..., min_length=1, max_length=64)
|
||||||
|
description: str = Field(default="")
|
||||||
|
agent_type: AgentType = Field(default=AgentType.CUSTOM)
|
||||||
|
model_id: str = Field(..., description="ID of the model to use")
|
||||||
|
capabilities: list[AgentCapability] = Field(default_factory=list)
|
||||||
|
|
||||||
|
# Execution settings
|
||||||
|
auto_start: bool = Field(default=False)
|
||||||
|
interactive: bool = Field(default=True)
|
||||||
|
max_retries: int = Field(default=3, ge=0, le=10)
|
||||||
|
timeout_seconds: int = Field(default=300, ge=10, le=3600)
|
||||||
|
|
||||||
|
# Resource limits
|
||||||
|
max_memory_mb: int = Field(default=512, ge=64, le=8192)
|
||||||
|
max_cpu_percent: float = Field(default=50.0, ge=1.0, le=100.0)
|
||||||
|
|
||||||
|
# Scheduling
|
||||||
|
schedule_cron: Optional[str] = Field(default=None)
|
||||||
|
run_on_boot: bool = Field(default=False)
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
environment: dict[str, str] = Field(default_factory=dict)
|
||||||
|
working_directory: str = Field(default="/tmp/debai")
|
||||||
|
|
||||||
|
# Instructions
|
||||||
|
system_prompt: str = Field(
|
||||||
|
default="You are a helpful AI assistant managing a GNU/Linux system."
|
||||||
|
)
|
||||||
|
allowed_commands: list[str] = Field(default_factory=list)
|
||||||
|
denied_commands: list[str] = Field(
|
||||||
|
default_factory=lambda: ["rm -rf /", "dd if=/dev/zero", ":(){ :|:& };:"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
created_at: datetime = Field(default_factory=datetime.now)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.now)
|
||||||
|
tags: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
use_enum_values = True
|
||||||
|
|
||||||
|
|
||||||
|
class AgentMessage(BaseModel):
|
||||||
|
"""Message exchanged with an agent."""
|
||||||
|
|
||||||
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
|
role: str = Field(..., pattern="^(user|assistant|system)$")
|
||||||
|
content: str
|
||||||
|
timestamp: datetime = Field(default_factory=datetime.now)
|
||||||
|
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class Agent:
|
||||||
|
"""
|
||||||
|
AI Agent that can perform system tasks autonomously.
|
||||||
|
|
||||||
|
This class wraps the cagent framework to provide a high-level interface
|
||||||
|
for creating and managing AI agents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config: AgentConfig):
|
||||||
|
self.config = config
|
||||||
|
self.status = AgentStatus.STOPPED
|
||||||
|
self.process: Optional[subprocess.Popen] = None
|
||||||
|
self.message_history: list[AgentMessage] = []
|
||||||
|
self._callbacks: dict[str, list[Callable]] = {
|
||||||
|
"on_start": [],
|
||||||
|
"on_stop": [],
|
||||||
|
"on_message": [],
|
||||||
|
"on_error": [],
|
||||||
|
"on_task_complete": [],
|
||||||
|
}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
return self.config.id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self.config.name
|
||||||
|
|
||||||
|
def on(self, event: str, callback: Callable) -> None:
|
||||||
|
"""Register an event callback."""
|
||||||
|
if event in self._callbacks:
|
||||||
|
self._callbacks[event].append(callback)
|
||||||
|
|
||||||
|
def _emit(self, event: str, *args, **kwargs) -> None:
|
||||||
|
"""Emit an event to all registered callbacks."""
|
||||||
|
for callback in self._callbacks.get(event, []):
|
||||||
|
try:
|
||||||
|
callback(*args, **kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in callback for {event}: {e}")
|
||||||
|
|
||||||
|
async def start(self) -> bool:
|
||||||
|
"""Start the agent."""
|
||||||
|
async with self._lock:
|
||||||
|
if self.status == AgentStatus.RUNNING:
|
||||||
|
logger.warning(f"Agent {self.name} is already running")
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.status = AgentStatus.STARTING
|
||||||
|
logger.info(f"Starting agent {self.name}...")
|
||||||
|
|
||||||
|
# Create working directory
|
||||||
|
work_dir = Path(self.config.working_directory)
|
||||||
|
work_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Generate agent configuration for cagent
|
||||||
|
agent_config_path = work_dir / f"agent_{self.id}.yaml"
|
||||||
|
self._write_cagent_config(agent_config_path)
|
||||||
|
|
||||||
|
# Start cagent process
|
||||||
|
cmd = [
|
||||||
|
"cagent",
|
||||||
|
"--config", str(agent_config_path),
|
||||||
|
"--model", self.config.model_id,
|
||||||
|
"--interactive" if self.config.interactive else "--daemon",
|
||||||
|
]
|
||||||
|
|
||||||
|
self.process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
cwd=str(work_dir),
|
||||||
|
env={**os.environ, **self.config.environment},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.status = AgentStatus.RUNNING
|
||||||
|
self._emit("on_start", self)
|
||||||
|
logger.info(f"Agent {self.name} started successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.status = AgentStatus.ERROR
|
||||||
|
self._emit("on_error", self, e)
|
||||||
|
logger.error(f"Failed to start agent {self.name}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def stop(self) -> bool:
|
||||||
|
"""Stop the agent."""
|
||||||
|
async with self._lock:
|
||||||
|
if self.status == AgentStatus.STOPPED:
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.process:
|
||||||
|
self.process.terminate()
|
||||||
|
try:
|
||||||
|
self.process.wait(timeout=10)
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
self.process.kill()
|
||||||
|
self.process.wait()
|
||||||
|
|
||||||
|
self.status = AgentStatus.STOPPED
|
||||||
|
self._emit("on_stop", self)
|
||||||
|
logger.info(f"Agent {self.name} stopped")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self._emit("on_error", self, e)
|
||||||
|
logger.error(f"Failed to stop agent {self.name}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def send_message(self, content: str) -> Optional[AgentMessage]:
|
||||||
|
"""Send a message to the agent and wait for response."""
|
||||||
|
if self.status != AgentStatus.RUNNING:
|
||||||
|
logger.error(f"Agent {self.name} is not running")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Record user message
|
||||||
|
user_msg = AgentMessage(role="user", content=content)
|
||||||
|
self.message_history.append(user_msg)
|
||||||
|
|
||||||
|
# Send to agent process
|
||||||
|
if self.process and self.process.stdin:
|
||||||
|
self.process.stdin.write(f"{content}\n".encode())
|
||||||
|
self.process.stdin.flush()
|
||||||
|
|
||||||
|
# Read response
|
||||||
|
if self.process and self.process.stdout:
|
||||||
|
response = self.process.stdout.readline().decode().strip()
|
||||||
|
assistant_msg = AgentMessage(role="assistant", content=response)
|
||||||
|
self.message_history.append(assistant_msg)
|
||||||
|
self._emit("on_message", self, assistant_msg)
|
||||||
|
return assistant_msg
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error sending message to agent {self.name}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def execute_task(self, task_description: str) -> dict[str, Any]:
|
||||||
|
"""Execute a task and return the result."""
|
||||||
|
result = {
|
||||||
|
"success": False,
|
||||||
|
"output": "",
|
||||||
|
"error": None,
|
||||||
|
"duration_seconds": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
start_time = datetime.now()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await self.send_message(
|
||||||
|
f"Execute the following task: {task_description}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if response:
|
||||||
|
result["success"] = True
|
||||||
|
result["output"] = response.content
|
||||||
|
self._emit("on_task_complete", self, result)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result["error"] = str(e)
|
||||||
|
self._emit("on_error", self, e)
|
||||||
|
|
||||||
|
result["duration_seconds"] = (datetime.now() - start_time).total_seconds()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _write_cagent_config(self, path: Path) -> None:
|
||||||
|
"""Write cagent configuration file."""
|
||||||
|
config = {
|
||||||
|
"agent": {
|
||||||
|
"name": self.config.name,
|
||||||
|
"description": self.config.description,
|
||||||
|
"type": self.config.agent_type,
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"id": self.config.model_id,
|
||||||
|
"provider": "docker-model",
|
||||||
|
},
|
||||||
|
"capabilities": list(self.config.capabilities),
|
||||||
|
"system_prompt": self.config.system_prompt,
|
||||||
|
"limits": {
|
||||||
|
"max_memory_mb": self.config.max_memory_mb,
|
||||||
|
"max_cpu_percent": self.config.max_cpu_percent,
|
||||||
|
"timeout_seconds": self.config.timeout_seconds,
|
||||||
|
},
|
||||||
|
"security": {
|
||||||
|
"allowed_commands": self.config.allowed_commands,
|
||||||
|
"denied_commands": self.config.denied_commands,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(path, "w") as f:
|
||||||
|
yaml.dump(config, f, default_flow_style=False)
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, Any]:
|
||||||
|
"""Convert agent to dictionary."""
|
||||||
|
return {
|
||||||
|
"id": self.id,
|
||||||
|
"name": self.name,
|
||||||
|
"status": self.status.value,
|
||||||
|
"config": self.config.model_dump(),
|
||||||
|
"message_count": len(self.message_history),
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"Agent(id={self.id}, name={self.name}, status={self.status.value})"
|
||||||
|
|
||||||
|
|
||||||
|
class AgentManager:
|
||||||
|
"""
|
||||||
|
Manager for multiple AI agents.
|
||||||
|
|
||||||
|
Provides functionality to create, start, stop, and manage multiple agents.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config_dir: Optional[Path] = None):
|
||||||
|
self.config_dir = config_dir or Path.home() / ".config" / "debai" / "agents"
|
||||||
|
self.config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.agents: dict[str, Agent] = {}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def create_agent(self, config: AgentConfig) -> Agent:
|
||||||
|
"""Create a new agent."""
|
||||||
|
async with self._lock:
|
||||||
|
if config.id in self.agents:
|
||||||
|
raise ValueError(f"Agent with id {config.id} already exists")
|
||||||
|
|
||||||
|
agent = Agent(config)
|
||||||
|
self.agents[config.id] = agent
|
||||||
|
|
||||||
|
# Save configuration
|
||||||
|
self._save_agent_config(agent)
|
||||||
|
|
||||||
|
logger.info(f"Created agent: {agent.name}")
|
||||||
|
return agent
|
||||||
|
|
||||||
|
def get_agent(self, agent_id: str) -> Optional[Agent]:
|
||||||
|
"""Get an agent by ID."""
|
||||||
|
return self.agents.get(agent_id)
|
||||||
|
|
||||||
|
def list_agents(
|
||||||
|
self,
|
||||||
|
status: Optional[AgentStatus] = None,
|
||||||
|
agent_type: Optional[AgentType] = None,
|
||||||
|
) -> list[Agent]:
|
||||||
|
"""List agents with optional filtering."""
|
||||||
|
agents = list(self.agents.values())
|
||||||
|
|
||||||
|
if status:
|
||||||
|
agents = [a for a in agents if a.status == status]
|
||||||
|
|
||||||
|
if agent_type:
|
||||||
|
agents = [a for a in agents if a.config.agent_type == agent_type]
|
||||||
|
|
||||||
|
return agents
|
||||||
|
|
||||||
|
async def start_agent(self, agent_id: str) -> bool:
|
||||||
|
"""Start an agent by ID."""
|
||||||
|
agent = self.get_agent(agent_id)
|
||||||
|
if agent:
|
||||||
|
return await agent.start()
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def stop_agent(self, agent_id: str) -> bool:
|
||||||
|
"""Stop an agent by ID."""
|
||||||
|
agent = self.get_agent(agent_id)
|
||||||
|
if agent:
|
||||||
|
return await agent.stop()
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def delete_agent(self, agent_id: str) -> bool:
|
||||||
|
"""Delete an agent."""
|
||||||
|
async with self._lock:
|
||||||
|
agent = self.agents.get(agent_id)
|
||||||
|
if not agent:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Stop if running
|
||||||
|
await agent.stop()
|
||||||
|
|
||||||
|
# Remove configuration
|
||||||
|
config_path = self.config_dir / f"{agent_id}.yaml"
|
||||||
|
if config_path.exists():
|
||||||
|
config_path.unlink()
|
||||||
|
|
||||||
|
del self.agents[agent_id]
|
||||||
|
logger.info(f"Deleted agent: {agent_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def start_all(self, auto_start_only: bool = True) -> dict[str, bool]:
|
||||||
|
"""Start all agents (optionally only auto-start ones)."""
|
||||||
|
results = {}
|
||||||
|
for agent_id, agent in self.agents.items():
|
||||||
|
if not auto_start_only or agent.config.auto_start:
|
||||||
|
results[agent_id] = await agent.start()
|
||||||
|
return results
|
||||||
|
|
||||||
|
async def stop_all(self) -> dict[str, bool]:
|
||||||
|
"""Stop all running agents."""
|
||||||
|
results = {}
|
||||||
|
for agent_id, agent in self.agents.items():
|
||||||
|
if agent.status == AgentStatus.RUNNING:
|
||||||
|
results[agent_id] = await agent.stop()
|
||||||
|
return results
|
||||||
|
|
||||||
|
def load_agents(self) -> int:
|
||||||
|
"""Load agents from configuration directory."""
|
||||||
|
count = 0
|
||||||
|
for config_file in self.config_dir.glob("*.yaml"):
|
||||||
|
try:
|
||||||
|
with open(config_file) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
|
config = AgentConfig(**data)
|
||||||
|
agent = Agent(config)
|
||||||
|
self.agents[config.id] = agent
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load agent from {config_file}: {e}")
|
||||||
|
|
||||||
|
logger.info(f"Loaded {count} agents from {self.config_dir}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def save_agents(self) -> int:
|
||||||
|
"""Save all agent configurations."""
|
||||||
|
count = 0
|
||||||
|
for agent in self.agents.values():
|
||||||
|
try:
|
||||||
|
self._save_agent_config(agent)
|
||||||
|
count += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save agent {agent.id}: {e}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def _save_agent_config(self, agent: Agent) -> None:
|
||||||
|
"""Save agent configuration to file."""
|
||||||
|
config_path = self.config_dir / f"{agent.id}.yaml"
|
||||||
|
with open(config_path, "w") as f:
|
||||||
|
yaml.dump(agent.config.model_dump(), f, default_flow_style=False)
|
||||||
|
|
||||||
|
def get_statistics(self) -> dict[str, Any]:
|
||||||
|
"""Get agent statistics."""
|
||||||
|
total = len(self.agents)
|
||||||
|
by_status = {}
|
||||||
|
by_type = {}
|
||||||
|
|
||||||
|
for agent in self.agents.values():
|
||||||
|
status = agent.status.value
|
||||||
|
agent_type = agent.config.agent_type
|
||||||
|
|
||||||
|
by_status[status] = by_status.get(status, 0) + 1
|
||||||
|
by_type[agent_type] = by_type.get(agent_type, 0) + 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": total,
|
||||||
|
"by_status": by_status,
|
||||||
|
"by_type": by_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Predefined agent templates
|
||||||
|
AGENT_TEMPLATES = {
|
||||||
|
"package_updater": AgentConfig(
|
||||||
|
name="Package Updater",
|
||||||
|
description="Automatically updates system packages",
|
||||||
|
agent_type=AgentType.PACKAGE,
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
capabilities=[
|
||||||
|
AgentCapability.READ_SYSTEM,
|
||||||
|
AgentCapability.EXECUTE_COMMANDS,
|
||||||
|
AgentCapability.PACKAGE_INSTALL,
|
||||||
|
],
|
||||||
|
system_prompt="""You are a package management agent for a GNU/Linux system.
|
||||||
|
Your role is to:
|
||||||
|
1. Check for available package updates
|
||||||
|
2. Review update changelogs for security implications
|
||||||
|
3. Apply updates safely during low-usage periods
|
||||||
|
4. Report any issues or conflicts
|
||||||
|
|
||||||
|
Always prioritize security updates and be cautious with major version upgrades.""",
|
||||||
|
allowed_commands=["apt", "apt-get", "dpkg", "snap", "flatpak"],
|
||||||
|
schedule_cron="0 3 * * *", # 3 AM daily
|
||||||
|
),
|
||||||
|
"config_manager": AgentConfig(
|
||||||
|
name="Configuration Manager",
|
||||||
|
description="Manages application configurations",
|
||||||
|
agent_type=AgentType.CONFIG,
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
capabilities=[
|
||||||
|
AgentCapability.READ_SYSTEM,
|
||||||
|
AgentCapability.FILE_ACCESS,
|
||||||
|
AgentCapability.USER_INTERACTION,
|
||||||
|
],
|
||||||
|
system_prompt="""You are a configuration management agent.
|
||||||
|
Your role is to:
|
||||||
|
1. Monitor configuration files for changes
|
||||||
|
2. Validate configurations against best practices
|
||||||
|
3. Suggest optimizations
|
||||||
|
4. Backup configurations before changes
|
||||||
|
5. Help users with configuration questions
|
||||||
|
|
||||||
|
Always create backups before modifying any configuration.""",
|
||||||
|
),
|
||||||
|
"resource_monitor": AgentConfig(
|
||||||
|
name="Resource Monitor",
|
||||||
|
description="Monitors and optimizes system resources",
|
||||||
|
agent_type=AgentType.RESOURCE,
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
capabilities=[
|
||||||
|
AgentCapability.READ_SYSTEM,
|
||||||
|
AgentCapability.EXECUTE_COMMANDS,
|
||||||
|
AgentCapability.SERVICE_CONTROL,
|
||||||
|
],
|
||||||
|
system_prompt="""You are a resource monitoring agent.
|
||||||
|
Your role is to:
|
||||||
|
1. Monitor CPU, memory, disk, and network usage
|
||||||
|
2. Identify resource-hungry processes
|
||||||
|
3. Suggest optimizations
|
||||||
|
4. Alert on critical thresholds
|
||||||
|
5. Perform automatic cleanup of temporary files
|
||||||
|
|
||||||
|
Never terminate critical system processes without explicit permission.""",
|
||||||
|
schedule_cron="*/15 * * * *", # Every 15 minutes
|
||||||
|
),
|
||||||
|
"security_guard": AgentConfig(
|
||||||
|
name="Security Guard",
|
||||||
|
description="Monitors system security",
|
||||||
|
agent_type=AgentType.SECURITY,
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
capabilities=[
|
||||||
|
AgentCapability.READ_SYSTEM,
|
||||||
|
AgentCapability.EXECUTE_COMMANDS,
|
||||||
|
AgentCapability.NETWORK_ACCESS,
|
||||||
|
],
|
||||||
|
system_prompt="""You are a security monitoring agent.
|
||||||
|
Your role is to:
|
||||||
|
1. Monitor system logs for suspicious activity
|
||||||
|
2. Check for unauthorized access attempts
|
||||||
|
3. Verify file integrity
|
||||||
|
4. Monitor open ports and network connections
|
||||||
|
5. Alert on security issues
|
||||||
|
|
||||||
|
Never expose sensitive information in logs or reports.""",
|
||||||
|
interactive=True,
|
||||||
|
),
|
||||||
|
"backup_agent": AgentConfig(
|
||||||
|
name="Backup Manager",
|
||||||
|
description="Manages system backups",
|
||||||
|
agent_type=AgentType.BACKUP,
|
||||||
|
model_id="llama3.2:3b",
|
||||||
|
capabilities=[
|
||||||
|
AgentCapability.READ_SYSTEM,
|
||||||
|
AgentCapability.FILE_ACCESS,
|
||||||
|
AgentCapability.EXECUTE_COMMANDS,
|
||||||
|
],
|
||||||
|
system_prompt="""You are a backup management agent.
|
||||||
|
Your role is to:
|
||||||
|
1. Create regular backups of important data
|
||||||
|
2. Verify backup integrity
|
||||||
|
3. Manage backup retention policies
|
||||||
|
4. Perform restore operations when needed
|
||||||
|
5. Monitor backup storage usage
|
||||||
|
|
||||||
|
Always verify backups after creation.""",
|
||||||
|
schedule_cron="0 2 * * *", # 2 AM daily
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_agent_template(template_name: str) -> Optional[AgentConfig]:
|
||||||
|
"""Get a predefined agent template."""
|
||||||
|
return AGENT_TEMPLATES.get(template_name)
|
||||||
|
|
||||||
|
|
||||||
|
def list_agent_templates() -> list[str]:
|
||||||
|
"""List available agent templates."""
|
||||||
|
return list(AGENT_TEMPLATES.keys())
|
||||||
481
src/debai/core/model.py
Archivo normal
481
src/debai/core/model.py
Archivo normal
@@ -0,0 +1,481 @@
|
|||||||
|
"""
|
||||||
|
Model management module for Debai.
|
||||||
|
|
||||||
|
This module provides classes and utilities for managing AI models
|
||||||
|
using Docker Model Runner.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelStatus(str, Enum):
|
||||||
|
"""Model status enumeration."""
|
||||||
|
NOT_PULLED = "not_pulled"
|
||||||
|
PULLING = "pulling"
|
||||||
|
READY = "ready"
|
||||||
|
LOADING = "loading"
|
||||||
|
LOADED = "loaded"
|
||||||
|
ERROR = "error"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelProvider(str, Enum):
|
||||||
|
"""Model providers."""
|
||||||
|
DOCKER_MODEL = "docker-model"
|
||||||
|
OLLAMA = "ollama"
|
||||||
|
LOCAL = "local"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelCapability(str, Enum):
|
||||||
|
"""Model capabilities."""
|
||||||
|
TEXT_GENERATION = "text_generation"
|
||||||
|
CODE_GENERATION = "code_generation"
|
||||||
|
CHAT = "chat"
|
||||||
|
EMBEDDING = "embedding"
|
||||||
|
VISION = "vision"
|
||||||
|
FUNCTION_CALLING = "function_calling"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelConfig(BaseModel):
|
||||||
|
"""Configuration for an AI model."""
|
||||||
|
|
||||||
|
id: str = Field(..., min_length=1)
|
||||||
|
name: str = Field(..., min_length=1)
|
||||||
|
provider: ModelProvider = Field(default=ModelProvider.DOCKER_MODEL)
|
||||||
|
description: str = Field(default="")
|
||||||
|
|
||||||
|
# Model specifications
|
||||||
|
parameter_count: str = Field(default="") # e.g., "7B", "13B"
|
||||||
|
context_length: int = Field(default=4096, ge=512, le=131072)
|
||||||
|
quantization: str = Field(default="") # e.g., "Q4_K_M", "Q8_0"
|
||||||
|
|
||||||
|
# Capabilities
|
||||||
|
capabilities: list[ModelCapability] = Field(default_factory=list)
|
||||||
|
|
||||||
|
# Runtime settings
|
||||||
|
gpu_layers: int = Field(default=0, ge=0)
|
||||||
|
threads: int = Field(default=4, ge=1, le=64)
|
||||||
|
batch_size: int = Field(default=512, ge=1, le=4096)
|
||||||
|
|
||||||
|
# Generation defaults
|
||||||
|
temperature: float = Field(default=0.7, ge=0.0, le=2.0)
|
||||||
|
top_p: float = Field(default=0.9, ge=0.0, le=1.0)
|
||||||
|
top_k: int = Field(default=40, ge=0, le=100)
|
||||||
|
repeat_penalty: float = Field(default=1.1, ge=1.0, le=2.0)
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
size_bytes: int = Field(default=0, ge=0)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.now)
|
||||||
|
tags: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
use_enum_values = True
|
||||||
|
|
||||||
|
|
||||||
|
class Model:
|
||||||
|
"""
|
||||||
|
AI Model wrapper for Docker Model Runner.
|
||||||
|
|
||||||
|
Provides a high-level interface for managing and using AI models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config: ModelConfig):
|
||||||
|
self.config = config
|
||||||
|
self.status = ModelStatus.NOT_PULLED
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
return self.config.id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self.config.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ready(self) -> bool:
|
||||||
|
return self.status in (ModelStatus.READY, ModelStatus.LOADED)
|
||||||
|
|
||||||
|
async def pull(self, progress_callback: Optional[callable] = None) -> bool:
|
||||||
|
"""Pull the model from Docker Model Runner."""
|
||||||
|
async with self._lock:
|
||||||
|
if self.status == ModelStatus.READY:
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.status = ModelStatus.PULLING
|
||||||
|
logger.info(f"Pulling model {self.id}...")
|
||||||
|
|
||||||
|
# Use docker model pull
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
"docker", "model", "pull", self.id,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
self.status = ModelStatus.READY
|
||||||
|
logger.info(f"Model {self.id} pulled successfully")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.status = ModelStatus.ERROR
|
||||||
|
logger.error(f"Failed to pull model: {stderr.decode()}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.status = ModelStatus.ERROR
|
||||||
|
logger.error(f"Error pulling model {self.id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def load(self) -> bool:
|
||||||
|
"""Load the model into memory."""
|
||||||
|
if self.status not in (ModelStatus.READY, ModelStatus.LOADED):
|
||||||
|
logger.error(f"Model {self.id} is not ready")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.status = ModelStatus.LOADING
|
||||||
|
|
||||||
|
# Start docker model serve
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
"docker", "model", "serve", self.id,
|
||||||
|
"--threads", str(self.config.threads),
|
||||||
|
"--ctx-size", str(self.config.context_length),
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.status = ModelStatus.LOADED
|
||||||
|
logger.info(f"Model {self.id} loaded")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.status = ModelStatus.ERROR
|
||||||
|
logger.error(f"Error loading model {self.id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def generate(
|
||||||
|
self,
|
||||||
|
prompt: str,
|
||||||
|
max_tokens: int = 512,
|
||||||
|
temperature: Optional[float] = None,
|
||||||
|
stop_sequences: Optional[list[str]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Generate text using the model."""
|
||||||
|
if not self.is_ready:
|
||||||
|
raise RuntimeError(f"Model {self.id} is not ready")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Build generation request
|
||||||
|
cmd = [
|
||||||
|
"docker", "model", "run", self.id,
|
||||||
|
"--prompt", prompt,
|
||||||
|
"--max-tokens", str(max_tokens),
|
||||||
|
"--temperature", str(temperature or self.config.temperature),
|
||||||
|
]
|
||||||
|
|
||||||
|
if stop_sequences:
|
||||||
|
for seq in stop_sequences:
|
||||||
|
cmd.extend(["--stop", seq])
|
||||||
|
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
return stdout.decode().strip()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Generation failed: {stderr.decode()}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error generating text: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def chat(
|
||||||
|
self,
|
||||||
|
messages: list[dict[str, str]],
|
||||||
|
max_tokens: int = 512,
|
||||||
|
temperature: Optional[float] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Chat with the model."""
|
||||||
|
if not self.is_ready:
|
||||||
|
raise RuntimeError(f"Model {self.id} is not ready")
|
||||||
|
|
||||||
|
# Format messages for chat
|
||||||
|
formatted = []
|
||||||
|
for msg in messages:
|
||||||
|
role = msg.get("role", "user")
|
||||||
|
content = msg.get("content", "")
|
||||||
|
formatted.append(f"{role}: {content}")
|
||||||
|
|
||||||
|
prompt = "\n".join(formatted) + "\nassistant:"
|
||||||
|
return await self.generate(prompt, max_tokens, temperature)
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, Any]:
|
||||||
|
"""Convert model to dictionary."""
|
||||||
|
return {
|
||||||
|
"id": self.id,
|
||||||
|
"name": self.name,
|
||||||
|
"status": self.status.value,
|
||||||
|
"config": self.config.model_dump(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"Model(id={self.id}, status={self.status.value})"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelManager:
|
||||||
|
"""
|
||||||
|
Manager for AI models.
|
||||||
|
|
||||||
|
Handles model discovery, pulling, loading, and lifecycle management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config_dir: Optional[Path] = None):
|
||||||
|
self.config_dir = config_dir or Path.home() / ".config" / "debai" / "models"
|
||||||
|
self.config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.models: dict[str, Model] = {}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def discover_models(self) -> list[ModelConfig]:
|
||||||
|
"""Discover available models from Docker Model Runner."""
|
||||||
|
models = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
"docker", "model", "list", "--format", "json",
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
data = json.loads(stdout.decode())
|
||||||
|
for item in data:
|
||||||
|
config = ModelConfig(
|
||||||
|
id=item.get("name", item.get("id", "")),
|
||||||
|
name=item.get("name", ""),
|
||||||
|
size_bytes=item.get("size", 0),
|
||||||
|
)
|
||||||
|
models.append(config)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error discovering models: {e}")
|
||||||
|
|
||||||
|
return models
|
||||||
|
|
||||||
|
async def add_model(self, config: ModelConfig) -> Model:
|
||||||
|
"""Add a model to the manager."""
|
||||||
|
async with self._lock:
|
||||||
|
if config.id in self.models:
|
||||||
|
return self.models[config.id]
|
||||||
|
|
||||||
|
model = Model(config)
|
||||||
|
self.models[config.id] = model
|
||||||
|
|
||||||
|
# Save configuration
|
||||||
|
self._save_model_config(model)
|
||||||
|
|
||||||
|
logger.info(f"Added model: {model.name}")
|
||||||
|
return model
|
||||||
|
|
||||||
|
def get_model(self, model_id: str) -> Optional[Model]:
|
||||||
|
"""Get a model by ID."""
|
||||||
|
return self.models.get(model_id)
|
||||||
|
|
||||||
|
def list_models(
|
||||||
|
self,
|
||||||
|
status: Optional[ModelStatus] = None,
|
||||||
|
capability: Optional[ModelCapability] = None,
|
||||||
|
) -> list[Model]:
|
||||||
|
"""List models with optional filtering."""
|
||||||
|
models = list(self.models.values())
|
||||||
|
|
||||||
|
if status:
|
||||||
|
models = [m for m in models if m.status == status]
|
||||||
|
|
||||||
|
if capability:
|
||||||
|
models = [
|
||||||
|
m for m in models
|
||||||
|
if capability in m.config.capabilities
|
||||||
|
]
|
||||||
|
|
||||||
|
return models
|
||||||
|
|
||||||
|
async def pull_model(
|
||||||
|
self,
|
||||||
|
model_id: str,
|
||||||
|
progress_callback: Optional[callable] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Pull a model by ID."""
|
||||||
|
model = self.get_model(model_id)
|
||||||
|
if model:
|
||||||
|
return await model.pull(progress_callback)
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def remove_model(self, model_id: str) -> bool:
|
||||||
|
"""Remove a model."""
|
||||||
|
async with self._lock:
|
||||||
|
model = self.models.get(model_id)
|
||||||
|
if not model:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Remove from Docker Model Runner
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
"docker", "model", "rm", model_id,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
await process.communicate()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error removing model from runner: {e}")
|
||||||
|
|
||||||
|
# Remove configuration
|
||||||
|
config_path = self.config_dir / f"{model_id.replace(':', '_')}.yaml"
|
||||||
|
if config_path.exists():
|
||||||
|
config_path.unlink()
|
||||||
|
|
||||||
|
del self.models[model_id]
|
||||||
|
logger.info(f"Removed model: {model_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def load_models(self) -> int:
|
||||||
|
"""Load models from configuration directory."""
|
||||||
|
count = 0
|
||||||
|
for config_file in self.config_dir.glob("*.yaml"):
|
||||||
|
try:
|
||||||
|
with open(config_file) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
|
config = ModelConfig(**data)
|
||||||
|
model = Model(config)
|
||||||
|
model.status = ModelStatus.READY # Assume ready if configured
|
||||||
|
self.models[config.id] = model
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load model from {config_file}: {e}")
|
||||||
|
|
||||||
|
logger.info(f"Loaded {count} models from {self.config_dir}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def save_models(self) -> int:
|
||||||
|
"""Save all model configurations."""
|
||||||
|
count = 0
|
||||||
|
for model in self.models.values():
|
||||||
|
try:
|
||||||
|
self._save_model_config(model)
|
||||||
|
count += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save model {model.id}: {e}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def _save_model_config(self, model: Model) -> None:
|
||||||
|
"""Save model configuration to file."""
|
||||||
|
config_path = self.config_dir / f"{model.id.replace(':', '_')}.yaml"
|
||||||
|
with open(config_path, "w") as f:
|
||||||
|
yaml.dump(model.config.model_dump(), f, default_flow_style=False)
|
||||||
|
|
||||||
|
def get_statistics(self) -> dict[str, Any]:
|
||||||
|
"""Get model statistics."""
|
||||||
|
total = len(self.models)
|
||||||
|
by_status = {}
|
||||||
|
total_size = 0
|
||||||
|
|
||||||
|
for model in self.models.values():
|
||||||
|
status = model.status.value
|
||||||
|
by_status[status] = by_status.get(status, 0) + 1
|
||||||
|
total_size += model.config.size_bytes
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": total,
|
||||||
|
"by_status": by_status,
|
||||||
|
"total_size_bytes": total_size,
|
||||||
|
"total_size_gb": round(total_size / (1024**3), 2),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Recommended models for different use cases
|
||||||
|
RECOMMENDED_MODELS = {
|
||||||
|
"general": ModelConfig(
|
||||||
|
id="llama3.2:3b",
|
||||||
|
name="Llama 3.2 3B",
|
||||||
|
description="Balanced model for general tasks",
|
||||||
|
parameter_count="3B",
|
||||||
|
context_length=8192,
|
||||||
|
capabilities=[
|
||||||
|
ModelCapability.TEXT_GENERATION,
|
||||||
|
ModelCapability.CHAT,
|
||||||
|
ModelCapability.CODE_GENERATION,
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"code": ModelConfig(
|
||||||
|
id="codellama:7b",
|
||||||
|
name="Code Llama 7B",
|
||||||
|
description="Specialized for code generation and analysis",
|
||||||
|
parameter_count="7B",
|
||||||
|
context_length=16384,
|
||||||
|
capabilities=[
|
||||||
|
ModelCapability.CODE_GENERATION,
|
||||||
|
ModelCapability.TEXT_GENERATION,
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"small": ModelConfig(
|
||||||
|
id="llama3.2:1b",
|
||||||
|
name="Llama 3.2 1B",
|
||||||
|
description="Lightweight model for simple tasks",
|
||||||
|
parameter_count="1B",
|
||||||
|
context_length=4096,
|
||||||
|
capabilities=[
|
||||||
|
ModelCapability.TEXT_GENERATION,
|
||||||
|
ModelCapability.CHAT,
|
||||||
|
],
|
||||||
|
),
|
||||||
|
"large": ModelConfig(
|
||||||
|
id="llama3.1:8b",
|
||||||
|
name="Llama 3.1 8B",
|
||||||
|
description="Larger model for complex tasks",
|
||||||
|
parameter_count="8B",
|
||||||
|
context_length=16384,
|
||||||
|
capabilities=[
|
||||||
|
ModelCapability.TEXT_GENERATION,
|
||||||
|
ModelCapability.CHAT,
|
||||||
|
ModelCapability.CODE_GENERATION,
|
||||||
|
ModelCapability.FUNCTION_CALLING,
|
||||||
|
],
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_recommended_model(use_case: str) -> Optional[ModelConfig]:
|
||||||
|
"""Get a recommended model for a specific use case."""
|
||||||
|
return RECOMMENDED_MODELS.get(use_case)
|
||||||
|
|
||||||
|
|
||||||
|
def list_recommended_models() -> list[str]:
|
||||||
|
"""List available recommended model configurations."""
|
||||||
|
return list(RECOMMENDED_MODELS.keys())
|
||||||
581
src/debai/core/system.py
Archivo normal
581
src/debai/core/system.py
Archivo normal
@@ -0,0 +1,581 @@
|
|||||||
|
"""
|
||||||
|
System information and resource monitoring module for Debai.
|
||||||
|
|
||||||
|
This module provides utilities for gathering system information and
|
||||||
|
monitoring resource usage.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import subprocess
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import psutil
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CPUInfo:
|
||||||
|
"""CPU information."""
|
||||||
|
|
||||||
|
model: str
|
||||||
|
cores_physical: int
|
||||||
|
cores_logical: int
|
||||||
|
frequency_mhz: float
|
||||||
|
usage_percent: float
|
||||||
|
temperature: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MemoryInfo:
|
||||||
|
"""Memory information."""
|
||||||
|
|
||||||
|
total_bytes: int
|
||||||
|
available_bytes: int
|
||||||
|
used_bytes: int
|
||||||
|
percent_used: float
|
||||||
|
swap_total_bytes: int
|
||||||
|
swap_used_bytes: int
|
||||||
|
swap_percent_used: float
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DiskInfo:
|
||||||
|
"""Disk information for a partition."""
|
||||||
|
|
||||||
|
device: str
|
||||||
|
mountpoint: str
|
||||||
|
filesystem: str
|
||||||
|
total_bytes: int
|
||||||
|
used_bytes: int
|
||||||
|
free_bytes: int
|
||||||
|
percent_used: float
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NetworkInfo:
|
||||||
|
"""Network interface information."""
|
||||||
|
|
||||||
|
interface: str
|
||||||
|
ip_address: str
|
||||||
|
mac_address: str
|
||||||
|
bytes_sent: int
|
||||||
|
bytes_recv: int
|
||||||
|
packets_sent: int
|
||||||
|
packets_recv: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProcessInfo:
|
||||||
|
"""Process information."""
|
||||||
|
|
||||||
|
pid: int
|
||||||
|
name: str
|
||||||
|
username: str
|
||||||
|
cpu_percent: float
|
||||||
|
memory_percent: float
|
||||||
|
memory_bytes: int
|
||||||
|
status: str
|
||||||
|
created: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class SystemInfo:
|
||||||
|
"""
|
||||||
|
Gathers system information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_hostname() -> str:
|
||||||
|
"""Get the system hostname."""
|
||||||
|
return platform.node()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_os_info() -> dict[str, str]:
|
||||||
|
"""Get operating system information."""
|
||||||
|
return {
|
||||||
|
"system": platform.system(),
|
||||||
|
"release": platform.release(),
|
||||||
|
"version": platform.version(),
|
||||||
|
"machine": platform.machine(),
|
||||||
|
"processor": platform.processor(),
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_distro_info() -> dict[str, str]:
|
||||||
|
"""Get Linux distribution information."""
|
||||||
|
info = {
|
||||||
|
"name": "",
|
||||||
|
"version": "",
|
||||||
|
"codename": "",
|
||||||
|
"id": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Try /etc/os-release first
|
||||||
|
os_release = Path("/etc/os-release")
|
||||||
|
if os_release.exists():
|
||||||
|
for line in os_release.read_text().splitlines():
|
||||||
|
if "=" in line:
|
||||||
|
key, value = line.split("=", 1)
|
||||||
|
value = value.strip('"')
|
||||||
|
if key == "NAME":
|
||||||
|
info["name"] = value
|
||||||
|
elif key == "VERSION_ID":
|
||||||
|
info["version"] = value
|
||||||
|
elif key == "VERSION_CODENAME":
|
||||||
|
info["codename"] = value
|
||||||
|
elif key == "ID":
|
||||||
|
info["id"] = value
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error reading distro info: {e}")
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_cpu_info() -> CPUInfo:
|
||||||
|
"""Get CPU information."""
|
||||||
|
freq = psutil.cpu_freq()
|
||||||
|
|
||||||
|
# Get CPU model
|
||||||
|
model = ""
|
||||||
|
try:
|
||||||
|
with open("/proc/cpuinfo") as f:
|
||||||
|
for line in f:
|
||||||
|
if line.startswith("model name"):
|
||||||
|
model = line.split(":")[1].strip()
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
model = platform.processor()
|
||||||
|
|
||||||
|
# Get temperature if available
|
||||||
|
temp = None
|
||||||
|
try:
|
||||||
|
temps = psutil.sensors_temperatures()
|
||||||
|
if temps:
|
||||||
|
for name, entries in temps.items():
|
||||||
|
if entries:
|
||||||
|
temp = entries[0].current
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return CPUInfo(
|
||||||
|
model=model,
|
||||||
|
cores_physical=psutil.cpu_count(logical=False) or 1,
|
||||||
|
cores_logical=psutil.cpu_count(logical=True) or 1,
|
||||||
|
frequency_mhz=freq.current if freq else 0,
|
||||||
|
usage_percent=psutil.cpu_percent(interval=0.1),
|
||||||
|
temperature=temp,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_memory_info() -> MemoryInfo:
|
||||||
|
"""Get memory information."""
|
||||||
|
mem = psutil.virtual_memory()
|
||||||
|
swap = psutil.swap_memory()
|
||||||
|
|
||||||
|
return MemoryInfo(
|
||||||
|
total_bytes=mem.total,
|
||||||
|
available_bytes=mem.available,
|
||||||
|
used_bytes=mem.used,
|
||||||
|
percent_used=mem.percent,
|
||||||
|
swap_total_bytes=swap.total,
|
||||||
|
swap_used_bytes=swap.used,
|
||||||
|
swap_percent_used=swap.percent,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_disk_info() -> list[DiskInfo]:
|
||||||
|
"""Get disk information for all partitions."""
|
||||||
|
disks = []
|
||||||
|
|
||||||
|
for partition in psutil.disk_partitions(all=False):
|
||||||
|
try:
|
||||||
|
usage = psutil.disk_usage(partition.mountpoint)
|
||||||
|
disks.append(DiskInfo(
|
||||||
|
device=partition.device,
|
||||||
|
mountpoint=partition.mountpoint,
|
||||||
|
filesystem=partition.fstype,
|
||||||
|
total_bytes=usage.total,
|
||||||
|
used_bytes=usage.used,
|
||||||
|
free_bytes=usage.free,
|
||||||
|
percent_used=usage.percent,
|
||||||
|
))
|
||||||
|
except (PermissionError, OSError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
return disks
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_network_info() -> list[NetworkInfo]:
|
||||||
|
"""Get network interface information."""
|
||||||
|
interfaces = []
|
||||||
|
|
||||||
|
addrs = psutil.net_if_addrs()
|
||||||
|
stats = psutil.net_io_counters(pernic=True)
|
||||||
|
|
||||||
|
for name, addr_list in addrs.items():
|
||||||
|
if name == "lo":
|
||||||
|
continue
|
||||||
|
|
||||||
|
ip_addr = ""
|
||||||
|
mac_addr = ""
|
||||||
|
|
||||||
|
for addr in addr_list:
|
||||||
|
if addr.family.name == "AF_INET":
|
||||||
|
ip_addr = addr.address
|
||||||
|
elif addr.family.name == "AF_PACKET":
|
||||||
|
mac_addr = addr.address
|
||||||
|
|
||||||
|
io = stats.get(name)
|
||||||
|
|
||||||
|
interfaces.append(NetworkInfo(
|
||||||
|
interface=name,
|
||||||
|
ip_address=ip_addr,
|
||||||
|
mac_address=mac_addr,
|
||||||
|
bytes_sent=io.bytes_sent if io else 0,
|
||||||
|
bytes_recv=io.bytes_recv if io else 0,
|
||||||
|
packets_sent=io.packets_sent if io else 0,
|
||||||
|
packets_recv=io.packets_recv if io else 0,
|
||||||
|
))
|
||||||
|
|
||||||
|
return interfaces
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_uptime() -> float:
|
||||||
|
"""Get system uptime in seconds."""
|
||||||
|
return datetime.now().timestamp() - psutil.boot_time()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_uptime_string() -> str:
|
||||||
|
"""Get human-readable uptime string."""
|
||||||
|
uptime = SystemInfo.get_uptime()
|
||||||
|
|
||||||
|
days = int(uptime // 86400)
|
||||||
|
hours = int((uptime % 86400) // 3600)
|
||||||
|
minutes = int((uptime % 3600) // 60)
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
if days > 0:
|
||||||
|
parts.append(f"{days}d")
|
||||||
|
if hours > 0:
|
||||||
|
parts.append(f"{hours}h")
|
||||||
|
parts.append(f"{minutes}m")
|
||||||
|
|
||||||
|
return " ".join(parts)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_load_average() -> tuple[float, float, float]:
|
||||||
|
"""Get system load average (1, 5, 15 minutes)."""
|
||||||
|
return os.getloadavg()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_logged_users() -> list[str]:
|
||||||
|
"""Get list of logged-in users."""
|
||||||
|
users = []
|
||||||
|
for user in psutil.users():
|
||||||
|
users.append(f"{user.name}@{user.terminal}")
|
||||||
|
return users
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_summary() -> dict[str, Any]:
|
||||||
|
"""Get a summary of system information."""
|
||||||
|
return {
|
||||||
|
"hostname": SystemInfo.get_hostname(),
|
||||||
|
"os": SystemInfo.get_os_info(),
|
||||||
|
"distro": SystemInfo.get_distro_info(),
|
||||||
|
"cpu": SystemInfo.get_cpu_info().__dict__,
|
||||||
|
"memory": SystemInfo.get_memory_info().__dict__,
|
||||||
|
"uptime": SystemInfo.get_uptime_string(),
|
||||||
|
"load_average": SystemInfo.get_load_average(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceMonitor:
|
||||||
|
"""
|
||||||
|
Monitors system resources continuously.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
interval_seconds: float = 5.0,
|
||||||
|
history_size: int = 100,
|
||||||
|
):
|
||||||
|
self.interval = interval_seconds
|
||||||
|
self.history_size = history_size
|
||||||
|
self.history: list[dict[str, Any]] = []
|
||||||
|
self._running = False
|
||||||
|
self._task: Optional[asyncio.Task] = None
|
||||||
|
self._callbacks: list[callable] = []
|
||||||
|
|
||||||
|
# Thresholds
|
||||||
|
self.thresholds = {
|
||||||
|
"cpu_percent": 80.0,
|
||||||
|
"memory_percent": 85.0,
|
||||||
|
"disk_percent": 90.0,
|
||||||
|
"load_1min": psutil.cpu_count() or 1,
|
||||||
|
}
|
||||||
|
self._alerts: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
def on_update(self, callback: callable) -> None:
|
||||||
|
"""Register a callback for updates."""
|
||||||
|
self._callbacks.append(callback)
|
||||||
|
|
||||||
|
def set_threshold(self, metric: str, value: float) -> None:
|
||||||
|
"""Set a threshold for a metric."""
|
||||||
|
self.thresholds[metric] = value
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
"""Start the resource monitor."""
|
||||||
|
if self._running:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._running = True
|
||||||
|
self._task = asyncio.create_task(self._monitor_loop())
|
||||||
|
logger.info("Resource monitor started")
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
"""Stop the resource monitor."""
|
||||||
|
self._running = False
|
||||||
|
if self._task:
|
||||||
|
self._task.cancel()
|
||||||
|
try:
|
||||||
|
await self._task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
logger.info("Resource monitor stopped")
|
||||||
|
|
||||||
|
async def _monitor_loop(self) -> None:
|
||||||
|
"""Main monitoring loop."""
|
||||||
|
while self._running:
|
||||||
|
try:
|
||||||
|
snapshot = self._take_snapshot()
|
||||||
|
self._add_to_history(snapshot)
|
||||||
|
self._check_thresholds(snapshot)
|
||||||
|
|
||||||
|
# Notify callbacks
|
||||||
|
for callback in self._callbacks:
|
||||||
|
try:
|
||||||
|
callback(snapshot)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in monitor callback: {e}")
|
||||||
|
|
||||||
|
await asyncio.sleep(self.interval)
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in monitor loop: {e}")
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
def _take_snapshot(self) -> dict[str, Any]:
|
||||||
|
"""Take a resource snapshot."""
|
||||||
|
cpu = psutil.cpu_percent(interval=0.1)
|
||||||
|
mem = psutil.virtual_memory()
|
||||||
|
load = os.getloadavg()
|
||||||
|
|
||||||
|
# Get top processes by CPU
|
||||||
|
top_cpu = []
|
||||||
|
for proc in sorted(
|
||||||
|
psutil.process_iter(['pid', 'name', 'cpu_percent', 'memory_percent']),
|
||||||
|
key=lambda p: p.info.get('cpu_percent', 0) or 0,
|
||||||
|
reverse=True,
|
||||||
|
)[:5]:
|
||||||
|
try:
|
||||||
|
info = proc.info
|
||||||
|
top_cpu.append({
|
||||||
|
"pid": info['pid'],
|
||||||
|
"name": info['name'],
|
||||||
|
"cpu_percent": info['cpu_percent'],
|
||||||
|
"memory_percent": info['memory_percent'],
|
||||||
|
})
|
||||||
|
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
||||||
|
continue
|
||||||
|
|
||||||
|
return {
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"cpu_percent": cpu,
|
||||||
|
"memory_percent": mem.percent,
|
||||||
|
"memory_available_mb": mem.available / (1024 * 1024),
|
||||||
|
"load_1min": load[0],
|
||||||
|
"load_5min": load[1],
|
||||||
|
"load_15min": load[2],
|
||||||
|
"top_processes": top_cpu,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _add_to_history(self, snapshot: dict[str, Any]) -> None:
|
||||||
|
"""Add a snapshot to history."""
|
||||||
|
self.history.append(snapshot)
|
||||||
|
if len(self.history) > self.history_size:
|
||||||
|
self.history.pop(0)
|
||||||
|
|
||||||
|
def _check_thresholds(self, snapshot: dict[str, Any]) -> None:
|
||||||
|
"""Check thresholds and generate alerts."""
|
||||||
|
alerts = []
|
||||||
|
|
||||||
|
if snapshot["cpu_percent"] > self.thresholds["cpu_percent"]:
|
||||||
|
alerts.append({
|
||||||
|
"type": "cpu",
|
||||||
|
"message": f"CPU usage is {snapshot['cpu_percent']:.1f}%",
|
||||||
|
"value": snapshot["cpu_percent"],
|
||||||
|
"threshold": self.thresholds["cpu_percent"],
|
||||||
|
})
|
||||||
|
|
||||||
|
if snapshot["memory_percent"] > self.thresholds["memory_percent"]:
|
||||||
|
alerts.append({
|
||||||
|
"type": "memory",
|
||||||
|
"message": f"Memory usage is {snapshot['memory_percent']:.1f}%",
|
||||||
|
"value": snapshot["memory_percent"],
|
||||||
|
"threshold": self.thresholds["memory_percent"],
|
||||||
|
})
|
||||||
|
|
||||||
|
if snapshot["load_1min"] > self.thresholds["load_1min"]:
|
||||||
|
alerts.append({
|
||||||
|
"type": "load",
|
||||||
|
"message": f"Load average is {snapshot['load_1min']:.2f}",
|
||||||
|
"value": snapshot["load_1min"],
|
||||||
|
"threshold": self.thresholds["load_1min"],
|
||||||
|
})
|
||||||
|
|
||||||
|
if alerts:
|
||||||
|
for alert in alerts:
|
||||||
|
alert["timestamp"] = snapshot["timestamp"]
|
||||||
|
self._alerts.extend(alerts)
|
||||||
|
|
||||||
|
# Keep only recent alerts
|
||||||
|
if len(self._alerts) > 100:
|
||||||
|
self._alerts = self._alerts[-100:]
|
||||||
|
|
||||||
|
def get_latest(self) -> Optional[dict[str, Any]]:
|
||||||
|
"""Get the latest snapshot."""
|
||||||
|
return self.history[-1] if self.history else None
|
||||||
|
|
||||||
|
def get_history(self, count: int = 0) -> list[dict[str, Any]]:
|
||||||
|
"""Get recent history."""
|
||||||
|
if count <= 0:
|
||||||
|
return list(self.history)
|
||||||
|
return list(self.history[-count:])
|
||||||
|
|
||||||
|
def get_alerts(self, count: int = 10) -> list[dict[str, Any]]:
|
||||||
|
"""Get recent alerts."""
|
||||||
|
return list(self._alerts[-count:])
|
||||||
|
|
||||||
|
def get_average(self, metric: str, minutes: int = 5) -> Optional[float]:
|
||||||
|
"""Get average value of a metric over a time period."""
|
||||||
|
if not self.history:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Calculate how many samples to use
|
||||||
|
samples_per_minute = 60 / self.interval
|
||||||
|
samples = int(samples_per_minute * minutes)
|
||||||
|
|
||||||
|
recent = self.history[-samples:] if samples < len(self.history) else self.history
|
||||||
|
values = [s.get(metric, 0) for s in recent if metric in s]
|
||||||
|
|
||||||
|
return sum(values) / len(values) if values else None
|
||||||
|
|
||||||
|
def get_statistics(self) -> dict[str, Any]:
|
||||||
|
"""Get monitoring statistics."""
|
||||||
|
if not self.history:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
cpu_values = [s["cpu_percent"] for s in self.history]
|
||||||
|
mem_values = [s["memory_percent"] for s in self.history]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"samples": len(self.history),
|
||||||
|
"interval_seconds": self.interval,
|
||||||
|
"cpu": {
|
||||||
|
"current": cpu_values[-1],
|
||||||
|
"average": sum(cpu_values) / len(cpu_values),
|
||||||
|
"max": max(cpu_values),
|
||||||
|
"min": min(cpu_values),
|
||||||
|
},
|
||||||
|
"memory": {
|
||||||
|
"current": mem_values[-1],
|
||||||
|
"average": sum(mem_values) / len(mem_values),
|
||||||
|
"max": max(mem_values),
|
||||||
|
"min": min(mem_values),
|
||||||
|
},
|
||||||
|
"alerts_count": len(self._alerts),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def format_bytes(size: int) -> str:
|
||||||
|
"""Format bytes to human-readable string."""
|
||||||
|
for unit in ["B", "KB", "MB", "GB", "TB"]:
|
||||||
|
if abs(size) < 1024.0:
|
||||||
|
return f"{size:.1f} {unit}"
|
||||||
|
size /= 1024.0
|
||||||
|
return f"{size:.1f} PB"
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_status() -> dict[str, Any]:
|
||||||
|
"""Get Docker status."""
|
||||||
|
result = {
|
||||||
|
"installed": False,
|
||||||
|
"running": False,
|
||||||
|
"version": "",
|
||||||
|
"containers": 0,
|
||||||
|
"images": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if docker is installed
|
||||||
|
version = subprocess.run(
|
||||||
|
["docker", "--version"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
if version.returncode == 0:
|
||||||
|
result["installed"] = True
|
||||||
|
result["version"] = version.stdout.strip()
|
||||||
|
|
||||||
|
# Check if docker is running
|
||||||
|
info = subprocess.run(
|
||||||
|
["docker", "info", "--format", "{{json .}}"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
if info.returncode == 0:
|
||||||
|
result["running"] = True
|
||||||
|
import json
|
||||||
|
data = json.loads(info.stdout)
|
||||||
|
result["containers"] = data.get("Containers", 0)
|
||||||
|
result["images"] = data.get("Images", 0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Error checking Docker status: {e}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def check_dependencies() -> dict[str, bool]:
|
||||||
|
"""Check if required dependencies are available."""
|
||||||
|
deps = {
|
||||||
|
"docker": False,
|
||||||
|
"docker-model": False,
|
||||||
|
"cagent": False,
|
||||||
|
"qemu-img": False,
|
||||||
|
"genisoimage": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
for dep in deps:
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["which", dep],
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
deps[dep] = result.returncode == 0
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return deps
|
||||||
571
src/debai/core/task.py
Archivo normal
571
src/debai/core/task.py
Archivo normal
@@ -0,0 +1,571 @@
|
|||||||
|
"""
|
||||||
|
Task management module for Debai.
|
||||||
|
|
||||||
|
This module provides classes and utilities for creating and managing
|
||||||
|
automated tasks that agents can execute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TaskStatus(str, Enum):
|
||||||
|
"""Task status enumeration."""
|
||||||
|
PENDING = "pending"
|
||||||
|
SCHEDULED = "scheduled"
|
||||||
|
RUNNING = "running"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
FAILED = "failed"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
PAUSED = "paused"
|
||||||
|
|
||||||
|
|
||||||
|
class TaskPriority(str, Enum):
|
||||||
|
"""Task priority levels."""
|
||||||
|
LOW = "low"
|
||||||
|
NORMAL = "normal"
|
||||||
|
HIGH = "high"
|
||||||
|
CRITICAL = "critical"
|
||||||
|
|
||||||
|
|
||||||
|
class TaskType(str, Enum):
|
||||||
|
"""Types of tasks."""
|
||||||
|
COMMAND = "command" # Execute shell command
|
||||||
|
SCRIPT = "script" # Run a script file
|
||||||
|
AGENT = "agent" # Delegate to agent
|
||||||
|
WORKFLOW = "workflow" # Multi-step workflow
|
||||||
|
SCHEDULED = "scheduled" # Cron-scheduled task
|
||||||
|
EVENT = "event" # Event-triggered task
|
||||||
|
|
||||||
|
|
||||||
|
class TaskConfig(BaseModel):
|
||||||
|
"""Configuration for a task."""
|
||||||
|
|
||||||
|
id: str = Field(default_factory=lambda: str(uuid.uuid4())[:8])
|
||||||
|
name: str = Field(..., min_length=1, max_length=128)
|
||||||
|
description: str = Field(default="")
|
||||||
|
task_type: TaskType = Field(default=TaskType.COMMAND)
|
||||||
|
priority: TaskPriority = Field(default=TaskPriority.NORMAL)
|
||||||
|
|
||||||
|
# Execution
|
||||||
|
command: str = Field(default="")
|
||||||
|
script_path: str = Field(default="")
|
||||||
|
agent_id: Optional[str] = Field(default=None)
|
||||||
|
working_directory: str = Field(default="/tmp")
|
||||||
|
environment: dict[str, str] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
# Scheduling
|
||||||
|
schedule_cron: Optional[str] = Field(default=None)
|
||||||
|
schedule_at: Optional[datetime] = Field(default=None)
|
||||||
|
repeat_count: int = Field(default=0) # 0 = infinite
|
||||||
|
repeat_interval_minutes: int = Field(default=0)
|
||||||
|
|
||||||
|
# Limits
|
||||||
|
timeout_seconds: int = Field(default=300, ge=1, le=86400)
|
||||||
|
max_retries: int = Field(default=3, ge=0, le=10)
|
||||||
|
retry_delay_seconds: int = Field(default=30, ge=1, le=3600)
|
||||||
|
|
||||||
|
# Dependencies
|
||||||
|
depends_on: list[str] = Field(default_factory=list)
|
||||||
|
blocks: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
# Notifications
|
||||||
|
notify_on_complete: bool = Field(default=False)
|
||||||
|
notify_on_failure: bool = Field(default=True)
|
||||||
|
notification_email: Optional[str] = Field(default=None)
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
created_at: datetime = Field(default_factory=datetime.now)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.now)
|
||||||
|
tags: list[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
use_enum_values = True
|
||||||
|
|
||||||
|
|
||||||
|
class TaskResult(BaseModel):
|
||||||
|
"""Result of a task execution."""
|
||||||
|
|
||||||
|
task_id: str
|
||||||
|
success: bool
|
||||||
|
exit_code: int = Field(default=0)
|
||||||
|
stdout: str = Field(default="")
|
||||||
|
stderr: str = Field(default="")
|
||||||
|
started_at: datetime
|
||||||
|
completed_at: datetime
|
||||||
|
duration_seconds: float
|
||||||
|
retry_count: int = Field(default=0)
|
||||||
|
error_message: Optional[str] = Field(default=None)
|
||||||
|
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class Task:
|
||||||
|
"""
|
||||||
|
A task that can be executed by the system or delegated to an agent.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config: TaskConfig):
|
||||||
|
self.config = config
|
||||||
|
self.status = TaskStatus.PENDING
|
||||||
|
self.current_retry = 0
|
||||||
|
self.last_result: Optional[TaskResult] = None
|
||||||
|
self.history: list[TaskResult] = []
|
||||||
|
self._callbacks: dict[str, list[Callable]] = {
|
||||||
|
"on_start": [],
|
||||||
|
"on_complete": [],
|
||||||
|
"on_failure": [],
|
||||||
|
"on_retry": [],
|
||||||
|
}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._process: Optional[asyncio.subprocess.Process] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
return self.config.id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self.config.name
|
||||||
|
|
||||||
|
def on(self, event: str, callback: Callable) -> None:
|
||||||
|
"""Register an event callback."""
|
||||||
|
if event in self._callbacks:
|
||||||
|
self._callbacks[event].append(callback)
|
||||||
|
|
||||||
|
def _emit(self, event: str, *args, **kwargs) -> None:
|
||||||
|
"""Emit an event to all registered callbacks."""
|
||||||
|
for callback in self._callbacks.get(event, []):
|
||||||
|
try:
|
||||||
|
callback(*args, **kwargs)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in callback for {event}: {e}")
|
||||||
|
|
||||||
|
async def execute(self) -> TaskResult:
|
||||||
|
"""Execute the task."""
|
||||||
|
async with self._lock:
|
||||||
|
started_at = datetime.now()
|
||||||
|
self.status = TaskStatus.RUNNING
|
||||||
|
self._emit("on_start", self)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.config.task_type == TaskType.COMMAND:
|
||||||
|
result = await self._execute_command()
|
||||||
|
elif self.config.task_type == TaskType.SCRIPT:
|
||||||
|
result = await self._execute_script()
|
||||||
|
elif self.config.task_type == TaskType.AGENT:
|
||||||
|
result = await self._execute_agent()
|
||||||
|
else:
|
||||||
|
result = await self._execute_command()
|
||||||
|
|
||||||
|
completed_at = datetime.now()
|
||||||
|
|
||||||
|
task_result = TaskResult(
|
||||||
|
task_id=self.id,
|
||||||
|
success=result["success"],
|
||||||
|
exit_code=result.get("exit_code", 0),
|
||||||
|
stdout=result.get("stdout", ""),
|
||||||
|
stderr=result.get("stderr", ""),
|
||||||
|
started_at=started_at,
|
||||||
|
completed_at=completed_at,
|
||||||
|
duration_seconds=(completed_at - started_at).total_seconds(),
|
||||||
|
retry_count=self.current_retry,
|
||||||
|
)
|
||||||
|
|
||||||
|
if task_result.success:
|
||||||
|
self.status = TaskStatus.COMPLETED
|
||||||
|
self._emit("on_complete", self, task_result)
|
||||||
|
else:
|
||||||
|
if self.current_retry < self.config.max_retries:
|
||||||
|
self.current_retry += 1
|
||||||
|
self._emit("on_retry", self, self.current_retry)
|
||||||
|
await asyncio.sleep(self.config.retry_delay_seconds)
|
||||||
|
return await self.execute()
|
||||||
|
else:
|
||||||
|
self.status = TaskStatus.FAILED
|
||||||
|
self._emit("on_failure", self, task_result)
|
||||||
|
|
||||||
|
self.last_result = task_result
|
||||||
|
self.history.append(task_result)
|
||||||
|
return task_result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
completed_at = datetime.now()
|
||||||
|
task_result = TaskResult(
|
||||||
|
task_id=self.id,
|
||||||
|
success=False,
|
||||||
|
exit_code=-1,
|
||||||
|
started_at=started_at,
|
||||||
|
completed_at=completed_at,
|
||||||
|
duration_seconds=(completed_at - started_at).total_seconds(),
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
self.status = TaskStatus.FAILED
|
||||||
|
self._emit("on_failure", self, task_result)
|
||||||
|
self.last_result = task_result
|
||||||
|
self.history.append(task_result)
|
||||||
|
return task_result
|
||||||
|
|
||||||
|
async def _execute_command(self) -> dict[str, Any]:
|
||||||
|
"""Execute a shell command."""
|
||||||
|
try:
|
||||||
|
self._process = await asyncio.create_subprocess_shell(
|
||||||
|
self.config.command,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
cwd=self.config.working_directory,
|
||||||
|
env={**dict(Path("/etc/environment").read_text().split("=")) if Path("/etc/environment").exists() else {}, **self.config.environment},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
stdout, stderr = await asyncio.wait_for(
|
||||||
|
self._process.communicate(),
|
||||||
|
timeout=self.config.timeout_seconds,
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
self._process.kill()
|
||||||
|
await self._process.wait()
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"exit_code": -1,
|
||||||
|
"stderr": "Task timed out",
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": self._process.returncode == 0,
|
||||||
|
"exit_code": self._process.returncode,
|
||||||
|
"stdout": stdout.decode() if stdout else "",
|
||||||
|
"stderr": stderr.decode() if stderr else "",
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"exit_code": -1,
|
||||||
|
"stderr": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _execute_script(self) -> dict[str, Any]:
|
||||||
|
"""Execute a script file."""
|
||||||
|
script_path = Path(self.config.script_path)
|
||||||
|
if not script_path.exists():
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"exit_code": -1,
|
||||||
|
"stderr": f"Script not found: {script_path}",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine interpreter
|
||||||
|
with open(script_path) as f:
|
||||||
|
first_line = f.readline()
|
||||||
|
|
||||||
|
if first_line.startswith("#!"):
|
||||||
|
interpreter = first_line[2:].strip()
|
||||||
|
command = f"{interpreter} {script_path}"
|
||||||
|
else:
|
||||||
|
command = f"bash {script_path}"
|
||||||
|
|
||||||
|
self.config.command = command
|
||||||
|
return await self._execute_command()
|
||||||
|
|
||||||
|
async def _execute_agent(self) -> dict[str, Any]:
|
||||||
|
"""Execute task via an agent."""
|
||||||
|
# This would integrate with the AgentManager
|
||||||
|
# For now, return a placeholder
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"exit_code": 0,
|
||||||
|
"stdout": f"Agent {self.config.agent_id} executed task",
|
||||||
|
}
|
||||||
|
|
||||||
|
async def cancel(self) -> bool:
|
||||||
|
"""Cancel the task."""
|
||||||
|
if self.status != TaskStatus.RUNNING:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self._process:
|
||||||
|
self._process.terminate()
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self._process.wait(), timeout=10)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
self._process.kill()
|
||||||
|
|
||||||
|
self.status = TaskStatus.CANCELLED
|
||||||
|
return True
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, Any]:
|
||||||
|
"""Convert task to dictionary."""
|
||||||
|
return {
|
||||||
|
"id": self.id,
|
||||||
|
"name": self.name,
|
||||||
|
"status": self.status.value,
|
||||||
|
"config": self.config.model_dump(),
|
||||||
|
"current_retry": self.current_retry,
|
||||||
|
"last_result": self.last_result.model_dump() if self.last_result else None,
|
||||||
|
"history_count": len(self.history),
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"Task(id={self.id}, name={self.name}, status={self.status.value})"
|
||||||
|
|
||||||
|
|
||||||
|
class TaskManager:
|
||||||
|
"""
|
||||||
|
Manager for tasks.
|
||||||
|
|
||||||
|
Handles task scheduling, execution, and lifecycle management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config_dir: Optional[Path] = None):
|
||||||
|
self.config_dir = config_dir or Path.home() / ".config" / "debai" / "tasks"
|
||||||
|
self.config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.tasks: dict[str, Task] = {}
|
||||||
|
self.queue: asyncio.Queue = asyncio.Queue()
|
||||||
|
self._running = False
|
||||||
|
self._worker_task: Optional[asyncio.Task] = None
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def create_task(self, config: TaskConfig) -> Task:
|
||||||
|
"""Create a new task."""
|
||||||
|
async with self._lock:
|
||||||
|
if config.id in self.tasks:
|
||||||
|
raise ValueError(f"Task with id {config.id} already exists")
|
||||||
|
|
||||||
|
task = Task(config)
|
||||||
|
self.tasks[config.id] = task
|
||||||
|
|
||||||
|
# Save configuration
|
||||||
|
self._save_task_config(task)
|
||||||
|
|
||||||
|
logger.info(f"Created task: {task.name}")
|
||||||
|
return task
|
||||||
|
|
||||||
|
def get_task(self, task_id: str) -> Optional[Task]:
|
||||||
|
"""Get a task by ID."""
|
||||||
|
return self.tasks.get(task_id)
|
||||||
|
|
||||||
|
def list_tasks(
|
||||||
|
self,
|
||||||
|
status: Optional[TaskStatus] = None,
|
||||||
|
task_type: Optional[TaskType] = None,
|
||||||
|
priority: Optional[TaskPriority] = None,
|
||||||
|
) -> list[Task]:
|
||||||
|
"""List tasks with optional filtering."""
|
||||||
|
tasks = list(self.tasks.values())
|
||||||
|
|
||||||
|
if status:
|
||||||
|
tasks = [t for t in tasks if t.status == status]
|
||||||
|
|
||||||
|
if task_type:
|
||||||
|
tasks = [t for t in tasks if t.config.task_type == task_type]
|
||||||
|
|
||||||
|
if priority:
|
||||||
|
tasks = [t for t in tasks if t.config.priority == priority]
|
||||||
|
|
||||||
|
return tasks
|
||||||
|
|
||||||
|
async def run_task(self, task_id: str) -> Optional[TaskResult]:
|
||||||
|
"""Run a task immediately."""
|
||||||
|
task = self.get_task(task_id)
|
||||||
|
if task:
|
||||||
|
return await task.execute()
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def queue_task(self, task_id: str) -> bool:
|
||||||
|
"""Add a task to the execution queue."""
|
||||||
|
task = self.get_task(task_id)
|
||||||
|
if task and task.status == TaskStatus.PENDING:
|
||||||
|
task.status = TaskStatus.SCHEDULED
|
||||||
|
await self.queue.put(task)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def cancel_task(self, task_id: str) -> bool:
|
||||||
|
"""Cancel a task."""
|
||||||
|
task = self.get_task(task_id)
|
||||||
|
if task:
|
||||||
|
return await task.cancel()
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def delete_task(self, task_id: str) -> bool:
|
||||||
|
"""Delete a task."""
|
||||||
|
async with self._lock:
|
||||||
|
task = self.tasks.get(task_id)
|
||||||
|
if not task:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Cancel if running
|
||||||
|
await task.cancel()
|
||||||
|
|
||||||
|
# Remove configuration
|
||||||
|
config_path = self.config_dir / f"{task_id}.yaml"
|
||||||
|
if config_path.exists():
|
||||||
|
config_path.unlink()
|
||||||
|
|
||||||
|
del self.tasks[task_id]
|
||||||
|
logger.info(f"Deleted task: {task_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def start_worker(self) -> None:
|
||||||
|
"""Start the task queue worker."""
|
||||||
|
if self._running:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._running = True
|
||||||
|
self._worker_task = asyncio.create_task(self._worker_loop())
|
||||||
|
logger.info("Task worker started")
|
||||||
|
|
||||||
|
async def stop_worker(self) -> None:
|
||||||
|
"""Stop the task queue worker."""
|
||||||
|
self._running = False
|
||||||
|
if self._worker_task:
|
||||||
|
self._worker_task.cancel()
|
||||||
|
try:
|
||||||
|
await self._worker_task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
logger.info("Task worker stopped")
|
||||||
|
|
||||||
|
async def _worker_loop(self) -> None:
|
||||||
|
"""Worker loop that processes queued tasks."""
|
||||||
|
while self._running:
|
||||||
|
try:
|
||||||
|
task = await asyncio.wait_for(self.queue.get(), timeout=1.0)
|
||||||
|
await task.execute()
|
||||||
|
self.queue.task_done()
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in worker loop: {e}")
|
||||||
|
|
||||||
|
def load_tasks(self) -> int:
|
||||||
|
"""Load tasks from configuration directory."""
|
||||||
|
count = 0
|
||||||
|
for config_file in self.config_dir.glob("*.yaml"):
|
||||||
|
try:
|
||||||
|
with open(config_file) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
|
config = TaskConfig(**data)
|
||||||
|
task = Task(config)
|
||||||
|
self.tasks[config.id] = task
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load task from {config_file}: {e}")
|
||||||
|
|
||||||
|
logger.info(f"Loaded {count} tasks from {self.config_dir}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def save_tasks(self) -> int:
|
||||||
|
"""Save all task configurations."""
|
||||||
|
count = 0
|
||||||
|
for task in self.tasks.values():
|
||||||
|
try:
|
||||||
|
self._save_task_config(task)
|
||||||
|
count += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save task {task.id}: {e}")
|
||||||
|
return count
|
||||||
|
|
||||||
|
def _save_task_config(self, task: Task) -> None:
|
||||||
|
"""Save task configuration to file."""
|
||||||
|
config_path = self.config_dir / f"{task.id}.yaml"
|
||||||
|
with open(config_path, "w") as f:
|
||||||
|
yaml.dump(task.config.model_dump(), f, default_flow_style=False)
|
||||||
|
|
||||||
|
def get_statistics(self) -> dict[str, Any]:
|
||||||
|
"""Get task statistics."""
|
||||||
|
total = len(self.tasks)
|
||||||
|
by_status = {}
|
||||||
|
by_type = {}
|
||||||
|
by_priority = {}
|
||||||
|
|
||||||
|
for task in self.tasks.values():
|
||||||
|
status = task.status.value
|
||||||
|
task_type = task.config.task_type
|
||||||
|
priority = task.config.priority
|
||||||
|
|
||||||
|
by_status[status] = by_status.get(status, 0) + 1
|
||||||
|
by_type[task_type] = by_type.get(task_type, 0) + 1
|
||||||
|
by_priority[priority] = by_priority.get(priority, 0) + 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total": total,
|
||||||
|
"by_status": by_status,
|
||||||
|
"by_type": by_type,
|
||||||
|
"by_priority": by_priority,
|
||||||
|
"queue_size": self.queue.qsize(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Predefined task templates
|
||||||
|
TASK_TEMPLATES = {
|
||||||
|
"update_packages": TaskConfig(
|
||||||
|
name="Update System Packages",
|
||||||
|
description="Update all system packages to latest versions",
|
||||||
|
task_type=TaskType.COMMAND,
|
||||||
|
command="apt update && apt upgrade -y",
|
||||||
|
priority=TaskPriority.NORMAL,
|
||||||
|
timeout_seconds=1800,
|
||||||
|
),
|
||||||
|
"cleanup_temp": TaskConfig(
|
||||||
|
name="Cleanup Temporary Files",
|
||||||
|
description="Remove temporary files older than 7 days",
|
||||||
|
task_type=TaskType.COMMAND,
|
||||||
|
command="find /tmp -type f -mtime +7 -delete",
|
||||||
|
priority=TaskPriority.LOW,
|
||||||
|
),
|
||||||
|
"check_disk": TaskConfig(
|
||||||
|
name="Check Disk Usage",
|
||||||
|
description="Check disk usage and report if above 80%",
|
||||||
|
task_type=TaskType.COMMAND,
|
||||||
|
command="df -h | awk 'NR>1 && int($5)>80 {print $0}'",
|
||||||
|
priority=TaskPriority.HIGH,
|
||||||
|
),
|
||||||
|
"security_updates": TaskConfig(
|
||||||
|
name="Security Updates",
|
||||||
|
description="Install security updates only",
|
||||||
|
task_type=TaskType.COMMAND,
|
||||||
|
command="apt update && apt upgrade -y --only-upgrade $(apt list --upgradable 2>/dev/null | grep -i security | cut -d'/' -f1)",
|
||||||
|
priority=TaskPriority.CRITICAL,
|
||||||
|
timeout_seconds=1800,
|
||||||
|
),
|
||||||
|
"system_health": TaskConfig(
|
||||||
|
name="System Health Check",
|
||||||
|
description="Comprehensive system health check",
|
||||||
|
task_type=TaskType.COMMAND,
|
||||||
|
command="echo '=== System Health ===' && uptime && echo && echo '=== Memory ===' && free -h && echo && echo '=== Disk ===' && df -h && echo && echo '=== Load ===' && cat /proc/loadavg",
|
||||||
|
priority=TaskPriority.NORMAL,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_task_template(template_name: str) -> Optional[TaskConfig]:
|
||||||
|
"""Get a predefined task template."""
|
||||||
|
template = TASK_TEMPLATES.get(template_name)
|
||||||
|
if template:
|
||||||
|
# Return a copy with a new ID
|
||||||
|
data = template.model_dump()
|
||||||
|
data["id"] = str(uuid.uuid4())[:8]
|
||||||
|
return TaskConfig(**data)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def list_task_templates() -> list[str]:
|
||||||
|
"""List available task templates."""
|
||||||
|
return list(TASK_TEMPLATES.keys())
|
||||||
15
src/debai/generators/__init__.py
Archivo normal
15
src/debai/generators/__init__.py
Archivo normal
@@ -0,0 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Generators module for Debai.
|
||||||
|
|
||||||
|
This module provides utilities for generating distribution images and configurations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from debai.generators.iso import ISOGenerator
|
||||||
|
from debai.generators.qcow2 import QCOW2Generator
|
||||||
|
from debai.generators.compose import ComposeGenerator
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ISOGenerator",
|
||||||
|
"QCOW2Generator",
|
||||||
|
"ComposeGenerator",
|
||||||
|
]
|
||||||
456
src/debai/generators/compose.py
Archivo normal
456
src/debai/generators/compose.py
Archivo normal
@@ -0,0 +1,456 @@
|
|||||||
|
"""
|
||||||
|
Docker Compose configuration generator for Debai.
|
||||||
|
|
||||||
|
Generates Docker Compose configurations for running Debai in containers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ComposeGenerator:
|
||||||
|
"""
|
||||||
|
Generates Docker Compose configurations for Debai.
|
||||||
|
|
||||||
|
Creates production-ready compose files with all necessary services.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
output_path: Path,
|
||||||
|
include_gui: bool = True,
|
||||||
|
include_monitoring: bool = True,
|
||||||
|
include_models: bool = True,
|
||||||
|
model_ids: Optional[list[str]] = None,
|
||||||
|
):
|
||||||
|
self.output_path = output_path
|
||||||
|
self.include_gui = include_gui
|
||||||
|
self.include_monitoring = include_monitoring
|
||||||
|
self.include_models = include_models
|
||||||
|
self.model_ids = model_ids or ["llama3.2:3b"]
|
||||||
|
|
||||||
|
def generate(self) -> dict[str, Any]:
|
||||||
|
"""Generate the Docker Compose configuration."""
|
||||||
|
result = {
|
||||||
|
"success": False,
|
||||||
|
"output_path": str(self.output_path),
|
||||||
|
"services": [],
|
||||||
|
"error": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
compose = self._build_compose()
|
||||||
|
|
||||||
|
# Write the compose file
|
||||||
|
with open(self.output_path, "w") as f:
|
||||||
|
yaml.dump(compose, f, default_flow_style=False, sort_keys=False)
|
||||||
|
|
||||||
|
result["success"] = True
|
||||||
|
result["services"] = list(compose.get("services", {}).keys())
|
||||||
|
|
||||||
|
# Also create .env file
|
||||||
|
self._create_env_file()
|
||||||
|
|
||||||
|
# Create helper scripts
|
||||||
|
self._create_helper_scripts()
|
||||||
|
|
||||||
|
logger.info(f"Docker Compose configuration generated: {self.output_path}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Compose generation failed: {e}")
|
||||||
|
result["error"] = str(e)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _build_compose(self) -> dict[str, Any]:
|
||||||
|
"""Build the compose configuration dictionary."""
|
||||||
|
compose = {
|
||||||
|
"version": "3.8",
|
||||||
|
"name": "debai",
|
||||||
|
"services": {},
|
||||||
|
"volumes": {},
|
||||||
|
"networks": {
|
||||||
|
"debai-network": {
|
||||||
|
"driver": "bridge",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Core Debai service
|
||||||
|
compose["services"]["debai-core"] = {
|
||||||
|
"image": "debai/core:latest",
|
||||||
|
"build": {
|
||||||
|
"context": ".",
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
"container_name": "debai-core",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"environment": [
|
||||||
|
"DEBAI_CONFIG_DIR=/etc/debai",
|
||||||
|
"DEBAI_DATA_DIR=/var/lib/debai",
|
||||||
|
"DEBAI_LOG_LEVEL=${DEBAI_LOG_LEVEL:-info}",
|
||||||
|
],
|
||||||
|
"volumes": [
|
||||||
|
"debai-config:/etc/debai",
|
||||||
|
"debai-data:/var/lib/debai",
|
||||||
|
"/var/run/docker.sock:/var/run/docker.sock:ro",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
"healthcheck": {
|
||||||
|
"test": ["CMD", "debai", "status"],
|
||||||
|
"interval": "30s",
|
||||||
|
"timeout": "10s",
|
||||||
|
"retries": 3,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compose["volumes"]["debai-config"] = {}
|
||||||
|
compose["volumes"]["debai-data"] = {}
|
||||||
|
|
||||||
|
# Model service (Docker Model Runner)
|
||||||
|
if self.include_models:
|
||||||
|
compose["services"]["debai-models"] = {
|
||||||
|
"image": "aimodel/runner:latest",
|
||||||
|
"container_name": "debai-models",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"environment": [
|
||||||
|
"MODEL_CACHE_DIR=/models",
|
||||||
|
],
|
||||||
|
"volumes": [
|
||||||
|
"debai-models:/models",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
"ports": [
|
||||||
|
"11434:11434",
|
||||||
|
],
|
||||||
|
"deploy": {
|
||||||
|
"resources": {
|
||||||
|
"limits": {
|
||||||
|
"memory": "8G",
|
||||||
|
},
|
||||||
|
"reservations": {
|
||||||
|
"memory": "4G",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
compose["volumes"]["debai-models"] = {}
|
||||||
|
|
||||||
|
# Agent service (cagent)
|
||||||
|
compose["services"]["debai-agents"] = {
|
||||||
|
"image": "debai/agents:latest",
|
||||||
|
"build": {
|
||||||
|
"context": ".",
|
||||||
|
"dockerfile": "Dockerfile.agents",
|
||||||
|
},
|
||||||
|
"container_name": "debai-agents",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"depends_on": ["debai-core"],
|
||||||
|
"environment": [
|
||||||
|
"DEBAI_CORE_URL=http://debai-core:8000",
|
||||||
|
"DEBAI_MODEL_URL=http://debai-models:11434",
|
||||||
|
],
|
||||||
|
"volumes": [
|
||||||
|
"debai-config:/etc/debai:ro",
|
||||||
|
"debai-agents:/var/lib/debai/agents",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
}
|
||||||
|
compose["volumes"]["debai-agents"] = {}
|
||||||
|
|
||||||
|
# API service
|
||||||
|
compose["services"]["debai-api"] = {
|
||||||
|
"image": "debai/api:latest",
|
||||||
|
"build": {
|
||||||
|
"context": ".",
|
||||||
|
"dockerfile": "Dockerfile.api",
|
||||||
|
},
|
||||||
|
"container_name": "debai-api",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"depends_on": ["debai-core", "debai-agents"],
|
||||||
|
"environment": [
|
||||||
|
"DEBAI_API_PORT=8000",
|
||||||
|
"DEBAI_API_HOST=0.0.0.0",
|
||||||
|
],
|
||||||
|
"ports": [
|
||||||
|
"8000:8000",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
"healthcheck": {
|
||||||
|
"test": ["CMD", "curl", "-f", "http://localhost:8000/health"],
|
||||||
|
"interval": "30s",
|
||||||
|
"timeout": "10s",
|
||||||
|
"retries": 3,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# GUI service
|
||||||
|
if self.include_gui:
|
||||||
|
compose["services"]["debai-gui"] = {
|
||||||
|
"image": "debai/gui:latest",
|
||||||
|
"build": {
|
||||||
|
"context": ".",
|
||||||
|
"dockerfile": "Dockerfile.gui",
|
||||||
|
},
|
||||||
|
"container_name": "debai-gui",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"depends_on": ["debai-api"],
|
||||||
|
"environment": [
|
||||||
|
"DEBAI_API_URL=http://debai-api:8000",
|
||||||
|
],
|
||||||
|
"ports": [
|
||||||
|
"8080:8080",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Monitoring services
|
||||||
|
if self.include_monitoring:
|
||||||
|
# Prometheus for metrics
|
||||||
|
compose["services"]["prometheus"] = {
|
||||||
|
"image": "prom/prometheus:latest",
|
||||||
|
"container_name": "debai-prometheus",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"volumes": [
|
||||||
|
"./prometheus.yml:/etc/prometheus/prometheus.yml:ro",
|
||||||
|
"prometheus-data:/prometheus",
|
||||||
|
],
|
||||||
|
"ports": [
|
||||||
|
"9090:9090",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
"command": [
|
||||||
|
"--config.file=/etc/prometheus/prometheus.yml",
|
||||||
|
"--storage.tsdb.path=/prometheus",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
compose["volumes"]["prometheus-data"] = {}
|
||||||
|
|
||||||
|
# Grafana for dashboards
|
||||||
|
compose["services"]["grafana"] = {
|
||||||
|
"image": "grafana/grafana:latest",
|
||||||
|
"container_name": "debai-grafana",
|
||||||
|
"restart": "unless-stopped",
|
||||||
|
"depends_on": ["prometheus"],
|
||||||
|
"environment": [
|
||||||
|
"GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD:-debai}",
|
||||||
|
"GF_USERS_ALLOW_SIGN_UP=false",
|
||||||
|
],
|
||||||
|
"volumes": [
|
||||||
|
"grafana-data:/var/lib/grafana",
|
||||||
|
"./grafana/provisioning:/etc/grafana/provisioning:ro",
|
||||||
|
],
|
||||||
|
"ports": [
|
||||||
|
"3000:3000",
|
||||||
|
],
|
||||||
|
"networks": ["debai-network"],
|
||||||
|
}
|
||||||
|
compose["volumes"]["grafana-data"] = {}
|
||||||
|
|
||||||
|
return compose
|
||||||
|
|
||||||
|
def _create_env_file(self) -> None:
|
||||||
|
"""Create the .env file with configuration."""
|
||||||
|
env_content = """# Debai Docker Environment Configuration
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
DEBAI_LOG_LEVEL=info
|
||||||
|
|
||||||
|
# API Configuration
|
||||||
|
DEBAI_API_PORT=8000
|
||||||
|
|
||||||
|
# Model Configuration
|
||||||
|
DEBAI_DEFAULT_MODEL=llama3.2:3b
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
GRAFANA_PASSWORD=debai
|
||||||
|
|
||||||
|
# Security
|
||||||
|
# Generate a secure key for production:
|
||||||
|
# python3 -c "import secrets; print(secrets.token_hex(32))"
|
||||||
|
DEBAI_SECRET_KEY=change-me-in-production
|
||||||
|
|
||||||
|
# GPU Support (uncomment for NVIDIA GPU)
|
||||||
|
# NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
"""
|
||||||
|
|
||||||
|
env_path = self.output_path.parent / ".env"
|
||||||
|
env_path.write_text(env_content)
|
||||||
|
logger.info(f"Created .env file: {env_path}")
|
||||||
|
|
||||||
|
def _create_helper_scripts(self) -> None:
|
||||||
|
"""Create helper scripts for Docker Compose management."""
|
||||||
|
output_dir = self.output_path.parent
|
||||||
|
|
||||||
|
# Start script
|
||||||
|
start_script = """#!/bin/bash
|
||||||
|
# Start Debai services
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Starting Debai services..."
|
||||||
|
|
||||||
|
# Build images if needed
|
||||||
|
docker compose build
|
||||||
|
|
||||||
|
# Start services
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Debai is starting up..."
|
||||||
|
echo ""
|
||||||
|
echo "Services:"
|
||||||
|
echo " - API: http://localhost:8000"
|
||||||
|
echo " - GUI: http://localhost:8080"
|
||||||
|
echo " - Grafana: http://localhost:3000 (admin/debai)"
|
||||||
|
echo " - Prometheus: http://localhost:9090"
|
||||||
|
echo ""
|
||||||
|
echo "View logs: docker compose logs -f"
|
||||||
|
echo "Stop: ./stop.sh"
|
||||||
|
"""
|
||||||
|
|
||||||
|
start_path = output_dir / "start.sh"
|
||||||
|
start_path.write_text(start_script)
|
||||||
|
start_path.chmod(0o755)
|
||||||
|
|
||||||
|
# Stop script
|
||||||
|
stop_script = """#!/bin/bash
|
||||||
|
# Stop Debai services
|
||||||
|
|
||||||
|
echo "Stopping Debai services..."
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
echo "Services stopped."
|
||||||
|
"""
|
||||||
|
|
||||||
|
stop_path = output_dir / "stop.sh"
|
||||||
|
stop_path.write_text(stop_script)
|
||||||
|
stop_path.chmod(0o755)
|
||||||
|
|
||||||
|
# Logs script
|
||||||
|
logs_script = """#!/bin/bash
|
||||||
|
# View Debai service logs
|
||||||
|
|
||||||
|
SERVICE=${1:-}
|
||||||
|
|
||||||
|
if [ -z "$SERVICE" ]; then
|
||||||
|
docker compose logs -f
|
||||||
|
else
|
||||||
|
docker compose logs -f "$SERVICE"
|
||||||
|
fi
|
||||||
|
"""
|
||||||
|
|
||||||
|
logs_path = output_dir / "logs.sh"
|
||||||
|
logs_path.write_text(logs_script)
|
||||||
|
logs_path.chmod(0o755)
|
||||||
|
|
||||||
|
# Create Dockerfile for core service
|
||||||
|
dockerfile = """FROM python:3.11-slim
|
||||||
|
|
||||||
|
LABEL maintainer="Debai Team"
|
||||||
|
LABEL description="Debai AI Agent System"
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \\
|
||||||
|
curl \\
|
||||||
|
git \\
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create debai user
|
||||||
|
RUN useradd -m -s /bin/bash debai
|
||||||
|
|
||||||
|
# Install Debai
|
||||||
|
RUN pip install --no-cache-dir debai
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
RUN mkdir -p /etc/debai /var/lib/debai /var/log/debai \\
|
||||||
|
&& chown -R debai:debai /etc/debai /var/lib/debai /var/log/debai
|
||||||
|
|
||||||
|
# Switch to debai user
|
||||||
|
USER debai
|
||||||
|
WORKDIR /home/debai
|
||||||
|
|
||||||
|
# Initialize Debai
|
||||||
|
RUN debai init
|
||||||
|
|
||||||
|
# Expose API port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \\
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Start Debai
|
||||||
|
CMD ["debai", "serve", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
|
"""
|
||||||
|
|
||||||
|
dockerfile_path = output_dir / "Dockerfile"
|
||||||
|
dockerfile_path.write_text(dockerfile)
|
||||||
|
|
||||||
|
# Create Prometheus configuration
|
||||||
|
prometheus_config = """global:
|
||||||
|
scrape_interval: 15s
|
||||||
|
evaluation_interval: 15s
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: 'debai-api'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['debai-api:8000']
|
||||||
|
metrics_path: '/metrics'
|
||||||
|
|
||||||
|
- job_name: 'debai-core'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['debai-core:8000']
|
||||||
|
metrics_path: '/metrics'
|
||||||
|
|
||||||
|
- job_name: 'prometheus'
|
||||||
|
static_configs:
|
||||||
|
- targets: ['localhost:9090']
|
||||||
|
"""
|
||||||
|
|
||||||
|
prometheus_path = output_dir / "prometheus.yml"
|
||||||
|
prometheus_path.write_text(prometheus_config)
|
||||||
|
|
||||||
|
logger.info("Created helper scripts and configurations")
|
||||||
|
|
||||||
|
|
||||||
|
# Quick compose templates for common scenarios
|
||||||
|
COMPOSE_TEMPLATES = {
|
||||||
|
"minimal": {
|
||||||
|
"description": "Minimal setup with core and one model",
|
||||||
|
"include_gui": False,
|
||||||
|
"include_monitoring": False,
|
||||||
|
"include_models": True,
|
||||||
|
"model_ids": ["llama3.2:1b"],
|
||||||
|
},
|
||||||
|
"standard": {
|
||||||
|
"description": "Standard setup with GUI and monitoring",
|
||||||
|
"include_gui": True,
|
||||||
|
"include_monitoring": True,
|
||||||
|
"include_models": True,
|
||||||
|
"model_ids": ["llama3.2:3b"],
|
||||||
|
},
|
||||||
|
"production": {
|
||||||
|
"description": "Production setup with all features",
|
||||||
|
"include_gui": True,
|
||||||
|
"include_monitoring": True,
|
||||||
|
"include_models": True,
|
||||||
|
"model_ids": ["llama3.2:3b", "codellama:7b"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_compose_template(name: str) -> Optional[dict[str, Any]]:
|
||||||
|
"""Get a compose template by name."""
|
||||||
|
return COMPOSE_TEMPLATES.get(name)
|
||||||
|
|
||||||
|
|
||||||
|
def list_compose_templates() -> list[str]:
|
||||||
|
"""List available compose templates."""
|
||||||
|
return list(COMPOSE_TEMPLATES.keys())
|
||||||
363
src/debai/generators/iso.py
Archivo normal
363
src/debai/generators/iso.py
Archivo normal
@@ -0,0 +1,363 @@
|
|||||||
|
"""
|
||||||
|
ISO image generator for Debai.
|
||||||
|
|
||||||
|
Generates bootable ISO images with Debai pre-installed and configured.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from jinja2 import Template
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ISOGenerator:
|
||||||
|
"""
|
||||||
|
Generates bootable ISO images with Debai.
|
||||||
|
|
||||||
|
Uses debootstrap and genisoimage to create custom Debian-based ISOs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
output_path: Path,
|
||||||
|
base_distro: str = "debian",
|
||||||
|
release: str = "bookworm",
|
||||||
|
arch: str = "amd64",
|
||||||
|
include_agents: bool = True,
|
||||||
|
include_gui: bool = True,
|
||||||
|
):
|
||||||
|
self.output_path = output_path
|
||||||
|
self.base_distro = base_distro
|
||||||
|
self.release = release
|
||||||
|
self.arch = arch
|
||||||
|
self.include_agents = include_agents
|
||||||
|
self.include_gui = include_gui
|
||||||
|
self.work_dir: Optional[Path] = None
|
||||||
|
|
||||||
|
async def generate(self) -> dict[str, Any]:
|
||||||
|
"""Generate the ISO image."""
|
||||||
|
result = {
|
||||||
|
"success": False,
|
||||||
|
"output_path": str(self.output_path),
|
||||||
|
"size_mb": 0,
|
||||||
|
"error": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create temporary working directory
|
||||||
|
self.work_dir = Path(tempfile.mkdtemp(prefix="debai_iso_"))
|
||||||
|
logger.info(f"Working directory: {self.work_dir}")
|
||||||
|
|
||||||
|
# Create directory structure
|
||||||
|
iso_root = self.work_dir / "iso_root"
|
||||||
|
iso_root.mkdir(parents=True)
|
||||||
|
|
||||||
|
# Create boot configuration
|
||||||
|
await self._create_boot_config(iso_root)
|
||||||
|
|
||||||
|
# Create filesystem
|
||||||
|
await self._create_filesystem(iso_root)
|
||||||
|
|
||||||
|
# Add Debai files
|
||||||
|
await self._add_debai_files(iso_root)
|
||||||
|
|
||||||
|
# Generate ISO
|
||||||
|
await self._generate_iso(iso_root)
|
||||||
|
|
||||||
|
# Get size
|
||||||
|
if self.output_path.exists():
|
||||||
|
result["size_mb"] = self.output_path.stat().st_size / (1024 * 1024)
|
||||||
|
result["success"] = True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"ISO generation failed: {e}")
|
||||||
|
result["error"] = str(e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup
|
||||||
|
if self.work_dir and self.work_dir.exists():
|
||||||
|
shutil.rmtree(self.work_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def _create_boot_config(self, iso_root: Path) -> None:
|
||||||
|
"""Create boot configuration files."""
|
||||||
|
boot_dir = iso_root / "boot" / "grub"
|
||||||
|
boot_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
# GRUB configuration
|
||||||
|
grub_cfg = """
|
||||||
|
set timeout=10
|
||||||
|
set default=0
|
||||||
|
|
||||||
|
menuentry "Debai - AI Agent System" {
|
||||||
|
linux /boot/vmlinuz root=/dev/sda1 ro quiet splash
|
||||||
|
initrd /boot/initrd.img
|
||||||
|
}
|
||||||
|
|
||||||
|
menuentry "Debai - AI Agent System (Safe Mode)" {
|
||||||
|
linux /boot/vmlinuz root=/dev/sda1 ro single
|
||||||
|
initrd /boot/initrd.img
|
||||||
|
}
|
||||||
|
|
||||||
|
menuentry "Debai - Installation Mode" {
|
||||||
|
linux /boot/vmlinuz root=/dev/sda1 ro install
|
||||||
|
initrd /boot/initrd.img
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
(boot_dir / "grub.cfg").write_text(grub_cfg)
|
||||||
|
|
||||||
|
# Isolinux for legacy BIOS
|
||||||
|
isolinux_dir = iso_root / "isolinux"
|
||||||
|
isolinux_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
isolinux_cfg = """
|
||||||
|
DEFAULT debai
|
||||||
|
TIMEOUT 100
|
||||||
|
PROMPT 1
|
||||||
|
|
||||||
|
LABEL debai
|
||||||
|
MENU LABEL Debai - AI Agent System
|
||||||
|
KERNEL /boot/vmlinuz
|
||||||
|
APPEND initrd=/boot/initrd.img root=/dev/sda1 ro quiet splash
|
||||||
|
|
||||||
|
LABEL debai-safe
|
||||||
|
MENU LABEL Debai - Safe Mode
|
||||||
|
KERNEL /boot/vmlinuz
|
||||||
|
APPEND initrd=/boot/initrd.img root=/dev/sda1 ro single
|
||||||
|
|
||||||
|
LABEL install
|
||||||
|
MENU LABEL Debai - Installation
|
||||||
|
KERNEL /boot/vmlinuz
|
||||||
|
APPEND initrd=/boot/initrd.img root=/dev/sda1 ro install
|
||||||
|
"""
|
||||||
|
(isolinux_dir / "isolinux.cfg").write_text(isolinux_cfg)
|
||||||
|
|
||||||
|
async def _create_filesystem(self, iso_root: Path) -> None:
|
||||||
|
"""Create the root filesystem structure."""
|
||||||
|
# Create standard directories
|
||||||
|
dirs = [
|
||||||
|
"etc/debai",
|
||||||
|
"usr/bin",
|
||||||
|
"usr/lib/debai",
|
||||||
|
"usr/share/debai",
|
||||||
|
"var/lib/debai",
|
||||||
|
"var/log/debai",
|
||||||
|
]
|
||||||
|
|
||||||
|
for d in dirs:
|
||||||
|
(iso_root / d).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
async def _add_debai_files(self, iso_root: Path) -> None:
|
||||||
|
"""Add Debai application files."""
|
||||||
|
# Create installation script
|
||||||
|
install_script = """#!/bin/bash
|
||||||
|
# Debai Installation Script
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Installing Debai - AI Agent System..."
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y python3 python3-pip python3-venv docker.io qemu-utils
|
||||||
|
|
||||||
|
# Install Debai
|
||||||
|
pip3 install debai
|
||||||
|
|
||||||
|
# Configure Docker
|
||||||
|
systemctl enable docker
|
||||||
|
systemctl start docker
|
||||||
|
|
||||||
|
# Create user configuration
|
||||||
|
mkdir -p ~/.config/debai/{agents,models,tasks}
|
||||||
|
|
||||||
|
# Install Docker Model Runner
|
||||||
|
docker pull aimodel/runner:latest
|
||||||
|
|
||||||
|
echo "Debai installation complete!"
|
||||||
|
echo "Run 'debai init' to complete setup."
|
||||||
|
"""
|
||||||
|
|
||||||
|
script_path = iso_root / "usr" / "bin" / "debai-install"
|
||||||
|
script_path.write_text(install_script)
|
||||||
|
script_path.chmod(0o755)
|
||||||
|
|
||||||
|
# Create systemd service
|
||||||
|
service = """[Unit]
|
||||||
|
Description=Debai AI Agent Service
|
||||||
|
After=network.target docker.service
|
||||||
|
Requires=docker.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/bin/debai-daemon
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=10
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
"""
|
||||||
|
|
||||||
|
systemd_dir = iso_root / "etc" / "systemd" / "system"
|
||||||
|
systemd_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
(systemd_dir / "debai.service").write_text(service)
|
||||||
|
|
||||||
|
# Create default configuration
|
||||||
|
config = {
|
||||||
|
"debai": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"auto_start_agents": True,
|
||||||
|
"log_level": "info",
|
||||||
|
"models": {
|
||||||
|
"default": "llama3.2:3b",
|
||||||
|
"cache_dir": "/var/cache/debai/models",
|
||||||
|
},
|
||||||
|
"agents": {
|
||||||
|
"config_dir": "/etc/debai/agents",
|
||||||
|
"max_concurrent": 5,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config_path = iso_root / "etc" / "debai" / "config.yaml"
|
||||||
|
config_path.write_text(yaml.dump(config, default_flow_style=False))
|
||||||
|
|
||||||
|
# Add agents if requested
|
||||||
|
if self.include_agents:
|
||||||
|
await self._add_default_agents(iso_root)
|
||||||
|
|
||||||
|
async def _add_default_agents(self, iso_root: Path) -> None:
|
||||||
|
"""Add default agent configurations."""
|
||||||
|
from debai.core.agent import AGENT_TEMPLATES
|
||||||
|
|
||||||
|
agents_dir = iso_root / "etc" / "debai" / "agents"
|
||||||
|
agents_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for name, config in AGENT_TEMPLATES.items():
|
||||||
|
config_path = agents_dir / f"{name}.yaml"
|
||||||
|
config_path.write_text(yaml.dump(config.model_dump(), default_flow_style=False))
|
||||||
|
|
||||||
|
async def _generate_iso(self, iso_root: Path) -> None:
|
||||||
|
"""Generate the final ISO image."""
|
||||||
|
# Check for genisoimage or mkisofs
|
||||||
|
iso_cmd = None
|
||||||
|
for cmd in ["genisoimage", "mkisofs", "xorriso"]:
|
||||||
|
result = subprocess.run(["which", cmd], capture_output=True)
|
||||||
|
if result.returncode == 0:
|
||||||
|
iso_cmd = cmd
|
||||||
|
break
|
||||||
|
|
||||||
|
if not iso_cmd:
|
||||||
|
# Create a simple tar archive instead
|
||||||
|
logger.warning("ISO tools not found, creating tar archive")
|
||||||
|
tar_path = self.output_path.with_suffix(".tar.gz")
|
||||||
|
subprocess.run(
|
||||||
|
["tar", "-czf", str(tar_path), "-C", str(iso_root), "."],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
# Rename to .iso for consistency
|
||||||
|
shutil.move(tar_path, self.output_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
if iso_cmd == "xorriso":
|
||||||
|
cmd = [
|
||||||
|
"xorriso",
|
||||||
|
"-as", "mkisofs",
|
||||||
|
"-o", str(self.output_path),
|
||||||
|
"-V", "DEBAI",
|
||||||
|
"-J", "-R",
|
||||||
|
str(iso_root),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
cmd = [
|
||||||
|
iso_cmd,
|
||||||
|
"-o", str(self.output_path),
|
||||||
|
"-V", "DEBAI",
|
||||||
|
"-J", "-R",
|
||||||
|
"-b", "isolinux/isolinux.bin",
|
||||||
|
"-c", "isolinux/boot.cat",
|
||||||
|
"-no-emul-boot",
|
||||||
|
"-boot-load-size", "4",
|
||||||
|
"-boot-info-table",
|
||||||
|
str(iso_root),
|
||||||
|
]
|
||||||
|
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise RuntimeError(f"ISO generation failed: {stderr.decode()}")
|
||||||
|
|
||||||
|
logger.info(f"ISO generated: {self.output_path}")
|
||||||
|
|
||||||
|
|
||||||
|
# Template for preseed file (automated Debian installation)
|
||||||
|
PRESEED_TEMPLATE = """
|
||||||
|
# Debai Preseed Configuration
|
||||||
|
# Automated installation for AI Agent System
|
||||||
|
|
||||||
|
# Locale
|
||||||
|
d-i debian-installer/locale string en_US.UTF-8
|
||||||
|
d-i keyboard-configuration/xkb-keymap select us
|
||||||
|
|
||||||
|
# Network
|
||||||
|
d-i netcfg/choose_interface select auto
|
||||||
|
d-i netcfg/get_hostname string debai
|
||||||
|
d-i netcfg/get_domain string local
|
||||||
|
|
||||||
|
# Mirror
|
||||||
|
d-i mirror/country string manual
|
||||||
|
d-i mirror/http/hostname string deb.debian.org
|
||||||
|
d-i mirror/http/directory string /debian
|
||||||
|
d-i mirror/http/proxy string
|
||||||
|
|
||||||
|
# Account
|
||||||
|
d-i passwd/root-login boolean true
|
||||||
|
d-i passwd/root-password password debai
|
||||||
|
d-i passwd/root-password-again password debai
|
||||||
|
d-i passwd/user-fullname string Debai User
|
||||||
|
d-i passwd/username string debai
|
||||||
|
d-i passwd/user-password password debai
|
||||||
|
d-i passwd/user-password-again password debai
|
||||||
|
|
||||||
|
# Partitioning
|
||||||
|
d-i partman-auto/method string regular
|
||||||
|
d-i partman-auto/choose_recipe select atomic
|
||||||
|
d-i partman/confirm boolean true
|
||||||
|
d-i partman/confirm_nooverwrite boolean true
|
||||||
|
|
||||||
|
# Packages
|
||||||
|
tasksel tasksel/first multiselect standard
|
||||||
|
d-i pkgsel/include string python3 python3-pip docker.io openssh-server
|
||||||
|
|
||||||
|
# GRUB
|
||||||
|
d-i grub-installer/only_debian boolean true
|
||||||
|
d-i grub-installer/bootdev string default
|
||||||
|
|
||||||
|
# Finish
|
||||||
|
d-i finish-install/reboot_in_progress note
|
||||||
|
|
||||||
|
# Post-installation
|
||||||
|
d-i preseed/late_command string \\
|
||||||
|
in-target pip3 install debai; \\
|
||||||
|
in-target systemctl enable docker; \\
|
||||||
|
in-target debai init
|
||||||
|
"""
|
||||||
393
src/debai/generators/qcow2.py
Archivo normal
393
src/debai/generators/qcow2.py
Archivo normal
@@ -0,0 +1,393 @@
|
|||||||
|
"""
|
||||||
|
QCOW2 image generator for Debai.
|
||||||
|
|
||||||
|
Generates QCOW2 disk images for use with QEMU/KVM.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class QCOW2Generator:
|
||||||
|
"""
|
||||||
|
Generates QCOW2 disk images with Debai pre-installed.
|
||||||
|
|
||||||
|
Creates virtual machine disk images that can be used with QEMU/KVM.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
output_path: Path,
|
||||||
|
disk_size: str = "20G",
|
||||||
|
base_distro: str = "debian",
|
||||||
|
release: str = "bookworm",
|
||||||
|
arch: str = "amd64",
|
||||||
|
memory_mb: int = 2048,
|
||||||
|
cpus: int = 2,
|
||||||
|
):
|
||||||
|
self.output_path = output_path
|
||||||
|
self.disk_size = disk_size
|
||||||
|
self.base_distro = base_distro
|
||||||
|
self.release = release
|
||||||
|
self.arch = arch
|
||||||
|
self.memory_mb = memory_mb
|
||||||
|
self.cpus = cpus
|
||||||
|
self.work_dir: Optional[Path] = None
|
||||||
|
|
||||||
|
async def generate(self) -> dict[str, Any]:
|
||||||
|
"""Generate the QCOW2 image."""
|
||||||
|
result = {
|
||||||
|
"success": False,
|
||||||
|
"output_path": str(self.output_path),
|
||||||
|
"size_mb": 0,
|
||||||
|
"error": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check for qemu-img
|
||||||
|
if not shutil.which("qemu-img"):
|
||||||
|
raise RuntimeError("qemu-img not found. Install qemu-utils package.")
|
||||||
|
|
||||||
|
# Create temporary working directory
|
||||||
|
self.work_dir = Path(tempfile.mkdtemp(prefix="debai_qcow2_"))
|
||||||
|
logger.info(f"Working directory: {self.work_dir}")
|
||||||
|
|
||||||
|
# Create the QCOW2 image
|
||||||
|
await self._create_qcow2()
|
||||||
|
|
||||||
|
# Create cloud-init configuration
|
||||||
|
await self._create_cloud_init()
|
||||||
|
|
||||||
|
# Get size
|
||||||
|
if self.output_path.exists():
|
||||||
|
result["size_mb"] = self.output_path.stat().st_size / (1024 * 1024)
|
||||||
|
result["success"] = True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"QCOW2 generation failed: {e}")
|
||||||
|
result["error"] = str(e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup
|
||||||
|
if self.work_dir and self.work_dir.exists():
|
||||||
|
shutil.rmtree(self.work_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def _create_qcow2(self) -> None:
|
||||||
|
"""Create the QCOW2 disk image."""
|
||||||
|
# Create empty QCOW2 image
|
||||||
|
cmd = [
|
||||||
|
"qemu-img", "create",
|
||||||
|
"-f", "qcow2",
|
||||||
|
str(self.output_path),
|
||||||
|
self.disk_size,
|
||||||
|
]
|
||||||
|
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise RuntimeError(f"Failed to create QCOW2: {stderr.decode()}")
|
||||||
|
|
||||||
|
logger.info(f"Created QCOW2 image: {self.output_path}")
|
||||||
|
|
||||||
|
async def _create_cloud_init(self) -> None:
|
||||||
|
"""Create cloud-init configuration for the image."""
|
||||||
|
cloud_init_dir = self.output_path.parent / "cloud-init"
|
||||||
|
cloud_init_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# User data
|
||||||
|
user_data = {
|
||||||
|
"#cloud-config": None,
|
||||||
|
"hostname": "debai",
|
||||||
|
"manage_etc_hosts": True,
|
||||||
|
"users": [
|
||||||
|
{
|
||||||
|
"name": "debai",
|
||||||
|
"sudo": "ALL=(ALL) NOPASSWD:ALL",
|
||||||
|
"groups": ["docker", "sudo"],
|
||||||
|
"shell": "/bin/bash",
|
||||||
|
"lock_passwd": False,
|
||||||
|
"passwd": "$6$rounds=4096$debai$Qs8qLMmPMvpZq0nP9P.WQm1C5K.s1hQ5P0Z3CgK.0xOjv6Zl6JwZ9vX5Y7U2a8nT4K6M3W1Q0X5Y7U2a8nT4K6", # debai
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"package_update": True,
|
||||||
|
"package_upgrade": True,
|
||||||
|
"packages": [
|
||||||
|
"python3",
|
||||||
|
"python3-pip",
|
||||||
|
"python3-venv",
|
||||||
|
"docker.io",
|
||||||
|
"qemu-guest-agent",
|
||||||
|
"curl",
|
||||||
|
"git",
|
||||||
|
],
|
||||||
|
"runcmd": [
|
||||||
|
"systemctl enable docker",
|
||||||
|
"systemctl start docker",
|
||||||
|
"pip3 install debai",
|
||||||
|
"debai init",
|
||||||
|
"systemctl enable debai",
|
||||||
|
],
|
||||||
|
"final_message": "Debai AI Agent System is ready!",
|
||||||
|
}
|
||||||
|
|
||||||
|
user_data_path = cloud_init_dir / "user-data"
|
||||||
|
with open(user_data_path, "w") as f:
|
||||||
|
f.write("#cloud-config\n")
|
||||||
|
yaml.dump({k: v for k, v in user_data.items() if k != "#cloud-config"},
|
||||||
|
f, default_flow_style=False)
|
||||||
|
|
||||||
|
# Meta data
|
||||||
|
meta_data = {
|
||||||
|
"instance-id": "debai-001",
|
||||||
|
"local-hostname": "debai",
|
||||||
|
}
|
||||||
|
|
||||||
|
meta_data_path = cloud_init_dir / "meta-data"
|
||||||
|
with open(meta_data_path, "w") as f:
|
||||||
|
yaml.dump(meta_data, f, default_flow_style=False)
|
||||||
|
|
||||||
|
# Network config
|
||||||
|
network_config = {
|
||||||
|
"version": 2,
|
||||||
|
"ethernets": {
|
||||||
|
"ens3": {
|
||||||
|
"dhcp4": True,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
network_config_path = cloud_init_dir / "network-config"
|
||||||
|
with open(network_config_path, "w") as f:
|
||||||
|
yaml.dump(network_config, f, default_flow_style=False)
|
||||||
|
|
||||||
|
# Create cloud-init ISO
|
||||||
|
cloud_init_iso = self.output_path.parent / "cloud-init.iso"
|
||||||
|
|
||||||
|
# Try to create ISO with genisoimage or cloud-localds
|
||||||
|
if shutil.which("cloud-localds"):
|
||||||
|
cmd = [
|
||||||
|
"cloud-localds",
|
||||||
|
str(cloud_init_iso),
|
||||||
|
str(user_data_path),
|
||||||
|
str(meta_data_path),
|
||||||
|
]
|
||||||
|
elif shutil.which("genisoimage"):
|
||||||
|
cmd = [
|
||||||
|
"genisoimage",
|
||||||
|
"-output", str(cloud_init_iso),
|
||||||
|
"-volid", "cidata",
|
||||||
|
"-joliet", "-rock",
|
||||||
|
str(cloud_init_dir),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
logger.warning("No ISO tool found, skipping cloud-init ISO creation")
|
||||||
|
return
|
||||||
|
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
logger.info(f"Created cloud-init ISO: {cloud_init_iso}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to create cloud-init ISO: {stderr.decode()}")
|
||||||
|
|
||||||
|
def generate_run_script(self) -> str:
|
||||||
|
"""Generate a script to run the QCOW2 image with QEMU."""
|
||||||
|
script = f"""#!/bin/bash
|
||||||
|
# Run Debai VM with QEMU
|
||||||
|
|
||||||
|
QCOW2_IMAGE="{self.output_path}"
|
||||||
|
CLOUD_INIT_ISO="{self.output_path.parent / 'cloud-init.iso'}"
|
||||||
|
MEMORY="{self.memory_mb}"
|
||||||
|
CPUS="{self.cpus}"
|
||||||
|
|
||||||
|
# Check if cloud-init ISO exists
|
||||||
|
CLOUD_INIT_OPTS=""
|
||||||
|
if [ -f "$CLOUD_INIT_ISO" ]; then
|
||||||
|
CLOUD_INIT_OPTS="-cdrom $CLOUD_INIT_ISO"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run QEMU
|
||||||
|
qemu-system-x86_64 \\
|
||||||
|
-enable-kvm \\
|
||||||
|
-m $MEMORY \\
|
||||||
|
-smp $CPUS \\
|
||||||
|
-drive file=$QCOW2_IMAGE,format=qcow2 \\
|
||||||
|
$CLOUD_INIT_OPTS \\
|
||||||
|
-netdev user,id=net0,hostfwd=tcp::2222-:22,hostfwd=tcp::8080-:8080 \\
|
||||||
|
-device virtio-net-pci,netdev=net0 \\
|
||||||
|
-display gtk \\
|
||||||
|
-boot d
|
||||||
|
|
||||||
|
# To access:
|
||||||
|
# SSH: ssh -p 2222 debai@localhost
|
||||||
|
# Web UI: http://localhost:8080
|
||||||
|
"""
|
||||||
|
|
||||||
|
script_path = self.output_path.parent / "run-debai-vm.sh"
|
||||||
|
script_path.write_text(script)
|
||||||
|
script_path.chmod(0o755)
|
||||||
|
|
||||||
|
return str(script_path)
|
||||||
|
|
||||||
|
|
||||||
|
class VMManager:
|
||||||
|
"""
|
||||||
|
Manages QEMU virtual machines running Debai.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, vm_dir: Optional[Path] = None):
|
||||||
|
self.vm_dir = vm_dir or Path.home() / ".local" / "share" / "debai" / "vms"
|
||||||
|
self.vm_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.running_vms: dict[str, subprocess.Popen] = {}
|
||||||
|
|
||||||
|
async def create_vm(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
disk_size: str = "20G",
|
||||||
|
memory_mb: int = 2048,
|
||||||
|
cpus: int = 2,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Create a new VM."""
|
||||||
|
vm_path = self.vm_dir / name
|
||||||
|
vm_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
qcow2_path = vm_path / f"{name}.qcow2"
|
||||||
|
|
||||||
|
generator = QCOW2Generator(
|
||||||
|
output_path=qcow2_path,
|
||||||
|
disk_size=disk_size,
|
||||||
|
memory_mb=memory_mb,
|
||||||
|
cpus=cpus,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await generator.generate()
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
# Generate run script
|
||||||
|
generator.generate_run_script()
|
||||||
|
|
||||||
|
# Save VM configuration
|
||||||
|
config = {
|
||||||
|
"name": name,
|
||||||
|
"disk_size": disk_size,
|
||||||
|
"memory_mb": memory_mb,
|
||||||
|
"cpus": cpus,
|
||||||
|
"qcow2_path": str(qcow2_path),
|
||||||
|
}
|
||||||
|
|
||||||
|
config_path = vm_path / "config.yaml"
|
||||||
|
with open(config_path, "w") as f:
|
||||||
|
yaml.dump(config, f, default_flow_style=False)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def list_vms(self) -> list[dict[str, Any]]:
|
||||||
|
"""List all VMs."""
|
||||||
|
vms = []
|
||||||
|
|
||||||
|
for vm_dir in self.vm_dir.iterdir():
|
||||||
|
if vm_dir.is_dir():
|
||||||
|
config_path = vm_dir / "config.yaml"
|
||||||
|
if config_path.exists():
|
||||||
|
with open(config_path) as f:
|
||||||
|
config = yaml.safe_load(f)
|
||||||
|
config["running"] = config["name"] in self.running_vms
|
||||||
|
vms.append(config)
|
||||||
|
|
||||||
|
return vms
|
||||||
|
|
||||||
|
async def start_vm(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
headless: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
"""Start a VM."""
|
||||||
|
vm_path = self.vm_dir / name
|
||||||
|
config_path = vm_path / "config.yaml"
|
||||||
|
|
||||||
|
if not config_path.exists():
|
||||||
|
logger.error(f"VM {name} not found")
|
||||||
|
return False
|
||||||
|
|
||||||
|
with open(config_path) as f:
|
||||||
|
config = yaml.safe_load(f)
|
||||||
|
|
||||||
|
qcow2_path = config["qcow2_path"]
|
||||||
|
cloud_init_iso = vm_path / "cloud-init.iso"
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"qemu-system-x86_64",
|
||||||
|
"-enable-kvm",
|
||||||
|
"-m", str(config["memory_mb"]),
|
||||||
|
"-smp", str(config["cpus"]),
|
||||||
|
"-drive", f"file={qcow2_path},format=qcow2",
|
||||||
|
"-netdev", "user,id=net0,hostfwd=tcp::2222-:22,hostfwd=tcp::8080-:8080",
|
||||||
|
"-device", "virtio-net-pci,netdev=net0",
|
||||||
|
]
|
||||||
|
|
||||||
|
if cloud_init_iso.exists():
|
||||||
|
cmd.extend(["-cdrom", str(cloud_init_iso)])
|
||||||
|
|
||||||
|
if headless:
|
||||||
|
cmd.extend(["-display", "none", "-daemonize"])
|
||||||
|
else:
|
||||||
|
cmd.extend(["-display", "gtk"])
|
||||||
|
|
||||||
|
process = subprocess.Popen(cmd)
|
||||||
|
self.running_vms[name] = process
|
||||||
|
|
||||||
|
logger.info(f"Started VM {name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def stop_vm(self, name: str) -> bool:
|
||||||
|
"""Stop a VM."""
|
||||||
|
if name not in self.running_vms:
|
||||||
|
return False
|
||||||
|
|
||||||
|
process = self.running_vms[name]
|
||||||
|
process.terminate()
|
||||||
|
process.wait(timeout=30)
|
||||||
|
|
||||||
|
del self.running_vms[name]
|
||||||
|
logger.info(f"Stopped VM {name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def delete_vm(self, name: str) -> bool:
|
||||||
|
"""Delete a VM."""
|
||||||
|
# Stop if running
|
||||||
|
if name in self.running_vms:
|
||||||
|
self.stop_vm(name)
|
||||||
|
|
||||||
|
vm_path = self.vm_dir / name
|
||||||
|
if vm_path.exists():
|
||||||
|
shutil.rmtree(vm_path)
|
||||||
|
logger.info(f"Deleted VM {name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
10
src/debai/gui/__init__.py
Archivo normal
10
src/debai/gui/__init__.py
Archivo normal
@@ -0,0 +1,10 @@
|
|||||||
|
"""
|
||||||
|
GUI module for Debai.
|
||||||
|
|
||||||
|
This module provides the graphical user interface using GTK4.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from debai.gui.main import main
|
||||||
|
from debai.gui.app import DebaiApplication
|
||||||
|
|
||||||
|
__all__ = ["main", "DebaiApplication"]
|
||||||
1145
src/debai/gui/app.py
Archivo normal
1145
src/debai/gui/app.py
Archivo normal
La diferencia del archivo ha sido suprimido porque es demasiado grande
Cargar Diff
30
src/debai/gui/main.py
Archivo normal
30
src/debai/gui/main.py
Archivo normal
@@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
Main entry point for the Debai GTK GUI.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import gi
|
||||||
|
|
||||||
|
gi.require_version("Gtk", "4.0")
|
||||||
|
gi.require_version("Adw", "1")
|
||||||
|
|
||||||
|
from gi.repository import Gio
|
||||||
|
|
||||||
|
from debai.gui.app import DebaiApplication
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
"""Main entry point for the GUI application."""
|
||||||
|
app = DebaiApplication()
|
||||||
|
return app.run(sys.argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
Referencia en una nueva incidencia
Block a user