225 lines
5.7 KiB
Bash
Executable File
225 lines
5.7 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# aPersona Setup Script
|
|
# This script helps you set up the aPersona AI assistant locally
|
|
|
|
set -e
|
|
|
|
echo "🤖 Welcome to aPersona Setup!"
|
|
echo "=========================================="
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Function to print colored output
|
|
print_status() {
|
|
echo -e "${BLUE}[INFO]${NC} $1"
|
|
}
|
|
|
|
print_success() {
|
|
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
|
}
|
|
|
|
print_warning() {
|
|
echo -e "${YELLOW}[WARNING]${NC} $1"
|
|
}
|
|
|
|
print_error() {
|
|
echo -e "${RED}[ERROR]${NC} $1"
|
|
}
|
|
|
|
# Check if Python 3.11+ is installed
|
|
check_python() {
|
|
print_status "Checking Python installation..."
|
|
if command -v python3 &> /dev/null; then
|
|
python_version=$(python3 --version | cut -d' ' -f2)
|
|
major_version=$(echo $python_version | cut -d'.' -f1)
|
|
minor_version=$(echo $python_version | cut -d'.' -f2)
|
|
|
|
if [ "$major_version" -eq 3 ] && [ "$minor_version" -ge 11 ]; then
|
|
print_success "Python $python_version found"
|
|
else
|
|
print_error "Python 3.11+ required. Found Python $python_version"
|
|
exit 1
|
|
fi
|
|
else
|
|
print_error "Python 3 not found. Please install Python 3.11+"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Check if Node.js 18+ is installed
|
|
check_node() {
|
|
print_status "Checking Node.js installation..."
|
|
if command -v node &> /dev/null; then
|
|
node_version=$(node --version | cut -d'v' -f2)
|
|
major_version=$(echo $node_version | cut -d'.' -f1)
|
|
|
|
if [ "$major_version" -ge 18 ]; then
|
|
print_success "Node.js $node_version found"
|
|
else
|
|
print_error "Node.js 18+ required. Found Node.js $node_version"
|
|
exit 1
|
|
fi
|
|
else
|
|
print_error "Node.js not found. Please install Node.js 18+"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Check if Ollama is installed
|
|
check_ollama() {
|
|
print_status "Checking Ollama installation..."
|
|
if command -v ollama &> /dev/null; then
|
|
print_success "Ollama found"
|
|
|
|
# Check if Ollama service is running
|
|
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
|
|
print_success "Ollama service is running"
|
|
else
|
|
print_warning "Ollama service is not running. Please start it with: ollama serve"
|
|
fi
|
|
else
|
|
print_warning "Ollama not found. Installing Ollama..."
|
|
curl -fsSL https://ollama.ai/install.sh | sh
|
|
print_success "Ollama installed. Please start it with: ollama serve"
|
|
fi
|
|
}
|
|
|
|
# Setup Python backend
|
|
setup_backend() {
|
|
print_status "Setting up Python backend..."
|
|
|
|
cd backend
|
|
|
|
# Create virtual environment if it doesn't exist
|
|
if [ ! -d "venv" ]; then
|
|
print_status "Creating Python virtual environment..."
|
|
python3 -m venv venv
|
|
print_success "Virtual environment created"
|
|
fi
|
|
|
|
# Activate virtual environment
|
|
source venv/bin/activate
|
|
|
|
# Install requirements
|
|
print_status "Installing Python dependencies..."
|
|
pip install --upgrade pip
|
|
pip install -r requirements.txt
|
|
|
|
print_success "Backend dependencies installed"
|
|
|
|
cd ..
|
|
}
|
|
|
|
# Setup React frontend
|
|
setup_frontend() {
|
|
print_status "Setting up React frontend..."
|
|
|
|
cd frontend
|
|
|
|
# Install npm dependencies
|
|
print_status "Installing Node.js dependencies..."
|
|
npm install
|
|
|
|
print_success "Frontend dependencies installed"
|
|
|
|
cd ..
|
|
}
|
|
|
|
# Create necessary directories
|
|
create_directories() {
|
|
print_status "Creating data directories..."
|
|
|
|
mkdir -p data/uploads
|
|
mkdir -p data/processed
|
|
mkdir -p data/vectors
|
|
mkdir -p data/embeddings_cache
|
|
|
|
print_success "Data directories created"
|
|
}
|
|
|
|
# Install Ollama models
|
|
install_models() {
|
|
print_status "Installing AI models..."
|
|
|
|
if command -v ollama &> /dev/null; then
|
|
print_status "Downloading Mistral model (this may take a while)..."
|
|
ollama pull mistral
|
|
|
|
print_status "Downloading embedding model..."
|
|
ollama pull nomic-embed-text
|
|
|
|
print_success "AI models installed"
|
|
else
|
|
print_warning "Ollama not available. Please install models manually after setting up Ollama"
|
|
fi
|
|
}
|
|
|
|
# Create environment file
|
|
create_env() {
|
|
print_status "Creating environment configuration..."
|
|
|
|
if [ ! -f "backend/.env" ]; then
|
|
cat > backend/.env << EOF
|
|
# aPersona Environment Configuration
|
|
|
|
# Security
|
|
SECRET_KEY=your-secret-key-change-in-production-$(openssl rand -hex 32)
|
|
|
|
# Database
|
|
DATABASE_URL=sqlite:///./apersona.db
|
|
|
|
# AI Services
|
|
OLLAMA_BASE_URL=http://localhost:11434
|
|
DEFAULT_LLM_MODEL=mistral
|
|
EMBEDDING_MODEL=all-MiniLM-L6-v2
|
|
|
|
# Development
|
|
DEBUG=true
|
|
EOF
|
|
print_success "Environment file created"
|
|
else
|
|
print_warning "Environment file already exists"
|
|
fi
|
|
}
|
|
|
|
# Main setup function
|
|
main() {
|
|
echo "Starting aPersona setup process..."
|
|
echo ""
|
|
|
|
# System checks
|
|
check_python
|
|
check_node
|
|
check_ollama
|
|
|
|
echo ""
|
|
|
|
# Setup components
|
|
create_directories
|
|
create_env
|
|
setup_backend
|
|
setup_frontend
|
|
install_models
|
|
|
|
echo ""
|
|
echo "=========================================="
|
|
print_success "aPersona setup completed successfully!"
|
|
echo ""
|
|
echo "📋 Next steps:"
|
|
echo " 1. Start Ollama service: ollama serve"
|
|
echo " 2. Start the backend: cd backend && source venv/bin/activate && uvicorn app.main:app --reload"
|
|
echo " 3. Start the frontend: cd frontend && npm run dev"
|
|
echo " 4. Open http://localhost:3000 in your browser"
|
|
echo ""
|
|
echo "💡 For more information, check the README.md file"
|
|
echo "🔒 Your data stays completely local and private!"
|
|
}
|
|
|
|
# Run main function
|
|
main |