90 lines
3.0 KiB
Python
90 lines
3.0 KiB
Python
import os
|
|
import subprocess
|
|
import sys
|
|
|
|
def setup_gpu_environment():
|
|
"""Setup GPU environment for PaddleOCR"""
|
|
print("=== SETTING UP GPU ENVIRONMENT ===")
|
|
|
|
# Set environment variables
|
|
os.environ['CUDA_PATH'] = r'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.9'
|
|
os.environ['CUDA_HOME'] = os.environ['CUDA_PATH']
|
|
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
|
|
|
|
# Add CUDA and cuDNN to PATH
|
|
cuda_bin = os.path.join(os.environ['CUDA_PATH'], 'bin')
|
|
current_path = os.environ.get('PATH', '')
|
|
if cuda_bin not in current_path:
|
|
os.environ['PATH'] = cuda_bin + ';' + current_path
|
|
|
|
print(f"✓ CUDA_PATH: {os.environ['CUDA_PATH']}")
|
|
print(f"✓ CUDA_VISIBLE_DEVICES: {os.environ['CUDA_VISIBLE_DEVICES']}")
|
|
print(f"✓ Added to PATH: {cuda_bin}")
|
|
|
|
def test_gpu_setup():
|
|
"""Test GPU setup before starting server"""
|
|
print("\n=== TESTING GPU SETUP ===")
|
|
|
|
try:
|
|
# Test cuDNN loading
|
|
import ctypes
|
|
cudnn_path = r'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.9\bin\cudnn64_8.dll'
|
|
cudnn = ctypes.WinDLL(cudnn_path)
|
|
print("✓ cuDNN loaded successfully")
|
|
except Exception as e:
|
|
print(f"✗ cuDNN loading failed: {e}")
|
|
return False
|
|
|
|
try:
|
|
# Test PaddlePaddle GPU
|
|
import paddle
|
|
print(f"✓ PaddlePaddle version: {paddle.__version__}")
|
|
print(f"✓ GPU available: {paddle.is_compiled_with_cuda()}")
|
|
|
|
if paddle.is_compiled_with_cuda():
|
|
paddle.device.set_device('gpu')
|
|
print("✓ PaddlePaddle GPU device set successfully")
|
|
return True
|
|
else:
|
|
print("✗ PaddlePaddle not compiled with CUDA")
|
|
return False
|
|
except Exception as e:
|
|
print(f"✗ PaddlePaddle error: {e}")
|
|
return False
|
|
|
|
def start_server():
|
|
"""Start the LightRAG server"""
|
|
print("\n=== STARTING LIGHTRAG SERVER ===")
|
|
print("Server will be available at: http://localhost:3015")
|
|
print("Press Ctrl+C to stop the server")
|
|
|
|
try:
|
|
# Start the server
|
|
subprocess.run([
|
|
sys.executable, '-m', 'uvicorn', 'main:app',
|
|
'--host', '0.0.0.0', '--port', '3015', '--reload'
|
|
], check=True)
|
|
except KeyboardInterrupt:
|
|
print("\nServer stopped by user")
|
|
except Exception as e:
|
|
print(f"Server error: {e}")
|
|
|
|
if __name__ == "__main__":
|
|
print("GPU MODE SERVER STARTUP")
|
|
print("=" * 50)
|
|
|
|
# Setup environment
|
|
setup_gpu_environment()
|
|
|
|
# Test GPU setup
|
|
if test_gpu_setup():
|
|
print("\n✓ GPU setup successful, starting server...")
|
|
start_server()
|
|
else:
|
|
print("\n✗ GPU setup failed, cannot start server")
|
|
print("\nTROUBLESHOOTING:")
|
|
print("1. Check CUDA 12.9 installation")
|
|
print("2. Verify cuDNN is installed in CUDA_PATH\\bin")
|
|
print("3. Ensure PaddlePaddle GPU version is installed")
|
|
print("4. Restart system after CUDA/cuDNN installation")
|
|
input("\nPress Enter to exit...") |