Initial commit
Some checks reported errors
Updates/open-webui-ollama-docker-deploy/pipeline/head Something is wrong with the build of this commit
Some checks reported errors
Updates/open-webui-ollama-docker-deploy/pipeline/head Something is wrong with the build of this commit
This commit is contained in:
commit
655ba84834
87
Jenkinsfile
vendored
Normal file
87
Jenkinsfile
vendored
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node { label 'Wrangler1' }
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
DOCKER_HOST = 'unix:///var/run/docker.sock'
|
||||||
|
OPENAI_TOKEN = credentials('OpenAI-Token')
|
||||||
|
OLLAMA = 'ollama/ollama'
|
||||||
|
OPENWEBUI = 'ghcr.io/open-webui/open-webui:latest'
|
||||||
|
TIKA = 'apache/tika:latest'
|
||||||
|
CHROMADB = 'chromadb/chroma:latest'
|
||||||
|
CHROMAUI = 'ghcr.io/logspace-ai/chroma-ui:latest'
|
||||||
|
}
|
||||||
|
stages {
|
||||||
|
stage('Check NVIDIA Driver Version') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
sh 'nvidia-smi --query-gpu=driver_version --format=csv,noheader'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Check Ollama and Open-WebUI Versions (Before Deploy)') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
echo 'Checking Ollama version before deploy:'
|
||||||
|
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||||
|
|
||||||
|
echo 'Checking Open-WebUI version before deploy:'
|
||||||
|
sh 'docker exec -i openwebui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Deploy with Docker Compose') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
withCredentials([usernamePassword(credentialsId: 'gitea-oauth-open-webui-client-id-secret', passwordVariable: 'CLIENT_SECRET', usernameVariable: 'CLIENT_ID'), string(credentialsId: 'OpenAI-API-Token', variable: 'OPEN_AI_TOKEN'), usernamePassword(credentialsId: 'ldap-bind-auth-user', passwordVariable: 'AD_BIND_PASSWORD', usernameVariable: 'AD_BIND_USER')]) {
|
||||||
|
sh """
|
||||||
|
|
||||||
|
COMPOSE_PROJECT_NAME=openwebui \
|
||||||
|
CHROMAUI=${CHROMAUI} \
|
||||||
|
CHROMADB=${CHROMADB} \
|
||||||
|
TIKA=${TIKA} \
|
||||||
|
OLLAMA=${OLLAMA} \
|
||||||
|
OPENWEBUI=${OPENWEBUI} \
|
||||||
|
|
||||||
|
export PG_PASS=\$(openssl rand -base64 48 | tr -dc 'A-Za-z0-9' | head -c32)
|
||||||
|
docker compose pull
|
||||||
|
COMPOSE_PROJECT_NAME=openwebui \
|
||||||
|
CHROMADB=${CHROMADB} \
|
||||||
|
CHROMAUI=$CHROMAUI} \
|
||||||
|
TIKA=${TIKA} \
|
||||||
|
OLLAMA=${OLLAMA} \
|
||||||
|
OPENWEBUI=${OPENWEBUI} \
|
||||||
|
docker compose up -d --force-recreate
|
||||||
|
|
||||||
|
sleep 5
|
||||||
|
echo "ALTER USER openwebui_user WITH PASSWORD '\$PG_PASS';" | docker exec -i openwebui_postgres_db psql -U openwebui_user -d openwebui_db
|
||||||
|
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Check Ollama and Open-WebUI Versions (After Deploy)') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
echo 'Checking Ollama version after deploy:'
|
||||||
|
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||||
|
|
||||||
|
echo 'Checking Open-WebUI version after deploy:'
|
||||||
|
sh 'docker exec -i openwebui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
echo 'Pipeline finished.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
141
README.md
Normal file
141
README.md
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
# Ollama and Open-WebUI Docker Setup
|
||||||
|
|
||||||
|
This repository provides the necessary configuration files to deploy two Docker services, Ollama and Open-WebUI, using Docker Compose and Jenkins for automated management. These services are deployed on the `shark-wrangler` node, making use of NVIDIA GPU acceleration for Ollama.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
To get started, ensure you have the following installed and configured:
|
||||||
|
|
||||||
|
- Docker and Docker Compose
|
||||||
|
- NVIDIA GPU drivers (compatible with the Ollama container)
|
||||||
|
- Jenkins with access to the `shark-wrangler` node
|
||||||
|
|
||||||
|
## Services Overview
|
||||||
|
|
||||||
|
### Ollama
|
||||||
|
- **Image**: `ollama/ollama:latest`
|
||||||
|
- **Container Name**: `ollama`
|
||||||
|
- **Ports**: `11434:11434`
|
||||||
|
- **Environment Variables**:
|
||||||
|
- `OLLAMA_LLM_LIBRARY=cuda_v12` for GPU acceleration.
|
||||||
|
- **Volumes**: Mounts data at `/root/.ollama` to persist Ollama's state.
|
||||||
|
- **Networks**: Connected to `shared_net`.
|
||||||
|
|
||||||
|
### Open-WebUI
|
||||||
|
- **Image**: `ghcr.io/open-webui/open-webui:dev`
|
||||||
|
- **Container Name**: `open-webui`
|
||||||
|
- **Ports**: `3000:8080`
|
||||||
|
- **Environment Variables**:
|
||||||
|
- **OAuth Configuration**: Enables login and OAuth features.
|
||||||
|
- **OpenAI API Key**: Configured to enable integrations.
|
||||||
|
- **OLLAMA_BASE_URL**: Points to the running Ollama instance for interoperability.
|
||||||
|
- **Volumes**: Mounts data at `/app/backend/data` to persist WebUI state.
|
||||||
|
- **Networks**: Connected to `shared_net`.
|
||||||
|
|
||||||
|
### Docker Compose Configuration
|
||||||
|
The Docker Compose configuration deploys the following:
|
||||||
|
- `ollama` and `open-webui` services with defined volumes and ports.
|
||||||
|
- `shared_net` network for container communication.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Jenkinsfile for CI/CD
|
||||||
|
|
||||||
|
The `Jenkinsfile` automates the deployment of Ollama and Open-WebUI services using Docker Compose:
|
||||||
|
|
||||||
|
### Pipeline Stages
|
||||||
|
|
||||||
|
1. **Check NVIDIA Driver Version**:
|
||||||
|
- Ensures NVIDIA drivers are available and compatible.
|
||||||
|
|
||||||
|
2. **Check Ollama and Open-WebUI Versions (Before Deploy)**:
|
||||||
|
- Retrieves the current version of the Ollama and Open-WebUI containers.
|
||||||
|
|
||||||
|
3. **Deploy with Docker Compose**:
|
||||||
|
- Pulls the latest images and recreates the containers using Docker Compose.
|
||||||
|
|
||||||
|
4. **Check Ollama and Open-WebUI Versions (After Deploy)**:
|
||||||
|
- Ensures that the services are running and updated to the latest version after deployment.
|
||||||
|
|
||||||
|
The relevant Jenkinsfile snippet:
|
||||||
|
```groovy
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
node { label 'shark-wrangler' }
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
DOCKER_HOST = 'unix:///var/run/docker.sock'
|
||||||
|
}
|
||||||
|
stages {
|
||||||
|
stage('Check NVIDIA Driver Version') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
sh 'nvidia-smi --query-gpu=driver_version --format=csv,noheader'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Check Ollama and Open-WebUI Versions (Before Deploy)') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
echo 'Checking Ollama version before deploy:'
|
||||||
|
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||||
|
|
||||||
|
echo 'Checking Open-WebUI version before deploy:'
|
||||||
|
sh 'docker exec -i open-webui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Deploy with Docker Compose') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
sh '''
|
||||||
|
docker pull ollama/ollama
|
||||||
|
docker pull ghcr.io/open-webui/open-webui:main
|
||||||
|
docker compose up -d --force-recreate
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Check Ollama and Open-WebUI Versions (After Deploy)') {
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||||
|
echo 'Checking Ollama version after deploy:'
|
||||||
|
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||||
|
|
||||||
|
echo 'Checking Open-WebUI version after deploy:'
|
||||||
|
sh 'docker exec -i open-webui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post {
|
||||||
|
always {
|
||||||
|
echo 'Pipeline finished.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To deploy the services, simply use Docker Compose:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
For automated deployments, you can use Jenkins with the provided `Jenkinsfile` to ensure the latest versions are deployed and tested.
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
- Ensure all environment variables are correctly set, particularly the OAuth credentials and OpenAI API key.
|
||||||
|
- Update Docker images regularly to maintain security and access new features.
|
||||||
|
|
||||||
|
## License
|
||||||
|
This project is open-source and licensed under the [MIT License](LICENSE).
|
||||||
|
|
172
docker-compose.yaml
Normal file
172
docker-compose.yaml
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
services:
|
||||||
|
ollama1: #For GPU 0
|
||||||
|
image: ${OLLAMA:-ollama/ollama:latest}
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
capabilities: [gpu]
|
||||||
|
device_ids: ['0']
|
||||||
|
volumes:
|
||||||
|
- ollama-small:/root/.ollama
|
||||||
|
ports:
|
||||||
|
- 11434:11434
|
||||||
|
restart: always
|
||||||
|
container_name: ollamasmall
|
||||||
|
environment:
|
||||||
|
OLLAMA_KEEP_ALIVE: -1
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
ollama2: #For GPU 1
|
||||||
|
image: ${OLLAMA:-ollama/ollama:latest}
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
capabilities: [gpu]
|
||||||
|
device_ids: ['1']
|
||||||
|
volumes:
|
||||||
|
- ollama:/root/.ollama
|
||||||
|
restart: always
|
||||||
|
container_name: ollamalarge
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
|
||||||
|
open_webui:
|
||||||
|
container_name: openwebui
|
||||||
|
image: ${OPENWEBUI:-ghcr.io/open-webui/open-webui:main}
|
||||||
|
ports:
|
||||||
|
- "3000:8080"
|
||||||
|
volumes:
|
||||||
|
- data:/app/backend/data
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
capabilities: [gpu]
|
||||||
|
count: all
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
environment:
|
||||||
|
|
||||||
|
# General
|
||||||
|
WEBUI_URL: https://ai.hackedyour.info
|
||||||
|
ADMIN_EMAIL: adamoutler@gmail.com
|
||||||
|
ENABLE_CHANNELS: True
|
||||||
|
USE_CUDA_DOCKER: True
|
||||||
|
ENABLE_REALTIME_CHAT_SAVE: False
|
||||||
|
ENV: dev
|
||||||
|
|
||||||
|
#Database
|
||||||
|
DATABASE_URL: postgresql://openwebui_user:${PG_PASS}@openwebui_postgres_db:5432/openwebui_db
|
||||||
|
|
||||||
|
# Ollama
|
||||||
|
OLLAMA_BASE_URL: http://ollama1:11434
|
||||||
|
OLLAMA_BASE_URLS: http://ollama1:11434;http://ollama2:11434
|
||||||
|
|
||||||
|
# OpenAI
|
||||||
|
OPENAI_API_KEY: ${OPEN_AI_TOKEN}
|
||||||
|
|
||||||
|
# RAG
|
||||||
|
RAG_TEXT_SPLITTER: character
|
||||||
|
CHUNK_SIZE: 1000
|
||||||
|
CHUNK_OVERLAP: 100
|
||||||
|
VECTOR_DB: chroma
|
||||||
|
CHROMA_HTTP_HOST: chromadb
|
||||||
|
CHROMA_HTTP_PORT: 8000
|
||||||
|
ENABLE_RAG_LOCAL_WEB_FETCH: True
|
||||||
|
|
||||||
|
# TIKA
|
||||||
|
CONTENT_EXTRACTION_ENGINE: tika
|
||||||
|
TIKA_SERVER_URL: http://tika:9998
|
||||||
|
|
||||||
|
# LDAP
|
||||||
|
ENABLE_LOGIN_FORM: true
|
||||||
|
ENABLE_SIGNUP: false
|
||||||
|
ENABLE_LDAP: true
|
||||||
|
LDAP_APP_DN: ${AD_BIND_USER}
|
||||||
|
LDAP_APP_PASSWORD: ${AD_BIND_PASSWORD}
|
||||||
|
LDAP_ATTRIBUTE_FOR_USERNAME: sAMAccountName
|
||||||
|
LDAP_ATTRIBUTE_FOR_MAIL: mail
|
||||||
|
LDAP_SEARCH_BASE: CN=Users,DC=activedirectory,DC=adamoutler,DC=com
|
||||||
|
LDAP_SEARCH_FILTERS: (&(memberOf=CN=AI,OU=Groups,dc=activedirectory,dc=adamoutler,dc=com))
|
||||||
|
LDAP_SERVER_HOST: 192.168.1.100
|
||||||
|
LDAP_SERVER_LABEL: FluffysNet
|
||||||
|
LDAP_SERVER_PORT: 389
|
||||||
|
LDAP_USE_TLS: false
|
||||||
|
|
||||||
|
# OAUTH
|
||||||
|
ENABLE_OAUTH_SIGNUP: false
|
||||||
|
OAUTH_PROVIDER_NAME: git.adamoutler.com permissions
|
||||||
|
OPENID_PROVIDER_URL: https://git.adamoutler.com/.well-known/openid-configuration
|
||||||
|
OPENID_REDIRECT_URI: https://ai.hackedyour.info/oauth/oidc/callback
|
||||||
|
OAUTH_CLIENT_ID: ${CLIENT_ID}
|
||||||
|
OAUTH_CLIENT_SECRET: ${CLIENT_SECRET}
|
||||||
|
OAUTH_USERNAME_CLAIM: name
|
||||||
|
OAUTH_MERGE_ACCOUNTS_BY_EMAIL: true
|
||||||
|
|
||||||
|
# Redis Support
|
||||||
|
ENABLE_WEBSOCKET_SUPPORT: True
|
||||||
|
WEBSOCKET_MANAGER: redis # This is the default, no need to set explicitly unless changing
|
||||||
|
WEBSOCKET_REDIS_URL: redis://redis:6379/0
|
||||||
|
REDIS_URL: redis://redis:6379/0 # Also set for app state, using the same instance
|
||||||
|
|
||||||
|
|
||||||
|
tika:
|
||||||
|
image: ${TIKA:-apache/tika:latest}
|
||||||
|
ports:
|
||||||
|
- "9998:9998"
|
||||||
|
restart: always
|
||||||
|
container_name: tika
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
chromadb:
|
||||||
|
container_name: chromadb
|
||||||
|
image: ${CHROMADB:-chromadb/chroma:latest}
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- chromadb:/chroma # Persist database storage
|
||||||
|
environment:
|
||||||
|
IS_PERSISTENT: TRUE # Enable persistent storage
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:alpine
|
||||||
|
container_name: redis
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
openwebui_postgres_db: # Renamed service key (lowercase)
|
||||||
|
image: postgres:latest
|
||||||
|
container_name: openwebui_postgres_db # Keep container name consistent
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: openwebui_db
|
||||||
|
POSTGRES_USER: openwebui_user
|
||||||
|
POSTGRES_PASSWORD: ${PG_PASS}
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
networks:
|
||||||
|
- shared_net
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
ollama:
|
||||||
|
ollama-small:
|
||||||
|
data:
|
||||||
|
chromadb:
|
||||||
|
redis_data:
|
||||||
|
postgres_data:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
shared_net:
|
Loading…
x
Reference in New Issue
Block a user