Reset history to latest state
All checks were successful
Updates/open-webui-ollama-docker-deploy/pipeline/head This commit looks good
All checks were successful
Updates/open-webui-ollama-docker-deploy/pipeline/head This commit looks good
This commit is contained in:
commit
f42ce511dc
67
Jenkinsfile
vendored
Normal file
67
Jenkinsfile
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
pipeline {
|
||||
agent {
|
||||
node { label 'Wrangler1' }
|
||||
}
|
||||
environment {
|
||||
DOCKER_HOST = 'unix:///var/run/docker.sock'
|
||||
OPENAI_TOKEN = credentials('OpenAI-Token')
|
||||
OLLAMA = 'ollama/ollama'
|
||||
OPENWEBUI = 'ghcr.io/open-webui/open-webui:dev'
|
||||
TIKA = 'apache/tika:latest'
|
||||
CHROMADB = 'chromadb/chroma:latest'
|
||||
}
|
||||
stages {
|
||||
stage('Check NVIDIA Driver Version') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
sh 'nvidia-smi --query-gpu=driver_version --format=csv,noheader'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Check Ollama and Open-WebUI Versions (Before Deploy)') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
echo 'Checking Ollama version before deploy:'
|
||||
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||
|
||||
echo 'Checking Open-WebUI version before deploy:'
|
||||
sh 'docker exec -i openwebui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Deploy with Docker Compose') {
|
||||
steps {
|
||||
script {
|
||||
withCredentials([usernamePassword(credentialsId: 'gitea-oauth-open-webui-client-id-secret', passwordVariable: 'CLIENT_SECRET', usernameVariable: 'CLIENT_ID'), string(credentialsId: 'OpenAI-API-Token', variable: 'OPEN_AI_TOKEN'), usernamePassword(credentialsId: 'ldap-bind-auth-user', passwordVariable: 'AD_BIND_PASSWORD', usernameVariable: 'AD_BIND_USER')]) {
|
||||
sh """
|
||||
COMPOSE_PROJECT_NAME=openwebui CHROMADB=${CHROMADB} TIKA=${TIKA} OLLAMA=${OLLAMA} OPENWEBUI=${OPENWEBUI} docker compose pull
|
||||
COMPOSE_PROJECT_NAME=openwebui CHROMADB=${CHROMADB} TIKA=${TIKA} OLLAMA=${OLLAMA} OPENWEBUI=${OPENWEBUI} docker compose up -d --force-recreate
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Check Ollama and Open-WebUI Versions (After Deploy)') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
echo 'Checking Ollama version after deploy:'
|
||||
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||
|
||||
echo 'Checking Open-WebUI version after deploy:'
|
||||
sh 'docker exec -i openwebui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
echo 'Pipeline finished.'
|
||||
}
|
||||
}
|
||||
}
|
196
README.md
Normal file
196
README.md
Normal file
@ -0,0 +1,196 @@
|
||||
# Ollama and Open-WebUI Docker Setup
|
||||
|
||||
This repository provides the necessary configuration files to deploy two Docker services, Ollama and Open-WebUI, using Docker Compose and Jenkins for automated management. These services are deployed on the `shark-wrangler` node, making use of NVIDIA GPU acceleration for Ollama.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
To get started, ensure you have the following installed and configured:
|
||||
|
||||
- Docker and Docker Compose
|
||||
- NVIDIA GPU drivers (compatible with the Ollama container)
|
||||
- Jenkins with access to the `shark-wrangler` node
|
||||
|
||||
## Services Overview
|
||||
|
||||
### Ollama
|
||||
- **Image**: `ollama/ollama:latest`
|
||||
- **Container Name**: `ollama`
|
||||
- **Ports**: `11434:11434`
|
||||
- **Environment Variables**:
|
||||
- `OLLAMA_LLM_LIBRARY=cuda_v12` for GPU acceleration.
|
||||
- **Volumes**: Mounts data at `/root/.ollama` to persist Ollama's state.
|
||||
- **Networks**: Connected to `shared_net`.
|
||||
|
||||
### Open-WebUI
|
||||
- **Image**: `ghcr.io/open-webui/open-webui:dev`
|
||||
- **Container Name**: `open-webui`
|
||||
- **Ports**: `3000:8080`
|
||||
- **Environment Variables**:
|
||||
- **OAuth Configuration**: Enables login and OAuth features.
|
||||
- **OpenAI API Key**: Configured to enable integrations.
|
||||
- **OLLAMA_BASE_URL**: Points to the running Ollama instance for interoperability.
|
||||
- **Volumes**: Mounts data at `/app/backend/data` to persist WebUI state.
|
||||
- **Networks**: Connected to `shared_net`.
|
||||
|
||||
### Docker Compose Configuration
|
||||
The Docker Compose configuration deploys the following:
|
||||
- `ollama` and `open-webui` services with defined volumes and ports.
|
||||
- `shared_net` network for container communication.
|
||||
|
||||
The relevant section in `docker-compose.yaml`:
|
||||
```yaml
|
||||
docker-compose.yaml
|
||||
services:
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
capabilities: [gpu]
|
||||
count: all
|
||||
ports:
|
||||
- "11434:11434"
|
||||
environment:
|
||||
- OLLAMA_LLM_LIBRARY=cuda_v12
|
||||
volumes:
|
||||
- ollama:/root/.ollama
|
||||
restart: always
|
||||
container_name: ollama
|
||||
networks:
|
||||
- shared_net
|
||||
|
||||
open_webui:
|
||||
image: ghcr.io/open-webui/open-webui:dev
|
||||
ports:
|
||||
- "3000:8080"
|
||||
volumes:
|
||||
- open-webui:/app/backend/data
|
||||
restart: always
|
||||
container_name: open-webui
|
||||
networks:
|
||||
- shared_net
|
||||
environment:
|
||||
- ENV=dev
|
||||
- ENABLE_LOGIN_FORM=true
|
||||
- ENABLE_OAUTH_SIGNUP=true
|
||||
- OAUTH_USERNAME_CLAIM=name
|
||||
- OAUTH_MERGE_ACCOUNTS_BY_EMAIL=true
|
||||
- OAUTH_PROVIDER_NAME=git.adamoutler.com permissions
|
||||
- WEBUI_URL=https://ai.hackedyour.info
|
||||
- OPENID_PROVIDER_URL=https://git.adamoutler.com/.well-known/openid-configuration
|
||||
- OPENID_REDIRECT_URI=https://ai.hackedyour.info/oauth/oidc/callback
|
||||
- OLLAMA_BASE_URL=http://ollama:11434
|
||||
- OPENAI_API_KEY=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
|
||||
- OAUTH_CLIENT_ID=a7bb86aa-fea4-4c0d-a07f-3e0ca4428159
|
||||
- OAUTH_CLIENT_SECRET=gto_v5va4hznsqck4myz33fve7md2tk7nntgpwoskvki6mxiwg3ux6vq
|
||||
|
||||
volumes:
|
||||
ollama:
|
||||
open-webui:
|
||||
|
||||
networks:
|
||||
shared_net:
|
||||
```
|
||||
|
||||
## Jenkinsfile for CI/CD
|
||||
|
||||
The `Jenkinsfile` automates the deployment of Ollama and Open-WebUI services using Docker Compose:
|
||||
|
||||
### Pipeline Stages
|
||||
|
||||
1. **Check NVIDIA Driver Version**:
|
||||
- Ensures NVIDIA drivers are available and compatible.
|
||||
|
||||
2. **Check Ollama and Open-WebUI Versions (Before Deploy)**:
|
||||
- Retrieves the current version of the Ollama and Open-WebUI containers.
|
||||
|
||||
3. **Deploy with Docker Compose**:
|
||||
- Pulls the latest images and recreates the containers using Docker Compose.
|
||||
|
||||
4. **Check Ollama and Open-WebUI Versions (After Deploy)**:
|
||||
- Ensures that the services are running and updated to the latest version after deployment.
|
||||
|
||||
The relevant Jenkinsfile snippet:
|
||||
```groovy
|
||||
pipeline {
|
||||
agent {
|
||||
node { label 'shark-wrangler' }
|
||||
}
|
||||
environment {
|
||||
DOCKER_HOST = 'unix:///var/run/docker.sock'
|
||||
}
|
||||
stages {
|
||||
stage('Check NVIDIA Driver Version') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
sh 'nvidia-smi --query-gpu=driver_version --format=csv,noheader'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Check Ollama and Open-WebUI Versions (Before Deploy)') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
echo 'Checking Ollama version before deploy:'
|
||||
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||
|
||||
echo 'Checking Open-WebUI version before deploy:'
|
||||
sh 'docker exec -i open-webui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Deploy with Docker Compose') {
|
||||
steps {
|
||||
script {
|
||||
sh '''
|
||||
docker pull ollama/ollama
|
||||
docker pull ghcr.io/open-webui/open-webui:main
|
||||
docker compose up -d --force-recreate
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Check Ollama and Open-WebUI Versions (After Deploy)') {
|
||||
steps {
|
||||
script {
|
||||
catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
echo 'Checking Ollama version after deploy:'
|
||||
sh 'docker exec -i ollama ollama -v || echo "Ollama check failed"'
|
||||
|
||||
echo 'Checking Open-WebUI version after deploy:'
|
||||
sh 'docker exec -i open-webui jq -r .version /app/package.json || echo "Open-WebUI check failed"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
echo 'Pipeline finished.'
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
To deploy the services, simply use Docker Compose:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
For automated deployments, you can use Jenkins with the provided `Jenkinsfile` to ensure the latest versions are deployed and tested.
|
||||
|
||||
## Notes
|
||||
- Ensure all environment variables are correctly set, particularly the OAuth credentials and OpenAI API key.
|
||||
- Update Docker images regularly to maintain security and access new features.
|
||||
|
||||
## License
|
||||
This project is open-source and licensed under the [MIT License](LICENSE).
|
||||
|
107
docker-compose.yaml
Normal file
107
docker-compose.yaml
Normal file
@ -0,0 +1,107 @@
|
||||
services:
|
||||
ollama:
|
||||
image: ${OLLAMA:-ollama/ollama:latest}
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
capabilities: [gpu]
|
||||
count: all
|
||||
ports:
|
||||
- "11434:11434"
|
||||
volumes:
|
||||
- ollama:/root/.ollama
|
||||
restart: always
|
||||
container_name: ollama
|
||||
networks:
|
||||
- shared_net
|
||||
|
||||
open_webui:
|
||||
container_name: openwebui
|
||||
image: ${OPENWEBUI:-ghcr.io/open-webui/open-webui:main}
|
||||
ports:
|
||||
- "3000:8080"
|
||||
volumes:
|
||||
- data:/app/backend/data
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
capabilities: [gpu]
|
||||
count: all
|
||||
restart: always
|
||||
networks:
|
||||
- shared_net
|
||||
environment:
|
||||
VECTOR_DB: chroma
|
||||
CHROMA_HTTP_HOST: chromadb
|
||||
CHROMA_HTTP_PORT: 8000
|
||||
|
||||
|
||||
ENABLE_CHANNELS: True
|
||||
USE_CUDA_DOCKER: True
|
||||
ENABLE_REALTIME_CHAT_SAVE: False
|
||||
CONTENT_EXTRACTION_ENGINE: tika
|
||||
TIKA_SERVER_URL: http://tika:9998
|
||||
ENV: dev
|
||||
|
||||
ENABLE_LOGIN_FORM: true
|
||||
ENABLE_SIGNUP: false
|
||||
ENABLE_LDAP: true
|
||||
LDAP_APP_DN: ${AD_BIND_USER}
|
||||
LDAP_APP_PASSWORD: ${AD_BIND_PASSWORD}
|
||||
LDAP_ATTRIBUTE_FOR_USERNAME: sAMAccountName
|
||||
LDAP_ATTRIBUTE_FOR_MAIL: mail
|
||||
LDAP_SEARCH_BASE: CN=Users,DC=activedirectory,DC=adamoutler,DC=com
|
||||
LDAP_SEARCH_FILTERS: (&(memberOf=CN=AI,OU=Groups,dc=activedirectory,dc=adamoutler,dc=com) (objectClass=Person) (sAMAccountName=%s))
|
||||
LDAP_SERVER_HOST: 192.168.1.100
|
||||
LDAP_SERVER_LABEL: FluffysNet
|
||||
LDAP_SERVER_PORT: 389
|
||||
LDAP_USE_TLS: false
|
||||
|
||||
RAG_TEXT_SPLITTER: character
|
||||
CHUNK_SIZE: 2000
|
||||
CHUNK_OVERLAP: 200
|
||||
OAUTH_USERNAME_CLAIM: name
|
||||
OAUTH_MERGE_ACCOUNTS_BY_EMAIL: true
|
||||
|
||||
ENABLE_OAUTH_SIGNUP: false
|
||||
OAUTH_PROVIDER_NAME: git.adamoutler.com permissions
|
||||
WEBUI_URL: https://ai.hackedyour.info
|
||||
OPENID_PROVIDER_URL: https://git.adamoutler.com/.well-known/openid-configuration
|
||||
OPENID_REDIRECT_URI: https://ai.hackedyour.info/oauth/oidc/callback
|
||||
OLLAMA_BASE_URL: http://ollama:11434
|
||||
OPENAI_API_KEY: ${OPEN_AI_TOKEN}
|
||||
OAUTH_CLIENT_ID: ${CLIENT_ID}
|
||||
OAUTH_CLIENT_SECRET: ${CLIENT_SECRET}
|
||||
|
||||
tika:
|
||||
image: ${TIKA:-apache/tika:latest}
|
||||
ports:
|
||||
- "9998:9998"
|
||||
restart: always
|
||||
container_name: tika
|
||||
networks:
|
||||
- shared_net
|
||||
|
||||
chromadb:
|
||||
container_name: chromadb
|
||||
image: ${CHROMADB:-chromadb/chroma:latest}
|
||||
restart: always
|
||||
volumes:
|
||||
- chromadb:/chroma # Persist database storage
|
||||
environment:
|
||||
IS_PERSISTENT: TRUE # Enable persistent storage
|
||||
networks:
|
||||
- shared_net
|
||||
|
||||
|
||||
volumes:
|
||||
ollama:
|
||||
data:
|
||||
chromadb:
|
||||
|
||||
networks:
|
||||
shared_net:
|
Loading…
x
Reference in New Issue
Block a user