Update apikey variable, increase token len
All checks were successful
Updates/AI-Frontend/pipeline/head This commit looks good
Updates/AIDGAF-server/pipeline/head This commit looks good

This commit is contained in:
Adam Outler 2024-11-16 02:47:17 +00:00
parent 0923f41428
commit 1e2c77227b
2 changed files with 3 additions and 5 deletions

6
Jenkinsfile vendored
View File

@ -39,10 +39,8 @@ pipeline {
} }
stage('export docker container') { stage('export docker container') {
steps { steps {
sh '#!/bin/sh \n' +
'set +e; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
withCredentials([ withCredentials([
string(credentialsId: 'ai-hacked-your-info-key', variable: 'OPENWEBUI-APIKEY'), string(credentialsId: 'ai-hacked-your-info-key', variable: 'OPENWEBUIAPIKEY'),
string(credentialsId: 'PapaHashingSecret', variable: 'PAPA_HASH'), string(credentialsId: 'PapaHashingSecret', variable: 'PAPA_HASH'),
string(credentialsId: 'PapaAsyncUrl', variable: 'ASYNC_URL'), string(credentialsId: 'PapaAsyncUrl', variable: 'ASYNC_URL'),
sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey')]) { sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey')]) {
@ -50,7 +48,7 @@ pipeline {
'mkdir -p ~/.ssh; cp "$sshkey" ~/.ssh/id_rsa' 'mkdir -p ~/.ssh; cp "$sshkey" ~/.ssh/id_rsa'
sh '#!/bin/sh \n' + sh '#!/bin/sh \n' +
/* groovylint-disable-next-line GStringExpressionWithinString */ /* groovylint-disable-next-line GStringExpressionWithinString */
'docker run --name=aidgaf-server -eSERVERPORT=8087 -eHOSTNAME=0.0.0.0 -eHASHKEY="${PAPA_HASH}" -eAPIKEY="${OPENWEBUI-APIKEY}" -eASYNC_METHOD="PATCH" -eASYNC_URL="${ASYNC_URL}" -p8087:8087 -d --restart=always aidgaf' 'docker run --name=aidgaf-server -eSERVERPORT=8087 -eHOSTNAME=0.0.0.0 -eHASHKEY="${PAPA_HASH}" -eAPIKEY="${OPENWEBUIAPIKEY}" -eASYNC_METHOD="PATCH" -eASYNC_URL="${ASYNC_URL}" -p8087:8087 -d --restart=always aidgaf'
} }
} }
} }

View File

@ -33,7 +33,7 @@ PROMPTS = [
] ]
""" The maximum number of tokens to use in a single OpenAI request. """ """ The maximum number of tokens to use in a single OpenAI request. """
OPEN_AI_MAX_TOKENS = 500 OPEN_AI_MAX_TOKENS = 1000
""" The model to use for OpenAI. """ """ The model to use for OpenAI. """
OPEN_AI_COMPLETION_MODEL = "granite3-dense:2b" OPEN_AI_COMPLETION_MODEL = "granite3-dense:2b"