Compare commits
22 Commits
66b99c5d1b
...
main
Author | SHA1 | Date | |
---|---|---|---|
1e2c77227b | |||
0923f41428 | |||
78ce7ab02a | |||
33f89916de | |||
d3872e4a62 | |||
fc00639e72 | |||
b5805c658f | |||
3444bc50ad | |||
1a91cbc8cc | |||
a649fa87a4 | |||
ce68767640 | |||
8c99b40b2a | |||
f1c7b195f3 | |||
b326642664 | |||
127892b5f2 | |||
a4c63a7177 | |||
a6e8e1a54f | |||
f142ad562e | |||
086f3b6ede | |||
18fb9c4679 | |||
7ea946de88 | |||
94f3c26b86 |
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -1,6 +1,7 @@
|
||||
{
|
||||
"secrets.enabledFolders": [
|
||||
"aidgaf"
|
||||
"aidgaf",
|
||||
"default"
|
||||
],
|
||||
"files.associations": {
|
||||
"[Jj]enkinsfile*": "groovy"
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
FROM alpine:latest
|
||||
RUN apk add python3 py3-pip \
|
||||
&& pip3 install openai\
|
||||
&& pip3 install openai requests\
|
||||
&& mkdir /app
|
||||
COPY src/aidgaf /app/aidgaf
|
||||
EXPOSE 8087
|
||||
|
10
Jenkinsfile
vendored
10
Jenkinsfile
vendored
@ -4,7 +4,7 @@ pipeline {
|
||||
agent {
|
||||
docker {
|
||||
alwaysPull true
|
||||
image 'alpine:3.14'
|
||||
image 'docker.io/alpine:3.14'
|
||||
label 'Wrangler1'
|
||||
args '-u root'
|
||||
}
|
||||
@ -33,16 +33,14 @@ pipeline {
|
||||
steps {
|
||||
withCredentials([ sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey', usernameVariable: 'user')]) {
|
||||
sh '#!/bin/sh \n' +
|
||||
'set +e; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
|
||||
'set +e; docker logs aidgaf-server; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('export docker container') {
|
||||
steps {
|
||||
sh '#!/bin/sh \n' +
|
||||
'set +e; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
|
||||
withCredentials([
|
||||
string(credentialsId: 'OpenAI-API-Token', variable: 'OPEN_AI_TOKEN'),
|
||||
string(credentialsId: 'ai-hacked-your-info-key', variable: 'OPENWEBUIAPIKEY'),
|
||||
string(credentialsId: 'PapaHashingSecret', variable: 'PAPA_HASH'),
|
||||
string(credentialsId: 'PapaAsyncUrl', variable: 'ASYNC_URL'),
|
||||
sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey')]) {
|
||||
@ -50,7 +48,7 @@ pipeline {
|
||||
'mkdir -p ~/.ssh; cp "$sshkey" ~/.ssh/id_rsa'
|
||||
sh '#!/bin/sh \n' +
|
||||
/* groovylint-disable-next-line GStringExpressionWithinString */
|
||||
'docker run --name=aidgaf-server -eSERVERPORT=8087 -eHOSTNAME=0.0.0.0 -eHASHKEY="${PAPA_HASH}" -eAPIKEY="${OPEN_AI_TOKEN}" -eASYNC_METHOD="PATCH" -eASYNC_URL="${ASYNC_URL}" -p8087:8087 -d --restart=always aidgaf'
|
||||
'docker run --name=aidgaf-server -eSERVERPORT=8087 -eHOSTNAME=0.0.0.0 -eHASHKEY="${PAPA_HASH}" -eAPIKEY="${OPENWEBUIAPIKEY}" -eASYNC_METHOD="PATCH" -eASYNC_URL="${ASYNC_URL}" -p8087:8087 -d --restart=always aidgaf'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
13
README.md
Normal file → Executable file
13
README.md
Normal file → Executable file
@ -23,25 +23,28 @@ git clone https://git.adamoutler.com/aoutler/aidgaf-server
|
||||
The server accepts a message in the following format:
|
||||
|
||||
``` json
|
||||
{"service":"papa","message":{"command":"aidgaf","data":{"username":"AdamOutler"},"timestamp":1676231329}}
|
||||
{"message":{"command":"aidgaf","data":{"username":"AdamOutler"},"timestamp":1676231329}}
|
||||
```
|
||||
## Built With
|
||||
* [Docker](https://www.docker.com/)
|
||||
* [Visual Studio Code](https://code.visualstudio.com/)
|
||||
* [OpenAI](https://openai.com/)
|
||||
* [Python](https://www.python.org/)
|
||||
|
||||
* [Automated builds by Jenkins](https://jenkins.adamoutler.com/blue/organizations/jenkins/Update%20IDGAF%20Server/activity)
|
||||
## Contributing
|
||||
If you would like to contribute to this project, please fork the repository and submit a pull request.
|
||||
|
||||
## License
|
||||
Copyright 2023 Adam Outler
|
||||
|
||||
Licensed under the I Dont Give A F License, Version 1.0 (the "License");
|
||||
Licensed under the I Dont Give A F License, Version 1.1 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
3. Send an email to idgaf@hackedyour.info if you find this helpful.
|
||||
Note: If you're wondering where number 1 and 2 are, IDGAF.
|
||||
rule 0. Not for use in California.
|
||||
|
||||
rule 3. Send an email to idgaf@hackedyour.info if you find this helpful.
|
||||
|
||||
Note: If you're wondering where numbers 1 and 2 are, IDGAF.
|
||||
|
||||
## Acknowledgments
|
||||
* This README was generated using OpenAI's language model, ChatGPT.
|
||||
|
@ -8,11 +8,11 @@ import requests
|
||||
import settings
|
||||
from const import OPENAI_TIMEOUT
|
||||
|
||||
URL = "https://api.openai.com/v1/completions"
|
||||
URL = "https://ai.hackedyour.info/api/chat/completions"
|
||||
""" The URL for the OpenAI API. """
|
||||
|
||||
DATA = {"model": settings.OPEN_AI_COMPLETION_MODEL,
|
||||
"prompt": settings.PROMPTS[0],
|
||||
"messages": [{"role":"system", "content":"You are AIDGAF server. You tell about how much people dont give a fuck"},{"role":"user", "content": settings.PROMPTS[0] }],
|
||||
"temperature": settings.TEMPERATURE,
|
||||
"max_tokens": settings.OPEN_AI_MAX_TOKENS
|
||||
}
|
||||
@ -49,9 +49,11 @@ def parse_idgaf_request(idgaf_command) -> [int, dict]:
|
||||
the_data = get_prompt(idgaf_command)
|
||||
response = get_gpt_response(the_data)
|
||||
try:
|
||||
response_text = response.json()['choices'][0]['text'].strip()
|
||||
response_text = response.json()['choices'][0]['message']["content"].strip()
|
||||
|
||||
except KeyError:
|
||||
response_text = response.text
|
||||
print(response_text)
|
||||
obj = get_response_base_object(response_text)
|
||||
return [response.status_code, obj]
|
||||
|
||||
@ -76,13 +78,20 @@ def get_prompt(command) -> dict:
|
||||
Returns:
|
||||
A dictionary containing the data to send to OpenAI.
|
||||
"""
|
||||
replyTo=command['message']['data'].get('replyTo',"")
|
||||
replyText=command['message']['data'].get('replyText',"")
|
||||
inputText=command['message']['data'].get('inputText',"")
|
||||
my_prompt = random.choice(settings.PROMPTS)
|
||||
my_prompt = my_prompt.replace(
|
||||
"USERNAME", command['message']['data']['username'])
|
||||
if replyTo:
|
||||
my_prompt=replyTo +"said \""+replyText+".\"\n In response, "+my_prompt
|
||||
if inputText:
|
||||
my_prompt="With the following in mind: "+ command['message']['data']['username'] +" doesn't care about \""+inputText+"\".\n\n"+my_prompt
|
||||
|
||||
print("Prompt selected: "+my_prompt)
|
||||
print(my_prompt)
|
||||
the_data = DATA
|
||||
the_data["prompt"] = my_prompt
|
||||
the_data["messages"][-1]["content"] = my_prompt
|
||||
return the_data
|
||||
|
||||
|
||||
@ -91,9 +100,7 @@ if __name__ == "__main__":
|
||||
INPUT = '''{"service":"papa","message":
|
||||
{"command":"aidgaf","data":{"username":"AdamOutler"},
|
||||
"timestamp":1675725191},
|
||||
"hash":"1bc73914478835d03f9ebdfb46328321d2bb656647e28
|
||||
76d6f162cc1860607fcfca8d825c48e390a6a254ee0835c8a4fe5f
|
||||
9a25795a3a0880ae5a23e9c132cf2"}'''
|
||||
"hash":"1bc73914478835d03f9ebdfb46328321d2bb656647e2876d6f162cc1860607fcfca8d825c48e390a6a254ee0835c8a4fe5f9a25795a3a0880ae5a23e9c132cf2"}'''
|
||||
test_command = json.loads(INPUT)
|
||||
[code, result] = parse_idgaf_request(test_command)
|
||||
print(result)
|
||||
|
@ -26,20 +26,20 @@ SERVERPORT: int = 8087
|
||||
""" The prompts used for OpenAI. When the server receives a request, it will
|
||||
randomly select one of these prompts to use."""
|
||||
PROMPTS = [
|
||||
"Say \"USERNAME does not give a fuck\" as a haiku and mention that it is a haiku.",
|
||||
"Say \"USERNAME does not give a fuck\" in a Dr Suess poem.",
|
||||
"Tell me a funny, impossible, story about USERNAME. Make USERNAME seem relatable at the end. Make up an outrageous situation where the moral of the story is: \"USERNAME does not give a fuck\" to this very day."
|
||||
"Say \"USERNAME does not give a fuck\" using 4 separate Haikus, and be sure to mention they are haikus before or after.",
|
||||
"Say \"USERNAME does not give a fuck\" within a 10 line Dr Suess poem." #,
|
||||
"Tell me a funny, impossible, story about USERNAME. Make USERNAME seem relatable at the end. Make up an outrageous situation where the moral of the story is: \"USERNAME does not give a fuck\" to this very day.",
|
||||
"Say \"USERNAME is completely apethetic and does not give a fuck\" in a verbose manner, using your most colorful words and one metaphor."
|
||||
]
|
||||
|
||||
""" The maximum number of tokens to use in a single OpenAI request. """
|
||||
OPEN_AI_MAX_TOKENS = 500
|
||||
OPEN_AI_MAX_TOKENS = 1000
|
||||
|
||||
""" The model to use for OpenAI. """
|
||||
OPEN_AI_COMPLETION_MODEL = "text-davinci-003"
|
||||
OPEN_AI_COMPLETION_MODEL = "granite3-dense:2b"
|
||||
|
||||
""" The temperature to use for OpenAI. 0-2, 0 is basicall repeating the prompt, 2 is more random. """
|
||||
TEMPERATURE = 0.7
|
||||
TEMPERATURE = 0.8
|
||||
|
||||
""" The hash key for the server. Leave this blank if you don't want to use it. """
|
||||
HASHKEY = bytes(os.getenv('HASHKEY') or "",UTF8) # shared secret for hmac of message
|
||||
|
Reference in New Issue
Block a user