Compare commits
2 Commits
3182959210
...
b513b38398
Author | SHA1 | Date | |
---|---|---|---|
|
b513b38398 | ||
|
87153f4194 |
@ -13,7 +13,10 @@
|
|||||||
"extensions": [
|
"extensions": [
|
||||||
"pomdtr.secrets",
|
"pomdtr.secrets",
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"donjayamanne.python-extension-pack"
|
"donjayamanne.python-extension-pack",
|
||||||
|
"ivory-lab.jenkinsfile-support",
|
||||||
|
"njpwerner.autodocstring",
|
||||||
|
"KevinRose.vsc-python-indent"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
69
Jenkinsfile
vendored
Normal file
69
Jenkinsfile
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
#! /bin/groovy
|
||||||
|
pipeline {
|
||||||
|
agent {
|
||||||
|
docker {
|
||||||
|
alwaysPull true
|
||||||
|
image 'alpine:3.14'
|
||||||
|
label 'Wrangler1'
|
||||||
|
args '-u root'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
stages {
|
||||||
|
stage('Prepare Environment'){
|
||||||
|
steps{
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'id; apk add docker openrc git'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Obtain Source'){
|
||||||
|
steps {
|
||||||
|
git branch: 'main', url: 'https://git.adamoutler.com/aoutler/aidgaf-server.git'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Build in docker') {
|
||||||
|
steps {
|
||||||
|
// Get some code from a Git repository
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'docker build -t aidgaf .'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage ("setup credentials"){
|
||||||
|
steps{
|
||||||
|
withCredentials([ sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey', usernameVariable: 'user')]) {
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'set +e; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage ('export docker container'){
|
||||||
|
steps {
|
||||||
|
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'set +e; docker stop aidgaf-server||echo machine stopped; docker rm aidgaf-server||echo machine does not exist; set -e'
|
||||||
|
withCredentials([
|
||||||
|
string(credentialsId: 'OpenAI-API-Token', variable: 'OPEN_AI_TOKEN'),
|
||||||
|
string(credentialsId: 'PapaHashingSecret', variable: 'PAPA_HASH'),
|
||||||
|
sshUserPrivateKey(credentialsId: 'dockeruserOn192.168.1.115', keyFileVariable: 'sshkey', usernameVariable: 'user')
|
||||||
|
]) {
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'mkdir -p ~/.ssh; cp "${sshkey}" ~/.ssh/id_rsa'
|
||||||
|
sh "#!/bin/sh \n" +
|
||||||
|
'docker run --name=aidgaf-server -eSERVERPORT=8087 -eHOSTNAME=0.0.0.0 -eHASHKEY="${PAPA_HASH}" -eAPIKEY="${OPEN_AI_TOKEN}" -p8087:8087 -d --restart=always aidgaf'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// To run Maven on a Windows agent, use
|
||||||
|
// bat "mvn -Dmaven.test.failure.ignore=true clean package"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -1,3 +1,7 @@
|
|||||||
|
""" aidgaf-server. """
|
||||||
|
|
||||||
|
""" The name of the package"""
|
||||||
|
name = 'aidgaf'
|
||||||
|
|
||||||
name = 'aidgaf'
|
""" The version of the package"""
|
||||||
|
version='0.1.0'
|
@ -1,3 +1,5 @@
|
|||||||
|
""" aidgaf-server entry point. """
|
||||||
import server
|
import server
|
||||||
|
|
||||||
|
""" The main entry point for the aidgaf-server package."""
|
||||||
server.main()
|
server.main()
|
@ -1 +1,4 @@
|
|||||||
|
"""Constants for the aidgaf-server package."""
|
||||||
|
|
||||||
|
""" The encoding to use for strings. """
|
||||||
UTF8="utf-8"
|
UTF8="utf-8"
|
@ -1,6 +1,18 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
def calculate_hash(value:bytes, secret:bytes)->str:
|
def calculate_hash(value:bytes, secret:bytes)->str:
|
||||||
|
""" This function calculates the hash of a value and a secret.
|
||||||
|
It is used to verify that the message is from the server. The hash
|
||||||
|
is calculated using the SHA512 algorithm. The hash is returned as a
|
||||||
|
hex string. This is not a secure hash, but it is good enough for
|
||||||
|
this application.
|
||||||
|
|
||||||
|
parameters:
|
||||||
|
value: The value to hash.
|
||||||
|
secret: The secret to hash with the value.
|
||||||
|
returns:
|
||||||
|
The hash of the value and secret. This is a hex string.
|
||||||
|
"""
|
||||||
m = hashlib.sha512()
|
m = hashlib.sha512()
|
||||||
m.update(b"".join([value,secret]))
|
m.update(b"".join([value,secret]))
|
||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
""" This file contains the logic for the IDGAF server. It can be used to make a stand-alone
|
||||||
|
IDGAF server, or it can be used as a module in a larger application.
|
||||||
|
"""
|
||||||
import json
|
import json
|
||||||
import random
|
import random
|
||||||
import security
|
import security
|
||||||
@ -5,26 +8,29 @@ import re
|
|||||||
import requests
|
import requests
|
||||||
from time import time
|
from time import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import openai
|
|
||||||
import settings
|
import settings
|
||||||
|
|
||||||
openai.organization = "org-hNNV1yHjZp7T3pn5pdZWaKLm"
|
""" The URL for the OpenAI API. """
|
||||||
# print(openai.Model.list())
|
|
||||||
|
|
||||||
URL = "https://api.openai.com/v1/completions"
|
URL = "https://api.openai.com/v1/completions"
|
||||||
|
|
||||||
|
""" The data to send to OpenAI. """
|
||||||
DATA = {"model": settings.OPEN_AI_COMPLETION_MODEL,
|
DATA = {"model": settings.OPEN_AI_COMPLETION_MODEL,
|
||||||
"prompt": settings.PROMPTS[0],
|
"prompt": settings.PROMPTS[0],
|
||||||
"temperature": settings.TEMPERATURE,
|
"temperature": settings.TEMPERATURE,
|
||||||
"max_tokens": settings.OPEN_AI_MAX_TOKENS
|
"max_tokens": settings.OPEN_AI_MAX_TOKENS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
""" The headers to send to OpenAI. """
|
||||||
request_headers = {"Authorization": "Bearer " +
|
request_headers = {"Authorization": "Bearer " +
|
||||||
settings.APIKEY, "Content-Type": "application/json"}
|
settings.APIKEY, "Content-Type": "application/json"}
|
||||||
|
|
||||||
|
def get_response_base_object(text:str) -> dict:
|
||||||
|
""" This is used to create the response object for the server.
|
||||||
def get_response_base_object(text):
|
Parameters:
|
||||||
|
text: The text to return to the client.
|
||||||
|
Returns:
|
||||||
|
A dictionary containing the response object.
|
||||||
|
"""
|
||||||
resultObject = {}
|
resultObject = {}
|
||||||
resultObject["message"] = {}
|
resultObject["message"] = {}
|
||||||
resultObject["service"] = "AIDGAF Server"
|
resultObject["service"] = "AIDGAF Server"
|
||||||
@ -33,9 +39,16 @@ def get_response_base_object(text):
|
|||||||
resultObject["timestamp"] = datetime.utcnow().timestamp()
|
resultObject["timestamp"] = datetime.utcnow().timestamp()
|
||||||
return resultObject
|
return resultObject
|
||||||
|
|
||||||
def parse_idgaf_request(command):
|
|
||||||
|
def parse_idgaf_request(command)->[int, dict]:
|
||||||
|
""" This function handles the IDGAF command. It will return a response object.
|
||||||
|
Parameters:
|
||||||
|
command: The command object received from the client.
|
||||||
|
Returns:
|
||||||
|
A tuple containing the status code and the response object.
|
||||||
|
"""
|
||||||
the_data = get_prompt(command)
|
the_data = get_prompt(command)
|
||||||
response=get_gpt_response(the_data)
|
response = get_gpt_response(the_data)
|
||||||
try:
|
try:
|
||||||
response_text = response.json()['choices'][0]['text'].strip()
|
response_text = response.json()['choices'][0]['text'].strip()
|
||||||
except (KeyError):
|
except (KeyError):
|
||||||
@ -43,12 +56,23 @@ def parse_idgaf_request(command):
|
|||||||
obj = get_response_base_object(response_text)
|
obj = get_response_base_object(response_text)
|
||||||
return [response.status_code, obj]
|
return [response.status_code, obj]
|
||||||
|
|
||||||
def get_gpt_response(data):
|
def get_gpt_response(data) -> requests.Response:
|
||||||
|
""" This function communicates with OpenAI and returns a response object.
|
||||||
|
Parameters:
|
||||||
|
data: The data to send to OpenAI.
|
||||||
|
Returns:
|
||||||
|
The response object from OpenAI.
|
||||||
|
"""
|
||||||
gpt_response = requests.post(URL, json=data, headers=request_headers)
|
gpt_response = requests.post(URL, json=data, headers=request_headers)
|
||||||
return gpt_response
|
return gpt_response
|
||||||
|
|
||||||
|
def get_prompt(command)->dict:
|
||||||
def get_prompt(command):
|
""" Selects a prompt from the PROMPTS list and replaces the USERNAME placeholder with the username.
|
||||||
|
Parameters:
|
||||||
|
command: The command object received from the client.
|
||||||
|
Returns:
|
||||||
|
A dictionary containing the data to send to OpenAI.
|
||||||
|
"""
|
||||||
my_prompt = random.choice(settings.PROMPTS)
|
my_prompt = random.choice(settings.PROMPTS)
|
||||||
my_prompt = my_prompt.replace(
|
my_prompt = my_prompt.replace(
|
||||||
"USERNAME", command['message']['data']['username'])
|
"USERNAME", command['message']['data']['username'])
|
||||||
@ -59,35 +83,9 @@ def get_prompt(command):
|
|||||||
return the_data
|
return the_data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_prompt(command):
|
|
||||||
my_prompt = random.choice(settings.PROMPTS)
|
|
||||||
my_prompt = my_prompt.replace(
|
|
||||||
"USERNAME", command['message']['data']['username'])
|
|
||||||
|
|
||||||
print("Prompt selected: "+my_prompt)
|
|
||||||
the_data = DATA
|
|
||||||
the_data["prompt"] = my_prompt
|
|
||||||
return the_data
|
|
||||||
|
|
||||||
value = '{"service":"papa","message":{"command":"aidgaf","data":{"username":"AdamOutler"},"timestamp":1675725191},"hash":"1bc73914478835d03f9ebdfb46328321d2bb656647e2876d6f162cc1860607fcfca8d825c48e390a6a254ee0835c8a4fe5f9a25795a3a0880ae5a23e9c132cf2"}'
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
""" This function is for testing the IDGAF capabilities without a server. """
|
||||||
|
value = '{"service":"papa","message":{"command":"aidgaf","data":{"username":"AdamOutler"},"timestamp":1675725191},"hash":"1bc73914478835d03f9ebdfb46328321d2bb656647e2876d6f162cc1860607fcfca8d825c48e390a6a254ee0835c8a4fe5f9a25795a3a0880ae5a23e9c132cf2"}'
|
||||||
command = json.loads(value)
|
command = json.loads(value)
|
||||||
[code, result] = parse_idgaf_request(command)
|
[code, result] = parse_idgaf_request(command)
|
||||||
print(result)
|
print(result)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# curl https://api.openai.com/v1/completions \
|
|
||||||
# -H "Content-Type: application/json" \
|
|
||||||
# -H "Authorization: Bearer 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" \
|
|
||||||
# -d '{"model": "text-curie-001",
|
|
||||||
# "prompt": "Say \"Adam does not give a fuck\" in a thoughtful and clever prose consisting of one to five paragraphs.",
|
|
||||||
# "temperature":1, "max_tokens": 500}'
|
|
||||||
# |jq -r .choices[0].text
|
|
||||||
# curl -X PATCH 127.0.0.1:8087 -d '{"message":{"command":"aidgaf","data":{"username":"AdamOutler"}}}'
|
|
||||||
# 2,500,000 tokens = $5
|
|
||||||
|
@ -1,12 +1,18 @@
|
|||||||
|
""" Security functions for the server. """
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import hash_calculator
|
import hash_calculator
|
||||||
|
|
||||||
import settings
|
import settings
|
||||||
from const import UTF8
|
from const import UTF8
|
||||||
|
|
||||||
|
def perform_hash_checks(str_request)->bool:
|
||||||
def perform_hash_checks(str_request):
|
""" Performs a hash check on the message, and verifies the timestamp is valid.
|
||||||
|
If either check fails, return False. Otherwise, return True.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
str_request: The request body, as a string.
|
||||||
|
Returns:
|
||||||
|
True if the message is valid, False otherwise."""
|
||||||
hash = get_message_hash(str_request)
|
hash = get_message_hash(str_request)
|
||||||
|
|
||||||
if hash not in str_request:
|
if hash not in str_request:
|
||||||
@ -17,9 +23,14 @@ def perform_hash_checks(str_request):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_message_hash(json_command) -> str:
|
def get_message_hash(json_command) -> str:
|
||||||
"""Get the object named "message", and run a hash over it. """
|
""" Get the object named "message", and run a hash over it. The hash is calculated
|
||||||
|
using the SHA512 algorithm. The hash is returned as a hex string.
|
||||||
|
Parameters:
|
||||||
|
json_command: The JSON command to hash.
|
||||||
|
Returns:
|
||||||
|
The hash of the message, as a string. """
|
||||||
|
|
||||||
strip1 = re.sub(".*\"message\":", "", json_command, 1)
|
strip1 = re.sub(".*\"message\":", "", json_command, 1)
|
||||||
if ("\"hash\":" in strip1):
|
if ("\"hash\":" in strip1):
|
||||||
strip2 = re.sub(',\"hash\":.*', '', strip1)
|
strip2 = re.sub(',\"hash\":.*', '', strip1)
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
|
""" aidgaf-server - A simple server to handle requests from the aidgaf client.
|
||||||
|
This server will communicate with OpenAI to generate responses to the client."""
|
||||||
import json
|
import json
|
||||||
import random
|
import random
|
||||||
|
|
||||||
@ -12,14 +13,17 @@ default_request_body = b'{"message":{"command":"aidgaf","data":{"username":"Adam
|
|||||||
|
|
||||||
|
|
||||||
class IDGAFServer(BaseHTTPRequestHandler):
|
class IDGAFServer(BaseHTTPRequestHandler):
|
||||||
|
""" This class handles the requests from the client. """
|
||||||
|
|
||||||
def do_GET(self):
|
def do_GET(self):
|
||||||
|
""" This function handles GET requests. """
|
||||||
self.send_response(418)
|
self.send_response(418)
|
||||||
self.send_header("Content-type", "text/html")
|
self.send_header("Content-type", "text/html")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.wfile.write(bytes("", UTF8))
|
self.wfile.write(bytes("", UTF8))
|
||||||
|
|
||||||
def do_PATCH(self):
|
def do_PATCH(self):
|
||||||
|
""" This function handles PATCH requests. """
|
||||||
body = self.get_body().decode(UTF8)
|
body = self.get_body().decode(UTF8)
|
||||||
if not perform_sanity_checks(body):
|
if not perform_sanity_checks(body):
|
||||||
self.send_response(403)
|
self.send_response(403)
|
||||||
@ -28,6 +32,9 @@ class IDGAFServer(BaseHTTPRequestHandler):
|
|||||||
self.do_request_handling(command)
|
self.do_request_handling(command)
|
||||||
|
|
||||||
def do_request_handling(self, command):
|
def do_request_handling(self, command):
|
||||||
|
""" This function handles the request.
|
||||||
|
Parameters:
|
||||||
|
command: The command object received from the client. """
|
||||||
print(command)
|
print(command)
|
||||||
if command['message']['command'] == 'aidgaf':
|
if command['message']['command'] == 'aidgaf':
|
||||||
[responseCode, json_response] = idgaf.parse_idgaf_request(command)
|
[responseCode, json_response] = idgaf.parse_idgaf_request(command)
|
||||||
@ -35,7 +42,11 @@ class IDGAFServer(BaseHTTPRequestHandler):
|
|||||||
response_body = json.dumps(json_response)
|
response_body = json.dumps(json_response)
|
||||||
self.handle_response(responseCode, response_body)
|
self.handle_response(responseCode, response_body)
|
||||||
|
|
||||||
def get_body(self):
|
def get_body(self)->bytes:
|
||||||
|
""" This function returns the body of the request.
|
||||||
|
Returns:
|
||||||
|
The body of the request.
|
||||||
|
"""
|
||||||
header_length = self.headers.get('Content-Length')
|
header_length = self.headers.get('Content-Length')
|
||||||
request_body = default_request_body
|
request_body = default_request_body
|
||||||
if header_length != None and self.headers.get('Content-Length') != None:
|
if header_length != None and self.headers.get('Content-Length') != None:
|
||||||
@ -47,13 +58,23 @@ class IDGAFServer(BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
|
|
||||||
def handle_response(self, code, body):
|
def handle_response(self, code, body):
|
||||||
|
""" This function handles the response to the client.
|
||||||
|
Parameters:
|
||||||
|
code: The HTTP response code.
|
||||||
|
body: The body of the response. """
|
||||||
self.send_response(code)
|
self.send_response(code)
|
||||||
self.send_header("Content-type", "text/html")
|
self.send_header("Content-type", "text/html")
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
print("sending:"+body)
|
print("sending:"+body)
|
||||||
self.wfile.write(bytes(body, UTF8))
|
self.wfile.write(bytes(body, UTF8))
|
||||||
|
|
||||||
def perform_sanity_checks(str_request):
|
def perform_sanity_checks(str_request)->bool:
|
||||||
|
""" Performs a hash check on the message, and verifies the timestamp is valid.
|
||||||
|
If either check fails, return False. Otherwise, return True.
|
||||||
|
Parameters:
|
||||||
|
str_request: The request body as a string.
|
||||||
|
Returns:
|
||||||
|
True if the message is valid, False otherwise. """
|
||||||
if settings.HASHKEY is not None:
|
if settings.HASHKEY is not None:
|
||||||
hash = security.get_message_hash(str_request)
|
hash = security.get_message_hash(str_request)
|
||||||
if hash not in str_request:
|
if hash not in str_request:
|
||||||
@ -68,6 +89,7 @@ def perform_sanity_checks(str_request):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
""" This function starts the server. """
|
||||||
webServer = HTTPServer(
|
webServer = HTTPServer(
|
||||||
(settings.HOSTNAME, settings.SERVERPORT), IDGAFServer)
|
(settings.HOSTNAME, settings.SERVERPORT), IDGAFServer)
|
||||||
print("Server started http://%s:%s" %
|
print("Server started http://%s:%s" %
|
||||||
@ -83,4 +105,5 @@ def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
""" This function starts the main method. """
|
||||||
main()
|
main()
|
||||||
|
@ -1,25 +1,40 @@
|
|||||||
|
"""this is the settings file for the server. It contains the settings for the server.
|
||||||
|
"""
|
||||||
import os
|
import os
|
||||||
from const import UTF8
|
from const import UTF8
|
||||||
# The hostname used by this app
|
|
||||||
|
""" The hostname of the server. """
|
||||||
HOSTNAME: str = os.getenv('HOSTNAME') # localhost or some name
|
HOSTNAME: str = os.getenv('HOSTNAME') # localhost or some name
|
||||||
# The port to broadcast the server
|
|
||||||
|
""" The port to broadcast the server """
|
||||||
# 8087 or the port you want to run on. pass in with docker -e command.
|
# 8087 or the port you want to run on. pass in with docker -e command.
|
||||||
SERVERPORT: int = 8087
|
SERVERPORT: int = 8087
|
||||||
# The API key for OpenAI
|
|
||||||
|
""" The API key for OpenAI"""
|
||||||
APIKEY: str = os.getenv('APIKEY') # secret key from OpenAPI website
|
APIKEY: str = os.getenv('APIKEY') # secret key from OpenAPI website
|
||||||
if APIKEY is None:
|
if APIKEY is None:
|
||||||
raise Exception("APIKEY Environmental Variable must be set")
|
raise Exception("APIKEY Environmental Variable must be set")
|
||||||
# The hash key
|
|
||||||
HASHKEY = bytes(os.getenv('HASHKEY'),UTF8) # shared secret for hmac of message
|
|
||||||
|
|
||||||
# The prompts used for OpenAI.
|
""" The prompts used for OpenAI. When the server receives a request, it will
|
||||||
|
randomly select one of these prompts to use."""
|
||||||
PROMPTS = [
|
PROMPTS = [
|
||||||
"Say \"USERNAME does not give a fuck\" as a haiku and mention that it is a haiku.",
|
"Say \"USERNAME does not give a fuck\" as a haiku and mention that it is a haiku.",
|
||||||
"Say \"USERNAME does not give a fuck\" in a Dr Suess poem.",
|
"Say \"USERNAME does not give a fuck\" in a Dr Suess poem.",
|
||||||
"Tell me a story about how \"USERNAME does not give a fuck\" using an outrageous situation where someone should care but they do not and thats fine.",
|
"Tell me a story about how \"USERNAME does not give a fuck\" using an outrageous situation where someone should care but they do not and thats fine.",
|
||||||
"Say \"USERNAME is completely apethetic and does not give a fuck\" in a verbose manner, using your most colorful words and one metaphor."
|
"Say \"USERNAME is completely apethetic and does not give a fuck\" in a verbose manner, using your most colorful words and one metaphor."
|
||||||
]
|
]
|
||||||
|
|
||||||
|
""" The maximum number of tokens to use in a single OpenAI request. """
|
||||||
OPEN_AI_MAX_TOKENS = 500
|
OPEN_AI_MAX_TOKENS = 500
|
||||||
|
|
||||||
|
""" The model to use for OpenAI. """
|
||||||
OPEN_AI_COMPLETION_MODEL = "text-davinci-003"
|
OPEN_AI_COMPLETION_MODEL = "text-davinci-003"
|
||||||
|
|
||||||
|
""" The temperature to use for OpenAI. 0-2, 0 is basicall repeating the prompt, 2 is more random. """
|
||||||
|
TEMPERATURE = 0.7
|
||||||
|
|
||||||
|
""" The hash key for the server. Leave this blank if you don't want to use it. """
|
||||||
|
HASHKEY = bytes(os.getenv('HASHKEY'),
|
||||||
|
UTF8) # shared secret for hmac of message
|
||||||
|
""" The maximum age of a message in seconds. Only used if HASHKEY is set."""
|
||||||
MAX_MESSAGE_AGE = 600
|
MAX_MESSAGE_AGE = 600
|
||||||
TEMPERATURE = 0.8
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
|
""" aidgaf-server setup.py """
|
||||||
from setuptools import setup, find_namespace_packages
|
from setuptools import setup, find_namespace_packages
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='aidgaf',
|
name='aidgaf',
|
||||||
version='1',
|
version='0.1.0',
|
||||||
description='',
|
description='OpenAI GPT Implementation of IDGAF.',
|
||||||
long_description='',
|
long_description='Tells the user how much they don\'t GAF, using the power of OpenAI\'s GPT-3 API.',
|
||||||
author='Adam Outler',
|
author='Adam Outler',
|
||||||
author_email='adamoutler@gmail.com',
|
author_email='adamoutler@gmail.com',
|
||||||
license='IDGAF License',
|
license='IDGAF License',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user