Compare commits
6 commits
Author | SHA1 | Date | |
---|---|---|---|
|
2032483e64 | ||
|
4789b9355d | ||
|
78bb591bd3 | ||
|
d09a57062c | ||
|
c14e81fd86 | ||
|
8d8cd8950f |
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
||||||
# Sensitive files
|
# Sensitive files
|
||||||
config.py
|
config.py
|
||||||
|
*.png
|
||||||
|
|
||||||
# ---> Python
|
# ---> Python
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
|
|
|
@ -27,6 +27,7 @@ You can manipulate the main abstractions of the OpenAI model:
|
||||||
| --------------------------------------- | ----------------- | --------------- | --------------- | -------------------------------------------------------- |
|
| --------------------------------------- | ----------------- | --------------- | --------------- | -------------------------------------------------------- |
|
||||||
| `cancel_model_response.py` | `response_id` | | | Cancel a Response running in the background |
|
| `cancel_model_response.py` | `response_id` | | | Cancel a Response running in the background |
|
||||||
| `create_conversation.py` | | | | Create a new Conversation |
|
| `create_conversation.py` | | | | Create a new Conversation |
|
||||||
|
| `create_image.py` | `model_id` | `prompt` | | Create an image based on the prompt description |
|
||||||
| `create_model_response.py` | `conversation_id` | `model_id` | `input_message` | Create a Response within a Conversation |
|
| `create_model_response.py` | `conversation_id` | `model_id` | `input_message` | Create a Response within a Conversation |
|
||||||
| `create_text_item.py` | `conversation_id` | `input_message` | | Create a user input or instruction within a Conversation |
|
| `create_text_item.py` | `conversation_id` | `input_message` | | Create a user input or instruction within a Conversation |
|
||||||
| `create_vector_store.py` | | | | Create a Vector Store |
|
| `create_vector_store.py` | | | | Create a Vector Store |
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ------- | ------------------ |
|
| ------- | ------------------ |
|
||||||
|
| 0.5.x | :heavy_check_mark: |
|
||||||
|
| 0.4.x | :heavy_check_mark: |
|
||||||
| 0.3.x | :heavy_check_mark: |
|
| 0.3.x | :heavy_check_mark: |
|
||||||
| 0.2.x | :heavy_check_mark: |
|
| 0.2.x | :heavy_check_mark: |
|
||||||
| 0.1.x | :heavy_check_mark: |
|
| 0.1.x | :heavy_check_mark: |
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
base_url = "https://api.openai.com/v1/"
|
base_url = "https://api.openai.com/v1/"
|
||||||
secret_key = "YOUR-OPENAI-SECRET-KEY-HERE"
|
secret_key = "YOUR-OPENAI-SECRET-KEY-HERE"
|
||||||
vector_store = "YOUR-VECTOR-STORE-ID-HERE"
|
vector_store = ["YOUR-VECTOR-STORE-IDs-HERE","SEPARATED-BY-COMMAS"]
|
||||||
|
|
||||||
|
|
33
create_image.py
Normal file
33
create_image.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
from config import *
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import base64
|
||||||
|
from PIL import Image
|
||||||
|
from io import BytesIO
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def create_image(model,prompt):
|
||||||
|
url = base_url+"images/generations"
|
||||||
|
|
||||||
|
payload = json.dumps({
|
||||||
|
"prompt": prompt,
|
||||||
|
"model": model
|
||||||
|
})
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': 'Bearer '+secret_key,
|
||||||
|
}
|
||||||
|
date_time_string = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||||
|
|
||||||
|
response = requests.request("POST", url, headers=headers, data=payload)
|
||||||
|
decoded_bytes = base64.b64decode(response.json()["data"][0]["b64_json"])
|
||||||
|
byte_stream = BytesIO(decoded_bytes)
|
||||||
|
image = Image.open(byte_stream)
|
||||||
|
image.save("./output_images/output-"+date_time_string+".png", "PNG")
|
||||||
|
|
||||||
|
print("Image ./output_images/output-"+date_time_string+".png has been generated.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
create_image(str(sys.argv[1]),str(sys.argv[2]))
|
|
@ -10,7 +10,7 @@ def create_model_response(conversation,model,message):
|
||||||
"model": model,
|
"model": model,
|
||||||
"tools": [
|
"tools": [
|
||||||
{"type": "web_search"},
|
{"type": "web_search"},
|
||||||
{"type": "file_search", "vector_store_ids" :[vector_store]}
|
{"type": "file_search", "vector_store_ids" :vector_store}
|
||||||
],
|
],
|
||||||
"input": message,
|
"input": message,
|
||||||
"conversation": conversation
|
"conversation": conversation
|
||||||
|
@ -23,5 +23,7 @@ def create_model_response(conversation,model,message):
|
||||||
response = requests.request("POST", url, headers=headers, data=payload)
|
response = requests.request("POST", url, headers=headers, data=payload)
|
||||||
|
|
||||||
print(response.text)
|
print(response.text)
|
||||||
|
print(response.json()["output"][-1]["content"][0]["text"])
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
create_model_response(str(sys.argv[1]),str(sys.argv[2]),str(sys.argv[3]))
|
create_model_response(str(sys.argv[1]),str(sys.argv[2]),str(sys.argv[3]))
|
||||||
|
|
0
output_images/__init__.py
Normal file
0
output_images/__init__.py
Normal file
|
@ -13,6 +13,7 @@ def retrieve_item(conversation,message):
|
||||||
response = requests.request("GET", url, headers=headers, data=payload)
|
response = requests.request("GET", url, headers=headers, data=payload)
|
||||||
|
|
||||||
print(response.text)
|
print(response.text)
|
||||||
|
print(response.json()["content"][0]["text"])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -13,6 +13,7 @@ def retrieve_model_response(response):
|
||||||
response = requests.request("GET", url, headers=headers, data=payload)
|
response = requests.request("GET", url, headers=headers, data=payload)
|
||||||
|
|
||||||
print(response.text)
|
print(response.text)
|
||||||
|
print(response.json()["output"][-1]["content"][0]["text"])
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
retrieve_model_response(str(sys.argv[1]))
|
retrieve_model_response(str(sys.argv[1]))
|
||||||
|
|
Loading…
Reference in a new issue