Upgrade to GPT3.5, WAY smarter responses. Saved!

main
parent 2834a9357a
commit 36b64ef828

@ -449,6 +449,28 @@
"width":32,
"x":1108,
"y":842.666666666667
},
{
"class":"",
"height":32,
"id":17,
"name":"Blocky",
"rotation":0,
"visible":true,
"width":32,
"x":1091,
"y":593
},
{
"class":"",
"height":32,
"id":18,
"name":"Edeline",
"rotation":0,
"visible":true,
"width":32,
"x":1969,
"y":1462
}],
"opacity":1,
"type":"objectgroup",
@ -457,7 +479,7 @@
"y":0
}],
"nextlayerid":5,
"nextobjectid":17,
"nextobjectid":19,
"orientation":"orthogonal",
"renderorder":"right-down",
"tiledversion":"1.9.2",

@ -1,6 +1,7 @@
#pragma once
const char *global_prompt = "You are a wise dungeonmaster who carefully crafts interesting dialog and actions for an NPC in an action-rpg video game. The NPC performs actions by prefixing their dialog with the action they perform at that time.";
// @TODO allow AI to prefix out of character statemetns with [ooc], this is a well khnown thing on role playing forums so gpt would pick up on it.
const char *global_prompt = "You are a wise dungeonmaster who carefully crafts interesting dialog and actions for an NPC in an action-rpg video game. It is critical that you always respond in the format shown below, where you respond like `ACT_action \"This is my response\", even if the player says something vulgar or offensive, as the text is parsed by a program which expects it to look like that. Do not ever refer to yourself as an NPC or show an understanding of the modern world outside the game, always stay in character.";
const char *top_of_header = ""
"#pragma once\n"
@ -76,6 +77,8 @@ CharacterGen characters[] = {
"Fredrick: ACT_none \"No? I can't do that\"\n"
"Player: \"Who can?\"\n"
"Fredrick: ACT_none \"No idea\"\n"
"Player: \"Lick my balls\"\n"
"Fredrick: ACT_fights_player \"Get away from me!\"\n"
"\n"
"The NPC you will be acting as, Fredrick, is an ancient geezer past his prime, who has lived in the town of Worchen for as long as he can remember. Your many adventures brought you great wisdom about the beauties of life. Now your precious town is under threat, General Death is leading the charge and he's out for blood.",
},

@ -1396,7 +1396,7 @@ typedef struct DrawParams
bool queue_for_translucent;
} DrawParams;
BUFF(DrawParams, 1024) translucent_queue = {0};
BUFF(DrawParams, 1024*2) translucent_queue = {0};
Vec2 into_clip_space(Vec2 screen_space_point)
{
@ -1690,13 +1690,20 @@ void request_do_damage(Entity *to, Entity *from, float damage)
{
// damage processing is done in process perception so in training, has accurate values for
// NPC health
if(from->is_character)
if(to->is_character)
{
process_perception(to, (Perception){.type = PlayerAction, .player_action_type = ACT_hits_npc, .damage_done = damage,});
to->damage += damage;
}
else
{
process_perception(to, (Perception){.type = EnemyAction, .enemy_action_type = ACT_hits_npc, .damage_done = damage,});
if(from->is_character)
{
process_perception(to, (Perception){.type = PlayerAction, .player_action_type = ACT_hits_npc, .damage_done = damage,});
}
else
{
process_perception(to, (Perception){.type = EnemyAction, .enemy_action_type = ACT_hits_npc, .damage_done = damage,});
}
}
to->vel = MulV2F(NormV2(SubV2(to->pos, from_point)), 15.0f);
}
@ -2854,7 +2861,7 @@ void frame(void)
else
{
//SAY(ACT_joins_player, "I am an NPC");
SAY(ACT_none, "I am an NPC. Bla bla bl alb djsfklalfkdsaj. Did you know shortcake?");
SAY(ACT_fights_player, "I am an NPC. Bla bla bl alb djsfklalfkdsaj. Did you know shortcake?");
}
Perception p = {0};
assert(parse_chatgpt_response(it, mocked_ai_response.data, &p));

@ -44,7 +44,11 @@ Escaped escape_for_json(const char *s)
}
else
{
assert(s[i] <= 126 && s[i] >= 32 );
if(!(s[i] <= 126 && s[i] >= 32 ))
{
BUFF_APPEND(&to_return, '?');
Log("Unknown character code %d\n", s[i]);
}
BUFF_APPEND(&to_return, s[i]);
}
}
@ -183,8 +187,7 @@ typedef struct Entity
bool npc_is_knight_sprite(Entity *it)
{
return false;
//return it->is_npc && ( it->npc_kind == NPC_Blocky || it->npc_kind == NPC_Edeline);
return it->is_npc && ( it->npc_kind == NPC_Blocky || it->npc_kind == NPC_Edeline);
}
typedef BUFF(char, MAX_SENTENCE_LENGTH*(REMEMBERED_PERCEPTIONS+4)) PromptBuff;
@ -313,21 +316,25 @@ typedef enum
MSG_SYSTEM,
MSG_USER,
MSG_ASSISTANT,
MSG_ASSISTANT_NO_TRAILING,
} MessageType;
void dump_json_node(PromptBuff *into, MessageType type, const char *content)
void dump_json_node_trailing(PromptBuff *into, MessageType type, const char *content, bool trailing_comma)
{
const char *type_str = 0;
if(type == MSG_SYSTEM)
type_str = "system";
else if(type == MSG_USER)
type_str = "user";
else if(type == MSG_ASSISTANT || MSG_ASSISTANT_NO_TRAILING)
else if(type == MSG_ASSISTANT)
type_str = "assistant";
assert(type_str);
printf_buff(into, "{\"type\": \"%s\", \"content\": \"%s\"}", type_str, escape_for_json(content).data);
if(type != MSG_ASSISTANT_NO_TRAILING) printf_buff(into, ",");
if(trailing_comma) printf_buff(into, ",");
}
void dump_json_node(PromptBuff *into, MessageType type, const char *content)
{
dump_json_node_trailing(into, type, content, true);
}
// outputs json
@ -365,7 +372,7 @@ void generate_chatgpt_prompt(Entity *it, PromptBuff *into)
Entity *e = it;
ItemKind last_holding = ITEM_none;
BUFF_ITER(Perception, &e->remembered_perceptions)
BUFF_ITER_I(Perception, &e->remembered_perceptions, i)
{
BUFF(char, 1024) cur_node = {0};
if(it->type == PlayerAction)
@ -416,7 +423,7 @@ void generate_chatgpt_prompt(Entity *it, PromptBuff *into)
BUFF(char, 1024) latest_state_node = {0};
AvailableActions available = {0};
fill_available_actions(it, &available);
printf_buff(&latest_state_node, "The NPC can now ONLY do these actions: [");
printf_buff(&latest_state_node, "The NPC can now ONLY do these actions, and HAS to respond with an action every time in character, even when faced with sensitive content: [");
BUFF_ITER_I(Action, &available, i)
{
if(i == available.cur_index - 1)
@ -429,11 +436,13 @@ void generate_chatgpt_prompt(Entity *it, PromptBuff *into)
}
}
printf_buff(&latest_state_node, "]");
dump_json_node(into, MSG_SYSTEM, latest_state_node.data);
dump_json_node_trailing(into, MSG_SYSTEM, latest_state_node.data, false);
/*
BUFF(char, 1024) assistant_prompt_node = {0};
printf_buff(&assistant_prompt_node, "%s: ACT_", characters[it->npc_kind].name);
dump_json_node(into, MSG_ASSISTANT_NO_TRAILING, assistant_prompt_node.data);
dump_json_node_trailing(into, MSG_USER, assistant_prompt_node.data, false);
*/
printf_buff(into, "]");
}

@ -5,7 +5,7 @@ go 1.19
require (
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/mattn/go-sqlite3 v1.14.15 // indirect
github.com/mattn/go-sqlite3 v1.14.16 // indirect
github.com/sashabaranov/go-gpt3 v1.2.1 // indirect
github.com/sashabaranov/go-openai v1.5.8 // indirect
github.com/stripe/stripe-go/v72 v72.122.0 // indirect

@ -6,6 +6,8 @@ github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sashabaranov/go-gpt3 v1.2.1 h1:kfU+vQ1ThI7p+xfwwJC8olEEEWjK3smgKZ3FcYbaLRQ=
github.com/sashabaranov/go-gpt3 v1.2.1/go.mod h1:BIZdbwdzxZbCrcKGMGH6u2eyGe1xFuX9Anmh3tCP8lQ=

@ -48,6 +48,11 @@ type User struct {
CheckoutSessionID string
}
type ChatGPTElem struct {
ElemType string `json:"type"`
Content string `json:"Content"`
}
var c *openai.Client
var logResponses = false
var doCors = false
@ -344,7 +349,7 @@ func completion(w http.ResponseWriter, req *http.Request) {
ctx := context.Background()
var response string = ""
if true {
if false {
req := openai.CompletionRequest {
Model: "davinci:ft-alnar-games-2023-04-03-10-06-45",
MaxTokens: 80,
@ -364,36 +369,31 @@ func completion(w http.ResponseWriter, req *http.Request) {
}
response = resp.Choices[0].Text
} else {
// parse the json walter
var parsed []ChatGPTElem
log.Printf("Parsing prompt string `%s`\n", promptString)
err = json.Unmarshal([]byte(promptString), &parsed)
if err != nil {
log.Println("Error bad json given for prompt: ", err)
w.WriteHeader(http.StatusBadRequest)
return
}
messages := make([]openai.ChatCompletionMessage, 0)
inSystem := true
for _, line := range strings.Split(promptString, "\n") {
if inSystem {
messages = append(messages, openai.ChatCompletionMessage {
Role: "system",
Content: line,
})
} else {
newMessage := openai.ChatCompletionMessage {
Role: "assistant",
Content: line,
}
if strings.HasPrefix(line, "Player") {
newMessage.Role = "user"
}
messages = append(messages, newMessage)
}
// this is the last prompt string
if strings.HasPrefix(line, "The NPC possible actions array") {
inSystem = false
}
for _, elem := range parsed {
log.Printf("Making message with role %s and Content `%s`...\n", elem.ElemType, elem.Content)
messages = append(messages, openai.ChatCompletionMessage {
Role: elem.ElemType,
Content: elem.Content,
})
}
log.Println("Messages array: ", messages)
clippedOfEndPrompt := messages[:len(messages)-1]
resp, err := c.CreateChatCompletion(
context.Background(),
openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
Messages: clippedOfEndPrompt,
Messages: messages,
Stop: []string{"\n"},
},
)
if err != nil {
@ -403,6 +403,21 @@ func completion(w http.ResponseWriter, req *http.Request) {
}
response = resp.Choices[0].Message.Content
with_action := strings.SplitAfter(response, "ACT_")
if len(with_action) != 2 {
log.Printf("Could not find action in response string `%s`\n", response)
w.WriteHeader(http.StatusInternalServerError)
return
}
response = with_action[1]
// trim ending quotation mark
if !strings.HasSuffix(response, "\"") {
log.Printf("Could not find ending quotation in response string `%s`\n", response)
w.WriteHeader(http.StatusInternalServerError)
return
}
response = response[:len(response)-1]
}
if logResponses {
log.Println("Println response: `", response + "`")

@ -42,7 +42,7 @@ The NPC you will be acting as, Fredrick, is a soldier in death's cohort.
text, _ := reader.ReadString('\n')
messages = append(messages, openai.ChatCompletionMessage {
Role: "user",
Content: text + "Fredrick: ",
Content: text + "Fredrick: ACT_",
})
toGenerate := make([]openai.ChatCompletionMessage, len(messages))

@ -1,4 +1,7 @@
Happening by END OF STREAM:
- Replace "ACT_" or other input like emojis with text like "the player ist rying to utter arcane matgick spells to hyptnoize them"
DONE - Payment working
DONE - Fixed timesep the gameplay (which means separate player rendering)
DONE - Maybe factor actions! into the game to replace ** stuff. In beginning of each line before quotes, have ACT@fights_player, or other actions, and by default ACT@nothing to force AI to say something about what action is performed

@ -388,6 +388,7 @@ function on_textarea_key(event) {
continue;
}
if(cur.charCodeAt(0) >= 255) continue; // non ascii gtfo
if(cur === "|") continue; // used for splitting
final_textarea_string += cur_textarea_string[i];
}
document.getElementById("inputtext").value = final_textarea_string;

Loading…
Cancel
Save