Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions chatgpt/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ type ChatGPTResponse struct {
Qualifications []string `json:"qualifications"`
TechStack []string `json:"tech_stack"`
Level string `json:"level"`
UserRespSummary []string `json:"user_response_summary"`
}

type OpenAIClient struct {
Expand Down Expand Up @@ -157,10 +158,36 @@ Here is the input data:
%s`, jdSummary)
}

func BuildResponseSummary(question, response string) string {
return fmt.Sprintf(`Extract the **key technical points** from the following backend interview answer.

Break the response into a list of concise, self-contained statements. Each item should:
- Represent a distinct technical idea, method, or decision
- Be understandable without the original question
- Focus only on what the user **actually said**, not what they should have said
- Exclude filler, vague claims, or generalities
- Be written in the past tense

Output only valid JSON in this format:
{
"user_response_summary":[
"First technical point...",
"Second technical point...",
...
]}

Interview question:
"%s"

User’s answer:
"%s"`, question, response)
}

type AIClient interface {
GetChatGPTResponse(prompt string) (*ChatGPTResponse, error)
GetChatGPTResponseConversation(conversationHistory []map[string]string) (*ChatGPTResponse, error)
GetChatGPT35Response(prompt string) (*ChatGPTResponse, error)
ExtractJDInput(jd string) (*JDParsedOutput, error)
ExtractJDSummary(jdInput *JDParsedOutput) (string, error)
ExtractResponseSummary(userResponse, answer string) (*ChatGPTResponse, error)
}
10 changes: 10 additions & 0 deletions chatgpt/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -269,3 +269,13 @@ func (c *OpenAIClient) ExtractJDSummary(jdInput *JDParsedOutput) (string, error)

return jdSummary, nil
}

func (c *OpenAIClient) ExtractResponseSummary(question, response string) (*ChatGPTResponse, error) {
systemPrompt := BuildResponseSummary(question, response)
summarizedResponse, err := c.GetChatGPT35Response(systemPrompt)
if err != nil {
return nil, err
}

return summarizedResponse, nil
}
32 changes: 28 additions & 4 deletions conversation/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,23 @@ package conversation
import (
"encoding/json"
"errors"
"fmt"
"log"
"sort"
"strings"
"time"

"github.com/michaelboegner/interviewer/chatgpt"
"github.com/michaelboegner/interviewer/interview"
)

func GetChatGPTResponses(conversation *Conversation, openAI chatgpt.AIClient, interviewRepo interview.InterviewRepo) (*chatgpt.ChatGPTResponse, string, error) {
conversationHistory, err := GetConversationHistory(conversation, interviewRepo)
func GetChatGPTResponses(conversation *Conversation, openAI chatgpt.AIClient, interviewRepo interview.InterviewRepo, conversationContext []string) (*chatgpt.ChatGPTResponse, string, error) {
conversationHistory, err := GetConversationHistory(conversation, interviewRepo, conversationContext)
if err != nil {
log.Printf("GetConversationHistory failed: %v", err)
return nil, "", err
}

chatGPTResponse, err := openAI.GetChatGPTResponseConversation(conversationHistory)
if err != nil {
log.Printf("getNextQuestion failed: %v", err)
Expand All @@ -31,7 +34,7 @@ func GetChatGPTResponses(conversation *Conversation, openAI chatgpt.AIClient, in
return chatGPTResponse, chatGPTResponseString, nil
}

func GetConversationHistory(conversation *Conversation, interviewRepo interview.InterviewRepo) ([]map[string]string, error) {
func GetConversationHistory(conversation *Conversation, interviewRepo interview.InterviewRepo, conversationContext []string) ([]map[string]string, error) {
var arrayOfTopics []string
var currentTopic string
chatGPTConversationArray := make([]map[string]string, 0)
Expand Down Expand Up @@ -65,6 +68,7 @@ func GetConversationHistory(conversation *Conversation, interviewRepo interview.
questionNumbersSorted = append(questionNumbersSorted, questionNumber)
}
sort.Ints(questionNumbersSorted)
lastQuestionNumber := questionNumbersSorted[len(questionNumbersSorted)-1]
for _, questionNumber := range questionNumbersSorted {
question := topic.Questions[questionNumber]
for i, message := range question.Messages {
Expand All @@ -78,13 +82,33 @@ func GetConversationHistory(conversation *Conversation, interviewRepo interview.
if message.Author == "interviewer" {
role = "assistant"
}

content := message.Content
isFinalInjectionTarget := questionNumber == lastQuestionNumber &&
message.Author == "user"
// DEBUG
fmt.Printf("isFinalInjectionTarget: %v\n", isFinalInjectionTarget)
fmt.Printf("conversationContext: %v\n", conversationContext)
if isFinalInjectionTarget && len(conversationContext) > 0 {
formattedContext := strings.Join(conversationContext, "\n")
content = fmt.Sprintf("Relevant prior user context:\n%s\n\n--- BEGIN USER'S ACTUAL RESPONSE ---\n%s", formattedContext, content)
}

chatGPTConversationArray = append(chatGPTConversationArray, map[string]string{
"role": role,
"content": message.Content,
"content": content,
})
}
}

fmt.Println("------ DEBUG: Formatted Conversation History ------")
for i, msg := range chatGPTConversationArray {
fmt.Printf("\n--- Message %d ---\n", i+1)
fmt.Printf("Role : %s\n", msg["role"])
fmt.Printf("Content:\n%s\n", msg["content"])
}
fmt.Println("------ END DEBUG ------")

return chatGPTConversationArray, nil
}

Expand Down
2 changes: 1 addition & 1 deletion conversation/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ type ConversationRepo interface {
CreateQuestion(conversation *Conversation, prompt string) (int, error)
AddQuestion(question *Question) (int, error)
GetQuestions(Conversation *Conversation) ([]*Question, error)
CreateMessages(conversation *Conversation, messages []Message) error
CreateMessages(conversation *Conversation, messages []Message) (int, error)
AddMessage(conversationID, topic_id, questionNumber int, message Message) (int, error)
GetMessages(conversationID, topic_id, questionNumber int) ([]Message, error)
}
8 changes: 4 additions & 4 deletions conversation/repository.go
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ func (repo *Repository) GetQuestions(conversation *Conversation) ([]*Question, e
return questions, nil
}

func (repo *Repository) CreateMessages(conversation *Conversation, messages []Message) error {
func (repo *Repository) CreateMessages(conversation *Conversation, messages []Message) (int, error) {
var id int
for _, message := range messages {
query := `
Expand All @@ -229,14 +229,14 @@ func (repo *Repository) CreateMessages(conversation *Conversation, messages []Me
).Scan(&id)

if err == sql.ErrNoRows {
return err
return 0, err
} else if err != nil {
log.Printf("Error querying conversation: %v\n", err)
return err
return 0, err
}
}

return nil
return id, nil
}

func (repo *Repository) AddMessage(conversationID, topic_id, questionNumber int, message Message) (int, error) {
Expand Down
46 changes: 42 additions & 4 deletions conversation/service.go
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
package conversation

import (
"context"
"errors"
"log"
"time"

"github.com/michaelboegner/interviewer/chatgpt"
"github.com/michaelboegner/interviewer/embedding"
"github.com/michaelboegner/interviewer/interview"
)

Expand All @@ -30,9 +33,11 @@ func CreateEmptyConversation(repo ConversationRepo, interviewID int, subTopic st
}

func CreateConversation(
ctx context.Context,
repo ConversationRepo,
interviewRepo interview.InterviewRepo,
openAI chatgpt.AIClient,
embeddingService embedding.Service,
conversation *Conversation,
interviewID int,
prompt,
Expand Down Expand Up @@ -61,13 +66,29 @@ func CreateConversation(
topic.Questions[questionNumber] = NewQuestion(conversationID, topicID, questionNumber, firstQuestion, messages)
conversation.Topics[topicID] = topic

err = repo.CreateMessages(conversation, messages)
messageID, err := repo.CreateMessages(conversation, messages)
if err != nil {
log.Printf("repo.CreateMessages failed: %v", err)
return nil, err
}

chatGPTResponse, chatGPTResponseString, err := GetChatGPTResponses(conversation, openAI, interviewRepo)
embedInput := embedding.EmbedInput{
InterviewID: interviewID,
ConversationID: conversationID,
TopicID: topicID,
QuestionNumber: questionNumber,
MessageID: messageID,
Question: firstQuestion,
UserResponse: message,
CreatedAt: time.Now().UTC(),
}

conversationContext, err := embeddingService.ProcessAndRetrieve(ctx, embedInput)
if err != nil {
log.Printf("embeddingService.ProcessAndRetrieve failed: %v", err)
}

chatGPTResponse, chatGPTResponseString, err := GetChatGPTResponses(conversation, openAI, interviewRepo, conversationContext)
if err != nil {
log.Printf("getChatGPTResponses failed: %v", err)
return nil, err
Expand Down Expand Up @@ -108,9 +129,11 @@ func CreateConversation(
}

func AppendConversation(
ctx context.Context,
repo ConversationRepo,
interviewRepo interview.InterviewRepo,
openAI chatgpt.AIClient,
embeddingService embedding.Service,
interviewID,
userID int,
conversation *Conversation,
Expand All @@ -125,13 +148,28 @@ func AppendConversation(
}

messageUser := NewMessage(conversationID, topicID, questionNumber, User, message)
_, err := repo.AddMessage(conversationID, topicID, questionNumber, messageUser)
messageID, err := repo.AddMessage(conversationID, topicID, questionNumber, messageUser)
if err != nil {
return nil, err
}
conversation.Topics[topicID].Questions[questionNumber].Messages = append(conversation.Topics[topicID].Questions[questionNumber].Messages, messageUser)

chatGPTResponse, chatGPTResponseString, err := GetChatGPTResponses(conversation, openAI, interviewRepo)
embedInput := embedding.EmbedInput{
InterviewID: interviewID,
ConversationID: conversationID,
TopicID: topicID,
QuestionNumber: questionNumber,
MessageID: messageID,
Question: conversation.Topics[topicID].Questions[questionNumber].Prompt,
UserResponse: message,
CreatedAt: time.Now().UTC(),
}

conversationContext, err := embeddingService.ProcessAndRetrieve(ctx, embedInput)
if err != nil {
log.Printf("embeddingService.ProcessAndRetrieve failed: %v", err)
}
chatGPTResponse, chatGPTResponseString, err := GetChatGPTResponses(conversation, openAI, interviewRepo, conversationContext)
if err != nil {
log.Printf("getChatGPTResponses failed: %v", err)
return nil, err
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
DROP INDEX IF EXISTS conversation_embeddings_lookup_idx;
DROP INDEX IF EXISTS convo_embeddings_by_question_idx;
DROP INDEX IF EXISTS conversation_embeddings_embedding_idx;

DROP TABLE IF EXISTS conversation_embeddings;

DROP EXTENSION IF EXISTS vector;
25 changes: 25 additions & 0 deletions database/migrations/000010_create_conversation_embeddings.up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
CREATE EXTENSION IF NOT EXISTS vector;

CREATE TABLE conversation_embeddings (
id SERIAL PRIMARY KEY,
interview_id INT NOT NULL,
conversation_id INT NOT NULL,
topic_id INT NOT NULL,
question_number INT NOT NULL,
message_id INT NOT NULL,
summary TEXT NOT NULL,
embedding VECTOR(384) NOT NULL,
created_at TIMESTAMP DEFAULT now()
);

CREATE INDEX conversation_embeddings_embedding_idx
ON conversation_embeddings USING ivfflat (embedding vector_cosine_ops)
WITH (lists = 100);

CREATE INDEX convo_embeddings_by_question_idx
ON conversation_embeddings (interview_id, topic_id, question_number);

CREATE INDEX conversation_embeddings_lookup_idx
ON conversation_embeddings (interview_id, message_id);

ANALYZE conversation_embeddings;
57 changes: 57 additions & 0 deletions embedding/embedder.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
package embedding

import (
"bytes"
"context"
"encoding/json"
"errors"
"net/http"
"os"
"time"
)

type HTTPEmbedder struct {
Endpoint string
Timeout time.Duration
}

func NewHTTPEmbedder() (*HTTPEmbedder, error) {
endpoint := os.Getenv("EMBEDDING_URL")
if endpoint == "" {
return nil, errors.New("env not set for EMBEDDING_URL")
}

return &HTTPEmbedder{
Endpoint: endpoint,
Timeout: 10 * time.Second,
}, nil
}

func (e *HTTPEmbedder) EmbedText(ctx context.Context, input string) ([]float32, error) {
body, err := json.Marshal(map[string]string{"text": input})
if err != nil {
return nil, err
}

req, err := http.NewRequestWithContext(ctx, "POST", e.Endpoint, bytes.NewBuffer(body))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")

client := &http.Client{Timeout: e.Timeout}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()

var result struct {
Embedding []float32 `json:"embedding"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return nil, err
}

return result.Embedding, nil
}
Loading
Loading