-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathytt_web.go
139 lines (126 loc) · 3.36 KB
/
ytt_web.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
package main
import (
"fmt"
"net/http"
"strings"
)
func handleYTT(w http.ResponseWriter, r *http.Request) {
url := r.URL.Query().Get("url")
if url == "" {
http.Error(w, "URL parameter is required", http.StatusBadRequest)
return
}
modelStr := r.URL.Query().Get("model")
if modelStr == "" {
http.Error(w, "Model parameter is required", http.StatusBadRequest)
return
}
model := LLMModel(modelStr)
config, err := ReadConfig()
if err != nil {
http.Error(w, "Failed to read config", http.StatusInternalServerError)
return
}
provider := getProviderForModel(model)
if provider == "" {
http.Error(w, "Unsupported model", http.StatusBadRequest)
return
}
// Validate required credentials
switch provider {
case OpenAI:
if config.OpenAIAPIKey == "" {
http.Error(w, fmt.Sprintf("OpenAI API key required for model %s", model), http.StatusBadRequest)
return
}
case Claude:
if config.AnthropicAPIKey == "" {
http.Error(w, fmt.Sprintf("Anthropic API key required for model %s", model), http.StatusBadRequest)
return
}
case Groq:
if config.GroqAPIKey == "" {
http.Error(w, fmt.Sprintf("Groq API key required for model %s", model), http.StatusBadRequest)
return
}
case Bedrock:
if config.AWSRegion == "" || config.AWSAccessKeyID == "" || config.AWSSecretAccessKey == "" {
http.Error(w, fmt.Sprintf("AWS credentials required for model %s. Run 'podscript configure' to set them up", model), http.StatusBadRequest)
return
}
case Gemini:
if config.GeminiAPIKey == "" {
http.Error(w, fmt.Sprintf("Gemini API key required for model %s", model), http.StatusBadRequest)
return
}
default:
http.Error(w, "Unsupported model", http.StatusBadRequest)
return
}
client, err := NewLLMClient(provider, *config)
if err != nil {
http.Error(w, "Failed to initialize LLM client", http.StatusInternalServerError)
return
}
// Set up SSE headers
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
flusher, ok := w.(http.Flusher)
if !ok {
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
transcriber := NewYouTubeTranscriber(client, model)
err = transcriber.Transcribe(r.Context(), url, func(text string, done bool) error {
// Format multi-line SSE message by prefixing each line with "data: "
// fmt.Printf("text (with whitespace): %q\n", text)
lines := strings.Split(text, "\n")
for i, line := range lines {
if i > 0 {
_, err := fmt.Fprint(w, "\n")
if err != nil {
return err
}
}
_, err := fmt.Fprintf(w, "data: %s", line)
if err != nil {
return err
}
}
_, err := fmt.Fprint(w, "\n\n")
if err != nil {
return err
}
if done {
_, err := fmt.Fprint(w, "event: done\ndata: \n\n")
if err != nil {
return err
}
}
flusher.Flush()
return nil
})
if err != nil {
// Log the error server-side
fmt.Printf("[ERROR] Transcription failed: %v\n", err)
fmt.Fprintf(w, "event: error\ndata: %s\n\n", err.Error())
flusher.Flush()
}
}
func getProviderForModel(model LLMModel) LLMProvider {
switch model {
case GPT4o, GPT4oMini:
return OpenAI
case Claude37Sonnet, Claude35Haiku:
return Claude
case Llama3370b, Llama318b:
return Groq
case Gemini2Flash:
return Gemini
case BedrockClaude37Sonnet, BedrockClaude35Haiku:
return Bedrock
default:
return ""
}
}