Class: Prompt
Class Method Summary collapse
-
.file_finetune ⇒ Object
Not implemented only scaffolding.
-
.stream_prompt(input, conversation = '', temp = Config.load_temperature()) ⇒ Object
Streams the response, VERY NICE.
- .whisper_transcribe(file_path, interactive = false) ⇒ Object
- .whisper_translate(file_path, interactive = false) ⇒ Object
Methods included from Config
load_context_length, load_key, load_temperature, save_context_length, save_key, save_temperature, set_config
Methods included from Files
config_path, context_file_path, context_path, file_path, root
Class Method Details
.file_finetune ⇒ Object
Not implemented only scaffolding
39 40 41 42 43 44 45 46 |
# File 'lib/prompt.rb', line 39 def self.file_finetune() return client.files.upload(parameters: { file: "./test.json", purpose: "fine-tune" }) client.files.lisr client.files.retrieve(id: "file-123") client.files.content(id: "file-123") client.files.delete(id: "file-123") end |
.stream_prompt(input, conversation = '', temp = Config.load_temperature()) ⇒ Object
Streams the response, VERY NICE
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
# File 'lib/prompt.rb', line 9 def self.stream_prompt(input, conversation = '', temp = Config.load_temperature()) if temp.nil? temp = 0.7 end if conversation.length == 0 conversation += input else conversation += "\n My question: #{input}" end response = '' client.chat( parameters: { model: "gpt-3.5-turbo", messages: [{ role: "user", content: conversation}], temperature: temp, ## Should be a parameter stream: proc do |chunk, _bytesize| response += chunk.dig("choices", 0, "delta", "content") unless chunk.dig("choices", 0, "delta", "content").nil? print chunk.dig("choices", 0, "delta", "content") end } ) context = { "input" => input, "response" => response, } return context end |
.whisper_transcribe(file_path, interactive = false) ⇒ Object
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
# File 'lib/prompt.rb', line 80 def self.whisper_transcribe(file_path, interactive = false) if (file_path.nil? || !file_path.end_with?(*['.mp3', '.wav', '.m4a', '.webm', '.mpeg', '.mpga'])) Logging.log("No file given") unless interactive exit end else size = File.size(file_path).to_f / 2**20 if size > 24 warning("The file is above the maximum size of 25MB") unless interactive exit end else response = client.audio.transcribe( parameters: { model: "whisper-1", file: File.open(file_path, "rb"), }) if (response["text"].nil? || response["text"].empty?) Logging.log("No text found") unless interactive exit end end return response["text"] end end rescue Errno::ENOENT => e #Logging.log("File not found") Logging.log(e) end |
.whisper_translate(file_path, interactive = false) ⇒ Object
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# File 'lib/prompt.rb', line 48 def self.whisper_translate(file_path, interactive = false) if (file_path.nil? || !file_path.end_with?(*['.mp3', '.wav', '.m4a', '.webm', '.mpeg', '.mpga'])) Logging.log("No file given or wrong file type") unless interactive exit end else size = File.size(file_path).to_f / 2**20 if size > 24 warning("The file is above the maximum size of 25MB") unless interactive exit end else response = client.audio.translate( parameters: { model: "whisper-1", file: File.open(file_path, "rb"), }) if (response["text"].nil? || response["text"].empty?) Logging.log("No text found") unless interactive exit end end return response["text"] end end rescue Errno::ENOENT => e Logging.log(e) end |