Class: Prompt
- Inherits:
-
Object
- Object
- Prompt
- Defined in:
- lib/prompt.rb
Class Method Summary collapse
-
.file_finetune ⇒ Object
Not implemented only scaffolding.
-
.stream_prompt(input, conversation = '', temp = load_temperature()) ⇒ Object
Streams the response, VERY NICE.
- .whisper_transcribe(file_path, interactive = false) ⇒ Object
- .whisper_translate(file_path, interactive = false) ⇒ Object
Methods included from Logging
Methods included from Config
load_context_length, load_env, load_key, load_temperature, save_context_length, save_key, save_temperature, set_config, set_key
Methods included from Files
config_path, context_file_path, context_path, file_path, root
Class Method Details
.file_finetune ⇒ Object
Not implemented only scaffolding
40 41 42 43 44 45 46 47 |
# File 'lib/prompt.rb', line 40 def self.file_finetune() return client.files.upload(parameters: { file: "./test.json", purpose: "fine-tune" }) client.files.lisr client.files.retrieve(id: "file-123") client.files.content(id: "file-123") client.files.delete(id: "file-123") end |
.stream_prompt(input, conversation = '', temp = load_temperature()) ⇒ Object
Streams the response, VERY NICE
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
# File 'lib/prompt.rb', line 8 def self.stream_prompt(input, conversation = '', temp = load_temperature()) if temp.nil? temp = 0.7 end if conversation.length == 0 conversation += input else conversation += "\n My question: #{input}" end response = '' unless client.nil? client.chat( parameters: { model: "gpt-3.5-turbo", messages: [{ role: "user", content: conversation}], temperature: temp, ## Should be a parameter stream: proc do |chunk, _bytesize| response += chunk.dig("choices", 0, "delta", "content") unless chunk.dig("choices", 0, "delta", "content").nil? print chunk.dig("choices", 0, "delta", "content") end } ) context = { "input" => input, "response" => response, } return context end end |
.whisper_transcribe(file_path, interactive = false) ⇒ Object
83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
# File 'lib/prompt.rb', line 83 def self.whisper_transcribe(file_path, interactive = false) if (file_path.nil? || !file_path.end_with?(*['.mp3', '.wav', '.m4a', '.webm', '.mpeg', '.mpga'])) log("No file given") unless interactive exit end else size = File.size(file_path).to_f / 2**20 if size > 24 warning("The file is above the maximum size of 25MB") unless interactive exit end else unless client.nil? response = client.audio.transcribe( parameters: { model: "whisper-1", file: File.open(file_path, "rb"), }) if (response["text"].nil? || response["text"].empty?) log("No text found") unless interactive exit end end return response["text"] end end end rescue Errno::ENOENT => e #Logging.log("File not found") log(e) end |
.whisper_translate(file_path, interactive = false) ⇒ Object
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
# File 'lib/prompt.rb', line 49 def self.whisper_translate(file_path, interactive = false) if (file_path.nil? || !file_path.end_with?(*['.mp3', '.wav', '.m4a', '.webm', '.mpeg', '.mpga'])) log("No file given or wrong file type") unless interactive exit end else size = File.size(file_path).to_f / 2**20 if size > 24 warning("The file is above the maximum size of 25MB") unless interactive exit end else unless client.nil? response = client.audio.translate( parameters: { model: "whisper-1", file: File.open(file_path, "rb"), }) if (response["text"].nil? || response["text"].empty?) log("No text found") unless interactive exit end end return response["text"] end end end rescue Errno::ENOENT => e log(e) end |