Class: OCI::GenerativeAiInference::Models::CohereChatResponse

Inherits:
BaseChatResponse
  • Object
show all
Defined in:
lib/oci/generative_ai_inference/models/cohere_chat_response.rb

Overview

The response to the chat conversation.

Constant Summary collapse

FINISH_REASON_ENUM =
[
  FINISH_REASON_COMPLETE = 'COMPLETE'.freeze,
  FINISH_REASON_ERROR_TOXIC = 'ERROR_TOXIC'.freeze,
  FINISH_REASON_ERROR_LIMIT = 'ERROR_LIMIT'.freeze,
  FINISH_REASON_ERROR = 'ERROR'.freeze,
  FINISH_REASON_USER_CANCEL = 'USER_CANCEL'.freeze,
  FINISH_REASON_MAX_TOKENS = 'MAX_TOKENS'.freeze,
  FINISH_REASON_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze

Constants inherited from BaseChatResponse

BaseChatResponse::API_FORMAT_ENUM

Instance Attribute Summary collapse

Attributes inherited from BaseChatResponse

#api_format

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from BaseChatResponse

get_subtype

Constructor Details

#initialize(attributes = {}) ⇒ CohereChatResponse

Initializes the object

Parameters:

  • attributes (Hash) (defaults to: {})

    Model attributes in the form of hash

Options Hash (attributes):



138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 138

def initialize(attributes = {})
  return unless attributes.is_a?(Hash)

  attributes['apiFormat'] = 'COHERE'

  super(attributes)

  # convert string to symbol for hash key
  attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }

  self.text = attributes[:'text'] if attributes[:'text']

  self.chat_history = attributes[:'chatHistory'] if attributes[:'chatHistory']

  raise 'You cannot provide both :chatHistory and :chat_history' if attributes.key?(:'chatHistory') && attributes.key?(:'chat_history')

  self.chat_history = attributes[:'chat_history'] if attributes[:'chat_history']

  self.citations = attributes[:'citations'] if attributes[:'citations']

  self.is_search_required = attributes[:'isSearchRequired'] unless attributes[:'isSearchRequired'].nil?
  self.is_search_required = false if is_search_required.nil? && !attributes.key?(:'isSearchRequired') # rubocop:disable Style/StringLiterals

  raise 'You cannot provide both :isSearchRequired and :is_search_required' if attributes.key?(:'isSearchRequired') && attributes.key?(:'is_search_required')

  self.is_search_required = attributes[:'is_search_required'] unless attributes[:'is_search_required'].nil?
  self.is_search_required = false if is_search_required.nil? && !attributes.key?(:'isSearchRequired') && !attributes.key?(:'is_search_required') # rubocop:disable Style/StringLiterals

  self.finish_reason = attributes[:'finishReason'] if attributes[:'finishReason']

  raise 'You cannot provide both :finishReason and :finish_reason' if attributes.key?(:'finishReason') && attributes.key?(:'finish_reason')

  self.finish_reason = attributes[:'finish_reason'] if attributes[:'finish_reason']

  self.error_message = attributes[:'errorMessage'] if attributes[:'errorMessage']

  raise 'You cannot provide both :errorMessage and :error_message' if attributes.key?(:'errorMessage') && attributes.key?(:'error_message')

  self.error_message = attributes[:'error_message'] if attributes[:'error_message']

  self.search_queries = attributes[:'searchQueries'] if attributes[:'searchQueries']

  raise 'You cannot provide both :searchQueries and :search_queries' if attributes.key?(:'searchQueries') && attributes.key?(:'search_queries')

  self.search_queries = attributes[:'search_queries'] if attributes[:'search_queries']

  self.documents = attributes[:'documents'] if attributes[:'documents']

  self.tool_calls = attributes[:'toolCalls'] if attributes[:'toolCalls']

  raise 'You cannot provide both :toolCalls and :tool_calls' if attributes.key?(:'toolCalls') && attributes.key?(:'tool_calls')

  self.tool_calls = attributes[:'tool_calls'] if attributes[:'tool_calls']

  self.prompt = attributes[:'prompt'] if attributes[:'prompt']

  self.usage = attributes[:'usage'] if attributes[:'usage']
end

Instance Attribute Details

#chat_historyArray<OCI::GenerativeAiInference::Models::CohereMessage>

The list of previous messages between the user and the model. The chat history gives the model context for responding to the user's inputs.



29
30
31
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 29

def chat_history
  @chat_history
end

#citationsArray<OCI::GenerativeAiInference::Models::Citation>

Inline citations for the generated response.



33
34
35
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 33

def citations
  @citations
end

#documentsArray<Object>

The documents that the model can refer to when generating a response. Each document is a JSON string that represents the field and values of the document.

Example: '[ { "id": "doc_0", "snippet": "Emperor penguins are the tallest.", "title": "Tall penguins" }, { "id": "doc_1", "snippet": "Emperor penguins only live in Antarctica.", "title": "Penguin habitats" } ]'

Returns:

  • (Array<Object>)


68
69
70
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 68

def documents
  @documents
end

#error_messageString

If there is an error during the streaming scenario, then the errorMessage parameter contains details for the error.

Returns:

  • (String)


45
46
47
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 45

def error_message
  @error_message
end

#finish_reasonString

[Required] Why the generation stopped.

Returns:

  • (String)


41
42
43
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 41

def finish_reason
  @finish_reason
end

#is_search_requiredBOOLEAN

If set to true, a search for documents is required.

Returns:

  • (BOOLEAN)


37
38
39
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 37

def is_search_required
  @is_search_required
end

#promptString

The full prompt that was sent to the model if isEcho is true when request.

Returns:

  • (String)


76
77
78
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 76

def prompt
  @prompt
end

#search_queriesArray<OCI::GenerativeAiInference::Models::SearchQuery>

The generated search queries.



49
50
51
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 49

def search_queries
  @search_queries
end

#textString

[Required] Contents of the response that the model generates.

Returns:

  • (String)


25
26
27
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 25

def text
  @text
end

#tool_callsArray<OCI::GenerativeAiInference::Models::CohereToolCall>

A list of tool calls generated by the model.



72
73
74
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 72

def tool_calls
  @tool_calls
end

#usageOCI::GenerativeAiInference::Models::Usage



79
80
81
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 79

def usage
  @usage
end

Class Method Details

.attribute_mapObject

Attribute mapping from ruby-style variable name to JSON key.



82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 82

def self.attribute_map
  {
    # rubocop:disable Style/SymbolLiteral
    'api_format': :'apiFormat',
    'text': :'text',
    'chat_history': :'chatHistory',
    'citations': :'citations',
    'is_search_required': :'isSearchRequired',
    'finish_reason': :'finishReason',
    'error_message': :'errorMessage',
    'search_queries': :'searchQueries',
    'documents': :'documents',
    'tool_calls': :'toolCalls',
    'prompt': :'prompt',
    'usage': :'usage'
    # rubocop:enable Style/SymbolLiteral
  }
end

.swagger_typesObject

Attribute type mapping.



102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 102

def self.swagger_types
  {
    # rubocop:disable Style/SymbolLiteral
    'api_format': :'String',
    'text': :'String',
    'chat_history': :'Array<OCI::GenerativeAiInference::Models::CohereMessage>',
    'citations': :'Array<OCI::GenerativeAiInference::Models::Citation>',
    'is_search_required': :'BOOLEAN',
    'finish_reason': :'String',
    'error_message': :'String',
    'search_queries': :'Array<OCI::GenerativeAiInference::Models::SearchQuery>',
    'documents': :'Array<Object>',
    'tool_calls': :'Array<OCI::GenerativeAiInference::Models::CohereToolCall>',
    'prompt': :'String',
    'usage': :'OCI::GenerativeAiInference::Models::Usage'
    # rubocop:enable Style/SymbolLiteral
  }
end

Instance Method Details

#==(other) ⇒ Object

Checks equality by comparing each attribute.

Parameters:

  • other (Object)

    the other object to be compared



217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 217

def ==(other)
  return true if equal?(other)

  self.class == other.class &&
    api_format == other.api_format &&
    text == other.text &&
    chat_history == other.chat_history &&
    citations == other.citations &&
    is_search_required == other.is_search_required &&
    finish_reason == other.finish_reason &&
    error_message == other.error_message &&
    search_queries == other.search_queries &&
    documents == other.documents &&
    tool_calls == other.tool_calls &&
    prompt == other.prompt &&
    usage == other.usage
end

#build_from_hash(attributes) ⇒ Object

Builds the object from hash

Parameters:

  • attributes (Hash)

    Model attributes in the form of hash

Returns:

  • (Object)

    Returns the model itself



258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 258

def build_from_hash(attributes)
  return nil unless attributes.is_a?(Hash)

  self.class.swagger_types.each_pair do |key, type|
    if type =~ /^Array<(.*)>/i
      # check to ensure the input is an array given that the the attribute
      # is documented as an array but the input is not
      if attributes[self.class.attribute_map[key]].is_a?(Array)
        public_method("#{key}=").call(
          attributes[self.class.attribute_map[key]]
            .map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
        )
      end
    elsif !attributes[self.class.attribute_map[key]].nil?
      public_method("#{key}=").call(
        OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
      )
    end
    # or else data not found in attributes(hash), not an issue as the data can be optional
  end

  self
end

#eql?(other) ⇒ Boolean

Parameters:

  • other (Object)

    the other object to be compared

Returns:

  • (Boolean)

See Also:

  • `==` method


238
239
240
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 238

def eql?(other)
  self == other
end

#hashFixnum

Calculates hash code according to all attributes.

Returns:

  • (Fixnum)

    Hash code



247
248
249
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 247

def hash
  [api_format, text, chat_history, citations, is_search_required, finish_reason, error_message, search_queries, documents, tool_calls, prompt, usage].hash
end

#to_hashHash

Returns the object in the form of hash

Returns:

  • (Hash)

    Returns the object in the form of hash



291
292
293
294
295
296
297
298
299
300
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 291

def to_hash
  hash = {}
  self.class.attribute_map.each_pair do |attr, param|
    value = public_method(attr).call
    next if value.nil? && !instance_variable_defined?("@#{attr}")

    hash[param] = _to_hash(value)
  end
  hash
end

#to_sString

Returns the string representation of the object

Returns:

  • (String)

    String presentation of the object



285
286
287
# File 'lib/oci/generative_ai_inference/models/cohere_chat_response.rb', line 285

def to_s
  to_hash.to_s
end