Skip to content

Commit

Permalink
update Readme
Browse files Browse the repository at this point in the history
  • Loading branch information
jamesrochabrun committed Dec 19, 2023
1 parent 2ad34c3 commit d88010f
Showing 1 changed file with 66 additions and 1 deletion.
67 changes: 66 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -322,9 +322,12 @@ public struct ChatCompletionParameters: Encodable {
/// Modify the likelihood of specified tokens appearing in the completion.
/// Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. Defaults to null.
public var logitBias: [Int: Double]?
/// Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned in the content of message. This option is currently not available on the gpt-4-vision-preview model. Defaults to false.
public var logprobs: Bool?
/// An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. logprobs must be set to true if this parameter is used.
public var topLogprobs: Int?
/// The maximum number of [tokens](https://platform.openai.com/tokenizer) to generate in the chat completion.
/// The total length of input tokens and generated tokens is limited by the model's context length. Example [Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) for counting tokens.
/// Defaults to inf
public var maxTokens: Int?
/// How many chat completion choices to generate for each input message. Defaults to 1.
public var n: Int?
Expand Down Expand Up @@ -781,6 +784,8 @@ public struct ChatCompletionObject: Decodable {
public let index: Int
/// A chat completion message generated by the model.
public let message: ChatMessage
/// Log probability information for the choice.
public let logprobs: LogProb?

public struct ChatMessage: Decodable {

Expand All @@ -801,6 +806,36 @@ public struct ChatCompletionObject: Decodable {
let type: String
}
}

public struct LogProb: Decodable {
/// A list of message content tokens with log probability information.
let content: [TokenDetail]
}

public struct TokenDetail: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
let topLogprobs: [TopLogProb]

enum CodingKeys: String, CodingKey {
case token, logprob, bytes
case topLogprobs = "top_logprobs"
}

struct TopLogProb: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
}
}
}

public struct ChatUsage: Decodable {
Expand Down Expand Up @@ -866,6 +901,36 @@ public struct ChatCompletionChunkObject: Decodable {
public let role: String?
}

public struct LogProb: Decodable {
/// A list of message content tokens with log probability information.
let content: [TokenDetail]
}

public struct TokenDetail: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
let topLogprobs: [TopLogProb]

enum CodingKeys: String, CodingKey {
case token, logprob, bytes
case topLogprobs = "top_logprobs"
}

struct TopLogProb: Decodable {
/// The token.
let token: String
/// The log probability of this token.
let logprob: Double
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
let bytes: [Int]?
}
}

/// Provided by the Vision API.
public struct FinishDetails: Decodable {
let type: String
Expand Down

0 comments on commit d88010f

Please sign in to comment.