Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[OpenAI] Support Top Logprob in Chat Completions #29432

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions sdk/openai/openai/CHANGELOG.md
Expand Up @@ -5,6 +5,7 @@
### Features Added

- Create a new `OpenAIError` model to represent the error model returned.
- Adds support for `logprobs` and `topLogprobs` in `GetChatCompletionsOptions` to support log probabilities for chat completions.

### Bugs Fixed

Expand Down
2 changes: 1 addition & 1 deletion sdk/openai/openai/assets.json
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "js",
"TagPrefix": "js/openai/openai",
"Tag": "js/openai/openai_4baca95ece"
"Tag": "js/openai/openai_395ad83731"
}
2 changes: 2 additions & 0 deletions sdk/openai/openai/review/openai-models.api.md
Expand Up @@ -661,6 +661,7 @@ export interface GetChatCompletionsOptions extends OperationOptions {
functionCall?: FunctionCallPreset | FunctionName;
functions?: FunctionDefinition[];
logitBias?: Record<string, number>;
logprobs?: boolean;
maxTokens?: number;
n?: number;
presencePenalty?: number;
Expand All @@ -670,6 +671,7 @@ export interface GetChatCompletionsOptions extends OperationOptions {
temperature?: number;
toolChoice?: ChatCompletionsNamedToolSelectionUnion;
tools?: ChatCompletionsToolDefinitionUnion[];
topLogprobs?: number;
topP?: number;
user?: string;
}
Expand Down
2 changes: 2 additions & 0 deletions sdk/openai/openai/review/openai.api.md
Expand Up @@ -626,6 +626,7 @@ export interface GetChatCompletionsOptions extends OperationOptions {
functionCall?: FunctionCallPreset | FunctionName;
functions?: FunctionDefinition[];
logitBias?: Record<string, number>;
logprobs?: boolean;
maxTokens?: number;
n?: number;
presencePenalty?: number;
Expand All @@ -635,6 +636,7 @@ export interface GetChatCompletionsOptions extends OperationOptions {
temperature?: number;
toolChoice?: ChatCompletionsNamedToolSelectionUnion;
tools?: ChatCompletionsToolDefinitionUnion[];
topLogprobs?: number;
topP?: number;
user?: string;
}
Expand Down
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/audioTranscription.ts
Expand Up @@ -33,5 +33,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/audioTranslation.ts
Expand Up @@ -33,5 +33,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/bringYourOwnData.ts
Expand Up @@ -68,5 +68,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
3 changes: 1 addition & 2 deletions sdk/openai/openai/samples-dev/chatCompletions.ts
Expand Up @@ -37,6 +37,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});

2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/completions.ts
Expand Up @@ -34,5 +34,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/getEmbeddings.ts
Expand Up @@ -37,5 +37,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/getImages.ts
Expand Up @@ -39,5 +39,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/openAi.ts
Expand Up @@ -34,5 +34,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/streamChatCompletions.ts
Expand Up @@ -43,5 +43,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
Expand Up @@ -64,5 +64,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/streamCompletions.ts
Expand Up @@ -36,5 +36,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
2 changes: 1 addition & 1 deletion sdk/openai/openai/samples-dev/toolCall.ts
Expand Up @@ -62,5 +62,5 @@ export async function main() {
}

main().catch((err) => {
parseOpenAIError(err)
parseOpenAIError(err);
});
10 changes: 10 additions & 0 deletions sdk/openai/openai/src/models/options.ts
Expand Up @@ -233,4 +233,14 @@ export interface GetChatCompletionsOptions extends OperationOptions {
* This additional specification is only compatible with Azure OpenAI.
*/
azureExtensionOptions?: AzureExtensionsOptions;
/**
* Whether to return log probabilities of the output tokens. If true, the log probabilities of
* each output token are returned in the `content` of `message`.
*/
logprobs?: boolean;
/**
* An integer between 0 and 5 specifying the number of most likely tokens to return at each token position,
* each with an associated log probability. `logprobs` must be set to `true` if this parameter is used.
*/
topLogprobs?: number;
Comment on lines +240 to +245
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder do we need two knobs to control this feature? can't we just do with topLogprobs? Perhaps it is more of a question to the architects.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a pattern in OpenAI API, so seems like most languages are surfacing the pattern. In .NET, they rename logprobs as EnableLogProbabilities and topLogprobs as LogProbabilitiesPerToken.

}
29 changes: 29 additions & 0 deletions sdk/openai/openai/test/public/completions.spec.ts
Expand Up @@ -465,6 +465,35 @@ describe("OpenAI", function () {
authMethod,
);
});

it("returns log prob information", async function () {
updateWithSucceeded(
await withDeployments(
getSucceeded(
authMethod,
deployments,
models,
chatCompletionDeployments,
chatCompletionModels,
),
(deploymentName) =>
client.getChatCompletions(
deploymentName,
[{ role: "user", content: "What's the weather like in Boston?" }],
{
logprobs: true,
topLogprobs: 3,
},
),
(result) => {
assertChatCompletions(result);
},
),
chatCompletionDeployments,
chatCompletionModels,
authMethod,
);
});
});

describe("streamChatCompletions", function () {
Expand Down
19 changes: 19 additions & 0 deletions sdk/openai/openai/test/public/utils/asserts.ts
Expand Up @@ -21,6 +21,8 @@ import {
ChatCompletionsToolCallUnion,
ChatFinishDetails,
ChatResponseMessage,
ChatTokenLogProbabilityInfo,
ChatTokenLogProbabilityResult,
Choice,
Completions,
CompletionsLogProbabilityModel,
Expand Down Expand Up @@ -277,12 +279,29 @@ function assertChatChoice(choice: ChatChoice, options: ChatCompletionTestOptions
assert.isUndefined(choice.delta);
}
assert.isNumber(choice.index);
ifDefined(choice.logprobs?.content, (content) =>
assertNonEmptyArray(content, assertLogprobResult),
);
ifDefined(choice.contentFilterResults, assertContentFilterResultsForChoice);
ifDefined(choice.finishReason, assert.isString);
ifDefined(choice.finishDetails, assertChatFinishDetails);
ifDefined(choice.enhancements, assertAzureChatEnhancements);
}

function assertLogprobResult(logprobResult: ChatTokenLogProbabilityResult): void {
assert.isNumber(logprobResult.logprob);
assert.isString(logprobResult.token);
ifDefined(logprobResult.bytes, (bytes) => assertNonEmptyArray(bytes, assert.isNumber));
ifDefined(logprobResult.topLogprobs, (topLogprobs) =>
assertNonEmptyArray(topLogprobs, assertTokenLogProbInfo),
);
}

function assertTokenLogProbInfo(tokenLogprobInfo: ChatTokenLogProbabilityInfo): void {
ifDefined(tokenLogprobInfo.bytes, (bytes) => assertNonEmptyArray(bytes, assert.isNumber));
assert.isNumber(tokenLogprobInfo.logprob);
assert.isString(tokenLogprobInfo.token);
}
function assertUsage(usage: CompletionsUsage | undefined): void {
assert.isDefined(usage);
const castUsage = usage as CompletionsUsage;
Expand Down