From e2db04a02b60bdf26a9590916f446250742cef8c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 26 Jun 2024 17:21:58 +0000 Subject: [PATCH] feat(api): OpenAPI spec update via Stainless API (#101) --- .stats.yml | 2 +- src/resources/audio/transcriptions.ts | 3 ++- src/resources/chat/completions.ts | 15 +++++++++++++++ tests/api-resources/audio/transcriptions.test.ts | 2 +- tests/api-resources/chat/completions.test.ts | 1 + 5 files changed, 20 insertions(+), 3 deletions(-) diff --git a/.stats.yml b/.stats.yml index 86a0c79..8681549 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 7 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-d9cb66d87629b96a931ce71a189b1482b6ef980aec670e03ffaf75efde6f306a.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-f2c71038b5acfa3945dfe9b31211e46e0d3ebc4cba100ce91b04475737aba50f.yml diff --git a/src/resources/audio/transcriptions.ts b/src/resources/audio/transcriptions.ts index 72ab990..6705f3b 100644 --- a/src/resources/audio/transcriptions.ts +++ b/src/resources/audio/transcriptions.ts @@ -45,6 +45,7 @@ export interface TranscriptionCreateParams { * improve accuracy and latency. */ language?: + | (string & {}) | 'en' | 'zh' | 'de' @@ -142,7 +143,7 @@ export interface TranscriptionCreateParams { | 'ln' | 'ha' | 'ba' - | 'jw' + | 'jv' | 'su' | 'yue'; diff --git a/src/resources/chat/completions.ts b/src/resources/chat/completions.ts index 685cf5e..26e6012 100644 --- a/src/resources/chat/completions.ts +++ b/src/resources/chat/completions.ts @@ -801,6 +801,11 @@ export interface ChatCompletionCreateParamsBase { */ stream?: boolean | null; + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + stream_options?: CompletionCreateParams.StreamOptions | null; + /** * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will * make the output more random, while lower values like 0.2 will make it more @@ -895,6 +900,16 @@ export namespace CompletionCreateParams { */ type?: 'text' | 'json_object'; } + + /** + * Options for streaming response. Only set this when you set `stream: true`. + */ + export interface StreamOptions { + /** + * This field is unused + */ + include_usage?: boolean; + } } export interface ChatCompletionCreateParamsNonStreaming extends ChatCompletionCreateParamsBase { diff --git a/tests/api-resources/audio/transcriptions.test.ts b/tests/api-resources/audio/transcriptions.test.ts index 50ce6df..aa27c59 100644 --- a/tests/api-resources/audio/transcriptions.test.ts +++ b/tests/api-resources/audio/transcriptions.test.ts @@ -27,7 +27,7 @@ describe('resource transcriptions', () => { const response = await groq.audio.transcriptions.create({ file: await toFile(Buffer.from('# my file contents'), 'README.md'), model: 'whisper-large-v3', - language: 'en', + language: 'string', prompt: 'string', response_format: 'json', temperature: 0, diff --git a/tests/api-resources/chat/completions.test.ts b/tests/api-resources/chat/completions.test.ts index 48e561f..358eb24 100644 --- a/tests/api-resources/chat/completions.test.ts +++ b/tests/api-resources/chat/completions.test.ts @@ -44,6 +44,7 @@ describe('resource completions', () => { seed: 0, stop: '\n', stream: true, + stream_options: { include_usage: true }, temperature: 1, tool_choice: 'none', tools: [