Skip to content

Commit f1974c4

Browse files
committed
Remove preview versions. Add examples.
1 parent 063335d commit f1974c4

File tree

72 files changed

+1204
-8974
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+1204
-8974
lines changed

specification/ai/ContentUnderstanding/audioVisualContent.tsp

Lines changed: 34 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ model AudioVisualContent extends MediaContent {
2828
@doc("Height of each video frame in pixels, if applicable.")
2929
height?: int32;
3030

31-
@added(Versions.v2025_05_01_preview)
31+
// @added(Versions.v2025_05_01_preview)
3232
@doc("List of camera shot changes in the video, represented by its timestamp in milliseconds. Only if returnDetails is true.")
3333
cameraShotTimesMs?: int64[];
3434

@@ -38,14 +38,14 @@ model AudioVisualContent extends MediaContent {
3838
@doc("List of transcript phrases. Only if returnDetails is true.")
3939
transcriptPhrases?: TranscriptPhrase[];
4040

41-
@removed(Versions.v2025_05_01_preview)
42-
@doc("List of faces in the video. Only if enableFace and returnDetails are true.")
43-
faces?: ImageFace[];
41+
// @removed(Versions.v2025_05_01_preview)
42+
// @doc("List of faces in the video. Only if enableFace and returnDetails are true.")
43+
// faces?: ImageFace[];
4444

45-
@added(Versions.v2025_05_01_preview)
46-
@removed(Versions.v2025_11_01)
47-
@doc("List of detected persons in the video. Only if enableFace and returnDetails are true.")
48-
persons?: DetectedPerson[];
45+
// @added(Versions.v2025_05_01_preview)
46+
// @removed(Versions.v2025_11_01)
47+
// @doc("List of detected persons in the video. Only if enableFace and returnDetails are true.")
48+
// persons?: DetectedPerson[];
4949

5050
@added(Versions.v2025_11_01)
5151
@doc("List of detected content segments. Only if enableSegment is true.")
@@ -96,38 +96,39 @@ model TranscriptWord {
9696
span?: ContentSpan;
9797
}
9898

99-
@added(Versions.v2025_05_01_preview)
100-
@removed(Versions.v2025_11_01)
101-
@doc("Audio visual segment, such as a scene, chapter, etc.")
102-
model AudioVisualSegment {
103-
@doc("Segment ID.")
104-
segmentId: string;
99+
// @added(Versions.v2025_05_01_preview)
100+
// @removed(Versions.v2025_11_01)
101+
// @doc("Audio visual segment, such as a scene, chapter, etc.")
102+
// model AudioVisualSegment {
103+
// @doc("Segment ID.")
104+
// segmentId: string;
105105

106-
@doc("Start time of the segment in milliseconds.")
107-
startTimeMs: int64;
106+
// @doc("Start time of the segment in milliseconds.")
107+
// startTimeMs: int64;
108108

109-
@doc("End time of the segment in milliseconds.")
110-
endTimeMs: int64;
109+
// @doc("End time of the segment in milliseconds.")
110+
// endTimeMs: int64;
111111

112-
@doc("Short description of the segment.")
113-
description: string;
112+
// @doc("Short description of the segment.")
113+
// description: string;
114114

115-
@doc("Span of the segment in the markdown content.")
116-
span?: ContentSpan;
117-
}
115+
// @doc("Span of the segment in the markdown content.")
116+
// span?: ContentSpan;
117+
// }
118118

119-
@added(Versions.v2025_05_01_preview)
120-
@doc("Detected person.")
121-
model DetectedPerson {
122-
@doc("Person identifier in the optional person directory if found. Otherwise, each unknown person is assigned a unique `Person-{Number}`.")
123-
personId?: string;
119+
// @added(Versions.v2025_05_01_preview)
120+
// @removed(Versions.v2025_11_01)
121+
// @doc("Detected person.")
122+
// model DetectedPerson {
123+
// @doc("Person identifier in the optional person directory if found. Otherwise, each unknown person is assigned a unique `Person-{Number}`.")
124+
// personId?: string;
124125

125-
@doc("Confidence of the person identification, if a person directory is provided.")
126-
confidence?: float32;
126+
// @doc("Confidence of the person identification, if a person directory is provided.")
127+
// confidence?: float32;
127128

128-
@doc("Encoded source that identifies the position of the person in the input content.")
129-
source?: SourceExpression;
130-
}
129+
// @doc("Encoded source that identifies the position of the person in the input content.")
130+
// source?: SourceExpression;
131+
// }
131132

132133
@added(Versions.v2025_11_01)
133134
@doc("Detected audio/visual content segment.")

specification/ai/ContentUnderstanding/client.tsp

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,42 @@ namespace ClientCustomizations;
1616
interface ContentUnderstandingClient {
1717
// Flatten all ContentAnalyzers operations to client level
1818
// LRO operations will automatically get "begin_" prefix in Python
19+
1920
analyze is ContentAnalyzers.analyze;
21+
2022
analyzeBinary is ContentAnalyzers.analyzeBinary;
23+
2124
copyAnalyzer is ContentAnalyzers.copy;
25+
26+
#suppress "@azure-tools/typespec-azure-core/use-standard-names" "Doesn't fit standard naming"
2227
createAnalyzer is ContentAnalyzers.createOrReplace;
28+
2329
deleteAnalyzer is ContentAnalyzers.delete;
30+
31+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
2432
deleteResult is ContentAnalyzers.deleteResult;
33+
2534
getAnalyzer is ContentAnalyzers.get;
35+
36+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
2637
getDefaults is ContentAnalyzers.getDefaults;
38+
39+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
2740
getOperationStatus is ContentAnalyzers.getOperationStatus;
41+
42+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
2843
getResult is ContentAnalyzers.getResult;
44+
45+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
2946
getResultFile is ContentAnalyzers.getResultFile;
47+
3048
grantCopyAuthorization is ContentAnalyzers.grantCopyAuthorization;
49+
3150
listAnalyzers is ContentAnalyzers.list;
51+
3252
updateAnalyzer is ContentAnalyzers.update;
53+
54+
#suppress "@azure-tools/typespec-azure-core/use-standard-operations" "Doesn't fit standard ops"
3355
updateDefaults is ContentAnalyzers.updateDefaults;
3456
}
3557

@@ -53,8 +75,6 @@ interface ContentUnderstandingClient {
5375
@@clientName(AnalyzeInput.range, "inputRange");
5476
@@clientName(AnalyzeBinaryRequest.range, "inputRange");
5577

56-
//@@clientName(ContentFieldDefinition.$ref, "reference", "javascript");
57-
5878
// Mark polling operations as internal - client generators automatically handle
5979
// the polling pattern for long-running operations. When users call analyze(), the
6080
// generated SDK returns a poller that internally uses these endpoints to check status
@@ -73,9 +93,3 @@ interface ContentUnderstandingClient {
7393
"getAnalyzers",
7494
"csharp"
7595
);
76-
77-
// Client-only docs clarifying that these properties accept raw binary bytes (not base64 strings) in Python SDK.
78-
@@clientDoc(AnalyzeInput.data,
79-
"Raw image bytes. Provide bytes-like object; do not base64-encode. Only one of url or data should be specified.",
80-
DocumentationMode.replace
81-
);

specification/ai/ContentUnderstanding/documentContent.tsp

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,10 @@ model DocumentContent extends MediaContent {
4242
@doc("List of figures in the document. Only if enableLayout and returnDetails are true.")
4343
figures?: DocumentFigure[];
4444

45-
@added(Versions.v2025_05_01_preview)
46-
@doc("List of detected persons in the document. Only if enableFace and returnDetails are true.")
47-
persons?: DetectedPerson[];
45+
// @added(Versions.v2025_05_01_preview)
46+
// @removed(Versions.v2025_11_01)
47+
// @doc("List of detected persons in the document. Only if enableFace and returnDetails are true.")
48+
// persons?: DetectedPerson[];
4849

4950
@added(Versions.v2025_11_01)
5051
@doc("List of annotations in the document. Only if enableAnnotations and returnDetails are true.")
@@ -488,18 +489,18 @@ union DocumentTableCellKind {
488489
description: "description",
489490
}
490491

491-
@removed(Versions.v2025_05_01_preview)
492-
@doc("Face in an image.")
493-
model ImageFace {
494-
@doc("Face identifier.")
495-
faceId?: string;
492+
// @removed(Versions.v2025_05_01_preview)
493+
// @doc("Face in an image.")
494+
// model ImageFace {
495+
// @doc("Face identifier.")
496+
// faceId?: string;
496497

497-
@doc("Confidence of predicting the face.")
498-
confidence?: float32;
498+
// @doc("Confidence of predicting the face.")
499+
// confidence?: float32;
499500

500-
@doc("Encoded source that identifies the position of the face in the content.")
501-
source?: SourceExpression;
502-
}
501+
// @doc("Encoded source that identifies the position of the face in the content.")
502+
// source?: SourceExpression;
503+
// }
503504

504505
@added(Versions.v2025_11_01)
505506
@doc("Annotation in a document, such as a strikethrough or a comment.")
Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,20 @@
22
"title": "Analyze URL",
33
"operationId": "ContentAnalyzers_Analyze",
44
"parameters": {
5-
"api-version": "2025-05-01-preview",
5+
"api-version": "2025-11-01",
66
"analyzerId": "myAnalyzer",
77
"body": {
8-
"url": "https://host.com/doc.pdf"
8+
"inputs": [
9+
{
10+
"url": "https://host.com/doc.pdf"
11+
}
12+
]
913
}
1014
},
1115
"responses": {
1216
"202": {
1317
"headers": {
14-
"Operation-Location": "https://myendpoint.cognitiveservices.azure.com/contentunderstanding/analyzerResults/3b31320d-8bab-4f88-b19c-2322a7f11034?api-version=2025-05-01-preview"
18+
"Operation-Location": "https://myendpoint.cognitiveservices.azure.com/contentunderstanding/analyzerResults/3b31320d-8bab-4f88-b19c-2322a7f11034?api-version=2025-11-01"
1519
},
1620
"body": {
1721
"id": "3b31320d-8bab-4f88-b19c-2322a7f11034",
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@
22
"title": "Analyze File",
33
"operationId": "ContentAnalyzers_AnalyzeBinary",
44
"parameters": {
5-
"api-version": "2025-05-01-preview",
5+
"api-version": "2025-11-01",
66
"analyzerId": "myAnalyzer",
77
"input": "RXhhbXBsZSBGaWxl"
88
},
99
"responses": {
1010
"202": {
1111
"headers": {
12-
"Operation-Location": "https://myendpoint.cognitiveservices.azure.com/contentunderstanding/analyzerResults/3b31320d-8bab-4f88-b19c-2322a7f11034?api-version=2025-05-01-preview"
12+
"Operation-Location": "https://myendpoint.cognitiveservices.azure.com/contentunderstanding/analyzerResults/3b31320d-8bab-4f88-b19c-2322a7f11034?api-version=2025-11-01"
1313
},
1414
"body": {
1515
"id": "3b31320d-8bab-4f88-b19c-2322a7f11034",
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
{
2+
"title": "Copy Analyzer",
3+
"operationId": "ContentAnalyzers_Copy",
4+
"parameters": {
5+
"api-version": "2025-11-01",
6+
"analyzerId": "targetAnalyzer",
7+
"body": {
8+
"sourceAzureResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.CognitiveServices/accounts/myResource",
9+
"sourceRegion": "westus2",
10+
"sourceAnalyzerId": "sourceAnalyzer"
11+
}
12+
},
13+
"responses": {
14+
"202": {
15+
"headers": {
16+
"Operation-Location": "https://myendpoint.cognitiveservices.azure.com/contentunderstanding/analyzers/targetAnalyzer/operations/3b31320d-8bab-4f88-b19c-2322a7f11034?api-version=2025-11-01"
17+
},
18+
"body": {
19+
"analyzerId": "targetAnalyzer",
20+
"description": "My analyzer",
21+
"status": "creating",
22+
"createdAt": "2025-05-01T18:46:36.051Z",
23+
"lastModifiedAt": "2025-05-01T18:46:36.051Z",
24+
"baseAnalyzerId": "prebuilt-document",
25+
"config": {
26+
"enableOcr": true,
27+
"enableLayout": true,
28+
"returnDetails": true
29+
},
30+
"fieldSchema": {
31+
"name": "MyForm",
32+
"description": "My form",
33+
"fields": {
34+
"Company": {
35+
"type": "string",
36+
"description": "Name of company."
37+
}
38+
},
39+
"definitions": {}
40+
}
41+
}
42+
}
43+
}
44+
}

0 commit comments

Comments
 (0)