Skip to content

Commit 6692634

Browse files
committed
feat(tracing): strip inline media from messages (#18413)
This is the functional portion addressing JS-1002. Prior to truncating text messages for their byte length, any inline base64-encoded media properties are filtered out. This allows the message to possibly be included in the span, indicating to the user that a media object was present, without overflowing the allotted buffer for sending data. If a media message is not removed, the fallback is still to simply remove it if its overhead grows too large. If any truncation occurs, then a `client.recordDroppedEvent()` message is fired, with the reason `before_send`, category `attachment`, and the count of messages affected. Re JS-1002 Re GH-17810
1 parent 5c5c7d4 commit 6692634

File tree

4 files changed

+687
-26
lines changed

4 files changed

+687
-26
lines changed
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
import { instrumentAnthropicAiClient } from '@sentry/core';
2+
import * as Sentry from '@sentry/node';
3+
4+
class MockAnthropic {
5+
constructor(config) {
6+
this.apiKey = config.apiKey;
7+
this.baseURL = config.baseURL;
8+
9+
// Create messages object with create method
10+
this.messages = {
11+
create: this._messagesCreate.bind(this),
12+
};
13+
}
14+
15+
/**
16+
* Create a mock message
17+
*/
18+
async _messagesCreate(params) {
19+
// Simulate processing time
20+
await new Promise(resolve => setTimeout(resolve, 10));
21+
22+
return {
23+
id: 'msg-truncation-test',
24+
type: 'message',
25+
role: 'assistant',
26+
content: [
27+
{
28+
type: 'text',
29+
text: 'This is the number **3**.',
30+
},
31+
],
32+
model: params.model,
33+
stop_reason: 'end_turn',
34+
stop_sequence: null,
35+
usage: {
36+
input_tokens: 10,
37+
output_tokens: 15,
38+
},
39+
};
40+
}
41+
}
42+
43+
async function run() {
44+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
45+
const mockClient = new MockAnthropic({
46+
apiKey: 'mock-api-key',
47+
});
48+
49+
const client = instrumentAnthropicAiClient(mockClient);
50+
51+
// Send the image showing the number 3
52+
await client.messages.create({
53+
model: 'claude-3-haiku-20240307',
54+
max_tokens: 1024,
55+
messages: [
56+
{
57+
role: 'user',
58+
content: [
59+
{
60+
type: 'image',
61+
source: {
62+
type: 'base64',
63+
media_type: 'image/png',
64+
data: 'base64-mumbo-jumbo'.repeat(100),
65+
},
66+
},
67+
],
68+
},
69+
{
70+
role: 'user',
71+
content: 'what number is this?',
72+
},
73+
],
74+
temperature: 0.7,
75+
});
76+
});
77+
}
78+
79+
run();

dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -661,4 +661,52 @@ describe('Anthropic integration', () => {
661661
});
662662
},
663663
);
664+
665+
createEsmAndCjsTests(__dirname, 'scenario-media-truncation.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
666+
test('truncates media attachment, keeping all other details', async () => {
667+
await createRunner()
668+
.ignore('event')
669+
.expect({
670+
transaction: {
671+
transaction: 'main',
672+
spans: expect.arrayContaining([
673+
expect.objectContaining({
674+
data: expect.objectContaining({
675+
'gen_ai.operation.name': 'messages',
676+
'sentry.op': 'gen_ai.messages',
677+
'sentry.origin': 'auto.ai.anthropic',
678+
'gen_ai.system': 'anthropic',
679+
'gen_ai.request.model': 'claude-3-haiku-20240307',
680+
'gen_ai.request.messages': JSON.stringify([
681+
{
682+
role: 'user',
683+
content: [
684+
{
685+
type: 'image',
686+
source: {
687+
type: 'base64',
688+
media_type: 'image/png',
689+
data: '[Filtered]',
690+
},
691+
},
692+
],
693+
},
694+
{
695+
role: 'user',
696+
content: 'what number is this?',
697+
},
698+
]),
699+
}),
700+
description: 'messages claude-3-haiku-20240307',
701+
op: 'gen_ai.messages',
702+
origin: 'auto.ai.anthropic',
703+
status: 'ok',
704+
}),
705+
]),
706+
},
707+
})
708+
.start()
709+
.completed();
710+
});
711+
});
664712
});

0 commit comments

Comments
 (0)