mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-21 11:49:59 +00:00
feat(Anthropic Chat Model Node): Fetch models dynamically & support thinking (#13543)
This commit is contained in:
@@ -0,0 +1,60 @@
|
||||
import type {
|
||||
ILoadOptionsFunctions,
|
||||
INodeListSearchItems,
|
||||
INodeListSearchResult,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export interface AnthropicModel {
|
||||
id: string;
|
||||
display_name: string;
|
||||
type: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export async function searchModels(
|
||||
this: ILoadOptionsFunctions,
|
||||
filter?: string,
|
||||
): Promise<INodeListSearchResult> {
|
||||
const response = (await this.helpers.httpRequestWithAuthentication.call(this, 'anthropicApi', {
|
||||
url: 'https://api.anthropic.com/v1/models',
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
})) as { data: AnthropicModel[] };
|
||||
|
||||
const models = response.data || [];
|
||||
let results: INodeListSearchItems[] = [];
|
||||
|
||||
if (filter) {
|
||||
for (const model of models) {
|
||||
if (model.id.toLowerCase().includes(filter.toLowerCase())) {
|
||||
results.push({
|
||||
name: model.display_name,
|
||||
value: model.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
results = models.map((model) => ({
|
||||
name: model.display_name,
|
||||
value: model.id,
|
||||
}));
|
||||
}
|
||||
|
||||
// Sort models with more recent ones first (claude-3 before claude-2)
|
||||
results = results.sort((a, b) => {
|
||||
const modelA = models.find((m) => m.id === a.value);
|
||||
const modelB = models.find((m) => m.id === b.value);
|
||||
|
||||
if (!modelA || !modelB) return 0;
|
||||
|
||||
// Sort by created_at date, most recent first
|
||||
const dateA = new Date(modelA.created_at);
|
||||
const dateB = new Date(modelB.created_at);
|
||||
return dateB.getTime() - dateA.getTime();
|
||||
});
|
||||
|
||||
return {
|
||||
results,
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user