Skip to content

Commit 03daecc

Browse files
authored
Add AWS prices from API (#208)
1 parent fcc53ea commit 03daecc

File tree

6 files changed

+714
-67
lines changed

6 files changed

+714
-67
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ The following providers are currently supported:
3434

3535
- [Anthropic](prices/providers/anthropic.yml) - 15 models
3636
- [Avian](prices/providers/avian.yml) - 4 models
37-
- [AWS Bedrock](prices/providers/aws.yml) - 4 models
37+
- [AWS Bedrock](prices/providers/aws.yml) - 30 models
3838
- [Microsoft Azure](prices/providers/azure.yml) - 50 models
3939
- [Cerebras](prices/providers/cerebras.yml) - 4 models
4040
- [Cohere](prices/providers/cohere.yml) - 5 models

packages/js/src/data.ts

Lines changed: 307 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -469,26 +469,12 @@ export const data: Provider[] = [
469469
],
470470
models: [
471471
{
472-
id: 'meta.llama3-8b-instruct-v1:0',
473-
match: {
474-
contains: 'llama3-8b-instruct-v1',
475-
},
476-
prices: {
477-
input_mtok: 0.3,
478-
output_mtok: 0.6,
479-
},
480-
},
481-
{
482-
id: 'nova-lite-v1',
483-
name: 'Nova Lite 1.0',
472+
id: 'amazon.nova-lite-v1:0',
473+
name: 'Nova Lite',
484474
description:
485475
'Amazon Nova Lite 1.0 is a very low-cost multimodal model from Amazon that focused on fast processing of image, video, and text inputs to generate text output. Amazon Nova Lite can handle real-time customer interactions, document analysis, and visual question-answering tasks with high accuracy.',
486476
match: {
487-
or: [
488-
{
489-
contains: 'nova-lite-v1',
490-
},
491-
],
477+
contains: 'nova-lite',
492478
},
493479
prices: {
494480
input_mtok: 0.06,
@@ -497,16 +483,12 @@ export const data: Provider[] = [
497483
},
498484
},
499485
{
500-
id: 'nova-micro-v1',
501-
name: 'Nova Micro 1.0',
486+
id: 'amazon.nova-micro-v1:0',
487+
name: 'Nova Micro',
502488
description:
503489
'Amazon Nova Micro 1.0 is a text-only model that delivers the lowest latency responses in the Amazon Nova family of models at a very low cost. With a context length of 128K tokens and optimized for speed and cost, Amazon Nova Micro excels at tasks such as text summarization, translation, content classification, interactive chat, and brainstorming. It has simple mathematical reasoning and coding abilities.',
504490
match: {
505-
or: [
506-
{
507-
contains: 'nova-micro-v1',
508-
},
509-
],
491+
contains: 'nova-micro',
510492
},
511493
prices: {
512494
input_mtok: 0.035,
@@ -515,23 +497,317 @@ export const data: Provider[] = [
515497
},
516498
},
517499
{
518-
id: 'nova-pro-v1',
519-
name: 'Nova Pro 1.0',
500+
id: 'amazon.nova-premier-v1:0',
501+
name: 'Nova Premier',
502+
match: {
503+
contains: 'nova-premier',
504+
},
505+
prices: {
506+
input_mtok: 2.5,
507+
cache_read_mtok: 0.625,
508+
output_mtok: 12.5,
509+
},
510+
},
511+
{
512+
id: 'amazon.nova-pro-v1:0',
513+
name: 'Nova Pro',
520514
description:
521515
'Amazon Nova Pro 1.0 is a capable multimodal model from Amazon focused on providing a combination of accuracy, speed, and cost for a wide range of tasks. As of December 2024, it achieves state-of-the-art performance on key benchmarks including visual question answering (TextVQA) and video understanding (VATEX).',
522516
match: {
523-
or: [
524-
{
525-
contains: 'nova-pro-v1',
526-
},
527-
],
517+
contains: 'nova-pro',
528518
},
529519
prices: {
530520
input_mtok: 0.8,
531521
cache_read_mtok: 0.2,
532522
output_mtok: 3.2,
533523
},
534524
},
525+
{
526+
id: 'amazon.nova-sonic-v1:0',
527+
name: 'Nova Sonic',
528+
match: {
529+
contains: 'nova-sonic',
530+
},
531+
prices: {
532+
input_mtok: 0.06,
533+
output_mtok: 0.24,
534+
input_audio_mtok: 3.4,
535+
output_audio_mtok: 13.6,
536+
},
537+
},
538+
{
539+
id: 'amazon.titan-embed-text-v1',
540+
name: 'Titan Embeddings G1 - Text',
541+
match: {
542+
contains: 'titan-embed-text',
543+
},
544+
prices: {
545+
input_mtok: 0.1,
546+
},
547+
},
548+
{
549+
id: 'amazon.titan-text-express-v1',
550+
name: 'Titan Text G1 - Express',
551+
match: {
552+
contains: 'titan-text-express',
553+
},
554+
prices: {
555+
input_mtok: 0.2,
556+
output_mtok: 0.6,
557+
},
558+
},
559+
{
560+
id: 'amazon.titan-text-lite-v1',
561+
name: 'Titan Text G1 - Lite',
562+
match: {
563+
contains: 'titan-text-lite',
564+
},
565+
prices: {
566+
input_mtok: 0.15,
567+
output_mtok: 0.2,
568+
},
569+
},
570+
{
571+
id: 'anthropic.claude-3-haiku-20240307-v1:0',
572+
name: 'Claude 3 Haiku',
573+
match: {
574+
contains: 'claude-3-haiku-20240307',
575+
},
576+
prices: {
577+
input_mtok: 0.25,
578+
},
579+
},
580+
{
581+
id: 'deepseek.r1-v1:0',
582+
name: 'DeepSeek-R1',
583+
match: {
584+
contains: 'r1',
585+
},
586+
prices: {
587+
input_mtok: 1.35,
588+
output_mtok: 5.4,
589+
},
590+
},
591+
{
592+
id: 'meta.llama3-1-70b-instruct-v1:0',
593+
name: 'Llama 3.1 70B Instruct',
594+
match: {
595+
contains: 'llama3-1-70b-instruct',
596+
},
597+
prices: {
598+
input_mtok: 0.72,
599+
output_mtok: 0.72,
600+
},
601+
},
602+
{
603+
id: 'meta.llama3-1-8b-instruct-v1:0',
604+
name: 'Llama 3.1 8B Instruct',
605+
match: {
606+
contains: 'llama3-1-8b-instruct',
607+
},
608+
prices: {
609+
input_mtok: 0.22,
610+
output_mtok: 0.22,
611+
},
612+
},
613+
{
614+
id: 'meta.llama3-2-11b-instruct-v1:0',
615+
name: 'Llama 3.2 11B Instruct',
616+
match: {
617+
contains: 'llama3-2-11b-instruct',
618+
},
619+
prices: {
620+
input_mtok: 0.16,
621+
output_mtok: 0.16,
622+
},
623+
},
624+
{
625+
id: 'meta.llama3-2-1b-instruct-v1:0',
626+
name: 'Llama 3.2 1B Instruct',
627+
match: {
628+
contains: 'llama3-2-1b-instruct',
629+
},
630+
prices: {
631+
input_mtok: 0.1,
632+
output_mtok: 0.1,
633+
},
634+
},
635+
{
636+
id: 'meta.llama3-2-3b-instruct-v1:0',
637+
name: 'Llama 3.2 3B Instruct',
638+
match: {
639+
contains: 'llama3-2-3b-instruct',
640+
},
641+
prices: {
642+
input_mtok: 0.15,
643+
output_mtok: 0.15,
644+
},
645+
},
646+
{
647+
id: 'meta.llama3-2-90b-instruct-v1:0',
648+
name: 'Llama 3.2 90B Instruct',
649+
match: {
650+
contains: 'llama3-2-90b-instruct',
651+
},
652+
prices: {
653+
input_mtok: 0.72,
654+
output_mtok: 0.72,
655+
},
656+
},
657+
{
658+
id: 'meta.llama3-3-70b-instruct-v1:0',
659+
name: 'Llama 3.3 70B Instruct',
660+
match: {
661+
contains: 'llama3-3-70b-instruct',
662+
},
663+
prices: {
664+
input_mtok: 0.72,
665+
output_mtok: 0.72,
666+
},
667+
},
668+
{
669+
id: 'meta.llama3-70b-instruct-v1:0',
670+
name: 'Llama 3 70B Instruct',
671+
match: {
672+
contains: 'llama3-70b-instruct',
673+
},
674+
prices: {
675+
input_mtok: 2.65,
676+
output_mtok: 3.5,
677+
},
678+
},
679+
{
680+
id: 'meta.llama3-8b-instruct-v1:0',
681+
name: 'Llama 3 8B Instruct',
682+
match: {
683+
contains: 'llama3-8b-instruct',
684+
},
685+
prices: {
686+
input_mtok: 0.3,
687+
output_mtok: 0.6,
688+
},
689+
},
690+
{
691+
id: 'meta.llama4-maverick-17b-instruct-v1:0',
692+
name: 'Llama 4 Maverick 17B Instruct',
693+
match: {
694+
contains: 'llama4-maverick-17b-instruct',
695+
},
696+
prices: {
697+
input_mtok: 0.24,
698+
output_mtok: 0.97,
699+
},
700+
},
701+
{
702+
id: 'meta.llama4-scout-17b-instruct-v1:0',
703+
name: 'Llama 4 Scout 17B Instruct',
704+
match: {
705+
contains: 'llama4-scout-17b-instruct',
706+
},
707+
prices: {
708+
input_mtok: 0.17,
709+
output_mtok: 0.66,
710+
},
711+
},
712+
{
713+
id: 'mistral.mistral-7b-instruct-v0:2',
714+
name: 'Mistral 7B Instruct',
715+
match: {
716+
contains: 'mistral-7b-instruct',
717+
},
718+
prices: {
719+
input_mtok: 0.15,
720+
output_mtok: 0.2,
721+
},
722+
},
723+
{
724+
id: 'mistral.mistral-large-2402-v1:0',
725+
name: 'Mistral Large (24.02)',
726+
match: {
727+
contains: 'mistral-large-2402',
728+
},
729+
prices: {
730+
input_mtok: 4,
731+
output_mtok: 12,
732+
},
733+
},
734+
{
735+
id: 'mistral.mistral-small-2402-v1:0',
736+
name: 'Mistral Small (24.02)',
737+
match: {
738+
contains: 'mistral-small-2402',
739+
},
740+
prices: {
741+
input_mtok: 1,
742+
output_mtok: 3,
743+
},
744+
},
745+
{
746+
id: 'mistral.mixtral-8x7b-instruct-v0:1',
747+
name: 'Mixtral 8x7B Instruct',
748+
match: {
749+
contains: 'mixtral-8x7b-instruct',
750+
},
751+
prices: {
752+
input_mtok: 0.45,
753+
output_mtok: 0.7,
754+
},
755+
},
756+
{
757+
id: 'mistral.pixtral-large-2502-v1:0',
758+
name: 'Pixtral Large (25.02)',
759+
match: {
760+
contains: 'pixtral-large-2502',
761+
},
762+
prices: {
763+
input_mtok: 2,
764+
output_mtok: 6,
765+
},
766+
},
767+
{
768+
id: 'openai.gpt-oss-120b-1:0',
769+
name: 'gpt-oss-120b',
770+
match: {
771+
contains: 'gpt-oss-120b',
772+
},
773+
prices: {
774+
input_mtok: 0.15,
775+
output_mtok: 0.6,
776+
},
777+
},
778+
{
779+
id: 'openai.gpt-oss-20b-1:0',
780+
name: 'gpt-oss-20b',
781+
match: {
782+
contains: 'gpt-oss-20b',
783+
},
784+
prices: {
785+
input_mtok: 0.07,
786+
output_mtok: 0.3,
787+
},
788+
},
789+
{
790+
id: 'qwen.qwen3-32b-v1:0',
791+
name: 'Qwen3 32B (dense)',
792+
match: {
793+
contains: 'qwen3-32b',
794+
},
795+
prices: {
796+
input_mtok: 0.15,
797+
output_mtok: 0.6,
798+
},
799+
},
800+
{
801+
id: 'qwen.qwen3-coder-30b-a3b-v1:0',
802+
name: 'Qwen3-Coder-30B-A3B-Instruct',
803+
match: {
804+
contains: 'qwen3-coder-30b-a3b',
805+
},
806+
prices: {
807+
input_mtok: 0.15,
808+
output_mtok: 0.6,
809+
},
810+
},
535811
],
536812
},
537813
{

0 commit comments

Comments
 (0)