@@ -1102,17 +1102,20 @@ def test_metadata(self, create_datadreamer):
11021102 assert llm .citation [0 ].endswith ("year={2020}\n }" )
11031103 assert llm .citation [1 ].startswith ("@article{ouyang2022training" )
11041104 assert llm .citation [1 ].endswith ("year={2022}\n }" )
1105- llm = OpenAI ("gpt-4" )
1106- assert llm .model_card == "https://cdn.openai.com/papers/gpt-4-system-card.pdf"
1107- assert llm .license == "https://openai.com/policies"
1108- assert isinstance (llm .citation , list )
1109- assert len (llm .citation ) == 2
1110- assert llm .citation [0 ].startswith ("@article{OpenAI2023GPT4TR," )
1111- assert llm .citation [0 ].endswith (
1112- "url={https://api.semanticscholar.org/CorpusID:257532815}\n }"
1113- )
1114- assert llm .citation [1 ].startswith ("@article{ouyang2022training" )
1115- assert llm .citation [1 ].endswith ("year={2022}\n }" )
1105+ for gpt_4_model_name in ["gpt-4" , "gpt-4o" , "gpt-4o-mini" ]:
1106+ llm = OpenAI (gpt_4_model_name )
1107+ assert (
1108+ llm .model_card == "https://cdn.openai.com/papers/gpt-4-system-card.pdf"
1109+ )
1110+ assert llm .license == "https://openai.com/policies"
1111+ assert isinstance (llm .citation , list )
1112+ assert len (llm .citation ) == 2
1113+ assert llm .citation [0 ].startswith ("@article{OpenAI2023GPT4TR," )
1114+ assert llm .citation [0 ].endswith (
1115+ "url={https://api.semanticscholar.org/CorpusID:257532815}\n }"
1116+ )
1117+ assert llm .citation [1 ].startswith ("@article{ouyang2022training" )
1118+ assert llm .citation [1 ].endswith ("year={2022}\n }" )
11161119
11171120 def test_count_tokens (self , create_datadreamer ):
11181121 with create_datadreamer ():
@@ -1122,6 +1125,10 @@ def test_count_tokens(self, create_datadreamer):
11221125 def test_get_max_context_length (self , create_datadreamer ):
11231126 with create_datadreamer ():
11241127 # Check max context length
1128+ llm = OpenAI ("gpt-4o" )
1129+ assert llm .get_max_context_length (max_new_tokens = 0 ) == 127982
1130+ llm = OpenAI ("gpt-4o-mini" )
1131+ assert llm .get_max_context_length (max_new_tokens = 0 ) == 127982
11251132 llm = OpenAI ("gpt-4" )
11261133 assert llm .get_max_context_length (max_new_tokens = 0 ) == 8174
11271134 llm = OpenAI ("gpt-4-turbo-2024-04-09" )
@@ -1136,6 +1143,10 @@ def test_get_max_context_length(self, create_datadreamer):
11361143 def test_get_max_output_length (self , create_datadreamer ):
11371144 with create_datadreamer ():
11381145 # Check max output length
1146+ llm = OpenAI ("gpt-4o" )
1147+ assert llm ._get_max_output_length () == 4096
1148+ llm = OpenAI ("gpt-4o-mini" )
1149+ assert llm ._get_max_output_length () == 16384
11391150 llm = OpenAI ("gpt-4" )
11401151 assert llm ._get_max_output_length () is None
11411152 llm = OpenAI ("gpt-4-turbo-2024-04-09" )
0 commit comments