@@ -100,13 +100,15 @@ def test_ai21_llm_sync(langchain_community, request_vcr):
100100 llm .invoke ("Why does everyone in Bikini Bottom hate Plankton?" )
101101
102102
103+ @flaky (until = 1754218112 , reason = "Problematic test that needs fixing" )
103104@pytest .mark .snapshot (ignores = IGNORE_FIELDS )
104105def test_openai_chat_model_sync_call_langchain_openai (langchain_openai , request_vcr ):
105106 chat = langchain_openai .ChatOpenAI (temperature = 0 , max_tokens = 256 )
106107 with request_vcr .use_cassette ("openai_chat_completion_sync_call.yaml" ):
107108 chat .invoke (input = [langchain .schema .HumanMessage (content = "When do you use 'whom' instead of 'who'?" )])
108109
109110
111+ @flaky (until = 1754218112 , reason = "Problematic test that needs fixing" )
110112@pytest .mark .skipif (LANGCHAIN_VERSION < (0 , 3 ), reason = "Requires at least LangChain 0.3" )
111113@pytest .mark .snapshot (ignores = IGNORE_FIELDS )
112114def test_openai_chat_model_sync_generate (langchain_openai , request_vcr ):
@@ -128,6 +130,7 @@ def test_openai_chat_model_sync_generate(langchain_openai, request_vcr):
128130 )
129131
130132
133+ @flaky (until = 1754218112 , reason = "Problematic test that needs fixing" )
131134@pytest .mark .snapshot (ignores = IGNORE_FIELDS )
132135def test_openai_chat_model_vision_generate (langchain_openai , request_vcr ):
133136 """
@@ -472,6 +475,7 @@ def test_streamed_llm(langchain_openai, streamed_response_responder):
472475 pass
473476
474477
478+ @flaky (until = 1754218112 , reason = "Problematic test that needs fixing" )
475479@pytest .mark .snapshot (
476480 ignores = IGNORE_FIELDS ,
477481 token = "tests.contrib.langchain.test_langchain.test_streamed_chain" ,
@@ -496,6 +500,7 @@ async def test_astreamed_chain(langchain_core, langchain_openai, async_streamed_
496500 pass
497501
498502
503+ @flaky (until = 1754218112 , reason = "Problematic test that needs fixing" )
499504@pytest .mark .snapshot (
500505 ignores = IGNORE_FIELDS ,
501506 token = "tests.contrib.langchain.test_langchain.test_streamed_chat" ,
0 commit comments