[GH-ISSUE #4576] Tried Agentic chucking using Ollama but got error #28631

Open
opened 2026-04-22 07:05:56 -05:00 by GiteaMirror · 0 comments
Owner

Originally created by @arunkumarm-git on GitHub (May 22, 2024).
Original GitHub issue: https://github.com/ollama/ollama/issues/4576

What is the issue?

Code:
from langchain_community.llms import Ollama
from langchain.chains import create_extraction_chain_pydantic
from langchain_core.pydantic_v1 import BaseModel
from typing import Optional, List

llm = Ollama(model='llama3')
from langchain import hub
prompt = hub.pull("wfh/proposal-indexing")

runnable = prompt | llm

class Sentences(BaseModel):
sentences: List[str]

Extraction

extraction_chain = create_extraction_chain_pydantic(pydantic_schema=Sentences, llm=llm)

def get_propositions(text):
runnable_output = runnable.invoke({
"input": text
}).content

propositions = extraction_chain.run(runnable_output)[0].sentences
return propositions

essay_propositions = []

for i, para in enumerate(pdf_text_document):
propositions = get_propositions(para)

essay_propositions.extend(propositions)

Error:
File g:\AI arbiter V2\env\Lib\site-packages\urllib3\connectionpool.py:715, in HTTPConnectionPool.urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
714 # Make the request on the httplib connection object.
--> 715 httplib_response = self._make_request(
716 conn,
717 method,
718 url,
719 timeout=timeout_obj,
720 body=body,
721 headers=headers,
722 chunked=chunked,
723 )
725 # If we're going to release the connection in finally:, then
726 # the response doesn't need to know about the connection. Otherwise
727 # it will also try to release it and we'll have a double-release
728 # mess.

File g:\AI arbiter V2\env\Lib\site-packages\urllib3\connectionpool.py:467, in HTTPConnectionPool._make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
463 except BaseException as e:
464 # Remove the TypeError from the exception chain in
465 # Python 3 (including for exceptions like SystemExit).
466 # Otherwise it looks like a bug in the code.
--> 467 six.raise_from(e, None)
468 except (SocketTimeout, BaseSSLError, SocketError) as e:
...
503 except MaxRetryError as e:
504 if isinstance(e.reason, ConnectTimeoutError):
505 # TODO: Remove this in 3.0.0: see #2811

ConnectionError: ('Connection aborted.', ConnectionResetError(10054, 'An existing connection was forcibly closed by the remote host', None, 10054, None))

OS

Windows

GPU

Nvidia

CPU

AMD

Ollama version

0.1.30

Originally created by @arunkumarm-git on GitHub (May 22, 2024). Original GitHub issue: https://github.com/ollama/ollama/issues/4576 ### What is the issue? Code: from langchain_community.llms import Ollama from langchain.chains import create_extraction_chain_pydantic from langchain_core.pydantic_v1 import BaseModel from typing import Optional, List llm = Ollama(model='llama3') from langchain import hub prompt = hub.pull("wfh/proposal-indexing") runnable = prompt | llm class Sentences(BaseModel): sentences: List[str] # Extraction extraction_chain = create_extraction_chain_pydantic(pydantic_schema=Sentences, llm=llm) def get_propositions(text): runnable_output = runnable.invoke({ "input": text }).content propositions = extraction_chain.run(runnable_output)[0].sentences return propositions essay_propositions = [] for i, para in enumerate(pdf_text_document): propositions = get_propositions(para) essay_propositions.extend(propositions) Error: File g:\AI arbiter V2\env\Lib\site-packages\urllib3\connectionpool.py:715, in HTTPConnectionPool.urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw) [714](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:714) # Make the request on the httplib connection object. --> [715](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:715) httplib_response = self._make_request( [716](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:716) conn, [717](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:717) method, [718](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:718) url, [719](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:719) timeout=timeout_obj, [720](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:720) body=body, [721](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:721) headers=headers, [722](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:722) chunked=chunked, [723](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:723) ) [725](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:725) # If we're going to release the connection in ``finally:``, then [726](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:726) # the response doesn't need to know about the connection. Otherwise [727](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:727) # it will also try to release it and we'll have a double-release [728](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:728) # mess. File g:\AI arbiter V2\env\Lib\site-packages\urllib3\connectionpool.py:467, in HTTPConnectionPool._make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw) [463](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:463) except BaseException as e: [464](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:464) # Remove the TypeError from the exception chain in [465](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:465) # Python 3 (including for exceptions like SystemExit). [466](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:466) # Otherwise it looks like a bug in the code. --> [467](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:467) six.raise_from(e, None) [468](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/urllib3/connectionpool.py:468) except (SocketTimeout, BaseSSLError, SocketError) as e: ... [503](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/requests/adapters.py:503) except MaxRetryError as e: [504](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/requests/adapters.py:504) if isinstance(e.reason, ConnectTimeoutError): [505](file:///G:/AI%20arbiter%20V2/env/Lib/site-packages/requests/adapters.py:505) # TODO: Remove this in 3.0.0: see #2811 ConnectionError: ('Connection aborted.', ConnectionResetError(10054, 'An existing connection was forcibly closed by the remote host', None, 10054, None)) ### OS Windows ### GPU Nvidia ### CPU AMD ### Ollama version 0.1.30
GiteaMirror added the bug label 2026-04-22 07:05:56 -05:00
Sign in to join this conversation.
1 Participants
Notifications
Due Date
No due date set.
Dependencies

No dependencies set.

Reference: github-starred/ollama#28631