Skip to content

Commit 64a728c

Browse files
authored
Merge pull request #3 from explodinggradients/rag
chore: reorderd some files
2 parents 5aaad70 + f199a1d commit 64a728c

File tree

249 files changed

+2947
-392
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

249 files changed

+2947
-392
lines changed

Langchain/chains.ipynb

Lines changed: 0 additions & 63 deletions
This file was deleted.

Langchain/langsmith-qa-evaluation.ipynb

Lines changed: 0 additions & 295 deletions
This file was deleted.
File renamed without changes.
File renamed without changes.

langchain/callback-manager.ipynb

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "4ac3c236",
6+
"metadata": {},
7+
"source": [
8+
"# CallbackHandlers and CallbackManagers"
9+
]
10+
},
11+
{
12+
"cell_type": "code",
13+
"execution_count": 9,
14+
"id": "0c11f0ae",
15+
"metadata": {},
16+
"outputs": [],
17+
"source": [
18+
"from langchain.callbacks.manager import (\n",
19+
" trace_as_chain_group, \n",
20+
" atrace_as_chain_group,\n",
21+
")\n",
22+
"\n",
23+
"# Example usage:\n",
24+
"from langchain.chat_models import ChatOpenAI\n",
25+
"from langchain.chains import LLMChain\n",
26+
"from langchain.prompts import PromptTemplate\n",
27+
"\n",
28+
"llm = ChatOpenAI(temperature=0.9)\n",
29+
"prompt = PromptTemplate(\n",
30+
" input_variables=[\"question\"],\n",
31+
" template=\"What is the answer to {question}?\",\n",
32+
")\n",
33+
"chain = LLMChain(llm=llm, prompt=prompt)\n",
34+
"with trace_as_chain_group(\"my_group\") as group_manager:\n",
35+
" chain.run(question=\"What is your name?\", callbacks=group_manager)\n",
36+
" with trace_as_chain_group(\"inside_my_group\", callback_manager=group_manager) as sub_group:\n",
37+
" chain.run(question=\"What is your quest?\", callbacks=sub_group)\n",
38+
" chain.run(question=\"What is your favorite color?\", callbacks=sub_group)\n",
39+
" llm.predict(prompt.format(question=\"how is president?\"))\n",
40+
" llm.predict(prompt.format(question=\"how is president?\"), callbacks=sub_group)"
41+
]
42+
},
43+
{
44+
"cell_type": "code",
45+
"execution_count": 8,
46+
"id": "bc8310b2",
47+
"metadata": {},
48+
"outputs": [
49+
{
50+
"data": {
51+
"text/plain": [
52+
"'The president is the head of state and the highest-ranking government official in a country.'"
53+
]
54+
},
55+
"execution_count": 8,
56+
"metadata": {},
57+
"output_type": "execute_result"
58+
}
59+
],
60+
"source": [
61+
"llm.predict(prompt.format(question=\"how is president?\"))"
62+
]
63+
},
64+
{
65+
"cell_type": "code",
66+
"execution_count": null,
67+
"id": "d8cb6fce",
68+
"metadata": {},
69+
"outputs": [],
70+
"source": []
71+
}
72+
],
73+
"metadata": {
74+
"kernelspec": {
75+
"display_name": "Python 3 (ipykernel)",
76+
"language": "python",
77+
"name": "python3"
78+
},
79+
"language_info": {
80+
"codemirror_mode": {
81+
"name": "ipython",
82+
"version": 3
83+
},
84+
"file_extension": ".py",
85+
"mimetype": "text/x-python",
86+
"name": "python",
87+
"nbconvert_exporter": "python",
88+
"pygments_lexer": "ipython3",
89+
"version": "3.10.12"
90+
}
91+
},
92+
"nbformat": 4,
93+
"nbformat_minor": 5
94+
}

langchain/chains.ipynb

Lines changed: 191 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,191 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "f46200c8",
6+
"metadata": {},
7+
"source": [
8+
"# Chains - Intro\n",
9+
"\n",
10+
"chains are a core concept in langchain. It enables you to group together complex LLM calls and other functionality together\n",
11+
"\n",
12+
"\n",
13+
"The base interface is \n",
14+
"```python\n",
15+
"class Chain(BaseModel, ABC):\n",
16+
" \"\"\"Base interface that all chains should implement.\"\"\"\n",
17+
"\n",
18+
" memory: BaseMemory\n",
19+
" callbacks: Callbacks\n",
20+
"\n",
21+
" def __call__(\n",
22+
" self,\n",
23+
" inputs: Any,\n",
24+
" return_only_outputs: bool = False,\n",
25+
" callbacks: Callbacks = None,\n",
26+
" ) -> Dict[str, Any]:\n",
27+
" ...\n",
28+
"```"
29+
]
30+
},
31+
{
32+
"cell_type": "markdown",
33+
"id": "4b779141",
34+
"metadata": {},
35+
"source": [
36+
"## Getting Started\n",
37+
"\n",
38+
"`LLMChain` is the basic building block for chains."
39+
]
40+
},
41+
{
42+
"cell_type": "code",
43+
"execution_count": 7,
44+
"id": "f9207cec",
45+
"metadata": {},
46+
"outputs": [],
47+
"source": [
48+
"from langchain.llms import OpenAI\n",
49+
"from langchain.prompts import PromptTemplate\n",
50+
"from langchain.chat_models import ChatOpenAI\n",
51+
"\n",
52+
"llm = OpenAI(temperature=0.9)\n",
53+
"chat_llm = ChatOpenAI()\n",
54+
"\n",
55+
"prompt = PromptTemplate(\n",
56+
" input_variables=[\"product\"],\n",
57+
" template=\"What is a good name for a company that makes {product}?\",\n",
58+
")"
59+
]
60+
},
61+
{
62+
"cell_type": "code",
63+
"execution_count": 8,
64+
"id": "6d81a9b8",
65+
"metadata": {},
66+
"outputs": [
67+
{
68+
"data": {
69+
"text/plain": [
70+
"{'product': 'colorful vests', 'text': 'VibrantVests'}"
71+
]
72+
},
73+
"execution_count": 8,
74+
"metadata": {},
75+
"output_type": "execute_result"
76+
}
77+
],
78+
"source": [
79+
"from langchain.chains import LLMChain\n",
80+
"chain = LLMChain(llm=chat_llm, prompt=prompt)\n",
81+
"\n",
82+
"# Run the chain only specifying the input variable.\n",
83+
"chain({\"product\": \"colorful vests\"})"
84+
]
85+
},
86+
{
87+
"cell_type": "markdown",
88+
"id": "a3908709",
89+
"metadata": {},
90+
"source": [
91+
"## Custom Chain\n",
92+
"\n",
93+
"You have to implement"
94+
]
95+
},
96+
{
97+
"cell_type": "code",
98+
"execution_count": 13,
99+
"id": "528baa20",
100+
"metadata": {},
101+
"outputs": [],
102+
"source": [
103+
"from langchain.chains.base import Chain\n",
104+
"\n",
105+
"class MyCustomChain(Chain):\n",
106+
" @property\n",
107+
" def input_keys(self):\n",
108+
" return [\"question\", \"answer\", \"contexts\"]\n",
109+
" \n",
110+
" @property\n",
111+
" def output_keys(self):\n",
112+
" return [\"score\"]\n",
113+
" \n",
114+
" def _call(self, inputs, run_manager = None):\n",
115+
" print(inputs)\n",
116+
" return {\"score\": 1}"
117+
]
118+
},
119+
{
120+
"cell_type": "code",
121+
"execution_count": 14,
122+
"id": "391033b6",
123+
"metadata": {},
124+
"outputs": [],
125+
"source": [
126+
"chain = MyCustomChain()"
127+
]
128+
},
129+
{
130+
"cell_type": "code",
131+
"execution_count": 15,
132+
"id": "598474ff",
133+
"metadata": {},
134+
"outputs": [
135+
{
136+
"name": "stdout",
137+
"output_type": "stream",
138+
"text": [
139+
"{'question': 'q', 'answer': 'a', 'contexts': ['a']}\n"
140+
]
141+
},
142+
{
143+
"data": {
144+
"text/plain": [
145+
"{'question': 'q', 'answer': 'a', 'contexts': ['a'], 'score': 1}"
146+
]
147+
},
148+
"execution_count": 15,
149+
"metadata": {},
150+
"output_type": "execute_result"
151+
}
152+
],
153+
"source": [
154+
"chain({\n",
155+
" \"question\": \"q\",\n",
156+
" \"answer\": \"a\",\n",
157+
" \"contexts\": [\"a\"]\n",
158+
"})"
159+
]
160+
},
161+
{
162+
"cell_type": "code",
163+
"execution_count": null,
164+
"id": "ab88ba75",
165+
"metadata": {},
166+
"outputs": [],
167+
"source": []
168+
}
169+
],
170+
"metadata": {
171+
"kernelspec": {
172+
"display_name": "Python 3 (ipykernel)",
173+
"language": "python",
174+
"name": "python3"
175+
},
176+
"language_info": {
177+
"codemirror_mode": {
178+
"name": "ipython",
179+
"version": 3
180+
},
181+
"file_extension": ".py",
182+
"mimetype": "text/x-python",
183+
"name": "python",
184+
"nbconvert_exporter": "python",
185+
"pygments_lexer": "ipython3",
186+
"version": "3.10.12"
187+
}
188+
},
189+
"nbformat": 4,
190+
"nbformat_minor": 5
191+
}
File renamed without changes.

0 commit comments

Comments
 (0)