.gitignore DELETED
@@ -1 +0,0 @@
1
- venv
 
 
.idea/.gitignore DELETED
@@ -1,8 +0,0 @@
1
- # Default ignored files
2
- /shelf/
3
- /workspace.xml
4
- # Editor-based HTTP Client requests
5
- /httpRequests/
6
- # Datasource local storage ignored files
7
- /dataSources/
8
- /dataSources.local.xml
 
 
 
 
 
 
 
 
 
.idea/Agents_Course_Final_Project.iml DELETED
@@ -1,14 +0,0 @@
1
- <?xml version="1.0" encoding="UTF-8"?>
2
- <module type="PYTHON_MODULE" version="4">
3
- <component name="NewModuleRootManager">
4
- <content url="file://$MODULE_DIR$">
5
- <excludeFolder url="file://$MODULE_DIR$/venv" />
6
- </content>
7
- <orderEntry type="jdk" jdkName="$USER_HOME$/anaconda3" jdkType="Python SDK" />
8
- <orderEntry type="sourceFolder" forTests="false" />
9
- </component>
10
- <component name="PyDocumentationSettings">
11
- <option name="format" value="PLAIN" />
12
- <option name="myDocStringFormat" value="Plain" />
13
- </component>
14
- </module>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.idea/inspectionProfiles/Project_Default.xml DELETED
@@ -1,25 +0,0 @@
1
- <component name="InspectionProjectProfileManager">
2
- <profile version="1.0">
3
- <option name="myName" value="Project Default" />
4
- <inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
5
- <option name="ignoredPackages">
6
- <value>
7
- <list size="12">
8
- <item index="0" class="java.lang.String" itemvalue="blinker" />
9
- <item index="1" class="java.lang.String" itemvalue="joblib" />
10
- <item index="2" class="java.lang.String" itemvalue="Werkzeug" />
11
- <item index="3" class="java.lang.String" itemvalue="tzdata" />
12
- <item index="4" class="java.lang.String" itemvalue="MarkupSafe" />
13
- <item index="5" class="java.lang.String" itemvalue="numpy" />
14
- <item index="6" class="java.lang.String" itemvalue="click" />
15
- <item index="7" class="java.lang.String" itemvalue="uuid" />
16
- <item index="8" class="java.lang.String" itemvalue="pandas" />
17
- <item index="9" class="java.lang.String" itemvalue="pytz" />
18
- <item index="10" class="java.lang.String" itemvalue="itsdangerous" />
19
- <item index="11" class="java.lang.String" itemvalue="Flask" />
20
- </list>
21
- </value>
22
- </option>
23
- </inspection_tool>
24
- </profile>
25
- </component>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.idea/inspectionProfiles/profiles_settings.xml DELETED
@@ -1,6 +0,0 @@
1
- <component name="InspectionProjectProfileManager">
2
- <settings>
3
- <option name="USE_PROJECT_PROFILE" value="false" />
4
- <version value="1.0" />
5
- </settings>
6
- </component>
 
 
 
 
 
 
 
.idea/misc.xml DELETED
@@ -1,4 +0,0 @@
1
- <?xml version="1.0" encoding="UTF-8"?>
2
- <project version="4">
3
- <component name="ProjectRootManager" version="2" project-jdk-name="$USER_HOME$/anaconda3" project-jdk-type="Python SDK" />
4
- </project>
 
 
 
 
 
.idea/modules.xml DELETED
@@ -1,8 +0,0 @@
1
- <?xml version="1.0" encoding="UTF-8"?>
2
- <project version="4">
3
- <component name="ProjectModuleManager">
4
- <modules>
5
- <module fileurl="file://$PROJECT_DIR$/.idea/Agents_Course_Final_Project.iml" filepath="$PROJECT_DIR$/.idea/Agents_Course_Final_Project.iml" />
6
- </modules>
7
- </component>
8
- </project>
 
 
 
 
 
 
 
 
 
.idea/vcs.xml DELETED
@@ -1,6 +0,0 @@
1
- <?xml version="1.0" encoding="UTF-8"?>
2
- <project version="4">
3
- <component name="VcsDirectoryMappings">
4
- <mapping directory="" vcs="Git" />
5
- </component>
6
- </project>
 
 
 
 
 
 
 
app.py CHANGED
@@ -1,200 +1,41 @@
1
  import os
2
  import gradio as gr
 
 
3
  import pandas as pd
4
- from huggingface_hub import InferenceClient
5
  import asyncio
6
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
  from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec
8
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
9
  from llama_index.core.agent.workflow import AgentWorkflow
10
- from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
11
- from llama_index.readers.web import SimpleWebPageReader
12
- import requests
13
- from huggingface_hub import InferenceClient
14
 
15
- from llama_index.readers.wikipedia import WikipediaReader
16
- from llama_index.core.agent.workflow import (
17
- AgentInput,
18
- AgentOutput,
19
- ToolCall,
20
- ToolCallResult,
21
- AgentStream,
22
- )
23
- import requests
24
- from bs4 import BeautifulSoup
25
- from urllib.parse import urljoin
26
  # (Keep Constants as is)
27
  # --- Constants ---
28
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
29
 
30
-
31
  # --- Basic Agent Definition ---
32
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
33
  class BasicAgent:
34
  def __init__(self):
35
  self.llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
36
- self.vision_llm = HuggingFaceInferenceAPI(model_name="CohereLabs/aya-vision-32b")
37
- self.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
38
- self.search_client = DuckDuckGoSearchToolSpec()
39
- self.wiki_reader = WikipediaReader()
40
- system_prompt = """
41
- You are a helpful tool that uses the web to find out answers to specific questions in the manner that a human would.
42
- Your answers should contain just ONE single word.
43
-
44
- You have access to the following tools:
45
-
46
- 1. search_web: This uses DuckDuckGo to search the web. It's useful when you need to find generic info or links to
47
- web pages;
48
-
49
- 2. search_wiki: Use this when you think searching Wikipedia directly is more useful;
50
-
51
- 3. webpage_reader: Use this to extract content from web pages;
52
-
53
- 4. describe_images: This tool will return descriptions of all the images on a web page. Use this to describe
54
- images and figures;
55
-
56
- 5. Use multiply_nums, divide_nums, add_nums and subtract_nums for basic math operations.
57
- """
58
- self.agent = AgentWorkflow.from_tools_or_functions([self.search_web, self.search_wiki, self.webpage_reader,
59
- self.describe_images, self.multiply_nums, self.divide_nums,
60
- self.add_nums, self.subtract_nums],
61
- llm=self.llm,
62
- system_prompt=system_prompt)
63
  print("BasicAgent initialized.")
64
-
65
  async def __call__(self, question: str) -> str:
66
- handler = self.agent.run(user_msg=question)
67
- # async for event in handler.stream_events():
68
- # if isinstance(event, AgentStream):
69
- # print(event.delta, end="", flush=True)
70
- # elif isinstance(event, ToolCallResult):
71
- # print(event.tool_name) # the tool name
72
- # print(event.tool_kwargs) # the tool kwargs
73
- # print(event.tool_output) # the tool output
74
- response = await handler
75
- return str(response)
76
-
77
- def extract_image_urls(self, page_url):
78
- try:
79
- # Send HTTP GET request to the page
80
- response = requests.get(page_url)
81
- response.raise_for_status() # Raise an error for bad status codes
82
-
83
- # Parse HTML content
84
- soup = BeautifulSoup(response.text, 'html.parser')
85
-
86
- # Find all <img> tags
87
- img_tags = soup.find_all('img')
88
-
89
- # Extract and resolve image URLs
90
- img_urls = []
91
- for img in img_tags:
92
- src = img.get('src')
93
- if src:
94
- # Make relative URLs absolute
95
- full_url = urljoin(page_url, src)
96
- img_urls.append(full_url)
97
 
98
- return img_urls
99
-
100
- except requests.RequestException as e:
101
- print(f"Request failed: {e}")
102
- return []
103
-
104
- async def describe_images(self, webpage_url: str) -> str:
105
- """Extracts and describes images from an input webpage url based on a query."""
106
-
107
- image_urls = self.extract_image_urls(webpage_url)
108
- print("image urls: ", image_urls)
109
- if len(image_urls) == 0:
110
- return "Looks like there are no images on this webpage"
111
-
112
- docs = []
113
- for image_url in image_urls:
114
- messages = [
115
- {
116
- "role": "user",
117
- "content": [
118
- {
119
- "type": "text",
120
- "text": "Describe this image in one sentence."
121
- },
122
- {
123
- "type": "image_url",
124
- "image_url": {
125
- "url": image_url
126
- }
127
- }
128
- ]
129
- }
130
- ]
131
-
132
- # print(messages)
133
-
134
- client = InferenceClient(
135
- provider="hyperbolic",
136
- api_key=os.getenv('INFERENCE_KEY'),
137
- )
138
-
139
- try:
140
- completion = client.chat.completions.create(
141
- model="Qwen/Qwen2.5-VL-7B-Instruct",
142
- messages=messages,
143
- )
144
-
145
- # print(completion.choices[0].message.content)
146
- docs.append(completion.choices[0].message.content)
147
- except:
148
- continue
149
- return str(docs)
150
-
151
- async def search_wiki(self, query: str) -> str:
152
- """Useful for browsing Wikipedia to look up specific info."""
153
- reader = self.wiki_reader
154
- documents = reader.load_data(pages=[query])
155
- index = VectorStoreIndex.from_documents(documents, embed_model=self.embed_model)
156
- search_res = index.as_query_engine(llm=self.llm).query(query)
157
- return str(search_res)
158
-
159
- async def search_web(self, query: str) -> str:
160
- """Useful for using the web to answer questions. Keep the query very concise in order to get good results."""
161
- client = self.search_client
162
- search_res = client.duckduckgo_full_search(query)
163
- return str(search_res)
164
-
165
- async def webpage_reader(self, webpage_url: str) -> str:
166
- """Useful for when you want to read and extract information from a specific webpage."""
167
- documents = SimpleWebPageReader(html_to_text=True).load_data(
168
- [webpage_url]
169
- )
170
- return str(documents)
171
-
172
- async def multiply_nums(self, a: int, b: int) -> float:
173
- """Useful for multiplying two numbers"""
174
- return a * b
175
-
176
- async def divide_nums(self, a: int, b: int) -> float:
177
- """Useful for dividing two numbers"""
178
- return a / b
179
-
180
- async def add_nums(self, a: int, b: int) -> int:
181
- """Useful for adding two numbers"""
182
- return a + b
183
-
184
- async def subtract_nums(self, a: int, b: int) -> int:
185
- """Useful for subtracting two numbers"""
186
- return a - b
187
-
188
- def run_and_submit_all(profile: gr.OAuthProfile | None):
189
  """
190
  Fetches all questions, runs the BasicAgent on them, submits all answers,
191
  and displays the results.
192
  """
193
  # --- Determine HF Space Runtime URL and Repo URL ---
194
- space_id = os.getenv("SPACE_ID") # Get the SPACE_ID for sending link to the code
195
 
196
  if profile:
197
- username = f"{profile.username}"
198
  print(f"User logged in: {username}")
199
  else:
200
  print("User not logged in.")
@@ -221,16 +62,16 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
221
  response.raise_for_status()
222
  questions_data = response.json()
223
  if not questions_data:
224
- print("Fetched questions list is empty.")
225
- return "Fetched questions list is empty or invalid format.", None
226
  print(f"Fetched {len(questions_data)} questions.")
227
  except requests.exceptions.RequestException as e:
228
  print(f"Error fetching questions: {e}")
229
  return f"Error fetching questions: {e}", None
230
  except requests.exceptions.JSONDecodeError as e:
231
- print(f"Error decoding JSON response from questions endpoint: {e}")
232
- print(f"Response text: {response.text[:500]}")
233
- return f"Error decoding server response for questions: {e}", None
234
  except Exception as e:
235
  print(f"An unexpected error occurred fetching questions: {e}")
236
  return f"An unexpected error occurred fetching questions: {e}", None
@@ -242,24 +83,22 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
242
  for item in questions_data:
243
  task_id = item.get("task_id")
244
  question_text = item.get("question")
245
- question_text += "One-word answer only."
246
  if not task_id or question_text is None:
247
  print(f"Skipping item with missing task_id or question: {item}")
248
  continue
249
  try:
250
  submitted_answer = agent(question_text)
251
- print("Answer: ", submitted_answer)
252
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
253
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
254
  except Exception as e:
255
- print(f"Error running agent on task {task_id}: {e}")
256
- results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"})
257
 
258
  if not answers_payload:
259
  print("Agent did not produce any answers to submit.")
260
  return "Agent did not produce any answers to submit.", pd.DataFrame(results_log)
261
 
262
- # 4. Prepare Submission
263
  submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload}
264
  status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..."
265
  print(status_update)
@@ -340,16 +179,10 @@ with gr.Blocks() as demo:
340
  )
341
 
342
  if __name__ == "__main__":
343
- # agent = BasicAgent()
344
- # while True:
345
- # query = input("Ask a question here: ")
346
- # answ = asyncio.run(agent(query))
347
- # print(answ)
348
-
349
- print("\n" + "-" * 30 + " App Starting " + "-" * 30)
350
  # Check for SPACE_HOST and SPACE_ID at startup for information
351
  space_host_startup = os.getenv("SPACE_HOST")
352
- space_id_startup = os.getenv("SPACE_ID") # Get SPACE_ID at startup
353
 
354
  if space_host_startup:
355
  print(f"✅ SPACE_HOST found: {space_host_startup}")
@@ -357,14 +190,14 @@ if __name__ == "__main__":
357
  else:
358
  print("ℹ️ SPACE_HOST environment variable not found (running locally?).")
359
 
360
- if space_id_startup: # Print repo URLs if SPACE_ID is found
361
  print(f"✅ SPACE_ID found: {space_id_startup}")
362
  print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}")
363
  print(f" Repo Tree URL: https://huggingface.co/spaces/{space_id_startup}/tree/main")
364
  else:
365
  print("ℹ️ SPACE_ID environment variable not found (running locally?). Repo URL cannot be determined.")
366
 
367
- print("-" * (60 + len(" App Starting ")) + "\n")
368
 
369
  print("Launching Gradio Interface for Basic Agent Evaluation...")
370
- demo.launch(debug=True, share=False)
 
1
  import os
2
  import gradio as gr
3
+ import requests
4
+ import inspect
5
  import pandas as pd
 
6
  import asyncio
 
7
  from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec
8
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
9
  from llama_index.core.agent.workflow import AgentWorkflow
 
 
 
 
10
 
11
+
 
 
 
 
 
 
 
 
 
 
12
  # (Keep Constants as is)
13
  # --- Constants ---
14
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
15
 
 
16
  # --- Basic Agent Definition ---
17
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
18
  class BasicAgent:
19
  def __init__(self):
20
  self.llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
21
+ self.search_tool = DuckDuckGoSearchToolSpec()
22
+ self.agent = AgentWorkflow.from_tools_or_functions([self.search_tool], llm=self.llm)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  print("BasicAgent initialized.")
24
+
25
  async def __call__(self, question: str) -> str:
26
+ response = await self.agent.run(question)
27
+ return response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
+ def run_and_submit_all( profile: gr.OAuthProfile | None):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  """
31
  Fetches all questions, runs the BasicAgent on them, submits all answers,
32
  and displays the results.
33
  """
34
  # --- Determine HF Space Runtime URL and Repo URL ---
35
+ space_id = os.getenv("SPACE_ID") # Get the SPACE_ID for sending link to the code
36
 
37
  if profile:
38
+ username= f"{profile.username}"
39
  print(f"User logged in: {username}")
40
  else:
41
  print("User not logged in.")
 
62
  response.raise_for_status()
63
  questions_data = response.json()
64
  if not questions_data:
65
+ print("Fetched questions list is empty.")
66
+ return "Fetched questions list is empty or invalid format.", None
67
  print(f"Fetched {len(questions_data)} questions.")
68
  except requests.exceptions.RequestException as e:
69
  print(f"Error fetching questions: {e}")
70
  return f"Error fetching questions: {e}", None
71
  except requests.exceptions.JSONDecodeError as e:
72
+ print(f"Error decoding JSON response from questions endpoint: {e}")
73
+ print(f"Response text: {response.text[:500]}")
74
+ return f"Error decoding server response for questions: {e}", None
75
  except Exception as e:
76
  print(f"An unexpected error occurred fetching questions: {e}")
77
  return f"An unexpected error occurred fetching questions: {e}", None
 
83
  for item in questions_data:
84
  task_id = item.get("task_id")
85
  question_text = item.get("question")
 
86
  if not task_id or question_text is None:
87
  print(f"Skipping item with missing task_id or question: {item}")
88
  continue
89
  try:
90
  submitted_answer = agent(question_text)
 
91
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
92
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
93
  except Exception as e:
94
+ print(f"Error running agent on task {task_id}: {e}")
95
+ results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"})
96
 
97
  if not answers_payload:
98
  print("Agent did not produce any answers to submit.")
99
  return "Agent did not produce any answers to submit.", pd.DataFrame(results_log)
100
 
101
+ # 4. Prepare Submission
102
  submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload}
103
  status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..."
104
  print(status_update)
 
179
  )
180
 
181
  if __name__ == "__main__":
182
+ print("\n" + "-"*30 + " App Starting " + "-"*30)
 
 
 
 
 
 
183
  # Check for SPACE_HOST and SPACE_ID at startup for information
184
  space_host_startup = os.getenv("SPACE_HOST")
185
+ space_id_startup = os.getenv("SPACE_ID") # Get SPACE_ID at startup
186
 
187
  if space_host_startup:
188
  print(f"✅ SPACE_HOST found: {space_host_startup}")
 
190
  else:
191
  print("ℹ️ SPACE_HOST environment variable not found (running locally?).")
192
 
193
+ if space_id_startup: # Print repo URLs if SPACE_ID is found
194
  print(f"✅ SPACE_ID found: {space_id_startup}")
195
  print(f" Repo URL: https://huggingface.co/spaces/{space_id_startup}")
196
  print(f" Repo Tree URL: https://huggingface.co/spaces/{space_id_startup}/tree/main")
197
  else:
198
  print("ℹ️ SPACE_ID environment variable not found (running locally?). Repo URL cannot be determined.")
199
 
200
+ print("-"*(60 + len(" App Starting ")) + "\n")
201
 
202
  print("Launching Gradio Interface for Basic Agent Evaluation...")
203
+ demo.launch(debug=True, share=False)
requirements.txt CHANGED
@@ -1,204 +1,8 @@
1
- aiofiles==24.1.0
2
- aiohappyeyeballs==2.6.1
3
- aiohttp==3.12.2
4
- aiosignal==1.3.2
5
- aiosqlite==0.21.0
6
- annotated-types==0.7.0
7
- anyio==4.9.0
8
- asgiref==3.8.1
9
- attrs==25.3.0
10
- Authlib==1.6.0
11
- backoff==2.2.1
12
- banks==2.1.2
13
- bcrypt==4.3.0
14
- beautifulsoup4==4.13.4
15
- build==1.2.2.post1
16
- cachetools==5.5.2
17
- certifi==2025.4.26
18
- cffi==1.17.1
19
- charset-normalizer==3.4.2
20
- chromadb==1.0.10
21
- chromedriver-autoinstaller==0.6.4
22
- click==8.2.1
23
- colorama==0.4.6
24
- coloredlogs==15.0.1
25
- cryptography==45.0.3
26
- cssselect==1.3.0
27
- dataclasses-json==0.6.7
28
- defusedxml==0.7.1
29
- Deprecated==1.2.18
30
- dirtyjson==1.0.8
31
- distro==1.9.0
32
- duckduckgo_search==6.4.2
33
- durationpy==0.10
34
- fastapi==0.115.9
35
- feedfinder2==0.0.4
36
- feedparser==6.0.11
37
- ffmpy==0.5.0
38
- filelock==3.18.0
39
- filetype==1.2.0
40
- flatbuffers==25.2.10
41
- frozenlist==1.6.0
42
- fsspec==2025.5.1
43
- google-auth==2.40.2
44
- googleapis-common-protos==1.70.0
45
- gradio==5.31.0
46
- gradio_client==1.10.1
47
- greenlet==3.2.2
48
- griffe==1.7.3
49
- groovy==0.1.2
50
- grpcio==1.71.0
51
- h11==0.16.0
52
- hf-xet==1.1.2
53
- html2text==2024.2.26
54
- httpcore==1.0.9
55
- httptools==0.6.4
56
- httpx==0.28.1
57
- huggingface-hub==0.32.2
58
- humanfriendly==10.0
59
- idna==3.10
60
- importlib_metadata==8.6.1
61
- importlib_resources==6.5.2
62
- itsdangerous==2.2.0
63
- jieba3k==0.35.1
64
- Jinja2==3.1.6
65
- jiter==0.10.0
66
- joblib==1.5.1
67
- jsonschema==4.24.0
68
- jsonschema-specifications==2025.4.1
69
- kubernetes==32.0.1
70
- llama-cloud==0.1.22
71
- llama-cloud-services==0.6.23
72
- llama-index==0.12.37
73
- llama-index-agent-openai==0.4.8
74
- llama-index-cli==0.4.1
75
- llama-index-core==0.12.37
76
- llama-index-embeddings-huggingface==0.5.4
77
- llama-index-embeddings-openai==0.3.1
78
- llama-index-indices-managed-llama-cloud==0.6.11
79
- llama-index-llms-huggingface-api==0.4.3
80
- llama-index-llms-openai==0.3.44
81
- llama-index-multi-modal-llms-openai==0.4.3
82
- llama-index-program-openai==0.3.1
83
- llama-index-question-gen-openai==0.3.0
84
- llama-index-readers-file==0.4.8
85
- llama-index-readers-llama-parse==0.4.0
86
- llama-index-readers-web==0.4.1
87
- llama-index-readers-wikipedia==0.3.0
88
- llama-index-tools-duckduckgo==0.3.0
89
- llama-index-vector-stores-chroma==0.4.1
90
- llama-parse==0.6.23
91
- lxml==5.4.0
92
- lxml_html_clean==0.4.2
93
- markdown-it-py==3.0.0
94
- markdownify==1.1.0
95
- MarkupSafe==3.0.2
96
- marshmallow==3.26.1
97
- mdurl==0.1.2
98
- mmh3==5.1.0
99
- mpmath==1.3.0
100
- multidict==6.4.4
101
- mypy_extensions==1.1.0
102
- nest-asyncio==1.6.0
103
- networkx==3.4.2
104
- newspaper3k==0.2.8
105
- nltk==3.9.1
106
- numpy==1.26.4
107
- oauthlib==3.2.2
108
- onnxruntime==1.16.3
109
- openai==1.82.0
110
- opentelemetry-api==1.33.1
111
- opentelemetry-exporter-otlp-proto-common==1.33.1
112
- opentelemetry-exporter-otlp-proto-grpc==1.33.1
113
- opentelemetry-instrumentation==0.54b1
114
- opentelemetry-instrumentation-asgi==0.54b1
115
- opentelemetry-instrumentation-fastapi==0.54b1
116
- opentelemetry-proto==1.33.1
117
- opentelemetry-sdk==1.33.1
118
- opentelemetry-semantic-conventions==0.54b1
119
- opentelemetry-util-http==0.54b1
120
- orjson==3.10.18
121
- outcome==1.3.0.post0
122
- overrides==7.7.0
123
- oxylabs==2.0.0
124
- packaging==25.0
125
- pandas==2.2.3
126
- pillow==11.2.1
127
- platformdirs==4.3.8
128
- playwright==1.52.0
129
- posthog==4.2.0
130
- primp==0.15.0
131
- propcache==0.3.1
132
- protobuf==5.29.4
133
- pyasn1==0.6.1
134
- pyasn1_modules==0.4.2
135
- pycparser==2.22
136
- pydantic==2.11.5
137
- pydantic_core==2.33.2
138
- pydub==0.25.1
139
- pyee==13.0.0
140
- Pygments==2.19.1
141
- pypdf==5.5.0
142
- PyPika==0.48.9
143
- pyproject_hooks==1.2.0
144
- PySocks==1.7.1
145
- python-dateutil==2.9.0.post0
146
- python-dotenv==1.1.0
147
- python-multipart==0.0.20
148
- pytz==2025.2
149
- PyYAML==6.0.2
150
- referencing==0.36.2
151
- regex==2024.11.6
152
- requests==2.32.3
153
- requests-file==2.1.0
154
- requests-oauthlib==2.0.0
155
- rich==14.0.0
156
- rpds-py==0.25.1
157
- rsa==4.9.1
158
- ruff==0.11.11
159
- safehttpx==0.1.6
160
- safetensors==0.5.3
161
- scikit-learn==1.6.1
162
- scipy==1.15.3
163
- selenium==4.33.0
164
- semantic-version==2.10.0
165
- sgmllib3k==1.0.0
166
- shellingham==1.5.4
167
- six==1.17.0
168
- sniffio==1.3.1
169
- sortedcontainers==2.4.0
170
- soupsieve==2.7
171
- spider-client==0.0.27
172
- SQLAlchemy==2.0.41
173
- starlette==0.45.3
174
- striprtf==0.0.26
175
- sympy==1.14.0
176
- syncio==0.0.4
177
- tenacity==9.1.2
178
- threadpoolctl==3.6.0
179
- tiktoken==0.9.0
180
- tinysegmenter==0.3
181
- tldextract==5.3.0
182
- tokenizers==0.15.2
183
- tomlkit==0.13.2
184
- torch==2.2.2
185
- tqdm==4.67.1
186
- transformers==4.36.2
187
- trio==0.30.0
188
- trio-websocket==0.12.2
189
- typer==0.16.0
190
- typing-inspect==0.9.0
191
- typing-inspection==0.4.1
192
- typing_extensions==4.13.2
193
- tzdata==2025.2
194
- urllib3==2.4.0
195
- uvicorn==0.34.2
196
- uvloop==0.21.0
197
- watchfiles==1.0.5
198
- websocket-client==1.8.0
199
- websockets==15.0.1
200
- wikipedia==1.4.0
201
- wrapt==1.17.2
202
- wsproto==1.2.0
203
- yarl==1.20.0
204
- zipp==3.22.0
 
1
+ gradio
2
+ requests
3
+ llama-index
4
+ llama-index-vector-stores-chroma
5
+ llama-index-llms-huggingface-api
6
+ llama-index-embeddings-huggingface
7
+ llama-index-tools-duckduckgo
8
+ syncio