Skip to content

Commit

Permalink
Fix: none link and remove api from log (#452)
Browse files Browse the repository at this point in the history
* remove APIs from log, fix no links

* fix template of pull request

close #430 close #411
  • Loading branch information
ARajgor authored and nalaso committed Apr 17, 2024
1 parent 703c9c6 commit 1ff8934
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 18 deletions.
9 changes: 0 additions & 9 deletions .github/pull_request_template.md
@@ -1,12 +1,3 @@
---
name: Pull request
about: Create a pull request to help us improve
title: ''
labels: ''
assignees: ''

---

<!-- NOTE: A similar PR may already be submitted! Please search among the [Pull request] before creating one.
Thanks for submitting a pull request! Please provide enough information so that others can review your pull request:
Expand Down
2 changes: 0 additions & 2 deletions devika.py
Expand Up @@ -2,8 +2,6 @@
DO NOT REARRANGE THE ORDER OF THE FUNCTION CALLS AND VARIABLE DECLARATIONS
AS IT MAY CAUSE IMPORT ERRORS AND OTHER ISSUES
"""
# import eventlet
# eventlet.monkey_patch()
from gevent import monkey
monkey.patch_all()
from src.init import init_devika
Expand Down
11 changes: 5 additions & 6 deletions src/agents/agent.py
Expand Up @@ -66,10 +66,10 @@ def __init__(self, base_model: str, search_engine: str, browser: Browser = None)
self.engine = search_engine
self.tokenizer = tiktoken.get_encoding("cl100k_base")

async def open_page(self, project_name, pdf_download_url):
async def open_page(self, project_name, url):
browser = await Browser().start()

await browser.go_to(pdf_download_url)
await browser.go_to(url)
_, raw = await browser.screenshot(project_name)
data = await browser.extract_text()
await browser.close()
Expand All @@ -79,9 +79,7 @@ async def open_page(self, project_name, pdf_download_url):
def search_queries(self, queries: list, project_name: str) -> dict:
results = {}


knowledge_base = KnowledgeBase()
web_search = None

if self.engine == "bing":
web_search = BingSearch()
Expand All @@ -107,7 +105,8 @@ def search_queries(self, queries: list, project_name: str) -> dict:

link = web_search.get_first_link()
print("\nLink :: ", link, '\n')

if not link:
continue
browser, raw, data = loop.run_until_complete(self.open_page(project_name, link))
emit_agent("screenshot", {"data": raw, "project_name": project_name}, False)
results[query] = self.formatter.execute(data, project_name)
Expand Down Expand Up @@ -218,7 +217,7 @@ def subsequent_execute(self, prompt: str, project_name: str):
deploy_url = deploy_metadata["deploy_url"]

response = {
"message": "Done! I deployed your project on Netflify.",
"message": "Done! I deployed your project on Netlify.",
"deploy_url": deploy_url
}
response = json.dumps(response, indent=4)
Expand Down
4 changes: 3 additions & 1 deletion src/browser/search.py
Expand Up @@ -66,7 +66,7 @@ def get_first_link(self):
# self.query_result = None
#
# def search(self, query):
# from duckduckgo_search import DDGS
from duckduckgo_search import DDGS
# try:
# self.query_result = DDGS().text(query, max_results=5, region="us")
# print(self.query_result)
Expand Down Expand Up @@ -102,6 +102,8 @@ def _get_url(self, method, url, data):
return resp.content
if resp.status_code == (202, 301, 403):
raise Exception(f"Error: {resp.status_code} rate limit error")
if not resp:
return None
except Exception as error:
if "timeout" in str(error).lower():
raise TimeoutError("Duckduckgo timed out error")
Expand Down
2 changes: 2 additions & 0 deletions src/logger.py
Expand Up @@ -67,6 +67,8 @@ def wrapper(*args, **kwargs):
logger.debug(f"{request.path} {request.method} - Response: File response")
else:
response_summary = response.get_data(as_text=True)
if 'settings' in request.path:
response_summary = "*** Settings are not logged ***"
logger.debug(f"{request.path} {request.method} - Response: {response_summary}")
except Exception as e:
logger.exception(f"{request.path} {request.method} - {e})")
Expand Down

0 comments on commit 1ff8934

Please sign in to comment.