aboutsummaryrefslogtreecommitdiff
path: root/autoAidModules
diff options
context:
space:
mode:
authorNavan Chauhan <navanchauhan@gmail.com>2023-10-08 02:07:14 -0600
committerGitHub <noreply@github.com>2023-10-08 02:07:14 -0600
commit4d2a0981a58f4c150972b7aca1c62d00f337405e (patch)
treeb7d4683cf048699a6a52056cd0afe0eef1a8ce25 /autoAidModules
parent990e5f6b211463ea27ce1ce159c7ffb7e4546c16 (diff)
parenta8ffa7a710e09c471d1b1b101bf833d5369ccaf5 (diff)
Merge pull request #3 from navanchauhan/oct-8
select car and generate tasklist
Diffstat (limited to 'autoAidModules')
-rw-r--r--autoAidModules/search_funcs.py35
1 files changed, 30 insertions, 5 deletions
diff --git a/autoAidModules/search_funcs.py b/autoAidModules/search_funcs.py
index 241872e..e69ed47 100644
--- a/autoAidModules/search_funcs.py
+++ b/autoAidModules/search_funcs.py
@@ -1,16 +1,22 @@
+import os
+
from serpapi import GoogleSearch
from .sample_res import res
from boilerpy3 import extractors
from fake_useragent import UserAgent
+from langchain.llms import Bedrock
+from langchain.prompts.prompt import PromptTemplate
import requests
extractor = extractors.ArticleExtractor()
preferred_forums = {
- "BMW": ["bimmerforums.com"]
+ "BMW": ["bimmerforums.com"],
+ "Subaru": ["nasioc.com"]
}
+llm = Bedrock(model_id="anthropic.claude-instant-v1")
ua = UserAgent()
"""
@@ -29,7 +35,12 @@ Website data:
def find_preferred_forums(make):
if make not in preferred_forums:
- return None
+ template = "Human: If BMW: bimmerforums.com, Subaru: nasioc.com, Mazda: forum.miata.net What is the best forum for {make}? No more explanation\n\nAssistant: Then {make}:"
+ prompt = PromptTemplate(input_variables=["make"], template=template)
+ pred = llm.predict(prompt.format(make=make), max_tokens_to_sample=30, temperature=1,top_k=250, top_p=0.999)
+ make_url = pred.strip().split()[0]
+ print(f"Found {make_url} for {make}")
+ preferred_forums[make] = [make_url]
return preferred_forums[make]
def get_preferred_forums(make):
@@ -41,6 +52,20 @@ def parse_page(url):
content = extractor.get_content_from_url(url)
return content
+def get_tasks_from_pages(pages: list = [], query: str = "", details: str = ""):
+ template = "Human: You are an beginner mechanic. You are trying to solve the problem of {query} and have a {details}.\n Generate simple tasks from the following pages:\n {pages}\n\nAssistant: I would try all of the following, one by one:\n\n- Have you tried turning your car on and off?\n- "
+ prompt_template = PromptTemplate(input_variables=["query", "details", "pages"], template=template)
+
+
+ pred = llm.predict(
+ prompt_template.format(
+ query=query, details=details, pages=pages
+ ), max_tokens_to_sample=501, temperature=1,top_k=250, top_p=0.999
+ )
+ pred = "- " + pred
+ print(pred)
+ return pred
+
def search_on_forum(forum, query, max_results: int = 5):
params = {
@@ -49,12 +74,12 @@ def search_on_forum(forum, query, max_results: int = 5):
"hl": "en",
"gl": "us",
"google_domain": "google.com",
- "api_key": "KEY"
+ "api_key": os.environ.get("SERP_API_KEY", "demo")
}
#search = GoogleSearch(params)
#results = search.get_dict()
- results = res
+ results = res # Debugging Data
if results["search_metadata"]['status'] == "Success":
data = []
for idx, result in enumerate(results["organic_results"]):
@@ -73,4 +98,4 @@ def search_on_forum(forum, query, max_results: int = 5):
data.append(new_dict)
return data
else:
- return [] \ No newline at end of file
+ return []