text
stringlengths 1
93.6k
|
|---|
if node in self.search_queries:
|
node_color = color_map["query"]
|
node_size = 15
|
else:
|
node_color = color_map["result"]
|
node_size = 10
|
label = node[:30] + "..." if len(node) > 30 else node
|
title = data.get("content", node)
|
if title:
|
title = title[:200] + "..." if len(title) > 200 else title
|
net.add_node(
|
node, label=label, color=node_color, size=node_size, title=title
|
)
|
for edge in self.graph.edges():
|
net.add_edge(edge[0], edge[1], color="#2d3436")
|
try:
|
net.write_html(output_file)
|
logging.info(f"Successfully generated visualization at: {output_file}")
|
except Exception as e:
|
logging.error(f"Failed to generate visualization: {e}")
|
def fetch_webpage(self, url):
|
"""Fetch webpage content"""
|
logging.debug(f"Fetching webpage: {url}")
|
try:
|
headers = {
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
}
|
response = requests.get(url, timeout=10, headers=headers)
|
return response.text
|
except Exception as e:
|
logging.error(f"Error fetching {url}: {e}")
|
return None
|
def html_to_markdown(self, html_content):
|
"""Convert HTML to Markdown"""
|
if not html_content:
|
return None
|
h2t = HTML2Text()
|
h2t.ignore_links = False
|
h2t.ignore_images = True
|
return h2t.handle(html_content)
|
def google_search(self, query, max_results=10):
|
"""Simulate Google Search"""
|
logging.debug(f"Performing search for query: {query}")
|
search_url = f"https://www.google.com/search?q={quote_plus(query)}"
|
try:
|
headers = {
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
}
|
response = requests.get(search_url, headers=headers)
|
BeautifulSoup(response.text, "html.parser")
|
results = []
|
# Simulate results (replace with actual Google Search API in production)
|
for i in range(max_results):
|
results.append(f"https://example.com/result_{i}")
|
return results
|
except Exception as e:
|
logging.error(f"Error in search: {e}")
|
return []
|
def process_search_result(self, url, query):
|
"""Process a single search result"""
|
logging.debug(f"Processing result URL: {url}")
|
html_content = self.fetch_webpage(url)
|
markdown_content = self.html_to_markdown(html_content)
|
if markdown_content:
|
with self.graph_lock:
|
self.graph.add_node(url, content=markdown_content[:500])
|
self.graph.add_edge(query, url, weight=1)
|
return {
|
"url": url,
|
"content": markdown_content[:500] if markdown_content else None,
|
}
|
def process_search_query(self, query):
|
"""Process a single search query"""
|
logging.info(f"Processing query: {query}")
|
results = self.google_search(query)
|
with ThreadPoolExecutor(max_workers=10) as executor:
|
futures = [
|
executor.submit(self.process_search_result, url, query)
|
for url in results
|
]
|
results = [future.result() for future in futures]
|
return {"query": query, "results": results}
|
def process_question(self):
|
"""Main processing method"""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.