Complete Python SDK Example
Here’s a comprehensive Python implementation for working with the Fintool API:Copy
import requests
import json
import os
from typing import List, Dict, Optional, Iterator
class FintoolClient:
"""Python client for the Fintool API"""
def __init__(self, api_key: Optional[str] = None):
"""
Initialize the Fintool client
Args:
api_key: Your Fintool API key. If not provided, reads from FINTOOL_API_KEY env var
"""
self.api_key = api_key or os.getenv('FINTOOL_API_KEY')
if not self.api_key:
raise ValueError("API key must be provided or set in FINTOOL_API_KEY environment variable")
self.base_url = "https://api.fintool.com"
self.headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
def chat(
self,
messages: List[Dict[str, str]],
stream: bool = False,
filters: Optional[Dict] = None,
include_citations: bool = True
):
"""
Send a chat request to the Fintool API
Args:
messages: List of message dicts with 'role' and 'content'
stream: Enable streaming responses
filters: Optional filters for tickers, doc_types, dates
include_citations: Include citation details in response
Returns:
Response dict or iterator of events if streaming
"""
url = f"{self.base_url}/v2/chat"
payload = {
"messages": messages,
"stream": stream,
"include_citations": include_citations
}
if filters:
payload["filters"] = filters
if stream:
return self._stream_chat(url, payload)
else:
response = requests.post(url, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()
def _stream_chat(self, url: str, payload: Dict) -> Iterator[Dict]:
"""Stream chat responses"""
with requests.post(url, headers=self.headers, json=payload, stream=True) as response:
response.raise_for_status()
for line in response.iter_lines():
if not line:
continue
line = line.decode('utf-8')
if line.startswith('data: '):
try:
data = json.loads(line[6:])
yield data
except json.JSONDecodeError:
continue
def search(
self,
query: str,
filters: Optional[Dict] = None,
top_k: int = 10,
rerank: bool = True
) -> Dict:
"""
Search for relevant document chunks
Args:
query: Search query string
filters: Optional filters for tickers, doc_types, dates
top_k: Number of chunks to return
rerank: Apply reranking for better relevance
Returns:
Dict with 'chunks', 'total_chunks', and 'processing_time_ms'
"""
url = f"{self.base_url}/v1/search"
payload = {
"query": query,
"top_k": top_k,
"rerank": rerank
}
if filters:
payload["filters"] = filters
response = requests.post(url, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()
# Example usage
if __name__ == "__main__":
# Initialize client
client = FintoolClient()
# Example 1: Simple chat query
print("Example 1: Simple Chat Query")
print("-" * 50)
response = client.chat(
messages=[
{"role": "user", "content": "What was Tesla's revenue in Q4 2024?"}
],
filters={"tickers": ["TSLA"]}
)
print(f"Response: {response['message']['content']}")
print(f"\nCitations:")
for citation in response.get('citations', []):
print(f" - {citation['document_title']}, Page {citation['page_number']}")
print("\n" + "=" * 50 + "\n")
# Example 2: Streaming chat
print("Example 2: Streaming Chat")
print("-" * 50)
for event in client.chat(
messages=[
{"role": "user", "content": "Analyze Microsoft's cloud business growth"}
],
stream=True,
filters={"tickers": ["MSFT"]}
):
if event.get('type') == 'message':
message = event.get('message', {})
if 'thinking' in message:
print(f"[Thinking] {message['thinking']}")
if 'content' in message:
print(f"Content: {message['content'][:100]}...")
print("\n" + "=" * 50 + "\n")
# Example 3: Search for chunks
print("Example 3: Search for Document Chunks")
print("-" * 50)
search_results = client.search(
query="Apple iPhone revenue",
filters={"tickers": ["AAPL"]},
top_k=5
)
print(f"Found {search_results['total_chunks']} chunks")
print(f"Processing time: {search_results['processing_time_ms']}ms\n")
for i, chunk in enumerate(search_results['chunks'][:3], 1):
print(f"{i}. {chunk['document_title']}")
print(f" Page: {chunk['page_number']}, Relevance: {chunk['relevance_score']:.2f}")
print(f" Text: {chunk['text'][:150]}...\n")
Multi-turn Conversation
Here’s how to maintain conversation context across multiple turns:Copy
from fintool_client import FintoolClient
client = FintoolClient()
# Conversation history
conversation = []
def ask_question(question: str):
"""Ask a question and maintain conversation context"""
# Add user message
conversation.append({"role": "user", "content": question})
# Get response
response = client.chat(
messages=conversation,
filters={"tickers": ["AAPL", "MSFT", "GOOGL"]}
)
# Add assistant response with metadata
assistant_msg = response['message'].copy()
conversation.append(assistant_msg)
return response
# Multi-turn conversation
print("Q1: Compare Apple and Microsoft's revenue growth")
r1 = ask_question("Compare Apple and Microsoft's revenue growth in 2024")
print(f"A1: {r1['message']['content']}\n")
print("Q2: Which one is growing faster?")
r2 = ask_question("Which one is growing faster?")
print(f"A2: {r2['message']['content']}\n")
print("Q3: What about their profitability?")
r3 = ask_question("What about their profitability?")
print(f"A3: {r3['message']['content']}\n")
RAG Integration with OpenAI
Combine Fintool search with your own LLM:Copy
import openai
from fintool_client import FintoolClient
# Initialize clients
fintool = FintoolClient()
openai.api_key = os.getenv('OPENAI_API_KEY')
def rag_query(question: str, tickers: List[str]):
"""
Use Fintool for retrieval and OpenAI for generation
Args:
question: User's question
tickers: List of stock tickers to search
Returns:
Generated response with citations
"""
# Step 1: Retrieve relevant chunks from Fintool
search_results = fintool.search(
query=question,
filters={"tickers": tickers},
top_k=5,
rerank=True
)
# Step 2: Build context from chunks
context_parts = []
for chunk in search_results['chunks']:
context_parts.append(
f"[{chunk['chunk_id']}] From {chunk['document_title']} (Page {chunk['page_number']}):\n"
f"{chunk['text']}"
)
context = "\n\n".join(context_parts)
# Step 3: Generate response with your LLM
completion = openai.ChatCompletion.create(
model="gpt-4",
messages=[
{
"role": "system",
"content": "You are a financial analyst. Answer questions using ONLY the provided context. "
"Include citation markers [chunk_id] when referencing information."
},
{
"role": "user",
"content": f"Context:\n{context}\n\nQuestion: {question}"
}
]
)
response_text = completion.choices[0].message.content
return {
"answer": response_text,
"chunks": search_results['chunks']
}
# Usage
result = rag_query(
question="What is Tesla's strategy for autonomous driving?",
tickers=["TSLA"]
)
print(f"Answer: {result['answer']}\n")
print("Sources:")
for chunk in result['chunks']:
print(f" - {chunk['document_title']}, Page {chunk['page_number']}")
Async/Await Support
For async applications usinghttpx:
Copy
import httpx
import asyncio
import json
from typing import List, Dict, Optional, AsyncIterator
class AsyncFintoolClient:
"""Async Python client for the Fintool API"""
def __init__(self, api_key: Optional[str] = None):
self.api_key = api_key or os.getenv('FINTOOL_API_KEY')
if not self.api_key:
raise ValueError("API key required")
self.base_url = "https://api.fintool.com"
self.headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
async def chat(
self,
messages: List[Dict[str, str]],
stream: bool = False,
filters: Optional[Dict] = None
):
"""Async chat request"""
url = f"{self.base_url}/v2/chat"
payload = {
"messages": messages,
"stream": stream
}
if filters:
payload["filters"] = filters
async with httpx.AsyncClient() as client:
if stream:
async for event in self._stream_chat(client, url, payload):
yield event
else:
response = await client.post(url, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()
async def _stream_chat(
self,
client: httpx.AsyncClient,
url: str,
payload: Dict
) -> AsyncIterator[Dict]:
"""Stream chat responses asynchronously"""
async with client.stream('POST', url, headers=self.headers, json=payload) as response:
response.raise_for_status()
async for line in response.aiter_lines():
if line.startswith('data: '):
try:
data = json.loads(line[6:])
yield data
except json.JSONDecodeError:
continue
async def search(
self,
query: str,
filters: Optional[Dict] = None,
top_k: int = 10,
rerank: bool = True
) -> Dict:
"""Async search request"""
url = f"{self.base_url}/v1/search"
payload = {
"query": query,
"top_k": top_k,
"rerank": rerank
}
if filters:
payload["filters"] = filters
async with httpx.AsyncClient() as client:
response = await client.post(url, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()
# Example async usage
async def main():
client = AsyncFintoolClient()
# Non-streaming request
response = await client.chat(
messages=[{"role": "user", "content": "What is Apple's revenue?"}],
filters={"tickers": ["AAPL"]}
)
print(response['message']['content'])
# Streaming request
async for event in client.chat(
messages=[{"role": "user", "content": "Analyze Microsoft"}],
stream=True
):
if event.get('type') == 'message':
print(event['message'].get('content', ''))
# Run async code
asyncio.run(main())
Error Handling
Robust error handling example:Copy
import requests
from requests.exceptions import RequestException, Timeout, HTTPError
def safe_chat_query(client, query: str, max_retries: int = 3):
"""Chat query with error handling and retries"""
for attempt in range(max_retries):
try:
response = client.chat(
messages=[{"role": "user", "content": query}],
filters={"tickers": ["AAPL"]}
)
return response
except HTTPError as e:
if e.response.status_code == 401:
print("Authentication failed. Check your API key.")
break
elif e.response.status_code == 429:
wait_time = 2 ** attempt # Exponential backoff
print(f"Rate limited. Waiting {wait_time}s before retry...")
time.sleep(wait_time)
elif e.response.status_code >= 500:
print(f"Server error. Retry {attempt + 1}/{max_retries}")
time.sleep(1)
else:
print(f"HTTP error: {e}")
break
except Timeout:
print(f"Request timeout. Retry {attempt + 1}/{max_retries}")
time.sleep(1)
except RequestException as e:
print(f"Request failed: {e}")
break
return None
# Usage
client = FintoolClient()
result = safe_chat_query(client, "What is Tesla's revenue?")
if result:
print(result['message']['content'])
else:
print("Failed to get response after retries")
Citation Extraction
Helper functions for working with citations:Copy
import re
from typing import List, Dict, Tuple
def extract_citations(content: str, citations: List[Dict]) -> List[Tuple[str, Dict]]:
"""
Extract citation markers from content and match with citation objects
Args:
content: Response content with citation markers like **[chunk_id]**
citations: List of citation objects from API response
Returns:
List of tuples (text_segment, citation_object)
"""
# Create citation map
citation_map = {c['chunk_id']: c for c in citations}
# Find all citation markers
pattern = r'\*\*\[([^\]]+)\]\*\*'
matches = re.finditer(pattern, content)
results = []
last_end = 0
for match in matches:
# Text before citation
text = content[last_end:match.start()]
chunk_id = match.group(1)
if chunk_id in citation_map:
results.append((text, citation_map[chunk_id]))
last_end = match.end()
# Add remaining text
if last_end < len(content):
results.append((content[last_end:], None))
return results
def format_with_citations(content: str, citations: List[Dict]) -> str:
"""
Format content with inline citations as footnotes
Args:
content: Response content with citation markers
citations: List of citation objects
Returns:
Formatted string with footnotes
"""
citation_map = {c['chunk_id']: c for c in citations}
# Replace markers with footnote numbers
counter = 1
footnotes = []
def replace_marker(match):
nonlocal counter
chunk_id = match.group(1)
if chunk_id in citation_map:
citation = citation_map[chunk_id]
footnotes.append(
f"[{counter}] {citation['document_title']}, "
f"Page {citation['page_number']}"
)
result = f"[{counter}]"
counter += 1
return result
return match.group(0)
# Replace citation markers
formatted_content = re.sub(r'\*\*\[([^\]]+)\]\*\*', replace_marker, content)
# Add footnotes
if footnotes:
formatted_content += "\n\nSources:\n" + "\n".join(footnotes)
return formatted_content
# Usage
response = client.chat(
messages=[{"role": "user", "content": "What is Apple's revenue?"}]
)
formatted = format_with_citations(
response['message']['content'],
response['citations']
)
print(formatted)
