99 lines
3.6 KiB
Python
99 lines
3.6 KiB
Python
# This script gathers upcoming events, recent emails, and journal entries;
|
|
# then it searches your flat-file KB for related notes based on tags;
|
|
# finally, it compiles all the information into a briefing.
|
|
#
|
|
# (Optionally, you can send the briefing text to an LLM API for enhancement.)
|
|
|
|
|
|
import os, glob, yaml
|
|
from datetime import datetime
|
|
|
|
# Placeholder function for the LLM API call
|
|
def generate_briefing(text):
|
|
# Replace with actual API call if needed
|
|
return f"Enhanced Briefing:\n\n{text}"
|
|
|
|
def get_upcoming_events():
|
|
# In a real scenario, fetch events from a calendar source.
|
|
events = [
|
|
{"title": "Project Meeting", "datetime": "2025-02-17 10:00", "tags": ["meeting", "projectX"]},
|
|
{"title": "Code Review", "datetime": "2025-02-17 14:00", "tags": ["review", "code"]},
|
|
]
|
|
return events
|
|
|
|
def get_recent_emails():
|
|
# Simulate fetching recent emails (could be replaced with a file or API call)
|
|
emails = [
|
|
{"subject": "Follow-up on project", "content": "Let's discuss the new project approach", "tags": ["projectX"]},
|
|
]
|
|
return emails
|
|
|
|
def get_journal_entries():
|
|
# Simulate fetching journal entries (for example, from text files)
|
|
journals = [
|
|
{"date": "2025-02-15", "content": "Had a breakthrough with the complex feature."},
|
|
]
|
|
return journals
|
|
|
|
def search_kb_for_tags(tags, kb_dir="path/to/kb"):
|
|
notes = []
|
|
# Scan all Markdown files in the KB directory
|
|
for md_file in glob.glob(os.path.join(kb_dir, "*.md")):
|
|
with open(md_file, "r", encoding="utf-8") as f:
|
|
content = f.read()
|
|
if content.startswith("---"):
|
|
# Extract YAML front matter (assumes two '---' delimiters)
|
|
end = content.find("---", 3)
|
|
if end != -1:
|
|
frontmatter_str = content[3:end]
|
|
try:
|
|
metadata = yaml.safe_load(frontmatter_str)
|
|
if "tags" in metadata and any(tag in metadata["tags"] for tag in tags):
|
|
notes.append({"id": metadata.get("id", md_file), "tags": metadata["tags"], "file": md_file})
|
|
except Exception as e:
|
|
print(f"Error parsing {md_file}: {e}")
|
|
return notes
|
|
|
|
def build_briefing():
|
|
events = get_upcoming_events()
|
|
emails = get_recent_emails()
|
|
journals = get_journal_entries()
|
|
|
|
# Aggregate tags from events and emails to search the KB
|
|
tags = set()
|
|
for item in events + emails:
|
|
if "tags" in item:
|
|
tags.update(item["tags"])
|
|
|
|
kb_notes = search_kb_for_tags(tags)
|
|
|
|
# Build the briefing text
|
|
briefing_text = "Upcoming Briefing:\n\n"
|
|
briefing_text += "Events:\n"
|
|
for event in events:
|
|
briefing_text += f"- {event['datetime']}: {event['title']}\n"
|
|
|
|
briefing_text += "\nEmails:\n"
|
|
for email in emails:
|
|
briefing_text += f"- {email['subject']}: {email['content']}\n"
|
|
|
|
briefing_text += "\nJournal Entries:\n"
|
|
for journal in journals:
|
|
briefing_text += f"- {journal['date']}: {journal['content']}\n"
|
|
|
|
briefing_text += "\nRelated KB Notes:\n"
|
|
for note in kb_notes:
|
|
briefing_text += f"- Note {note['id']} with tags {', '.join(note['tags'])}\n"
|
|
|
|
# Optionally, send the briefing text to an LLM API to enhance it
|
|
enhanced_briefing = generate_briefing(briefing_text)
|
|
return enhanced_briefing
|
|
|
|
def main():
|
|
briefing = build_briefing()
|
|
with open("briefing.txt", "w", encoding="utf-8") as f:
|
|
f.write(briefing)
|
|
print("Briefing generated and saved to briefing.txt")
|
|
|
|
if __name__ == "__main__":
|
|
main() |