|
| 1 | +import argparse |
| 2 | +import os |
| 3 | + |
| 4 | +from notion_utils import ( |
| 5 | + get_notion_client, |
| 6 | + get_papers_from_notion, |
| 7 | + write_papers_to_notion, |
| 8 | +) |
| 9 | +from arxiv_utils import fill_papers_with_arxiv, search_arxiv_as_paper |
| 10 | +from openai_utils import ( |
| 11 | + get_focus_label_from_abstract, |
| 12 | + get_openai_client, |
| 13 | + summarize_abstract_with_openai, |
| 14 | +) |
| 15 | +from scholar_utils import get_recommended_arxiv_ids_from_semantic_scholar |
| 16 | + |
| 17 | +ARXIV_SEARCH = """\ |
| 18 | +"adversarial attacks" OR "language model attacks" OR "LLM vulnerabilities" OR \ |
| 19 | +"AI security" OR "machine learning security" OR "jailbreak" OR "bypassing AI"\ |
| 20 | +""" |
| 21 | + |
| 22 | + |
| 23 | +def main(): |
| 24 | + parser = argparse.ArgumentParser() |
| 25 | + |
| 26 | + parser.add_argument( |
| 27 | + "--notion-token", |
| 28 | + type=str, |
| 29 | + default=os.environ.get("NOTION_TOKEN"), |
| 30 | + help="Notion token", |
| 31 | + ) |
| 32 | + parser.add_argument( |
| 33 | + "--database-id", |
| 34 | + type=str, |
| 35 | + default=os.environ.get("NOTION_DATABASE_ID"), |
| 36 | + help="Notion database id", |
| 37 | + ) |
| 38 | + parser.add_argument( |
| 39 | + "--openai-token", |
| 40 | + type=str, |
| 41 | + default=os.environ.get("OPENAI_API_TOKEN"), |
| 42 | + help="OpenAI token", |
| 43 | + ) |
| 44 | + parser.add_argument("--arxiv-search-query", type=str, default=ARXIV_SEARCH) |
| 45 | + parser.add_argument("--search-arxiv", action="store_true", default=False) |
| 46 | + parser.add_argument("--search-semantic-scholar", action="store_true", default=False) |
| 47 | + |
| 48 | + args = parser.parse_args() |
| 49 | + |
| 50 | + print("[+] Paperstack") |
| 51 | + |
| 52 | + notion_client = get_notion_client(args.notion_token) |
| 53 | + openai_client = get_openai_client(args.openai_token) |
| 54 | + |
| 55 | + # Get papers from Notion |
| 56 | + print(" |- Getting papers from Notion") |
| 57 | + papers = get_papers_from_notion(notion_client, args.database_id) |
| 58 | + |
| 59 | + # Fill in missing data from arXiv |
| 60 | + print(" |- Filling in missing data from arXiv") |
| 61 | + papers = fill_papers_with_arxiv(papers) |
| 62 | + |
| 63 | + if args.search_arxiv: |
| 64 | + # Search arXiv for new papers and deduplicate |
| 65 | + print(" |- Searching arXiv") |
| 66 | + existing_titles = [paper.title for paper in papers] |
| 67 | + for searched_paper in search_arxiv_as_paper(args.arxiv_search, max_results=5): |
| 68 | + if searched_paper.title not in existing_titles: |
| 69 | + print(f" |- {searched_paper.title[:50]}...") |
| 70 | + papers.append(searched_paper) |
| 71 | + |
| 72 | + if args.search_semantic_scholar: |
| 73 | + print(" |- Getting related papers from Semantic Scholar") |
| 74 | + recommended_papers = get_recommended_arxiv_ids_from_semantic_scholar(papers) |
| 75 | + papers.extend(fill_papers_with_arxiv(recommended_papers)) |
| 76 | + print(f" |- {len(recommended_papers)} new papers") |
| 77 | + |
| 78 | + # Build summaries |
| 79 | + print(" |- Building summaries") |
| 80 | + for paper in papers: |
| 81 | + if not paper.summary and paper.abstract: |
| 82 | + print(f" |- {paper.title[:50]}...") |
| 83 | + paper.summary = summarize_abstract_with_openai( |
| 84 | + openai_client, paper.abstract |
| 85 | + ) |
| 86 | + |
| 87 | + # Assigning focus labels |
| 88 | + print(" |- Assigning focus labels") |
| 89 | + for paper in papers: |
| 90 | + if not paper.focus: |
| 91 | + paper.focus = get_focus_label_from_abstract(openai_client, paper.abstract) |
| 92 | + print(f" |- {paper.focus}") |
| 93 | + |
| 94 | + print(f" |- Writing back to Notion [{len(papers)}]...") |
| 95 | + write_papers_to_notion(notion_client, args.database_id, papers) |
| 96 | + |
| 97 | + |
| 98 | +if __name__ == "__main__": |
| 99 | + main() |
0 commit comments