Skip to content

Commit 347de2f

Browse files
authored
Update main.py
changed to nyt_movies code
1 parent 8febe73 commit 347de2f

File tree

1 file changed

+56
-159
lines changed

1 file changed

+56
-159
lines changed

app/main.py

Lines changed: 56 additions & 159 deletions
Original file line numberDiff line numberDiff line change
@@ -1,195 +1,92 @@
11
#pipenv install fastapi httpx
22
#pipenv shell
3-
#uvicorn gsa_zip:app --reload
4-
#http://127.0.0.1:8000/call_gsa_api?zip_code=92011&year=2024
5-
#http://127.0.0.1:8000/call_gsa_api/calculate_perdiem?zip_code=92011&begin_date=2023-12-29&end_date=2024-01-02
3+
# uvicorn nyt_movies:app --reload
4+
# http://127.0.0.1:8000/call-external-api?begin_date=20240905&end_date=20240913
65

7-
from fastapi import FastAPI, Header
6+
from fastapi import FastAPI
87
import httpx
98
from datetime import datetime
10-
import asyncio # Import the asyncio module
119

1210
app = FastAPI()
1311

14-
X_API_KEY='imRuLjQ9Wvzte0ZsNWojJzGnnHdwlzMFR7TJZpXf' # your X API key
15-
BASE_URL='https://api.gsa.gov/travel/perdiem/v2'
12+
API_KEY='uedPipKsm7yUoLxd1GEha30oBnpUtXza' # your API key
13+
BEGIN_DATE='20240905'
14+
END_DATE='20240911'
15+
FQ='section_name%3AMovies AND type_of_material%3AReview' # keyword
16+
F1='document_type%2Cabstract%2Cweb_url%2Cheadline%2Ckeywords%2Cbyline'
17+
EXTRA_FILTERS='sort=newest&page=0'
18+
BASE_URL='https://api.nytimes.com/svc/search/v2/articlesearch.json'
1619

17-
import json
1820

19-
MONTH_NUMBERS = {
20-
'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6,
21-
'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12
22-
}
23-
24-
def parse_and_display_rates(data):
21+
def extract_article_info(data):
2522
"""
26-
Parses the provided JSON data containing rate information, displays it in a clean format,
27-
and returns the extracted data as a dictionary.
23+
Extracts relevant information from the API response data, handling multiple articles.
2824
2925
Args:
30-
json_data (str): The JSON data as a string.
26+
data (dict): The JSON data returned from the API call.
3127
3228
Returns:
33-
dict: A dictionary containing the extracted rate information.
29+
list: A list of dictionaries, each containing the extracted information for one article.
3430
"""
35-
try:
36-
# Extract relevant fields
37-
rate_data = data['rates'][0]['rate'][0]
38-
extracted_data = {
39-
'city': rate_data['city'],
40-
'county': rate_data['county'],
41-
'state': data['rates'][0]['state'],
42-
'zip': rate_data['zip'],
43-
'year': data['rates'][0]['year'],
44-
'meals_rate': rate_data['meals'],
45-
'daily_hotel_rates_by_month': []
46-
}
47-
48-
# Extract monthly rates with month numbers
49-
for month in rate_data['months']['month']:
50-
extracted_data['daily_hotel_rates_by_month'].append({
51-
'month_num': month['number'],
52-
'value': month['value']
53-
})
54-
55-
# Display the extracted data
56-
print(f"Location: {extracted_data['city']}, {extracted_data['county']}, {extracted_data['state']} {extracted_data['zip']}")
57-
print(f"Year: {extracted_data['year']}")
58-
print(f"Meals Rate: ${extracted_data['meals_rate']:.2f}")
59-
60-
print("\nDaily Hotels Rates by Each Month:")
61-
print("Month\tRate")
62-
print("----\t----")
63-
for month_rate in extracted_data['daily_hotel_rates_by_month']:
64-
print(f"{month_rate['month_num']}\t${month_rate['value']:.2f}")
65-
66-
return extracted_data
67-
68-
except (KeyError, IndexError, json.JSONDecodeError) as e:
69-
print(f"Error parsing JSON: {e}")
70-
return {} # Return an empty dictionary in case of an error
31+
current_time = datetime.now().strftime("%H:%M:%S")
32+
print("extract_article_info Run time is:", current_time)
33+
34+
articles_info = []
7135

36+
for article in data["response"]["docs"]:
37+
abstract = article.get("abstract") # Use .get() to handle missing keys
38+
web_url = article.get("web_url")
39+
40+
#headlines = article.get("headlines", [])
41+
grouped_headlines = {
42+
"headline": article["headline"].get("main"),
43+
"print_headline": article["headline"].get("print_headline")
44+
}
45+
46+
keywords = article.get("keywords", [])
47+
grouped_keywords = {
48+
"subject": [kw["value"] for kw in keywords if kw["name"] == "subject"],
49+
#"creative_works":[kw["value"] for kw in keywords if kw["name"] == "creative_works"],
50+
"creative_work": next((kw["value"] for kw in keywords if kw["name"] == "creative_works"), None), # Extract first or None
51+
"persons": [kw["value"] for kw in keywords if kw["name"] == "persons"]
52+
}
53+
author_name = article["byline"].get("original", "").replace("By ", "")
7254

55+
articles_info.append({
56+
"abstract": abstract,
57+
"web_url": web_url,
58+
"headlines": grouped_headlines,
59+
"keywords": grouped_keywords,
60+
"author_name": author_name,
61+
})
62+
return articles_info
7363

7464
@app.get("/")
7565
async def root():
7666
current_time = datetime.now().strftime("%H:%M:%S")
7767
print("Root Run time is:", current_time)
78-
return {"message": "Welcome to the FAST API gsa_zip app"}
68+
return {"message": "Welcome to the FAST API"}
7969

8070
# @app.get("/items/{item_id}")
8171
# async def read_user_item(item_id: str, needy: str | None = None): # Make needy optional
8272
# item = {"item_id": item_id, "needy": needy}
8373
# return item
8474

8575

86-
@app.get("/call_gsa_api")
87-
async def call_gsa_api(zip_code:str, year:str):
76+
@app.get("/call-external-api")
77+
async def call_external_api(begin_date:str, end_date:str):
8878
async with httpx.AsyncClient() as client:
89-
90-
headers = {
91-
#"Authorization": "Bearer my_token", # Example authorization header
92-
#"Content-Type": "application/json", # Example content type header
93-
# Add more headers as needed
94-
"x-api-key": X_API_KEY
95-
}
96-
97-
ZIP=zip_code
98-
print("zip:", ZIP)
99-
YEAR=year
100-
print("year:", YEAR)
101-
url= BASE_URL+'/rates/zip/'+ZIP+'/year/'+YEAR
79+
BEGIN_DATE=begin_date
80+
print("begin_date:", BEGIN_DATE)
81+
END_DATE=end_date
82+
print("end_date:", END_DATE)
83+
url= BASE_URL+'?begin_date='+BEGIN_DATE+'&end_date='+END_DATE+'&f1='+F1+'&fq='+FQ+'&'+EXTRA_FILTERS+'&api-key='+API_KEY
10284
print("url:", url)
103-
response = await client.get(url,headers=headers)
85+
response = await client.get(url)
10486
# Process the response as needed
10587
if response.status_code == 200:
106-
json_data = response.json() # Parse the JSON response
107-
# Assuming you have the JSON data in the 'json_data' variable (from the previous response)
108-
extracted_data = parse_and_display_rates(json_data)
109-
# print("\nExtracted JSON Data:")
110-
# print(json.dumps(result, indent=4)) # Pretty-print the extracted JSON
111-
return extracted_data
88+
data = response.json() # Parse the JSON response
89+
extracted_info = extract_article_info(data)
90+
return extracted_info
11291
else:
11392
return {"error": "Failed to fetch data"}
114-
115-
116-
@app.get("/call_gsa_api/calculate_perdiem")
117-
async def call_gsa_api(zip_code:str, begin_date:str, end_date:str):
118-
async with httpx.AsyncClient() as client:
119-
120-
headers = {
121-
#"Authorization": "Bearer my_token", # Example authorization header
122-
#"Content-Type": "application/json", # Example content type header
123-
# Add more headers as needed
124-
"x-api-key": X_API_KEY
125-
}
126-
127-
ZIP = zip_code
128-
print("zip:", ZIP)
129-
130-
# Extract date components
131-
begin_YYYY, begin_MM, begin_day = begin_date.split("-")
132-
print("begin_YYYY:", begin_YYYY, "begin_MM:", begin_MM, "begin_DD:", begin_day)
133-
134-
end_YYYY, end_MM, end_day = end_date.split("-")
135-
print("end_YYYY:", end_YYYY, "end_MM:", end_MM, "end_DD:", end_day)
136-
137-
# Calculate number of days
138-
begin_num_of_days = (datetime(year=int(begin_YYYY), month=12, day=31) -
139-
datetime(year=int(begin_YYYY), month=int(begin_MM), day=int(begin_day))).days + 1
140-
print("begin_num_of_days:", begin_num_of_days)
141-
end_num_of_days = (datetime(year=int(end_YYYY), month=int(end_MM), day=int(end_day)) -
142-
datetime(year=int(end_YYYY), month=1, day=1)).days + 1
143-
print("end_num_of_days:", end_num_of_days)
144-
145-
# Make API calls for both years if necessary
146-
if begin_YYYY == end_YYYY:
147-
url = BASE_URL + '/rates/zip/' + ZIP + '/year/' + begin_YYYY
148-
responses = [await client.get(url, headers=headers)]
149-
else:
150-
url_begin_year = BASE_URL + '/rates/zip/' + ZIP + '/year/' + begin_YYYY
151-
url_end_year = BASE_URL + '/rates/zip/' + ZIP + '/year/' + end_YYYY
152-
responses = await asyncio.gather(
153-
client.get(url_begin_year, headers=headers),
154-
client.get(url_end_year, headers=headers)
155-
)
156-
157-
# Process responses and combine data
158-
extracted_data = {}
159-
for response in responses:
160-
if response.status_code == 200:
161-
json_data = response.json()
162-
year_data = parse_and_display_rates(json_data)
163-
extracted_data.update(year_data)
164-
else:
165-
return {"error": "Failed to fetch data for one or both years"}
166-
167-
begin_month_num = int(begin_MM) # Convert begin_MM to an integer
168-
print("begin_month_num:", begin_month_num)
169-
end_month_num = int(end_MM)
170-
print("end_month_num:", end_month_num)
171-
172-
# Calculate and add rate breakdowns (using daily_hotel_rates_by_month)
173-
extracted_data['begin_YYYY'] = begin_YYYY
174-
extracted_data['begin_MM'] = begin_MM
175-
extracted_data['begin_num_of_days']= begin_num_of_days
176-
177-
extracted_data['begin_daily_hotel_rate'] = extracted_data['daily_hotel_rates_by_month'][begin_month_num - 1]['value']
178-
extracted_data['begin_total_hotel'] = begin_num_of_days * extracted_data['begin_daily_hotel_rate']
179-
extracted_data['begin_daily_meals_rate'] = extracted_data['meals_rate']
180-
extracted_data['begin_total_meals'] = begin_num_of_days * extracted_data['begin_daily_meals_rate']
181-
182-
extracted_data['end_YYYY'] = end_YYYY
183-
extracted_data['end_MM'] = end_MM
184-
extracted_data['end_num_of_days']= end_num_of_days
185-
186-
extracted_data['end_daily_hotel_rate'] = extracted_data['daily_hotel_rates_by_month'][end_month_num - 1]['value']
187-
extracted_data['end_total_hotel'] = end_num_of_days * extracted_data['end_daily_hotel_rate']
188-
extracted_data['end_daily_meals_rate'] = extracted_data['meals_rate']
189-
extracted_data['end_total_meals'] = end_num_of_days * extracted_data['end_daily_meals_rate']
190-
191-
extracted_data['total_hotel'] = extracted_data['begin_total_hotel'] + extracted_data['end_total_hotel']
192-
extracted_data['total_meals'] = extracted_data['end_total_meals'] + extracted_data['end_total_meals']
193-
extracted_data['total_perdiem'] = extracted_data['total_hotel'] + extracted_data['total_meals']
194-
195-
return extracted_data

0 commit comments

Comments
 (0)