Compare commits
No commits in common. "thermokarst_jj_yttmtqskpotw" and "main" have entirely different histories.
thermokars
...
main
9 changed files with 0 additions and 3385 deletions
39
.gitignore
vendored
39
.gitignore
vendored
|
@ -1,39 +0,0 @@
|
|||
# Rust
|
||||
/target/
|
||||
**/*.rs.bk
|
||||
|
||||
# By default, Cargo will ignore "Cargo.lock" for libraries, but include for binaries
|
||||
Cargo.lock
|
||||
|
||||
# Next line is for macOS Finder/Spotlight
|
||||
.DS_Store
|
||||
|
||||
# VSCode/
|
||||
.vscode/
|
||||
|
||||
# CLion
|
||||
.idea/
|
||||
|
||||
# Other common
|
||||
*.log
|
||||
*.tmp
|
||||
*.swp
|
||||
*.swo
|
||||
*.bak
|
||||
*~
|
||||
|
||||
# dotenv, custom local env/config
|
||||
.env
|
||||
.env.*
|
||||
*.local
|
||||
|
||||
# compiled output
|
||||
*.out
|
||||
*.exe
|
||||
*.bin
|
||||
*.o
|
||||
*.obj
|
||||
|
||||
# Coverage/test/artifacts
|
||||
target/coverage/
|
||||
coverage/
|
2627
Cargo.lock
generated
2627
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
18
Cargo.toml
18
Cargo.toml
|
@ -1,18 +0,0 @@
|
|||
[package]
|
||||
name = "trmnl"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
# Only keep one of blocking+async if all features converted
|
||||
reqwest = { version = "0.12", features = ["json", "rustls-tls" ] }
|
||||
dirs = "5"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
serde_json = "1.0"
|
||||
chrono-tz = "0.8"
|
||||
tokio = { version = "1.37", features = ["full"] }
|
||||
scraper = "0.18"
|
||||
ical = "0.8"
|
||||
rrule = "0.14.0"
|
52
src/cache.rs
52
src/cache.rs
|
@ -1,52 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs,
|
||||
path::PathBuf,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Cache<T> {
|
||||
pub timestamp: u64,
|
||||
pub data: T,
|
||||
}
|
||||
|
||||
fn get_cache_path(report_type: &str) -> PathBuf {
|
||||
let home = std::env::var("HOME").expect("HOME not set");
|
||||
let cache_dir = PathBuf::from(format!("{}/.local/state/trmnl", home));
|
||||
let _ = fs::create_dir_all(&cache_dir);
|
||||
cache_dir.join(format!("{}.json", report_type))
|
||||
}
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
pub fn load_cache<T: Serialize + DeserializeOwned>(
|
||||
report_type: &str,
|
||||
expiry_secs: u64,
|
||||
) -> Option<T> {
|
||||
let path = get_cache_path(report_type);
|
||||
let contents = fs::read_to_string(&path).ok()?;
|
||||
let parsed: Cache<T> = serde_json::from_str(&contents).ok()?;
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
if now - parsed.timestamp <= expiry_secs {
|
||||
println!("(using cached {} report)", report_type);
|
||||
Some(parsed.data)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_cache<T: Serialize>(report_type: &str, data: &T) {
|
||||
let path = get_cache_path(report_type);
|
||||
let to_save = Cache {
|
||||
timestamp: SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs(),
|
||||
data: data,
|
||||
};
|
||||
let json = serde_json::to_string_pretty(&to_save).unwrap();
|
||||
let _ = fs::write(path, json);
|
||||
}
|
188
src/calendar.rs
188
src/calendar.rs
|
@ -1,188 +0,0 @@
|
|||
use chrono::{DateTime, FixedOffset, NaiveDate, Offset, TimeZone, Utc};
|
||||
use ical::IcalParser;
|
||||
use rrule::{RRule, RRuleSet, Unvalidated};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::error::Error;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CalendarEventSummary {
|
||||
pub start: Option<DateTime<FixedOffset>>,
|
||||
pub summary: String,
|
||||
pub all_day: bool,
|
||||
}
|
||||
|
||||
pub async fn fetch_next_events(
|
||||
calendar_url: &str,
|
||||
max_events: usize,
|
||||
tz_str: &str,
|
||||
) -> Result<Vec<CalendarEventSummary>, Box<dyn Error>> {
|
||||
let resp = reqwest::get(calendar_url).await?;
|
||||
let body = resp.bytes().await?;
|
||||
let ical_str = String::from_utf8_lossy(&body);
|
||||
|
||||
let parser = IcalParser::new(ical_str.as_bytes());
|
||||
let mut events: Vec<CalendarEventSummary> = Vec::new();
|
||||
|
||||
let tz: Option<chrono_tz::Tz> = tz_str.parse().ok();
|
||||
for calendar in parser {
|
||||
for evt in calendar?.events {
|
||||
let mut summary = None;
|
||||
let mut dtstart = None;
|
||||
let mut all_day = false;
|
||||
let mut rrule_str: Option<String> = None;
|
||||
let mut raw_dtstart: Option<String> = None;
|
||||
let mut dt_params: Option<Vec<(String, Vec<String>)>> = None;
|
||||
for prop in &evt.properties {
|
||||
match prop.name.as_str() {
|
||||
"SUMMARY" => summary = prop.value.clone(),
|
||||
"DTSTART" => {
|
||||
raw_dtstart = prop.value.clone();
|
||||
dt_params = prop.params.clone();
|
||||
if let Some(val) = &prop.value {
|
||||
// -------- Existing DTSTART parsing logic goes below ------
|
||||
// All-day check
|
||||
let is_all_day = prop
|
||||
.params
|
||||
.as_ref()
|
||||
.and_then(|params| params.iter().find(|(k, _)| k == "VALUE"))
|
||||
.and_then(|(_, v)| v.first())
|
||||
.is_some_and(|v| v == "DATE");
|
||||
|
||||
if is_all_day {
|
||||
if let Ok(date) = NaiveDate::parse_from_str(val, "%Y%m%d") {
|
||||
if let Some(tz) = tz {
|
||||
if let Some(dt) = tz
|
||||
.from_local_datetime(
|
||||
&date.and_hms_opt(0, 0, 0).unwrap(),
|
||||
)
|
||||
.single()
|
||||
{
|
||||
dtstart = Some(dt.with_timezone(&dt.offset().fix()));
|
||||
}
|
||||
}
|
||||
all_day = true;
|
||||
}
|
||||
} else if let Some(params) = &prop.params {
|
||||
// Check and handle TZID param!
|
||||
if let Some((_, tz_vec)) = params.iter().find(|(k, _)| k == "TZID")
|
||||
{
|
||||
let tz_id = &tz_vec[0];
|
||||
if let Ok(parsed_tz) = tz_id.parse::<chrono_tz::Tz>() {
|
||||
if let Ok(naive_dt) = chrono::NaiveDateTime::parse_from_str(
|
||||
val,
|
||||
"%Y%m%dT%H%M%S",
|
||||
) {
|
||||
let local_dt =
|
||||
parsed_tz.from_local_datetime(&naive_dt).single();
|
||||
if let Some(dt) = local_dt {
|
||||
dtstart =
|
||||
Some(dt.with_timezone(&dt.offset().fix()));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let Ok(dt) = DateTime::parse_from_rfc3339(val) {
|
||||
dtstart = Some(dt.with_timezone(dt.offset()));
|
||||
} else if let Ok(dt) =
|
||||
DateTime::parse_from_str(val, "%Y%m%dT%H%M%SZ")
|
||||
{
|
||||
dtstart = Some(dt.with_timezone(&Utc.fix()));
|
||||
}
|
||||
} else if val.ends_with('Z') && val.len() == 16 {
|
||||
// e.g. "20250522T181500Z" not RFC3339, convert to RFC3339
|
||||
let iso = format!(
|
||||
"{}-{}-{}T{}:{}:{}Z",
|
||||
&val[0..4],
|
||||
&val[4..6],
|
||||
&val[6..8],
|
||||
&val[9..11],
|
||||
&val[11..13],
|
||||
&val[13..15]
|
||||
);
|
||||
if let Ok(dt) = DateTime::parse_from_rfc3339(&iso) {
|
||||
dtstart = Some(dt.with_timezone(&Utc.fix()));
|
||||
}
|
||||
} else if let Ok(dt) = DateTime::parse_from_rfc3339(val) {
|
||||
dtstart = Some(dt.with_timezone(dt.offset()));
|
||||
} else if let Ok(dt) = DateTime::parse_from_str(val, "%Y%m%dT%H%M%S") {
|
||||
// No Z/zone, treat as in configured tz
|
||||
if let Some(tz) = tz {
|
||||
if let Some(dt2) =
|
||||
tz.from_local_datetime(&dt.naive_local()).single()
|
||||
{
|
||||
dtstart = Some(dt2.with_timezone(&dt2.offset().fix()));
|
||||
}
|
||||
}
|
||||
} else if let Ok(date) = NaiveDate::parse_from_str(val, "%Y%m%d") {
|
||||
// As a fallback, treat as all-day
|
||||
if let Some(tz) = tz {
|
||||
if let Some(dt2) = tz
|
||||
.from_local_datetime(&date.and_hms_opt(0, 0, 0).unwrap())
|
||||
.single()
|
||||
{
|
||||
dtstart = Some(dt2.with_timezone(&dt2.offset().fix()));
|
||||
}
|
||||
}
|
||||
all_day = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
"RRULE" => rrule_str = prop.value.clone(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
// ----------- RRULE recurring event expansion block -----------
|
||||
if let (Some(ref s), Some(dt), Some(ref rrule_val), Some(_val)) = (
|
||||
summary.clone(),
|
||||
dtstart,
|
||||
rrule_str.as_ref(),
|
||||
raw_dtstart.as_ref(),
|
||||
) {
|
||||
// dtstart is FixedOffset, convert to Utc for rrule
|
||||
let dtstart_rrule = dt.with_timezone(&rrule::Tz::UTC);
|
||||
if let Ok(unvalid) = rrule_val.parse::<RRule<Unvalidated>>() {
|
||||
if let Ok(rrule) = unvalid.validate(dtstart_rrule) {
|
||||
let set = RRuleSet::new(dtstart_rrule).rrule(rrule);
|
||||
// Expand up to the next 20 future instances for each recurring event
|
||||
let now = Utc::now();
|
||||
let instances = set.all(1000);
|
||||
let occur_iter = instances
|
||||
.dates
|
||||
.iter()
|
||||
.filter(|t| **t > now)
|
||||
.take(max_events);
|
||||
for occ in occur_iter {
|
||||
let occ_fixed: DateTime<FixedOffset> =
|
||||
occ.with_timezone(&dt.offset().fix());
|
||||
events.push(CalendarEventSummary {
|
||||
start: Some(occ_fixed),
|
||||
summary: s.clone(),
|
||||
all_day,
|
||||
});
|
||||
}
|
||||
} else if dtstart_rrule > Utc::now() {
|
||||
eprintln!("[ERROR] Failed to validate RRULE: {:?}", rrule_val);
|
||||
}
|
||||
// Otherwise, ignore and continue
|
||||
} else {
|
||||
eprintln!("[ERROR] Failed to parse RRULE: {:?}", rrule_val);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Non-recurring event
|
||||
if let (Some(s), Some(dt)) = (summary.clone(), dtstart) {
|
||||
events.push(CalendarEventSummary {
|
||||
start: Some(dt),
|
||||
summary: s,
|
||||
all_day,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Filter to only future events
|
||||
let now = Utc::now();
|
||||
events.retain(|e| e.start.map(|s| s > now).unwrap_or(false));
|
||||
// Sort by time ascending, then take first max_events
|
||||
events.sort_by_key(|e| e.start);
|
||||
Ok(events.into_iter().take(max_events).collect())
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
use dirs::config_dir;
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub cache_weather_secs: Option<u64>,
|
||||
pub cache_pollen_secs: Option<u64>,
|
||||
pub cache_calendar_secs: Option<u64>,
|
||||
pub weather_api_key: String,
|
||||
pub calendar_event_count: usize,
|
||||
pub location: String,
|
||||
pub timezone: String,
|
||||
pub pollen_zip: String,
|
||||
pub calendar_url: String,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
// Try primary platform config_dir first (macOS: ~/Library/Application Support/trmnl/config.toml, Linux: ~/.config/trmnl/config.toml)
|
||||
let mut config_path = config_dir().ok_or("Could not find user config directory")?;
|
||||
config_path.push("trmnl/config.toml");
|
||||
if config_path.exists() {
|
||||
let contents = fs::read_to_string(&config_path)?;
|
||||
let config: Config = toml::from_str(&contents)?;
|
||||
return Ok(config);
|
||||
}
|
||||
// Fallback: try ~/.config/trmnl/config.toml explicitly
|
||||
if let Some(home_dir) = dirs::home_dir() {
|
||||
let fallback_path: PathBuf = home_dir.join(".config/trmnl/config.toml");
|
||||
if fallback_path.exists() {
|
||||
let contents = fs::read_to_string(&fallback_path)?;
|
||||
let config: Config = toml::from_str(&contents)?;
|
||||
return Ok(config);
|
||||
}
|
||||
}
|
||||
Err("Config file not found in either location".into())
|
||||
}
|
||||
}
|
199
src/main.rs
199
src/main.rs
|
@ -1,199 +0,0 @@
|
|||
mod cache;
|
||||
mod calendar;
|
||||
mod config;
|
||||
mod pollen;
|
||||
mod weather;
|
||||
|
||||
use calendar::fetch_next_events;
|
||||
use chrono::{DateTime, TimeZone, Utc};
|
||||
use chrono_tz::Tz;
|
||||
use config::Config;
|
||||
use pollen::fetch_pollen_api;
|
||||
use weather::{fetch_weather, geocode_city, WeatherSummary};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let config = match Config::load() {
|
||||
Ok(cfg) => cfg,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to load config: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Weather - with cache
|
||||
let cache_weather_secs = config.cache_weather_secs.unwrap_or(300);
|
||||
let weather: Option<WeatherSummary> =
|
||||
if let Some(w) = cache::load_cache::<WeatherSummary>("weather", cache_weather_secs) {
|
||||
Some(w)
|
||||
} else {
|
||||
match fetch_weather(&config.location, &config.weather_api_key).await {
|
||||
Ok(data) => {
|
||||
cache::save_cache("weather", &data);
|
||||
Some(data)
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to fetch weather: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
let (_lat, _lon) = if weather.is_some() {
|
||||
match geocode_city(&config.location, &config.weather_api_key).await {
|
||||
Ok((lat, lon)) => (lat, lon),
|
||||
Err(_) => (0.0, 0.0),
|
||||
}
|
||||
} else {
|
||||
(0.0, 0.0)
|
||||
};
|
||||
if let Some(w) = &weather {
|
||||
if w.obs_time_unix > 0 {
|
||||
match config.timezone.parse::<Tz>() {
|
||||
Ok(tz) => {
|
||||
let dt_local = tz.from_utc_datetime(
|
||||
&Utc.timestamp_opt(w.obs_time_unix, 0).unwrap().naive_utc(),
|
||||
);
|
||||
println!(
|
||||
"Weather (as of {}):",
|
||||
dt_local.format("%Y-%m-%d %H:%M:%S %Z")
|
||||
);
|
||||
}
|
||||
Err(_) => {
|
||||
println!(
|
||||
"Weather (as of unix timestamp {}) (timezone parse error)",
|
||||
w.obs_time_unix
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("Weather:");
|
||||
}
|
||||
println!(" - Temperature: {}", w.temp);
|
||||
println!(" - Conditions: {}", w.current_desc);
|
||||
println!(" - High: {}, Low: {}", w.high, w.low);
|
||||
println!(" - Forecast: {}", w.daily_desc);
|
||||
} else {
|
||||
println!("Weather: N/A");
|
||||
}
|
||||
|
||||
// Pollen - with cache
|
||||
let cache_pollen_secs = config.cache_pollen_secs.unwrap_or(300);
|
||||
match if let Some(p) = cache::load_cache::<pollen::PollenSummary>("pollen", cache_pollen_secs) {
|
||||
Ok(p)
|
||||
} else {
|
||||
match fetch_pollen_api(&config.pollen_zip).await {
|
||||
Ok(data) => {
|
||||
cache::save_cache("pollen", &data);
|
||||
Ok(data)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
} {
|
||||
Ok(p) => {
|
||||
if let Ok(dt) = DateTime::parse_from_rfc3339(&p.forecast_date) {
|
||||
match config.timezone.parse::<Tz>() {
|
||||
Ok(tz) => {
|
||||
let local_time = dt.with_timezone(&tz);
|
||||
println!(
|
||||
"\nPollen.com ({})\n Forecast for {}:",
|
||||
p.location,
|
||||
local_time.format("%Y-%m-%d %H:%M:%S %Z")
|
||||
);
|
||||
}
|
||||
Err(_) => {
|
||||
println!(
|
||||
"\nPollen.com ({})\n Forecast for {} (timezone parse error):",
|
||||
p.location, p.forecast_date
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"\nPollen.com ({})\n Forecast for {}:",
|
||||
p.location, p.forecast_date
|
||||
);
|
||||
}
|
||||
|
||||
for day in &["Yesterday", "Today", "Tomorrow"] {
|
||||
if let Some(period) = p.periods.get(*day) {
|
||||
println!(
|
||||
" {:9}: {:>4.1} ({})",
|
||||
day,
|
||||
period.index,
|
||||
period.triggers.join(", ")
|
||||
);
|
||||
} else {
|
||||
println!(" {:9}: N/A", day);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("\nFailed to fetch pollen (API): {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// Calendar - with cache
|
||||
let cache_calendar_secs = config.cache_calendar_secs.unwrap_or(300);
|
||||
println!("\nUpcoming calendar events:");
|
||||
|
||||
match if let Some(events) =
|
||||
cache::load_cache::<Vec<calendar::CalendarEventSummary>>("calendar", cache_calendar_secs)
|
||||
{
|
||||
Ok(events)
|
||||
} else {
|
||||
match fetch_next_events(
|
||||
&config.calendar_url,
|
||||
config.calendar_event_count,
|
||||
&config.timezone,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(data) => {
|
||||
cache::save_cache("calendar", &data);
|
||||
Ok(data)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
} {
|
||||
Ok(events) if !events.is_empty() => {
|
||||
for (i, event) in events.iter().enumerate() {
|
||||
let day = event
|
||||
.start
|
||||
.map(|dt| dt.format("%a %Y-%m-%d").to_string())
|
||||
.unwrap_or_else(|| "".to_string());
|
||||
let time = if event.all_day {
|
||||
"all day".to_string()
|
||||
} else if let Some(dt) = event.start {
|
||||
dt.format("%H:%M").to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
println!("{}. {} {:>8} {}", i + 1, day, time, event.summary);
|
||||
}
|
||||
}
|
||||
_ => println!("No upcoming calendar events found."),
|
||||
}
|
||||
|
||||
println!();
|
||||
println!("MBTA Transit (placeholder):");
|
||||
println!(" - Red Line: Ashmont in 5 min, Braintree in 10 min");
|
||||
println!(" - Orange Line: Northbound in 7 min");
|
||||
println!(" - Bus 64: Due in 2 min");
|
||||
// In the future, fetch real data from an MBTA API here.
|
||||
|
||||
println!("Inbound Shopify packages:");
|
||||
let packages = get_shopify_packages(&config);
|
||||
for (i, pkg) in packages.iter().enumerate() {
|
||||
println!("{}. {}", i + 1, pkg);
|
||||
}
|
||||
}
|
||||
|
||||
// No longer needed (handled above)
|
||||
// fn get_calendar_events(_config: &Config, n: usize) -> Vec<String> { ... }
|
||||
|
||||
fn get_shopify_packages(_config: &Config) -> Vec<String> {
|
||||
vec![
|
||||
"Order #1234: Shipped - Arriving May 13".to_string(),
|
||||
"Order #5678: In transit - Arriving May 15".to_string(),
|
||||
]
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct PollenApiResponse {
|
||||
forecast_date: String,
|
||||
location: Option<Location>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Location {
|
||||
periods: Vec<Period>,
|
||||
#[serde(rename = "DisplayLocation")]
|
||||
display_location: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct Period {
|
||||
r#type: String,
|
||||
index: f32,
|
||||
triggers: Vec<Trigger>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct Trigger {
|
||||
name: String,
|
||||
#[allow(dead_code)]
|
||||
plant_type: String,
|
||||
#[allow(dead_code)]
|
||||
genus: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct PollenPeriod {
|
||||
pub index: f32,
|
||||
pub triggers: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct PollenSummary {
|
||||
pub location: String,
|
||||
pub forecast_date: String,
|
||||
pub periods: HashMap<String, PollenPeriod>, // "Yesterday", "Today", "Tomorrow"
|
||||
}
|
||||
|
||||
pub async fn fetch_pollen_api(zip: &str) -> Result<PollenSummary, Box<dyn Error>> {
|
||||
let url = format!("https://www.pollen.com/api/forecast/current/pollen/{}", zip);
|
||||
let resp = reqwest::Client::new()
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36")
|
||||
.header("Accept", "application/json, text/plain, */*")
|
||||
.header("Referer", &url)
|
||||
.header("Cookie", format!("geo={}", zip))
|
||||
.send().await?
|
||||
.text().await?;
|
||||
|
||||
let api: PollenApiResponse = serde_json::from_str(&resp)?;
|
||||
let loc = api.location.ok_or("No location in pollen.com response")?;
|
||||
let mut periods = HashMap::new();
|
||||
|
||||
for period in &loc.periods {
|
||||
let triggers: Vec<String> = period.triggers.iter().map(|t| t.name.clone()).collect();
|
||||
periods.insert(
|
||||
period.r#type.clone(),
|
||||
PollenPeriod {
|
||||
index: period.index,
|
||||
triggers,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(PollenSummary {
|
||||
location: loc.display_location,
|
||||
forecast_date: api.forecast_date,
|
||||
periods,
|
||||
})
|
||||
}
|
141
src/weather.rs
141
src/weather.rs
|
@ -1,141 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::error::Error;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GeocodeResponse {
|
||||
lat: f64,
|
||||
lon: f64,
|
||||
#[allow(dead_code)]
|
||||
name: String,
|
||||
#[allow(dead_code)]
|
||||
country: String,
|
||||
}
|
||||
|
||||
// returns (lat, lon) for specified city name
|
||||
pub async fn geocode_city(city: &str, api_key: &str) -> Result<(f64, f64), Box<dyn Error>> {
|
||||
let client = reqwest::Client::new();
|
||||
let geocode_url = format!(
|
||||
"https://api.openweathermap.org/geo/1.0/direct?q={}&limit=1&appid={}",
|
||||
city, api_key
|
||||
);
|
||||
let geo_resp = client.get(&geocode_url).send().await?;
|
||||
let geo_text = geo_resp.text().await?;
|
||||
let geo_parsed: Result<Vec<GeocodeResponse>, serde_json::Error> =
|
||||
serde_json::from_str(&geo_text);
|
||||
match geo_parsed {
|
||||
Ok(geo_vec) => {
|
||||
if geo_vec.is_empty() {
|
||||
return Err(format!(
|
||||
"No geocoding result for city: {} (response: {})",
|
||||
city, geo_text
|
||||
)
|
||||
.into());
|
||||
}
|
||||
Ok((geo_vec[0].lat, geo_vec[0].lon))
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode geocoding response. Raw response: {geo_text}");
|
||||
Err(Box::new(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
// ------------------ WEATHER -------------------------
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WeatherCurrent {
|
||||
pub temp: f64,
|
||||
pub weather: Vec<WeatherDesc>,
|
||||
pub dt: i64, // Unix timestamp
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WeatherDailyTemp {
|
||||
pub min: f64,
|
||||
pub max: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WeatherDesc {
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WeatherDaily {
|
||||
pub temp: WeatherDailyTemp,
|
||||
pub weather: Vec<WeatherDesc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OneCallResult {
|
||||
pub current: WeatherCurrent,
|
||||
pub daily: Vec<WeatherDaily>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct WeatherSummary {
|
||||
pub temp: String,
|
||||
pub current_desc: String,
|
||||
pub high: String,
|
||||
pub low: String,
|
||||
pub daily_desc: String,
|
||||
pub obs_time_unix: i64,
|
||||
}
|
||||
|
||||
/// Returns WeatherSummary struct
|
||||
pub async fn fetch_weather(city: &str, api_key: &str) -> Result<WeatherSummary, Box<dyn Error>> {
|
||||
let (lat, lon) = geocode_city(city, api_key).await?;
|
||||
let client = reqwest::Client::new();
|
||||
// Get weather data from One Call 3.0
|
||||
let onecall_url = format!(
|
||||
"https://api.openweathermap.org/data/3.0/onecall?lat={}&lon={}&appid={}&units=imperial",
|
||||
lat, lon, api_key
|
||||
);
|
||||
let one_resp = client.get(&onecall_url).send().await?;
|
||||
let one_status = one_resp.status();
|
||||
let one_text = one_resp.text().await?;
|
||||
if !one_status.is_success() {
|
||||
return Err(format!("HTTP error {}: {}", one_status, one_text).into());
|
||||
}
|
||||
|
||||
let one_parsed: OneCallResult = match serde_json::from_str(&one_text) {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to decode One Call weather response. Raw response: {one_text}");
|
||||
return Err(Box::new(e));
|
||||
}
|
||||
};
|
||||
|
||||
// Current conditions
|
||||
let temp = format!("{:.1}°F", one_parsed.current.temp);
|
||||
let current_desc = one_parsed
|
||||
.current
|
||||
.weather
|
||||
.first()
|
||||
.map(|w| w.description.clone())
|
||||
.unwrap_or_else(|| "N/A".to_string());
|
||||
let current_dt = one_parsed.current.dt; // UNIX timestamp, UTC
|
||||
|
||||
// Today's forecast is daily[0]
|
||||
let (high, low, daily_desc) = if let Some(today) = one_parsed.daily.first() {
|
||||
let high = format!("{:.1}°F", today.temp.max);
|
||||
let low = format!("{:.1}°F", today.temp.min);
|
||||
let desc = today
|
||||
.weather
|
||||
.first()
|
||||
.map(|w| w.description.clone())
|
||||
.unwrap_or_else(|| "N/A".to_string());
|
||||
(high, low, desc)
|
||||
} else {
|
||||
("N/A".to_string(), "N/A".to_string(), "N/A".to_string())
|
||||
};
|
||||
|
||||
Ok(WeatherSummary {
|
||||
temp,
|
||||
current_desc,
|
||||
high,
|
||||
low,
|
||||
daily_desc,
|
||||
obs_time_unix: current_dt,
|
||||
})
|
||||
}
|
Loading…
Add table
Reference in a new issue