nakas commited on
Commit
c134ae1
·
verified ·
1 Parent(s): be24fe4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -116
app.py CHANGED
@@ -12,92 +12,9 @@ from matplotlib.gridspec import GridSpec
12
  from windrose import WindroseAxes
13
  from datetime import datetime
14
 
15
- # Install Playwright browsers on startup
16
- def install_playwright_browsers():
17
- try:
18
- if not os.path.exists('/home/user/.cache/ms-playwright'):
19
- print("Installing Playwright browsers...")
20
- subprocess.run(
21
- [sys.executable, "-m", "playwright", "install", "chromium"],
22
- check=True,
23
- capture_output=True,
24
- text=True
25
- )
26
- print("Playwright browsers installed successfully")
27
- except Exception as e:
28
- print(f"Error installing browsers: {e}")
29
-
30
- # Install browsers when the module loads
31
  install_playwright_browsers()
32
 
33
- def scrape_weather_data(site_id, hours=720):
34
- """Scrape weather data from weather.gov timeseries"""
35
- url = f"https://www.weather.gov/wrh/timeseries?site={site_id}&hours={hours}&units=english&chart=on&headers=on&obs=tabular&hourly=false&pview=full&font=12&plot="
36
-
37
- try:
38
- with sync_playwright() as p:
39
- # Launch browser with minimal settings
40
- browser = p.chromium.launch(
41
- headless=True,
42
- args=['--no-sandbox', '--disable-dev-shm-usage']
43
- )
44
-
45
- # Create context with desktop user agent
46
- context = browser.new_context(
47
- user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
48
- )
49
-
50
- # Create new page and navigate
51
- page = context.new_page()
52
- response = page.goto(url)
53
- print(f"Response status: {response.status}")
54
-
55
- # Wait for content to load
56
- page.wait_for_selector('table', timeout=30000)
57
- time.sleep(5)
58
-
59
- # Get all text content
60
- print("Extracting data...")
61
- content = page.evaluate('''() => {
62
- const getTextContent = () => {
63
- const rows = [];
64
- const tables = document.getElementsByTagName('table');
65
- for (const table of tables) {
66
- if (table.textContent.includes('Date/Time')) {
67
- const headerRow = Array.from(table.querySelectorAll('th'))
68
- .map(th => th.textContent.trim());
69
-
70
- const dataRows = Array.from(table.querySelectorAll('tbody tr'))
71
- .map(row => Array.from(row.querySelectorAll('td'))
72
- .map(td => td.textContent.trim()));
73
-
74
- return {headers: headerRow, rows: dataRows};
75
- }
76
- }
77
- return null;
78
- };
79
-
80
- return getTextContent();
81
- }''')
82
-
83
- print(f"Found {len(content['rows'] if content else [])} rows of data")
84
- browser.close()
85
- return content
86
-
87
- except Exception as e:
88
- print(f"Error scraping data: {str(e)}")
89
- raise e
90
-
91
- def parse_date(date_str):
92
- """Parse date string to datetime"""
93
- try:
94
- # Handle format like "Feb 10, 10:00 am"
95
- # Add current year to the date string
96
- current_year = datetime.now().year
97
- return pd.to_datetime(f"{date_str}, {current_year}", format="%b %d, %I:%M %p, %Y")
98
- except:
99
- return pd.NaT
100
-
101
  def parse_weather_data(data):
102
  """Parse the weather data into a pandas DataFrame"""
103
  if not data or 'rows' not in data:
@@ -105,21 +22,21 @@ def parse_weather_data(data):
105
 
106
  df = pd.DataFrame(data['rows'])
107
 
108
- # Get all relevant columns
109
  columns = ['datetime', 'temp', 'dew_point', 'humidity', 'wind_chill',
110
  'wind_dir', 'wind_speed', 'snow_depth', 'snowfall_3hr',
111
- 'snowfall_6hr', 'snowfall_24hr']
112
 
113
- df = df.iloc[:, :11] # Take first 11 columns
114
  df.columns = columns
115
 
116
- # Convert numeric columns
117
  numeric_cols = ['temp', 'dew_point', 'humidity', 'wind_chill', 'snow_depth',
118
- 'snowfall_3hr', 'snowfall_6hr', 'snowfall_24hr']
119
  for col in numeric_cols:
120
  df[col] = pd.to_numeric(df[col], errors='coerce')
121
 
122
- # Parse wind data
123
  def parse_wind(x):
124
  if pd.isna(x): return np.nan, np.nan
125
  match = re.search(r'(\d+)G(\d+)', str(x))
@@ -131,7 +48,7 @@ def parse_weather_data(data):
131
  df['wind_speed'] = wind_data.apply(lambda x: x[0])
132
  df['wind_gust'] = wind_data.apply(lambda x: x[1])
133
 
134
- # Convert wind directions to degrees
135
  def parse_direction(direction):
136
  direction_map = {
137
  'N': 0, 'NNE': 22.5, 'NE': 45, 'ENE': 67.5,
@@ -149,20 +66,11 @@ def parse_weather_data(data):
149
 
150
  return df
151
 
152
- def create_wind_rose(df, ax):
153
- """Create a wind rose plot"""
154
- if not isinstance(ax, WindroseAxes):
155
- ax = WindroseAxes.from_ax(ax=ax)
156
- ax.bar(df['wind_dir_deg'].dropna(), df['wind_speed'].dropna(),
157
- bins=np.arange(0, 40, 5), normed=True, opening=0.8, edgecolor='white')
158
- ax.set_legend(title='Wind Speed (mph)')
159
- ax.set_title('Wind Rose')
160
-
161
  def create_plots(df):
162
- """Create all weather plots"""
163
- # Create figure with subplots
164
- fig = plt.figure(figsize=(20, 15))
165
- gs = GridSpec(3, 2, figure=fig)
166
 
167
  # Temperature plot
168
  ax1 = fig.add_subplot(gs[0, :])
@@ -186,24 +94,40 @@ def create_plots(df):
186
  ax2.grid(True)
187
  plt.setp(ax2.xaxis.get_majorticklabels(), rotation=45)
188
 
189
- # Snow depth plot
190
- ax3 = fig.add_subplot(gs[2, 0])
191
  ax3.plot(df['datetime'], df['snow_depth'], color='blue', label='Snow Depth')
192
- ax3.set_title('Snow Depth Over Time')
 
 
193
  ax3.set_xlabel('Date')
194
- ax3.set_ylabel('Snow Depth (inches)')
 
 
 
 
195
  ax3.grid(True)
196
  plt.setp(ax3.xaxis.get_majorticklabels(), rotation=45)
197
 
198
- # Daily new snow bar plot
199
- ax4 = fig.add_subplot(gs[2, 1])
200
- daily_snow = df.groupby('date')['snowfall_24hr'].max()
201
- ax4.bar(daily_snow.index, daily_snow.values, color='blue')
202
- ax4.set_title('Daily New Snow')
203
  ax4.set_xlabel('Date')
204
- ax4.set_ylabel('New Snow (inches)')
 
205
  plt.setp(ax4.xaxis.get_majorticklabels(), rotation=45)
206
 
 
 
 
 
 
 
 
 
 
207
  plt.tight_layout()
208
 
209
  # Create separate wind rose figure
@@ -226,7 +150,7 @@ def analyze_weather_data(site_id, hours):
226
  print("Parsing data...")
227
  df = parse_weather_data(raw_data)
228
 
229
- # Calculate statistics
230
  print("Calculating statistics...")
231
  stats = {
232
  'Temperature Range': f"{df['temp'].min():.1f}°F to {df['temp'].max():.1f}°F",
@@ -235,6 +159,8 @@ def analyze_weather_data(site_id, hours):
235
  'Max Wind Gust': f"{df['wind_gust'].max():.1f} mph",
236
  'Average Humidity': f"{df['humidity'].mean():.1f}%",
237
  'Current Snow Depth': f"{df['snow_depth'].iloc[0]:.1f} inches",
 
 
238
  'Total New Snow (24hr)': f"{df['snowfall_24hr'].sum():.1f} inches"
239
  }
240
 
@@ -256,7 +182,7 @@ def analyze_weather_data(site_id, hours):
256
  print(f"Error in analysis: {str(e)}")
257
  return f"Error analyzing data: {str(e)}", None, None
258
 
259
- # Create Gradio interface
260
  with gr.Blocks(title="Weather Station Data Analyzer") as demo:
261
  gr.Markdown("# Weather Station Data Analyzer")
262
  gr.Markdown("""
 
12
  from windrose import WindroseAxes
13
  from datetime import datetime
14
 
15
+ # Previous installation and scraping functions remain the same
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  install_playwright_browsers()
17
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  def parse_weather_data(data):
19
  """Parse the weather data into a pandas DataFrame"""
20
  if not data or 'rows' not in data:
 
22
 
23
  df = pd.DataFrame(data['rows'])
24
 
25
+ # Updated columns list to include SWE
26
  columns = ['datetime', 'temp', 'dew_point', 'humidity', 'wind_chill',
27
  'wind_dir', 'wind_speed', 'snow_depth', 'snowfall_3hr',
28
+ 'snowfall_6hr', 'snowfall_24hr', 'swe']
29
 
30
+ df = df.iloc[:, :12] # Take first 12 columns including SWE
31
  df.columns = columns
32
 
33
+ # Convert numeric columns including SWE
34
  numeric_cols = ['temp', 'dew_point', 'humidity', 'wind_chill', 'snow_depth',
35
+ 'snowfall_3hr', 'snowfall_6hr', 'snowfall_24hr', 'swe']
36
  for col in numeric_cols:
37
  df[col] = pd.to_numeric(df[col], errors='coerce')
38
 
39
+ # Previous wind data parsing remains the same
40
  def parse_wind(x):
41
  if pd.isna(x): return np.nan, np.nan
42
  match = re.search(r'(\d+)G(\d+)', str(x))
 
48
  df['wind_speed'] = wind_data.apply(lambda x: x[0])
49
  df['wind_gust'] = wind_data.apply(lambda x: x[1])
50
 
51
+ # Previous wind direction parsing remains the same
52
  def parse_direction(direction):
53
  direction_map = {
54
  'N': 0, 'NNE': 22.5, 'NE': 45, 'ENE': 67.5,
 
66
 
67
  return df
68
 
 
 
 
 
 
 
 
 
 
69
  def create_plots(df):
70
+ """Create all weather plots including SWE"""
71
+ # Create figure with subplots - updated layout to include SWE
72
+ fig = plt.figure(figsize=(20, 20)) # Increased height for additional plot
73
+ gs = GridSpec(4, 2, figure=fig) # Added one more row for SWE
74
 
75
  # Temperature plot
76
  ax1 = fig.add_subplot(gs[0, :])
 
94
  ax2.grid(True)
95
  plt.setp(ax2.xaxis.get_majorticklabels(), rotation=45)
96
 
97
+ # Snow depth and SWE comparison plot
98
+ ax3 = fig.add_subplot(gs[2, :])
99
  ax3.plot(df['datetime'], df['snow_depth'], color='blue', label='Snow Depth')
100
+ ax3_twin = ax3.twinx()
101
+ ax3_twin.plot(df['datetime'], df['swe'], color='red', label='SWE')
102
+ ax3.set_title('Snow Depth and Snow Water Equivalent Over Time')
103
  ax3.set_xlabel('Date')
104
+ ax3.set_ylabel('Snow Depth (inches)', color='blue')
105
+ ax3_twin.set_ylabel('Snow Water Equivalent (inches)', color='red')
106
+ lines1, labels1 = ax3.get_legend_handles_labels()
107
+ lines2, labels2 = ax3_twin.get_legend_handles_labels()
108
+ ax3.legend(lines1 + lines2, labels1 + labels2, loc='upper right')
109
  ax3.grid(True)
110
  plt.setp(ax3.xaxis.get_majorticklabels(), rotation=45)
111
 
112
+ # Snow density plot (SWE/Snow Depth ratio)
113
+ ax4 = fig.add_subplot(gs[3, 0])
114
+ snow_density = (df['swe'] / df['snow_depth']) * 100 # Convert to percentage
115
+ ax4.plot(df['datetime'], snow_density, color='purple')
116
+ ax4.set_title('Snow Density (SWE/Snow Depth Ratio)')
117
  ax4.set_xlabel('Date')
118
+ ax4.set_ylabel('Snow Density (%)')
119
+ ax4.grid(True)
120
  plt.setp(ax4.xaxis.get_majorticklabels(), rotation=45)
121
 
122
+ # Daily new snow bar plot
123
+ ax5 = fig.add_subplot(gs[3, 1])
124
+ daily_snow = df.groupby('date')['snowfall_24hr'].max()
125
+ ax5.bar(daily_snow.index, daily_snow.values, color='blue')
126
+ ax5.set_title('Daily New Snow')
127
+ ax5.set_xlabel('Date')
128
+ ax5.set_ylabel('New Snow (inches)')
129
+ plt.setp(ax5.xaxis.get_majorticklabels(), rotation=45)
130
+
131
  plt.tight_layout()
132
 
133
  # Create separate wind rose figure
 
150
  print("Parsing data...")
151
  df = parse_weather_data(raw_data)
152
 
153
+ # Calculate statistics - updated to include SWE
154
  print("Calculating statistics...")
155
  stats = {
156
  'Temperature Range': f"{df['temp'].min():.1f}°F to {df['temp'].max():.1f}°F",
 
159
  'Max Wind Gust': f"{df['wind_gust'].max():.1f} mph",
160
  'Average Humidity': f"{df['humidity'].mean():.1f}%",
161
  'Current Snow Depth': f"{df['snow_depth'].iloc[0]:.1f} inches",
162
+ 'Current SWE': f"{df['swe'].iloc[0]:.2f} inches",
163
+ 'Snow Density': f"{(df['swe'].iloc[0] / df['snow_depth'].iloc[0] * 100):.1f}%",
164
  'Total New Snow (24hr)': f"{df['snowfall_24hr'].sum():.1f} inches"
165
  }
166
 
 
182
  print(f"Error in analysis: {str(e)}")
183
  return f"Error analyzing data: {str(e)}", None, None
184
 
185
+ # Gradio interface remains the same
186
  with gr.Blocks(title="Weather Station Data Analyzer") as demo:
187
  gr.Markdown("# Weather Station Data Analyzer")
188
  gr.Markdown("""