Skip to content

Commit

Permalink
Merge pull request #50 from luftdaten-at/47-citycurrent-json-format-a…
Browse files Browse the repository at this point in the history
…npassen-an-stationcurrent

47 citycurrent json format anpassen an stationcurrent
  • Loading branch information
n11ik authored Dec 3, 2024
2 parents 3d62e75 + e596c62 commit 5f3fb9d
Show file tree
Hide file tree
Showing 7 changed files with 102 additions and 13 deletions.
32 changes: 32 additions & 0 deletions code/alembic/versions/80277768fe1d_added_lat_lon_to_city.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""added lat, lon to city
Revision ID: 80277768fe1d
Revises: 2df6cd6bb99c
Create Date: 2024-12-03 14:20:12.262337
"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '80277768fe1d'
down_revision: Union[str, None] = '2df6cd6bb99c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('cities', sa.Column('lat', sa.Float(), nullable=True))
op.add_column('cities', sa.Column('lon', sa.Float(), nullable=True))
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('cities', 'lon')
op.drop_column('cities', 'lat')
# ### end Alembic commands ###
2 changes: 1 addition & 1 deletion code/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
scheduler = BackgroundScheduler()

# Planen Sie die Aufgabe alle 5 Minuten
import_sensor_community_data()
#import_sensor_community_data()
scheduler.add_job(import_sensor_community_data, 'interval', minutes=5)

# Scheduler starten
Expand Down
7 changes: 6 additions & 1 deletion code/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,16 @@ class City(Base):
country = relationship("Country", back_populates="cities")
locations = relationship("Location", back_populates="city")

def __init__(self, name, country_id, tz):
lat = Column(Float)
lon = Column(Float)

def __init__(self, name, country_id, tz, lat, lon):
self.name = name
self.slug = slugify(name)
self.country_id = country_id
self.tz = tz
self.lat = lat
self.lon = lon


class Location(Base):
Expand Down
53 changes: 51 additions & 2 deletions code/routers/city.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from fastapi import APIRouter, Depends, HTTPException, Query
import json
from geopy.geocoders import Nominatim
from fastapi import APIRouter, Depends, HTTPException, Query, Response
from sqlalchemy.orm import Session
from database import get_db
from sqlalchemy import func, desc
from datetime import datetime
from zoneinfo import ZoneInfo

from models import City, Country, Station, Measurement, Values
from models import City, Country, Station, Measurement, Values, Location
from enums import Dimension


Expand Down Expand Up @@ -41,6 +43,53 @@ async def get_all_cities(db: Session = Depends(get_db)):
async def get_average_measurements_by_city(
city_slug: str = Query(..., description="The name of the city to get the average measurements for."),
db: Session = Depends(get_db)
):
db_city = db.query(City).filter(City.slug == city_slug).first()

if not db_city:
raise HTTPException(status_code=404, detail="City not found")

if not all([db_city.lat, db_city.lon]):
lat, lon = Nominatim(user_agent="api.luftdaten.at").geocode(city_slug)[1]
db_city.lat = lat
db_city.lon = lon
db.commit()

q = (
db.query(
Values.dimension,
func.avg(Values.value)
)
.join(Measurement)
.join(Location)
.join(City)
.filter(City.slug == city_slug)
.filter(Values.value != 'nan')
.group_by(Values.dimension)
)
j = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [db_city.lon, db_city.lat],
},
"properties": {
"city_slug": db_city.slug,
"country": db_city.country.name,
"timezone": db_city.tz,
"time": datetime.now(ZoneInfo('UTC')).replace(second=0, microsecond=0).isoformat(),
#"height": db_location.height,
"values":[{"dimension": dim, "value": val} for dim, val in q.all()]
}
}

return Response(content=json.dumps(j), media_type="pplication/geo+json")


@router.get("/currentold", tags=["city", "current"])
async def get_average_measurements_by_city_old(
city_slug: str = Query(..., description="The name of the city to get the average measurements for."),
db: Session = Depends(get_db)
):
# Suche die Stadt in der Datenbank
city = db.query(City).filter(City.slug == city_slug).first()
Expand Down
15 changes: 8 additions & 7 deletions code/routers/station.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,15 @@ async def get_current_station_data_all(db: Session = Depends(get_db)):
async def get_history_station_data(
station_ids: str = None,
smooth: str = "100",
start: str = None,
start: str = Query(None, description="Supply in ISO format: YYYY-MM-DDThh:mm+xx:xx. Time is optional."),
db: Session = Depends(get_db)
):
"""
Returns the values from a single station in a given time.
"""

start_time = datetime.strptime(start, "%Y-%m-%dT%H:%M") if start else None
# TODO: wich time zone should the user enter
start_time = datetime.fromisoformat(start) if start else None
station_ids = station_ids.split(',') if station_ids else None

q = (
Expand Down Expand Up @@ -113,7 +114,7 @@ async def get_history_station_data(

csv = "timestamp,sid,latitude,longitude,pm1,pm25,pm10\n"
csv += "\n".join(
",".join([time.strftime("%Y-%m-%dT%H:%M")] + [str(o) for o in other])
",".join([time.isoformat()] + [str(o) for o in other])
for time, *other in q.all()
)

Expand Down Expand Up @@ -321,8 +322,8 @@ async def get_topn_stations_by_dim(
@router.get("/historical", response_class=Response, tags=["station"])
async def get_historical_station_data(
station_ids: str = Query(..., description="Comma-separated list of station devices"),
start: str = Query(None, description="Supply in format: YYYY-MM-DDThh:mm. Time is optional."),
end: str = Query(None, description="Supply in format: YYYY-MM-DDThh:mm. Time is optional."),
start: str = Query(None, description="Supply in ISO format: YYYY-MM-DDThh:mm+xx:xx. Time is optional."),
end: str = Query(None, description="Supply in ISO format: YYYY-MM-DDThh:mm+xx:xx. Time is optional."),
precision: Precision = Query(Precision.MAX, description="Precision of data points"),
city_slugs: str = Query(None, description="Comma-seperated list of city_slugs"),
output_format: OutputFormat = Query(OutputFormat.CSV, description="Ouput format"),
Expand All @@ -334,8 +335,8 @@ async def get_historical_station_data(

# Konvertiere start und end in datetime-Objekte
try:
start_date = datetime.strptime(start, "%Y-%m-%dT%H:%M") if start else None
end_date = datetime.strptime(end, "%Y-%m-%dT%H:%M") if end else None
start_date = datetime.fromisoformat(start) if start else None
end_date = datetime.fromisoformat(end) if end else None
except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format. Use YYYY-MM-DDThh:mm")

Expand Down
4 changes: 3 additions & 1 deletion code/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ def get_or_create_location(db: Session, lat: float, lon: float, height: float):
if city is None:
try:
timezone_str = tf.timezone_at(lng=float(lon), lat=float(lat))
city = City(name=city_name, country_id=country.id, tz=timezone_str)

clat, clon = Nominatim(user_agent="api.luftdaten.at").geocode(city_name)[1]
city = City(name=city_name, country_id=country.id, tz=timezone_str, lat=clat, lon=clon)
db.add(city)
db.commit()
db.refresh(city)
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ setuptools==75.1.0
six==1.16.0
sniffio==1.3.1
SQLAlchemy==2.0.35
starlette==0.40.0
starlette
text-unidecode==1.3
timezonefinder==6.5.3
typing_extensions==4.12.2
Expand Down

0 comments on commit 5f3fb9d

Please sign in to comment.