Compare commits

...

2 Commits

Author SHA1 Message Date
Moritz Hölting 975193ff65 decrease docker image size by using alpine 2026-01-05 23:44:06 +01:00
Moritz Hölting bd22aeba67 consolidate scraper and refresh logic, and improve OpenAPI documentation 2026-01-05 23:44:00 +01:00
31 changed files with 799 additions and 618 deletions

View File

@ -1,29 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT DISTINCT scraped_for, canteen FROM canteens_scraped WHERE scraped_for >= $1 AND scraped_for <= $2",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "scraped_for",
"type_info": "Date"
},
{
"ordinal": 1,
"name": "canteen",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Date",
"Date"
]
},
"nullable": [
false,
false
]
},
"hash": "65858112433addbff921108a5b110ffead845478d359af83b70d98ff8d1945f2"
}

View File

@ -1,42 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT date, price_students, price_employees, price_guests FROM meals WHERE canteen = $1 AND LOWER(\"name\") = $2 AND is_latest = TRUE ORDER BY date DESC LIMIT $3;",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "date",
"type_info": "Date"
},
{
"ordinal": 1,
"name": "price_students",
"type_info": "Numeric"
},
{
"ordinal": 2,
"name": "price_employees",
"type_info": "Numeric"
},
{
"ordinal": 3,
"name": "price_guests",
"type_info": "Numeric"
}
],
"parameters": {
"Left": [
"Text",
"Text",
"Int8"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "781e98dce280715896a347808d891ff02e2c609c161e9c935a76fa9d63e61696"
}

View File

@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE meals SET is_latest = FALSE WHERE date = $1 AND canteen = $2 AND is_latest = TRUE",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Date",
"Text"
]
},
"nullable": []
},
"hash": "f804f9c634a34945d7aa0cd3162b20ff9f1ff928912d871a708a088f2d011ba7"
}

554
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,7 @@ resolver = "2"
[workspace.package]
license = "MIT"
authors = ["Moritz Hölting"]
authors = ["Moritz Hölting <moritz@hoelting.dev>"]
repository = "https://github.com/moritz-hoelting/mensa-upb-api"
readme = "README.md"
@ -20,9 +20,11 @@ chrono = "0.4.42"
dotenvy = "0.15.7"
futures = "0.3.31"
itertools = "0.14.0"
rust_decimal = "1.39.0"
serde = { version = "1.0.228", features = ["derive"] }
sqlx = "0.8.2"
strum = "0.27.2"
tokio = "1.48.0"
tracing = "0.1.43"
tokio = "1.49.0"
tracing = "0.1.44"
tracing-subscriber = "0.3.22"
utoipa = "5.4.0"

69
Dockerfile Normal file
View File

@ -0,0 +1,69 @@
# -----------------------------
# Chef base
# -----------------------------
FROM rust:alpine AS chef
# SQLx offline mode
ENV SQLX_OFFLINE true
# Alpine build dependencies
RUN apk add --no-cache curl bash musl-dev openssl-dev pkgconfig
# Install cargo-chef
RUN curl -L --proto '=https' --tlsv1.2 -sSf \
https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
RUN cargo binstall cargo-chef -y
WORKDIR /app
# -----------------------------
# Planner
# -----------------------------
FROM chef AS planner
COPY . .
RUN OFFLINE=true cargo chef prepare --recipe-path recipe.json
# -----------------------------
# Builder
# -----------------------------
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json
COPY . .
RUN OFFLINE=true cargo build --release \
--bin mensa-upb-api \
--bin mensa-upb-scraper
# =====================================================
# Runtime image: scraper (cron-based)
# =====================================================
FROM alpine:latest AS scraper-runtime
WORKDIR /app
RUN apk add --no-cache ca-certificates tzdata dcron tini
RUN echo "0 0 * * * /app/mensa-upb-scraper >> /var/log/cron.log 2>&1" \
> /etc/crontabs/root && \
touch /var/log/cron.log
COPY --from=builder /app/target/release/mensa-upb-scraper /app/mensa-upb-scraper
ENTRYPOINT ["/sbin/tini", "--"]
CMD sh -c 'env > /etc/environment && crond -l 2 && tail -f /var/log/cron.log'
# =====================================================
# Runtime image: API
# =====================================================
FROM alpine:latest AS api-runtime
ARG UID=10001
RUN adduser -D -H -u "${UID}" appuser
USER appuser
COPY --from=builder /app/target/release/mensa-upb-api /bin/mensa-upb-api
ENV API_INTERFACE=0.0.0.0
EXPOSE 8080
CMD ["/bin/mensa-upb-api"]

View File

@ -2,7 +2,8 @@ services:
api:
build:
context: .
dockerfile: ./web-api/Dockerfile
dockerfile: ./Dockerfile
target: api-runtime
image: mensa-upb-api:latest
ports:
- 8080:8080
@ -16,7 +17,8 @@ services:
scraper:
build:
context: .
dockerfile: ./scraper/Dockerfile
dockerfile: ./Dockerfile
target: scraper-runtime
image: mensa-upb-scraper:latest
environment:
- DATABASE_URL=postgres://pguser:pgpass@postgres-mensa-upb/postgres

View File

@ -1,2 +0,0 @@
.env
.gitignore

View File

@ -5,8 +5,8 @@ license.workspace = true
authors.workspace = true
repository.workspace = true
readme.workspace = true
version = "0.2.0"
edition = "2021"
version = "0.2.1"
edition = "2024"
publish = false
[dependencies]
@ -16,11 +16,10 @@ const_format = "0.2.33"
dotenvy = { workspace = true }
futures = { workspace = true }
itertools = { workspace = true }
num-bigint = "0.4.6"
reqwest = { version = "0.12.9", default-features = false, features = ["charset", "rustls-tls", "http2"] }
scraper = "0.25.0"
shared = { path = "../shared" }
sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "uuid", "bigdecimal"] }
sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "uuid", "rust_decimal"] }
strum = { workspace = true, features = ["derive"] }
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
tracing = { workspace = true }

View File

@ -1,29 +0,0 @@
FROM rust:latest AS chef
RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
RUN cargo binstall cargo-chef -y
WORKDIR /app
FROM chef AS planner
COPY . .
RUN OFFLINE=true cargo chef prepare --bin mensa-upb-scraper --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --bin mensa-upb-scraper --release --recipe-path recipe.json
COPY . .
RUN OFFLINE=true cargo build --bin mensa-upb-scraper --release
FROM debian:bookworm-slim AS runtime
WORKDIR /app
RUN apt-get update -y && \
apt-get install -y ca-certificates cron
RUN echo "0 0 * * * /app/mensa-upb-scraper >> /var/log/cron.log 2>&1" > /etc/cron.d/mensa_upb_scraper
RUN chmod 0644 /etc/cron.d/mensa_upb_scraper
RUN crontab /etc/cron.d/mensa_upb_scraper
RUN touch /var/log/cron.log
COPY --from=builder /app/target/release/mensa-upb-scraper /app/mensa-upb-scraper
CMD env > /etc/environment && cron && tail -f /var/log/cron.log

View File

@ -1,23 +0,0 @@
services:
scraper:
build: .
image: mensa-upb-scraper:latest
environment:
- DATABASE_URL=postgres://pguser:pgpass@postgres-mensa-upb-scraper/postgres
- "RUST_LOG=none,mensa_upb_scraper=info"
- TZ=Europe/Berlin
depends_on:
- postgres
postgres:
container_name: postgres-mensa-upb-scraper
image: postgres:17-alpine
environment:
- POSTGRES_USER=pguser
- POSTGRES_PASSWORD=pgpass
- POSTGRES_DB=postgres
volumes:
- db:/var/lib/postgresql/data
volumes:
db:

View File

@ -1,9 +1,8 @@
use std::sync::LazyLock;
use num_bigint::BigInt;
use scraper::{ElementRef, Selector};
use shared::DishType;
use sqlx::types::BigDecimal;
use sqlx::types::Decimal;
use crate::util::normalize_price_bigdecimal;
@ -20,9 +19,9 @@ static HTML_NUTRITIONS_SELECTOR: LazyLock<Selector> =
pub struct Dish {
pub name: String,
pub image_src: Option<String>,
pub price_students: BigDecimal,
pub price_employees: BigDecimal,
pub price_guests: BigDecimal,
pub price_students: Decimal,
pub price_employees: Decimal,
pub price_guests: Decimal,
pub vegetarian: bool,
pub vegan: bool,
pub dish_type: DishType,
@ -32,22 +31,22 @@ pub struct Dish {
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)]
pub struct NutritionValues {
pub kjoule: Option<i32>,
pub protein: Option<BigDecimal>,
pub carbs: Option<BigDecimal>,
pub fat: Option<BigDecimal>,
pub protein: Option<Decimal>,
pub carbs: Option<Decimal>,
pub fat: Option<Decimal>,
}
impl Dish {
pub fn get_name(&self) -> &str {
&self.name
}
pub fn get_price_students(&self) -> &BigDecimal {
pub fn get_price_students(&self) -> &Decimal {
&self.price_students
}
pub fn get_price_employees(&self) -> &BigDecimal {
pub fn get_price_employees(&self) -> &Decimal {
&self.price_employees
}
pub fn get_price_guests(&self) -> &BigDecimal {
pub fn get_price_guests(&self) -> &Decimal {
&self.price_guests
}
pub fn get_image_src(&self) -> Option<&str> {
@ -185,9 +184,9 @@ impl NutritionValues {
pub fn normalize(self) -> Self {
Self {
kjoule: self.kjoule,
protein: self.protein.map(|p| p.with_prec(6).with_scale(2)),
carbs: self.carbs.map(|c| c.with_prec(6).with_scale(2)),
fat: self.fat.map(|f| f.with_prec(6).with_scale(2)),
protein: self.protein.map(|p| p.normalize().round_dp(2)),
carbs: self.carbs.map(|c| c.normalize().round_dp(2)),
fat: self.fat.map(|f| f.normalize().round_dp(2)),
}
}
}
@ -198,18 +197,18 @@ impl PartialOrd for Dish {
}
}
fn price_to_bigdecimal(s: Option<&str>) -> BigDecimal {
fn price_to_bigdecimal(s: Option<&str>) -> Decimal {
s.and_then(|p| {
p.trim_end_matches("")
.replace(',', ".")
.parse::<BigDecimal>()
.parse::<Decimal>()
.ok()
})
.map(normalize_price_bigdecimal)
.unwrap_or_else(|| BigDecimal::from_bigint(BigInt::from(99999), 2))
.unwrap_or_else(|| Decimal::from(99999))
}
fn grams_to_bigdecimal(s: &str) -> Option<BigDecimal> {
fn grams_to_bigdecimal(s: &str) -> Option<Decimal> {
s.trim_end_matches("g")
.replace(',', ".")
.trim()

View File

@ -10,7 +10,6 @@ pub use dish::Dish;
pub use menu::scrape_menu;
pub use refresh::check_refresh;
use shared::Canteen;
pub use util::scrape_canteens_at_days_and_insert;
#[derive(Debug, Clone)]
struct CustomError(String);

View File

@ -1,11 +1,19 @@
use std::collections::HashSet;
use std::sync::LazyLock;
use anyhow::Result;
use chrono::{Duration, Utc};
use itertools::Itertools as _;
use mensa_upb_scraper::{util, FILTER_CANTEENS};
use futures::{future, StreamExt};
use mensa_upb_scraper::{check_refresh, util, FILTER_CANTEENS};
use shared::Canteen;
use strum::IntoEnumIterator as _;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::EnvFilter;
static CANTEENS: LazyLock<Vec<Canteen>> = LazyLock::new(|| {
Canteen::iter()
.filter(|c| !FILTER_CANTEENS.contains(c))
.collect::<Vec<_>>()
});
#[tokio::main]
async fn main() -> Result<()> {
@ -13,40 +21,31 @@ async fn main() -> Result<()> {
let db = util::get_db()?;
tracing_subscriber::fmt::init();
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::WARN.into())
.from_env()
.expect("Invalid filter")
.add_directive("mensa_upb_scraper=debug".parse().unwrap());
tracing_subscriber::fmt().with_env_filter(env_filter).init();
sqlx::migrate!("../migrations").run(&db).await?;
tracing::info!("Starting up...");
let start_date = Utc::now().date_naive();
let end_date = (Utc::now() + Duration::days(6)).date_naive();
let already_scraped = sqlx::query!(
"SELECT DISTINCT scraped_for, canteen FROM canteens_scraped WHERE scraped_for >= $1 AND scraped_for <= $2",
start_date,
end_date
)
.fetch_all(&db)
.await?
.into_iter()
.map(|r| {
(
r.scraped_for,
r.canteen.parse::<Canteen>().expect("Invalid db entry"),
)
})
.collect::<HashSet<_>>();
let date_canteen_combinations = (0..7)
let handles = (0..7)
.map(|d| (Utc::now() + Duration::days(d)).date_naive())
.cartesian_product(Canteen::iter())
.filter(|entry @ (_, canteen)| {
!FILTER_CANTEENS.contains(canteen) && !already_scraped.contains(entry)
})
.collect::<Vec<_>>();
.map(|date| {
let db = db.clone();
tokio::spawn(async move { check_refresh(&db, date, &CANTEENS).await })
});
util::scrape_canteens_at_days_and_insert(&db, &date_canteen_combinations).await?;
future::join_all(handles).await;
futures::stream::iter((0..7).map(|d| (Utc::now() + Duration::days(d)).date_naive()))
.for_each_concurrent(None, async |date| {
check_refresh(&db, date, &CANTEENS).await;
})
.await;
tracing::info!("Finished scraping menu");

View File

@ -12,9 +12,9 @@ use sqlx::QueryBuilder;
use strum::IntoEnumIterator as _;
use crate::{
Dish,
dish::NutritionValues,
util::{self, add_menu_to_db, normalize_price_bigdecimal},
Dish,
};
static NON_FILTERED_CANTEENS: LazyLock<Vec<Canteen>> = LazyLock::new(|| {
@ -28,7 +28,7 @@ static NON_FILTERED_CANTEENS: LazyLock<Vec<Canteen>> = LazyLock::new(|| {
#[tracing::instrument(skip(db))]
pub async fn check_refresh(db: &sqlx::PgPool, date: NaiveDate, canteens: &[Canteen]) -> bool {
if date > Utc::now().date_naive() + chrono::Duration::days(7) {
if date > Utc::now().date_naive() + chrono::Duration::days(31) {
tracing::debug!("Not refreshing menu for date {date} as it is too far in the future");
return false;
}
@ -148,13 +148,14 @@ fn needs_refresh(last_refreshed: chrono::DateTime<Utc>, date_entry: chrono::Naiv
}
}
#[tracing::instrument(skip(db, date, stale_dishes, new_dishes), fields(date = %date, stale_dish_count = %stale_dishes.len(), new_dish_count = %new_dishes.len()))]
async fn update_stale_dishes(
db: &sqlx::PgPool,
date: NaiveDate,
stale_dishes: &HashSet<&(Canteen, Dish)>,
new_dishes: &HashSet<&(Canteen, Dish)>,
canteens: &[Canteen],
) -> Result<(), sqlx::Error> {
) -> anyhow::Result<()> {
let mut tx = db.begin().await?;
if !stale_dishes.is_empty() {
@ -169,6 +170,10 @@ async fn update_stale_dishes(
.build()
.execute(&mut *tx)
.await?;
if new_dishes.is_empty() {
tracing::debug!("No new dishes to add after marking stale dishes");
}
}
let chunks = new_dishes
@ -184,12 +189,9 @@ async fn update_stale_dishes(
g.map(|(_, dish)| dish).cloned().collect::<Vec<_>>(),
)
})
.chain(
canteens
.iter()
.map(|canteen| (*canteen, Vec::new()))
.unique_by(|(c, _)| *c),
);
.chain(canteens.iter().map(|canteen| (*canteen, Vec::new())))
.unique_by(|(c, _)| *c)
.collect::<Vec<_>>();
for (canteen, menu) in new_dishes_iter {
add_menu_to_db(&mut tx, &date, canteen, menu).await?;

View File

@ -4,7 +4,7 @@ use anyhow::Result;
use chrono::NaiveDate;
use futures::{Stream, StreamExt as _};
use shared::{Canteen, DishType};
use sqlx::{postgres::PgPoolOptions, types::BigDecimal, PgPool, PgTransaction};
use sqlx::{postgres::PgPoolOptions, types::Decimal, PgPool, PgTransaction};
use crate::{scrape_menu, Dish};
@ -13,62 +13,6 @@ pub fn get_db() -> Result<PgPool> {
.connect_lazy(&env::var("DATABASE_URL").expect("missing DATABASE_URL env variable"))?)
}
pub async fn scrape_canteens_at_days_and_insert(
db: &PgPool,
date_canteen_combinations: &[(NaiveDate, Canteen)],
) -> Result<()> {
let (tx, mut rx) = tokio::sync::mpsc::channel::<(NaiveDate, Canteen, Vec<Dish>)>(128);
let mut transaction = db.begin().await?;
for (date, canteen) in date_canteen_combinations {
sqlx::query!(
"UPDATE meals SET is_latest = FALSE WHERE date = $1 AND canteen = $2 AND is_latest = TRUE",
date,
canteen.get_identifier()
)
.execute(&mut *transaction)
.await
.ok();
}
let insert_handle = tokio::spawn(async move {
while let Some((date, canteen, menu)) = rx.recv().await {
add_menu_to_db(&mut transaction, &date, canteen, menu).await?;
}
transaction.commit().await
});
let errs = scrape_canteens_at_days(date_canteen_combinations)
.then(|res| {
let tx = tx.clone();
async move {
match res {
Ok((date, canteen, menu)) => {
tx.send((date, canteen, menu)).await.ok();
Ok(())
}
Err(err) => {
tracing::error!("Error scraping menu: {err}");
Err(err)
}
}
}
})
.collect::<Vec<_>>()
.await;
drop(tx);
insert_handle.await??;
if let Some(err) = errs.into_iter().find_map(Result::err) {
return Err(err);
}
Ok(())
}
pub fn scrape_canteens_at_days<'a>(
date_canteen_combinations: &'a [(NaiveDate, Canteen)],
) -> impl Stream<Item = Result<(NaiveDate, Canteen, Vec<Dish>)>> + 'a {
@ -125,6 +69,6 @@ pub async fn add_menu_to_db(
Ok(())
}
pub fn normalize_price_bigdecimal(price: BigDecimal) -> BigDecimal {
price.with_prec(6).with_scale(2)
pub fn normalize_price_bigdecimal(price: Decimal) -> Decimal {
price.normalize().round_dp(2)
}

View File

@ -10,4 +10,5 @@ readme.workspace = true
[dependencies]
serde = { workspace = true, features = ["derive"] }
strum = { workspace = true, features = ["derive"] }
sqlx = { workspace = true }
sqlx = { workspace = true }
utoipa = { workspace = true }

View File

@ -4,7 +4,18 @@ use serde::{Deserialize, Serialize};
use strum::EnumIter;
#[derive(
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, EnumIter, Hash, Serialize, Deserialize,
Debug,
Clone,
Copy,
PartialEq,
Eq,
PartialOrd,
Ord,
EnumIter,
Hash,
Serialize,
Deserialize,
utoipa::ToSchema,
)]
#[serde(rename_all = "kebab-case")]
pub enum Canteen {

View File

@ -1,32 +0,0 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/engine/reference/builder/#dockerignore-file
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/secrets.dev.yaml
**/values.dev.yaml
/bin
/target
LICENSE
README.md

View File

@ -5,8 +5,8 @@ license.workspace = true
authors.workspace = true
repository.workspace = true
readme.workspace = true
version = "0.4.0"
edition = "2021"
version = "0.4.1"
edition = "2024"
publish = false
[dependencies]
@ -14,16 +14,19 @@ actix-cors = "0.7.1"
actix-governor = { version = "0.10.0", features = ["log"] }
actix-web = "4.12.1"
anyhow = { workspace = true }
bigdecimal = { version = "0.4.9", features = ["serde"] }
chrono = { workspace = true, features = ["serde"] }
dotenvy = { workspace = true }
itertools = { workspace = true }
mensa-upb-scraper = { path = "../scraper" }
rust_decimal = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = "1.0.145"
shared = { path = "../shared" }
sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "uuid", "bigdecimal"] }
sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "migrate", "chrono", "uuid", "rust_decimal"] }
strum = { workspace = true, features = ["derive"] }
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
tracing = "0.1.43"
tracing-subscriber = { workspace = true, features = ["env-filter"] }
utoipa = { workspace = true, features = ["actix_extras", "chrono", "decimal"] }
utoipa-actix-web = "0.1.2"
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }

View File

@ -1,37 +0,0 @@
FROM rust:latest AS chef
RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
RUN cargo binstall cargo-chef -y
WORKDIR /app
FROM chef AS planner
COPY . .
RUN OFFLINE=true cargo chef prepare --bin mensa-upb-api --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --bin mensa-upb-api --release --recipe-path recipe.json
COPY . .
RUN OFFLINE=true cargo build --bin mensa-upb-api --release
FROM debian:bookworm-slim AS runtime
ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
appuser
USER appuser
COPY --from=builder /app/target/release/mensa-upb-api /bin/mensa-upb-api
ENV API_INTERFACE=0.0.0.0
EXPOSE 8080
# What the container should run when it is started.
CMD ["/bin/mensa-upb-api"]

View File

@ -1,27 +0,0 @@
services:
api:
build: .
image: mensa-upb-api:latest
ports:
- 8080:8080
environment:
- DATABASE_URL=postgres://pguser:pgpass@postgres-mensa-upb-api/postgres
- "RUST_LOG=none,mensa_upb_api=info"
- TZ=Europe/Berlin
depends_on:
- postgres
postgres:
container_name: postgres-mensa-upb-api
image: postgres:17-alpine
environment:
- POSTGRES_USER=pguser
- POSTGRES_PASSWORD=pgpass
- POSTGRES_DB=postgres
volumes:
- db:/var/lib/postgresql/data
volumes:
db:

View File

@ -1,9 +1,9 @@
use bigdecimal::BigDecimal;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use shared::Canteen;
use sqlx::prelude::FromRow;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema)]
pub struct Dish {
pub name: String,
pub image_src: Option<String>,
@ -13,19 +13,27 @@ pub struct Dish {
pub canteens: Vec<Canteen>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, utoipa::ToSchema)]
pub struct DishPrices {
pub students: BigDecimal,
pub employees: BigDecimal,
pub guests: BigDecimal,
pub students: Decimal,
pub employees: Decimal,
pub guests: Decimal,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, FromRow)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, FromRow, utoipa::ToSchema)]
#[schema(examples(
json!({
"kjoules": 1500,
"carbohydrates": "45.5",
"proteins": "30.0",
"fats": "10.0"
})
))]
pub struct DishNutrients {
pub kjoules: Option<i32>,
pub carbohydrates: Option<BigDecimal>,
pub proteins: Option<BigDecimal>,
pub fats: Option<BigDecimal>,
pub carbohydrates: Option<Decimal>,
pub proteins: Option<Decimal>,
pub fats: Option<Decimal>,
}
impl Dish {
@ -52,9 +60,9 @@ impl PartialOrd for Dish {
impl DishPrices {
pub fn normalize(self) -> Self {
Self {
students: self.students.with_prec(5).with_scale(2),
employees: self.employees.with_prec(5).with_scale(2),
guests: self.guests.with_prec(5).with_scale(2),
students: self.students.normalize().round_dp(2),
employees: self.employees.normalize().round_dp(2),
guests: self.guests.normalize().round_dp(2),
}
}
}
@ -63,9 +71,9 @@ impl DishNutrients {
pub fn normalize(self) -> Self {
Self {
kjoules: self.kjoules,
carbohydrates: self.carbohydrates.map(|v| v.with_prec(6).with_scale(2)),
proteins: self.proteins.map(|v| v.with_prec(6).with_scale(2)),
fats: self.fats.map(|v| v.with_prec(6).with_scale(2)),
carbohydrates: self.carbohydrates.map(|v| v.normalize().round_dp(2)),
proteins: self.proteins.map(|v| v.normalize().round_dp(2)),
fats: self.fats.map(|v| v.normalize().round_dp(2)),
}
}
}

View File

@ -1,21 +1,21 @@
use actix_web::{
get,
web::{self, ServiceConfig},
HttpResponse, Responder,
};
use actix_web::{get, web, HttpResponse, Responder};
use chrono::NaiveDate;
use itertools::Itertools as _;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::PgPool;
use utoipa_actix_web::service_config::ServiceConfig;
use crate::{util, Menu};
use crate::{
util::{self, GenericServerError},
Menu,
};
pub fn configure(cfg: &mut ServiceConfig) {
cfg.service(menu);
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
struct MenuQuery {
date: Option<NaiveDate>,
@ -23,7 +23,35 @@ struct MenuQuery {
no_update: bool,
}
#[get("/menu/{canteen}")]
#[expect(dead_code)]
#[derive(utoipa::ToSchema)]
pub(super) struct InvalidCanteenError {
error: &'static str,
/// Which of the given canteen identifiers is invalid
invalid: Vec<String>,
}
#[utoipa::path(
summary = "Get menu of canteen(s)",
description = "Get the menu of a canteen(s) (at specified date).",
params(
("canteens" = String, Path, description = "Comma-separated list of canteen identifiers to get the menu for", example = "forum,academica"),
("date" = Option<NaiveDate>, Query, description = "Date to get the menu for (defaults to today)"),
("noUpdate" = Option<bool>, Query, description = "If set to true, the menu will not be updated before querying (default: false)", example = false),
),
responses(
(status = OK, description = "The menu of the specified canteen(s).", body = [Menu]),
(status = BAD_REQUEST, description = "Invalid canteen identifier.", body = InvalidCanteenError, example = json!({
"error": "Invalid canteen identifier",
"invalid": ["invalid_canteen_1", "invalid_canteen_2"]
})),
(status = INTERNAL_SERVER_ERROR, description = "Server failed to answer request.", body = GenericServerError, example = json!({
"error": "Failed to query database",
}))
)
)]
#[get("/menu/{canteens}")]
async fn menu(
path: web::Path<String>,
query: web::Query<MenuQuery>,

View File

@ -1,24 +1,35 @@
use std::sync::OnceLock;
use actix_web::{
get,
web::{self, ServiceConfig},
HttpResponse, Responder,
};
use actix_web::{get, web, HttpResponse, Responder};
use chrono::NaiveDate;
use serde::Serialize;
use serde_json::json;
use sqlx::PgPool;
use utoipa_actix_web::service_config::ServiceConfig;
use crate::util::GenericServerError;
pub fn configure(cfg: &mut ServiceConfig) {
cfg.service(web::scope("/metadata").service(earliest_meal_date));
cfg.service(utoipa_actix_web::scope("/metadata").service(earliest_meal_date));
}
static EARLIEST_MEAL_DATE: OnceLock<NaiveDate> = OnceLock::new();
#[derive(Serialize, utoipa::ToSchema)]
struct DateResponse {
date: NaiveDate,
}
#[utoipa::path(summary = "Earliest meal date", description = "Get the date of the earliest meal saved.", responses(
(status = OK, description = "Get the date of the earliest meal saved.", body = DateResponse),
(status = INTERNAL_SERVER_ERROR, description = "Server failed to answer request.", body = GenericServerError)
))]
#[get("/earliest-meal-date")]
async fn earliest_meal_date(db: web::Data<PgPool>) -> impl Responder {
if let Some(earliest_date) = EARLIEST_MEAL_DATE.get() {
earliest_meal_date_ok_response(*earliest_date)
HttpResponse::Ok().json(DateResponse {
date: *earliest_date,
})
} else {
match sqlx::query_scalar!(
r#"SELECT MIN(date) AS "date!" FROM meals WHERE is_latest = TRUE;"#
@ -28,7 +39,7 @@ async fn earliest_meal_date(db: web::Data<PgPool>) -> impl Responder {
{
Ok(date) => {
EARLIEST_MEAL_DATE.set(date).ok();
earliest_meal_date_ok_response(date)
HttpResponse::Ok().json(DateResponse { date })
}
Err(err) => {
tracing::error!("Failed to query datebase: {err}");
@ -39,9 +50,3 @@ async fn earliest_meal_date(db: web::Data<PgPool>) -> impl Responder {
}
}
}
fn earliest_meal_date_ok_response(date: NaiveDate) -> HttpResponse {
HttpResponse::Ok().json(json!({
"date": date,
}))
}

View File

@ -1,8 +1,7 @@
use actix_web::{get, web::ServiceConfig, HttpResponse, Responder};
use itertools::Itertools as _;
use serde_json::json;
use actix_web::{get, HttpResponse, Responder};
use shared::Canteen;
use strum::IntoEnumIterator as _;
use utoipa_actix_web::service_config::ServiceConfig;
mod menu;
mod metadata;
@ -17,11 +16,28 @@ pub fn configure(cfg: &mut ServiceConfig) {
.configure(price_history::configure);
}
#[derive(serde::Serialize, utoipa::ToSchema)]
struct IndexResponse {
/// The current version of the API.
version: &'static str,
/// A short description of the API.
description: &'static str,
/// A list of supported canteens.
supported_canteens: Vec<String>,
}
#[utoipa::path(summary = "Get API version and capabilities", description = "Get information about the api version and capabilities.", responses((status = 200, body = IndexResponse, example = json!(IndexResponse {
version: env!("CARGO_PKG_VERSION"),
description: env!("CARGO_PKG_DESCRIPTION"),
supported_canteens: Canteen::iter().map(|c| c.get_identifier().to_string()).collect::<Vec<String>>()
}))))]
#[get("/")]
async fn index() -> impl Responder {
HttpResponse::Ok().json(json!({
"version": env!("CARGO_PKG_VERSION"),
"description": env!("CARGO_PKG_DESCRIPTION"),
"supportedCanteens": Canteen::iter().map(|c| c.get_identifier().to_string()).collect_vec(),
}))
HttpResponse::Ok().json(IndexResponse {
version: env!("CARGO_PKG_VERSION"),
description: env!("CARGO_PKG_DESCRIPTION"),
supported_canteens: Canteen::iter()
.map(|c| c.get_identifier().to_string())
.collect(),
})
}

View File

@ -1,25 +1,32 @@
use actix_web::{
get,
web::{self, ServiceConfig},
HttpResponse, Responder,
};
use actix_web::{get, web, HttpResponse, Responder};
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::PgPool;
use utoipa_actix_web::service_config::ServiceConfig;
use crate::dish::DishNutrients;
use crate::{dish::DishNutrients, util::GenericServerError};
pub fn configure(cfg: &mut ServiceConfig) {
cfg.service(nutrition);
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
struct NutritionQuery {
date: Option<NaiveDate>,
}
#[utoipa::path(
summary = "Get nutrition values of some dish",
description = "Query nutrition values of some dish (at certain date).",
params(("name" = String, Path, description = "Name of the dish to query nutrition values for", example = "Bratwurst mit Currysauce und Pommes Frites")),
responses(
(status = OK, description = "Get nutrition values of some dish.", body = DishNutrients),
(status = NOT_FOUND, description = "No dish with a matching name could be found.", body = GenericServerError),
(status = INTERNAL_SERVER_ERROR, description = "Server failed to answer request.", body = GenericServerError)
)
)]
#[get("/nutrition/{name}")]
async fn nutrition(
path: web::Path<String>,

View File

@ -1,24 +1,25 @@
use std::collections::BTreeMap;
use actix_web::{
get,
web::{self, ServiceConfig},
HttpResponse, Responder,
};
use bigdecimal::BigDecimal;
use actix_web::{get, web, HttpResponse, Responder};
use chrono::NaiveDate;
use itertools::Itertools;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::{prelude::FromRow, PgPool};
use utoipa_actix_web::service_config::ServiceConfig;
use crate::{util, DishPrices};
use crate::{
endpoints::menu::InvalidCanteenError,
util::{self, GenericServerError},
DishPrices,
};
pub fn configure(cfg: &mut ServiceConfig) {
cfg.service(price_history);
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
struct PriceHistoryQuery {
canteens: Option<String>,
@ -29,11 +30,50 @@ struct PriceHistoryQuery {
struct PriceHistoryRow {
date: NaiveDate,
canteen: String,
price_students: BigDecimal,
price_employees: BigDecimal,
price_guests: BigDecimal,
price_students: Decimal,
price_employees: Decimal,
price_guests: Decimal,
}
#[utoipa::path(
summary = "Get price history of a dish",
description = "Query the price history of a dish (optionally filtered by canteen(s)).",
params(
("name" = String, Path, description = "Name of the dish to query price history for", example = "Bratwurst mit Currysauce und Pommes Frites"),
("canteens" = Option<String>, Query, description = "Comma-separated list of canteen identifiers to filter the price history by", example = "forum,academica"),
("limit" = Option<u32>, Query, description = "Maximum number of entries to return", minimum = 1, maximum = 1000, example = 100),
),
responses(
(status = OK, description = "Query the price history of a dish.", body = BTreeMap<String, BTreeMap<NaiveDate, DishPrices>>, example = json!({
"forum": {
"2024-06-01": {
"students": "2.50",
"employees": "3.50",
"guests": "4.50"
},
"2024-05-31": {
"students": "2.40",
"employees": "3.40",
"guests": "4.40"
}
},
"academica": {
"2024-06-01": {
"students": "2.60",
"employees": "3.60",
"guests": "4.60"
}
}
})),
(status = BAD_REQUEST, description = "Invalid canteen identifier.", body = InvalidCanteenError, example = json!({
"error": "Invalid canteen identifier",
"invalid": ["invalid_canteen_1", "invalid_canteen_2"]
})),
(status = INTERNAL_SERVER_ERROR, description = "Server failed to answer request.", body = GenericServerError, example = json!({
"error": "Failed to query database",
}))
)
)]
#[get("/price-history/{name}")]
async fn price_history(
path: web::Path<String>,
@ -46,52 +86,13 @@ async fn price_history(
.as_deref()
.map(util::parse_canteens_comma_separated);
let dish_name = path.into_inner();
let limit = query.limit.unwrap_or(1000) as i64;
let limit = query.limit.unwrap_or(1000).clamp(1, 1000) as i64;
if let Some(canteens) = canteens {
if canteens.iter().all(Result::is_ok) {
let canteens = canteens.into_iter().filter_map(Result::ok).collect_vec();
if canteens.len() == 1 {
let canteen = canteens.into_iter().next().expect("length is 1");
let res = sqlx::query!(
r#"SELECT date, price_students, price_employees, price_guests FROM meals WHERE canteen = $1 AND LOWER("name") = $2 AND is_latest = TRUE ORDER BY date DESC LIMIT $3;"#,
canteen.get_identifier(),
dish_name.to_lowercase(),
limit,
)
.fetch_all(db)
.await;
match res {
Ok(recs) => {
let structured = recs
.into_iter()
.map(|r| {
(
r.date,
DishPrices {
students: r.price_students,
employees: r.price_employees,
guests: r.price_guests,
}
.normalize(),
)
})
.collect::<BTreeMap<_, _>>();
HttpResponse::Ok().json(structured)
}
Err(err) => {
tracing::error!("Failed to query database: {err:?}");
HttpResponse::InternalServerError().json(json!({
"error": "Failed to query database",
}))
}
}
} else {
let res = sqlx::query_as!(PriceHistoryRow,
let res = sqlx::query_as!(PriceHistoryRow,
r#"SELECT date, canteen, price_students, price_employees, price_guests FROM meals WHERE canteen = ANY($1) AND LOWER("name") = $2 AND is_latest = TRUE ORDER BY date DESC LIMIT $3;"#,
&canteens.iter().map(|c| c.get_identifier().to_string()).collect_vec(),
dish_name.to_lowercase(),
@ -100,18 +101,17 @@ async fn price_history(
.fetch_all(db)
.await;
match res {
Ok(recs) => {
let structured = structure_multiple_canteens(recs);
match res {
Ok(recs) => {
let structured = structure_multiple_canteens(recs);
HttpResponse::Ok().json(structured)
}
Err(err) => {
tracing::error!("Failed to query database: {err:?}");
HttpResponse::InternalServerError().json(json!({
"error": "Failed to query database",
}))
}
HttpResponse::Ok().json(structured)
}
Err(err) => {
tracing::error!("Failed to query database: {err:?}");
HttpResponse::InternalServerError().json(json!({
"error": "Failed to query database",
}))
}
}
} else {

View File

@ -12,6 +12,13 @@ use mensa_upb_api::get_governor;
use sqlx::postgres::PgPoolOptions;
use tracing::{debug, error, info, level_filters::LevelFilter};
use tracing_subscriber::EnvFilter;
use utoipa::OpenApi as _;
use utoipa_actix_web::AppExt as _;
use utoipa_rapidoc::RapiDoc;
#[derive(utoipa::OpenApi)]
#[openapi(info(title = "Mensa UPB API"))]
struct ApiDoc;
#[tokio::main]
async fn main() -> Result<()> {
@ -80,7 +87,13 @@ async fn main() -> Result<()> {
.wrap(Governor::new(&governor_conf))
.wrap(cors)
.app_data(web::Data::new(db.clone()))
.into_utoipa_app()
.openapi(ApiDoc::openapi())
.configure(mensa_upb_api::endpoints::configure)
.openapi_service(|api| {
RapiDoc::with_openapi("/api-docs/openapi.json", api).path("/rapidoc")
})
.into_app()
})
.bind((interface.as_str(), port))?
.run()

View File

@ -7,7 +7,7 @@ use std::str::FromStr as _;
use crate::{Dish, DishPrices};
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[derive(Debug, Clone, Serialize, Deserialize, Default, utoipa::ToSchema)]
pub struct Menu {
date: NaiveDate,
main_dishes: Vec<Dish>,
@ -27,21 +27,17 @@ impl Menu {
.map(|c| c.get_identifier().to_string())
.collect::<Vec<_>>();
let query_db = async || {
sqlx::query!(r#"SELECT name, array_agg(DISTINCT canteen ORDER BY canteen) AS "canteens!", dish_type AS "dish_type: DishType", image_src, price_students, price_employees, price_guests, vegan, vegetarian
if allow_refresh {
check_refresh(db, date, canteens).await;
};
let result = sqlx::query!(r#"SELECT name, array_agg(DISTINCT canteen ORDER BY canteen) AS "canteens!", dish_type AS "dish_type: DishType", image_src, price_students, price_employees, price_guests, vegan, vegetarian
FROM meals WHERE date = $1 AND canteen = ANY($2) AND is_latest = TRUE
GROUP BY name, dish_type, image_src, price_students, price_employees, price_guests, vegan, vegetarian
ORDER BY name"#,
date, &canteens_str)
.fetch_all(db)
.await
};
let mut result = query_db().await?;
if allow_refresh && check_refresh(db, date, canteens).await {
result = query_db().await?;
}
.await?;
let mut main_dishes = Vec::new();
let mut side_dishes = Vec::new();

View File

@ -5,3 +5,9 @@ use shared::Canteen;
pub fn parse_canteens_comma_separated(s: &str) -> Vec<Result<Canteen, String>> {
s.split(',').map(Canteen::from_str).collect()
}
#[expect(dead_code)]
#[derive(utoipa::ToSchema)]
pub(crate) struct GenericServerError {
error: &'static str,
}