729 lines
20 KiB
Rust
729 lines
20 KiB
Rust
mod config;
|
|
mod question;
|
|
mod app_error;
|
|
mod auth;
|
|
|
|
use std::collections::HashMap;
|
|
use std::net::SocketAddr;
|
|
use std::sync::Arc;
|
|
|
|
use anyhow::{Result, Context};
|
|
use axum::http::StatusCode;
|
|
use axum::routing::{get, post};
|
|
use axum::{Router, Json};
|
|
use axum::extract::{State, Path, Query};
|
|
use chrono::{NaiveDate, Utc, Datelike, Duration, DateTime};
|
|
use futures::stream::{TryStreamExt, FuturesOrdered};
|
|
use rand::distributions::{Alphanumeric, DistString};
|
|
use serde::{Deserialize, Serialize};
|
|
use sqlx::PgPool;
|
|
use sqlx::postgres::{PgConnectOptions, PgPoolOptions};
|
|
use uuid::Uuid;
|
|
|
|
use crate::app_error::AppResult;
|
|
use crate::auth::User;
|
|
use crate::config::Config;
|
|
use crate::question::{FullQuestion, BasicQuestion, Question};
|
|
|
|
#[global_allocator]
|
|
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<()> {
|
|
let config: Config = {
|
|
let t = std::fs::read_to_string("./config.toml")
|
|
.context("could not read config.toml")?;
|
|
toml::from_str(&t)
|
|
.context("could not deserialize config file")?
|
|
};
|
|
|
|
let options = PgConnectOptions::new()
|
|
.host(&config.database.host)
|
|
.port(config.database.port)
|
|
.database(&config.database.name)
|
|
.username(&config.database.username)
|
|
.password(&config.database.password);
|
|
|
|
let pool = PgPoolOptions::new()
|
|
.connect_with(options)
|
|
.await
|
|
.context("could not connect to database")?;
|
|
|
|
sqlx::migrate!()
|
|
.run(&pool)
|
|
.await?;
|
|
|
|
let address: SocketAddr = config.server.address.parse()?;
|
|
|
|
let app = Router::new()
|
|
.route(
|
|
"/user",
|
|
post(create_user)
|
|
.delete(delete_user)
|
|
)
|
|
.route("/questions", get(get_data))
|
|
.route("/question/:id/breakdown/:stat", get(stat_breakdown))
|
|
.route("/vote", post(vote))
|
|
.with_state(Arc::new(AppState {
|
|
config,
|
|
pool,
|
|
}));
|
|
|
|
let listener = tokio::net::TcpListener::bind(&address).await?;
|
|
axum::serve(listener, app).await?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub type ArcState = Arc<AppState>;
|
|
|
|
pub struct AppState {
|
|
pub config: Config,
|
|
pub pool: PgPool,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct CreateUserResponse {
|
|
user_id: String,
|
|
api_key: String,
|
|
}
|
|
|
|
async fn create_user(
|
|
state: State<ArcState>,
|
|
) -> AppResult<Json<CreateUserResponse>> {
|
|
let id = Uuid::now_v7();
|
|
let api_key = Alphanumeric.sample_string(&mut rand::thread_rng(), 32);
|
|
let api_key_hash = {
|
|
let mut hasher = blake3::Hasher::new();
|
|
hasher.update(api_key.as_bytes());
|
|
hasher.finalize().as_bytes().to_vec()
|
|
};
|
|
|
|
sqlx::query!(
|
|
// language=postgresql
|
|
"insert into users (id, api_key_hash) values ($1, $2)",
|
|
id,
|
|
api_key_hash,
|
|
)
|
|
.execute(&state.pool)
|
|
.await?;
|
|
|
|
Ok(Json(CreateUserResponse {
|
|
user_id: id.simple().to_string(),
|
|
api_key,
|
|
}))
|
|
}
|
|
|
|
async fn delete_user(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
) -> AppResult<StatusCode> {
|
|
sqlx::query!(
|
|
// language=postgresql
|
|
"delete from users where id = $1",
|
|
user.id,
|
|
)
|
|
.execute(&state.pool)
|
|
.await?;
|
|
|
|
Ok(StatusCode::NO_CONTENT)
|
|
}
|
|
|
|
// https://github.com/nox/serde_urlencoded/issues/26
|
|
// #[derive(Deserialize)]
|
|
// #[serde(tag = "v")]
|
|
// enum GetDataQuery {
|
|
// #[serde(rename = "1")]
|
|
// V1,
|
|
// #[serde(rename = "2")]
|
|
// V2 {
|
|
// #[serde(default)]
|
|
// page: Option<u32>,
|
|
// #[serde(default = "t")]
|
|
// include_current: bool,
|
|
// },
|
|
// }
|
|
|
|
// impl Default for GetDataQuery {
|
|
// fn default() -> Self {
|
|
// Self::V1
|
|
// }
|
|
// }
|
|
|
|
#[derive(Deserialize)]
|
|
struct GetDataQuery {
|
|
v: u32,
|
|
#[serde(default)]
|
|
page: Option<u32>,
|
|
#[serde(default = "t")]
|
|
include_current: bool,
|
|
}
|
|
|
|
impl Default for GetDataQuery {
|
|
fn default() -> Self {
|
|
Self {
|
|
v: 1,
|
|
page: None,
|
|
include_current: false,
|
|
}
|
|
}
|
|
}
|
|
|
|
const fn t() -> bool {
|
|
true
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
#[serde(untagged)]
|
|
enum GetDataResult {
|
|
V1(Vec<Question>),
|
|
V2 {
|
|
current: Option<Question>,
|
|
page: Vec<Question>,
|
|
has_next: bool,
|
|
},
|
|
}
|
|
|
|
const PER_PAGE: usize = 10;
|
|
|
|
async fn get_data(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
query: Option<Query<GetDataQuery>>,
|
|
) -> AppResult<Json<GetDataResult>> {
|
|
let Query(query) = query.unwrap_or_default();
|
|
match query.v {
|
|
2 => get_data_v2(
|
|
state,
|
|
user,
|
|
query.page,
|
|
query.include_current,
|
|
).await.map(Json),
|
|
_ => get_data_v1(state, user).await.map(Json),
|
|
}
|
|
|
|
// :c
|
|
// match query {
|
|
// GetDataQuery::V1 => get_data_v1(state, user).await.map(Json),
|
|
// GetDataQuery::V2 {
|
|
// page,
|
|
// include_current
|
|
// } => get_data_v2(
|
|
// state,
|
|
// user,
|
|
// page,
|
|
// include_current,
|
|
// ).await.map(Json),
|
|
// }
|
|
}
|
|
|
|
async fn get_data_v2(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
page: Option<u32>,
|
|
include_current: bool,
|
|
) -> AppResult<GetDataResult> {
|
|
let offset_mul = page.unwrap_or(1).max(1) - 1;
|
|
|
|
let questions = sqlx::query_as!(
|
|
RawQuestion,
|
|
// language=postgresql
|
|
r#"
|
|
select
|
|
q.*,
|
|
current_timestamp <= q.publish_date + interval '1 day' and current_timestamp > q.publish_date as "active!",
|
|
(select answer from responses r where r.question_id = q.id and r.user_id = $1) as response
|
|
from questions q
|
|
where q.publish_date <= current_timestamp
|
|
order by q.publish_date desc
|
|
limit $2::bigint + 2 offset $3::bigint * $2::bigint
|
|
"#,
|
|
user.id,
|
|
PER_PAGE as i64,
|
|
offset_mul as i64,
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?;
|
|
|
|
let mut current = None;
|
|
|
|
let questions = questions.into_iter()
|
|
.map(|question| {
|
|
let state = Arc::clone(&state);
|
|
|
|
async move {
|
|
parse_question(&state, question).await
|
|
}
|
|
})
|
|
.collect::<FuturesOrdered<_>>()
|
|
.try_collect::<Vec<_>>()
|
|
.await?;
|
|
|
|
let mut questions: Vec<Question> = questions.into_iter()
|
|
.flat_map(|question| {
|
|
if question.active {
|
|
if include_current {
|
|
current = Some(question);
|
|
}
|
|
|
|
None
|
|
} else {
|
|
Some(question)
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
if current.is_none() && include_current {
|
|
let raw_current = sqlx::query_as!(
|
|
RawQuestion,
|
|
r#"
|
|
select
|
|
q.*,
|
|
true as "active!",
|
|
(select answer from responses r where r.question_id = q.id and r.user_id = $1) as response
|
|
from questions q
|
|
where
|
|
q.publish_date <= current_timestamp
|
|
and current_timestamp <= q.publish_date + interval '1 day'
|
|
and current_timestamp > q.publish_date
|
|
"#,
|
|
user.id,
|
|
)
|
|
.fetch_optional(&state.pool)
|
|
.await?;
|
|
|
|
if let Some(raw) = raw_current {
|
|
current = Some(parse_question(&state, raw).await?);
|
|
}
|
|
}
|
|
|
|
let has_next = questions.len() > PER_PAGE;
|
|
questions.truncate(PER_PAGE);
|
|
|
|
Ok(GetDataResult::V2 {
|
|
current,
|
|
has_next,
|
|
page: questions,
|
|
})
|
|
}
|
|
|
|
struct RawQuestion {
|
|
id: Uuid,
|
|
publish_date: DateTime<Utc>,
|
|
active: bool,
|
|
question_text: String,
|
|
answers: Vec<String>,
|
|
response: Option<i16>,
|
|
suggester: Option<String>,
|
|
}
|
|
|
|
async fn parse_question(state: &AppState, question: RawQuestion) -> Result<Question> {
|
|
let basic = BasicQuestion {
|
|
id: question.id,
|
|
date: question.publish_date,
|
|
active: question.active,
|
|
text: question.question_text,
|
|
answers: question.answers,
|
|
suggester: question.suggester,
|
|
};
|
|
|
|
if question.response.is_some() || !question.active {
|
|
let mut responses: Vec<u64> = vec![0; basic.answers.len()];
|
|
let raw_responses = sqlx::query!(
|
|
// language=postgresql
|
|
"select r.answer, count(r.*) from responses r where r.question_id = $1 group by r.answer",
|
|
question.id,
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?;
|
|
|
|
for response in raw_responses {
|
|
responses[response.answer as usize] = response.count.map(|c| c as u64).unwrap_or(0);
|
|
}
|
|
|
|
return Ok(FullQuestion {
|
|
basic,
|
|
responses,
|
|
response: question.response.map(|r| r as u16),
|
|
}.into());
|
|
}
|
|
|
|
Ok(basic.into())
|
|
}
|
|
|
|
async fn get_data_v1(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
) -> AppResult<GetDataResult> {
|
|
let questions = sqlx::query_as!(
|
|
RawQuestion,
|
|
// language=postgresql
|
|
r#"
|
|
select
|
|
q.*,
|
|
current_timestamp <= q.publish_date + interval '1 day' and current_timestamp > q.publish_date as "active!",
|
|
(select answer from responses r where r.question_id = q.id and r.user_id = $1) as response
|
|
from questions q
|
|
where q.publish_date <= current_timestamp
|
|
order by q.publish_date desc
|
|
"#,
|
|
user.id,
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?;
|
|
|
|
let questions = questions.into_iter()
|
|
.map(|question| {
|
|
let state = Arc::clone(&state);
|
|
|
|
async move {
|
|
parse_question(&state, question).await
|
|
}
|
|
})
|
|
.collect::<FuturesOrdered<_>>()
|
|
.try_collect::<Vec<_>>()
|
|
.await?;
|
|
|
|
Ok(GetDataResult::V1(questions))
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct VoteRequest {
|
|
question_id: Uuid,
|
|
answer: u16,
|
|
stats: VoteStats,
|
|
}
|
|
|
|
#[derive(Deserialize, Serialize, Default)]
|
|
#[serde(default)]
|
|
struct VoteStats {
|
|
gender: Option<Gender>,
|
|
birth_date: Option<NaiveDate>,
|
|
character_race: Option<u8>,
|
|
character_clan: Option<u8>,
|
|
character_gender: Option<u8>,
|
|
character_home_world: Option<u32>,
|
|
year_started_playing: Option<u16>,
|
|
myers_briggs: Option<Mbti>,
|
|
main_job: Option<u16>,
|
|
}
|
|
|
|
impl VoteStats {
|
|
fn clean(&mut self) {
|
|
let now = Utc::now().naive_utc();
|
|
|
|
self.year_started_playing = match self.year_started_playing {
|
|
Some(0..=2009) => None,
|
|
Some(year) if year as i32 > now.year() => None,
|
|
x => x,
|
|
};
|
|
|
|
self.birth_date = match self.birth_date {
|
|
Some(date) if date > now.date() => None,
|
|
Some(date) if date < (now - Duration::days(365 * 115)).date() => None,
|
|
x => x,
|
|
};
|
|
}
|
|
}
|
|
|
|
#[derive(Deserialize, Serialize)]
|
|
#[serde(rename_all = "snake_case")]
|
|
enum Gender {
|
|
Female,
|
|
NonBinary,
|
|
Male,
|
|
}
|
|
|
|
#[derive(Deserialize, Serialize)]
|
|
#[serde(rename_all = "snake_case")]
|
|
enum Mbti {
|
|
Enfj,
|
|
Enfp,
|
|
Entj,
|
|
Entp,
|
|
Esfj,
|
|
Esfp,
|
|
Estj,
|
|
Estp,
|
|
Infj,
|
|
Infp,
|
|
Intj,
|
|
Intp,
|
|
Isfj,
|
|
Isfp,
|
|
Istj,
|
|
Istp,
|
|
}
|
|
|
|
async fn vote(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
Json(mut req): Json<VoteRequest>,
|
|
) -> AppResult<StatusCode> {
|
|
// look up question
|
|
let question = sqlx::query!(
|
|
// language=postgresql
|
|
r#"
|
|
select
|
|
current_timestamp <= q.publish_date + interval '1 day' and current_timestamp > q.publish_date as "active!",
|
|
exists(select 1 from responses r where r.question_id = q.id and r.user_id = $1) as "voted!"
|
|
from questions q
|
|
where q.id = $1
|
|
"#,
|
|
req.question_id,
|
|
)
|
|
.fetch_optional(&state.pool)
|
|
.await?
|
|
.context("no such question")?;
|
|
|
|
if !question.active {
|
|
return Err(anyhow::anyhow!("that question is not open for voting").into());
|
|
}
|
|
|
|
if question.voted {
|
|
return Err(anyhow::anyhow!("you have already voted for that question").into());
|
|
}
|
|
|
|
req.stats.clean();
|
|
let stats = serde_json::to_value(&req.stats)?;
|
|
sqlx::query!(
|
|
// language=postgresql
|
|
"insert into responses (question_id, user_id, answer, country, stats) values ($1, $2, $3, $4, $5)",
|
|
req.question_id,
|
|
user.id,
|
|
req.answer as i16,
|
|
user.country,
|
|
stats,
|
|
)
|
|
.execute(&state.pool)
|
|
.await
|
|
.map_err(|_| anyhow::anyhow!("you have either already voted or the question id is invalid"))?;
|
|
|
|
Ok(StatusCode::NO_CONTENT)
|
|
}
|
|
|
|
#[derive(Deserialize, PartialEq)]
|
|
#[serde(rename_all = "snake_case")]
|
|
enum Statistic {
|
|
Country,
|
|
Gender,
|
|
BirthDate,
|
|
CharacterRace,
|
|
CharacterClan,
|
|
CharacterGender,
|
|
CharacterHomeWorld,
|
|
YearStartedPlaying,
|
|
MyersBriggs,
|
|
MainJob,
|
|
}
|
|
|
|
impl Statistic {
|
|
pub fn as_str(&self) -> &'static str {
|
|
match self {
|
|
Self::Country => "country",
|
|
Self::Gender => "gender",
|
|
Self::BirthDate => "birth_date",
|
|
Self::CharacterRace => "character_race",
|
|
Self::CharacterClan => "character_clan",
|
|
Self::CharacterGender => "character_gender",
|
|
Self::CharacterHomeWorld => "character_home_world",
|
|
Self::YearStartedPlaying => "year_started_playing",
|
|
Self::MyersBriggs => "myers_briggs",
|
|
Self::MainJob => "main_job",
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Deserialize, PartialEq)]
|
|
#[serde(rename_all = "snake_case")]
|
|
enum Breakdown {
|
|
Country,
|
|
Gender,
|
|
Age,
|
|
ZodiacSign,
|
|
CharacterRace,
|
|
CharacterClan,
|
|
CharacterGender,
|
|
CharacterHomeWorld,
|
|
YearStartedPlaying,
|
|
MyersBriggs,
|
|
MainJob,
|
|
}
|
|
|
|
impl Breakdown {
|
|
pub fn as_statistic(&self) -> Statistic {
|
|
match self {
|
|
Self::Country => Statistic::Country,
|
|
Self::Gender => Statistic::Gender,
|
|
Self::Age => Statistic::BirthDate,
|
|
Self::ZodiacSign => Statistic::BirthDate,
|
|
Self::CharacterRace => Statistic::CharacterRace,
|
|
Self::CharacterClan => Statistic::CharacterClan,
|
|
Self::CharacterGender => Statistic::CharacterGender,
|
|
Self::CharacterHomeWorld => Statistic::CharacterHomeWorld,
|
|
Self::YearStartedPlaying => Statistic::YearStartedPlaying,
|
|
Self::MyersBriggs => Statistic::MyersBriggs,
|
|
Self::MainJob => Statistic::MainJob,
|
|
}
|
|
}
|
|
}
|
|
|
|
struct BreakdownQueryResult {
|
|
key: String,
|
|
answer: i16,
|
|
count: i64,
|
|
}
|
|
|
|
async fn stat_breakdown(
|
|
state: State<ArcState>,
|
|
user: User,
|
|
Path((question_id, stat)): Path<(Uuid, Breakdown)>,
|
|
) -> AppResult<Json<HashMap<String, Vec<u64>>>> {
|
|
let question = sqlx::query!(
|
|
// language=postgresql
|
|
r#"
|
|
select
|
|
answers,
|
|
publish_date,
|
|
exists(select 1 from responses where question_id = $1 and user_id = $2) as "voted!",
|
|
current_timestamp <= publish_date + interval '1 day' and current_timestamp > publish_date as "active!"
|
|
from questions
|
|
where id = $1"#,
|
|
question_id,
|
|
user.id,
|
|
)
|
|
.fetch_optional(&state.pool)
|
|
.await?
|
|
.context("no such question")?;
|
|
|
|
if question.active && !question.voted {
|
|
return Err(anyhow::anyhow!("you have not voted for this question").into());
|
|
}
|
|
|
|
let records = if stat == Breakdown::Country {
|
|
sqlx::query_as!(
|
|
BreakdownQueryResult,
|
|
// language=postgresql
|
|
r#"select country as "key!", answer, count(*) as "count!" from responses where question_id = $1 group by country, answer order by answer"#,
|
|
question_id,
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?
|
|
} else if stat == Breakdown::Age {
|
|
sqlx::query_as!(
|
|
BreakdownQueryResult,
|
|
// language=postgresql
|
|
r#"
|
|
select
|
|
trunc(extract(days from ($2::timestamptz - (stats->>$3)::date)) / 365)::text as "key!",
|
|
answer,
|
|
count(*) as "count!"
|
|
from responses
|
|
where question_id = $1
|
|
and stats->>$3 is not null
|
|
group by 1, answer
|
|
order by answer
|
|
"#,
|
|
question_id,
|
|
question.publish_date,
|
|
stat.as_statistic().as_str(),
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?
|
|
} else {
|
|
sqlx::query_as!(
|
|
BreakdownQueryResult,
|
|
// language=postgresql
|
|
r#"select stats->>$2 as "key!", answer, count(*) as "count!" from responses where question_id = $1 and stats->>$2 is not null group by stats->>$2, answer order by answer"#,
|
|
question_id,
|
|
stat.as_statistic().as_str(),
|
|
)
|
|
.fetch_all(&state.pool)
|
|
.await?
|
|
};
|
|
|
|
let mut map: HashMap<String, Vec<u64>> = HashMap::new();
|
|
let mut add_to_map = |key: String, rec: &BreakdownQueryResult| {
|
|
let answers = map.entry(key)
|
|
.or_insert_with(|| vec![0; question.answers.len()]);
|
|
if let Some(answer) = answers.get_mut(rec.answer as usize) {
|
|
*answer += rec.count as u64;
|
|
}
|
|
};
|
|
|
|
// handle all the stats that require special processing
|
|
match &stat {
|
|
Breakdown::Age => {
|
|
for rec in records {
|
|
let years: u32 = match rec.key.parse() {
|
|
Ok(y) => y,
|
|
Err(_) => continue,
|
|
};
|
|
|
|
let range = match years {
|
|
0..=9 => "0 to 9",
|
|
10..=19 => "10 to 19",
|
|
20..=29 => "20 to 29",
|
|
30..=39 => "30 to 39",
|
|
40..=49 => "40 to 49",
|
|
50..=59 => "50 to 59",
|
|
60..=69 => "60 to 69",
|
|
70..=79 => "70 to 79",
|
|
80..=89 => "80 to 89",
|
|
90..=99 => "90 to 99",
|
|
100.. => "100+",
|
|
};
|
|
|
|
add_to_map(range.to_string(), &rec);
|
|
}
|
|
}
|
|
Breakdown::ZodiacSign => {
|
|
for rec in records {
|
|
let Ok(date) = NaiveDate::parse_from_str(&rec.key, "%Y-%m-%d") else {
|
|
continue;
|
|
};
|
|
|
|
let birthday = (date.month() * 100) + date.day();
|
|
|
|
#[allow(clippy::zero_prefixed_literal)]
|
|
let sign = match birthday {
|
|
0000..=0120 => "capricorn",
|
|
0121..=0219 => "aquarius",
|
|
0220..=0320 => "pisces",
|
|
0321..=0420 => "aries",
|
|
0421..=0521 => "taurus",
|
|
0522..=0621 => "gemini",
|
|
0622..=0723 => "cancer",
|
|
0724..=0823 => "leo",
|
|
0824..=0923 => "virgo",
|
|
0924..=1023 => "libra",
|
|
1024..=1122 => "scorpio",
|
|
1123..=1221 => "sagittarius",
|
|
1222..=1231 => "capricorn",
|
|
_ => continue,
|
|
};
|
|
|
|
add_to_map(sign.to_string(), &rec);
|
|
}
|
|
}
|
|
Breakdown::CharacterGender => {
|
|
for rec in records {
|
|
let key = match rec.key.as_str() {
|
|
"1" => "female",
|
|
"0" => "male",
|
|
_ => continue,
|
|
};
|
|
|
|
add_to_map(key.to_string(), &rec);
|
|
}
|
|
}
|
|
_ => {
|
|
for rec in records {
|
|
add_to_map(rec.key.clone(), &rec);
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(Json(map))
|
|
}
|