Skip to content

Commit

Permalink
✨ can view all now
Browse files Browse the repository at this point in the history
  • Loading branch information
jayy-lmao committed Jan 30, 2020
1 parent ce7ecf1 commit 09dadfd
Show file tree
Hide file tree
Showing 20 changed files with 243 additions and 208 deletions.
3 changes: 1 addition & 2 deletions api/src/data/cult/create_cult.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@

use crate::type_defs::{Cult,NewCult};
use crate::db::get_db_conn;
use crate::type_defs::{Cult, NewCult};

pub fn create_cult(data: NewCult) -> Cult {
let conn = get_db_conn();
Expand Down
16 changes: 16 additions & 0 deletions api/src/data/cult/get_cult_all.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
extern crate postgres;
use crate::db::get_db_conn;
use crate::type_defs::Cult;

pub fn get_cult_all() -> Vec<Cult> {
let mut vec = Vec::new();
let conn = get_db_conn();
for row in &conn.query("SELECT id, name, cult FROM cults", &[]).unwrap() {
let cult = Cult {
id: row.get(0),
name: row.get(1),
};
vec.push(cult);
}
vec
}
28 changes: 12 additions & 16 deletions api/src/data/cult/get_cult_by_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@ use std::collections::HashMap;
pub fn get_cult_by_ids(hashmap: &mut HashMap<i32, Cult>, ids: Vec<i32>) {
let conn = get_db_conn();
for row in &conn
.query(
"SELECT id, name FROM cults WHERE id = ANY($1)",
&[&ids],
)
.query("SELECT id, name FROM cults WHERE id = ANY($1)", &[&ids])
.unwrap()
{
let cult = Cult {
Expand All @@ -39,24 +36,23 @@ pub fn get_cult_by_ids(hashmap: &mut HashMap<i32, Cult>, ids: Vec<i32>) {
// }
// }


pub struct CultBatcher;

impl BatchFn<i32, Cult> for CultBatcher {
type Error = ();

fn load(&self, keys: &[i32]) -> BatchFuture<Cult, Self::Error> {
println!("load batch {:?}", keys);
let mut cult_hashmap = HashMap::new();
get_cult_by_ids(&mut cult_hashmap, keys.to_vec());
future::ready(keys.iter().map(|key| cult_hashmap[key].clone()).collect())
.unit_error()
.boxed()
}
type Error = ();

fn load(&self, keys: &[i32]) -> BatchFuture<Cult, Self::Error> {
println!("load batch {:?}", keys);
let mut cult_hashmap = HashMap::new();
get_cult_by_ids(&mut cult_hashmap, keys.to_vec());
future::ready(keys.iter().map(|key| cult_hashmap[key].clone()).collect())
.unit_error()
.boxed()
}
}

pub type CultLoader = Loader<i32, Cult, (), CultBatcher>;

pub fn get_loader() -> CultLoader {
Loader::new(CultBatcher)
Loader::new(CultBatcher)
}
30 changes: 17 additions & 13 deletions api/src/data/cult/mod.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,28 @@
use crate::type_defs::{Cult, NewCult};

pub mod get_cult_by_id;
pub mod create_cult;
pub mod get_cult_all;
pub mod get_cult_by_id;
use get_cult_by_id::{get_loader, CultLoader};

#[derive(Clone)]
pub struct CultData {
cult_by_id: CultLoader,
cult_by_id: CultLoader,
}

impl CultData {
pub fn new() -> CultData {
CultData {
cult_by_id: get_loader(),
pub fn new() -> CultData {
CultData {
cult_by_id: get_loader(),
}
}
pub async fn cult_by_id(&self, id: i32) -> Cult {
self.cult_by_id.load(id).await.unwrap()
}
}
pub async fn cult_by_id(&self, id: i32) -> Cult {
self.cult_by_id.load(id).await.unwrap()
}
pub async fn create_cult(&self, data: NewCult) -> Cult {
create_cult::create_cult(data)
}
}
pub async fn create_cult(&self, data: NewCult) -> Cult {
create_cult::create_cult(data)
}
pub async fn get_all_cults(&self) -> Vec<Cult> {
get_cult_all::get_cult_all()
}
}
4 changes: 2 additions & 2 deletions api/src/data/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
mod person;
mod cult;
mod person;

pub use cult::CultData;
pub use person::PersonData;
pub use cult::CultData;
5 changes: 2 additions & 3 deletions api/src/data/person/create_person.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@

use crate::type_defs::{Person,NewPerson};
use crate::db::get_db_conn;
use crate::type_defs::{NewPerson, Person};

pub fn create_person(data: NewPerson) -> Person {
let conn = get_db_conn();
Expand All @@ -14,6 +13,6 @@ pub fn create_person(data: NewPerson) -> Person {
Person {
id: row.get(0),
name: row.get(1),
cult: row.get(2)
cult: row.get(2),
}
}
20 changes: 20 additions & 0 deletions api/src/data/person/get_person_all.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
extern crate postgres;
use crate::db::get_db_conn;
use crate::type_defs::Person;

pub fn get_person_all() -> Vec<Person> {
let mut vec = Vec::new();
let conn = get_db_conn();
for row in &conn
.query("SELECT id, name, cult FROM persons", &[])
.unwrap()
{
let person = Person {
id: row.get(0),
name: row.get(1),
cult: row.get(2),
};
vec.push(person);
}
vec
}
21 changes: 10 additions & 11 deletions api/src/data/person/get_person_by_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,23 @@ pub fn get_person_by_ids(hashmap: &mut HashMap<i32, Person>, ids: Vec<i32>) {
}
}


pub struct PersonBatcher;

impl BatchFn<i32, Person> for PersonBatcher {
type Error = ();
type Error = ();

fn load(&self, keys: &[i32]) -> BatchFuture<Person, Self::Error> {
println!("load batch {:?}", keys);
let mut person_hashmap = HashMap::new();
get_person_by_ids(&mut person_hashmap, keys.to_vec());
future::ready(keys.iter().map(|key| person_hashmap[key].clone()).collect())
.unit_error()
.boxed()
}
fn load(&self, keys: &[i32]) -> BatchFuture<Person, Self::Error> {
println!("load batch {:?}", keys);
let mut person_hashmap = HashMap::new();
get_person_by_ids(&mut person_hashmap, keys.to_vec());
future::ready(keys.iter().map(|key| person_hashmap[key].clone()).collect())
.unit_error()
.boxed()
}
}

pub type PersonLoader = Loader<i32, Person, (), PersonBatcher>;

pub fn get_loader() -> PersonLoader {
Loader::new(PersonBatcher)
Loader::new(PersonBatcher)
}
70 changes: 34 additions & 36 deletions api/src/data/person/get_persons_by_cult_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,21 @@ use dataloader::{BatchFn, BatchFuture};
use futures::{future, FutureExt as _};

pub fn get_persons_by_cult_ids(person_vec: &mut Vec<Person>, cult_ids: Vec<i32>) {
let conn = get_db_conn();
for row in &conn
.query(
"SELECT id, name, cult FROM persons WHERE cult = ANY($1)",
&[&cult_ids],
)
.unwrap()
{
let person = Person {
id: row.get(0),
name: row.get(1),
cult: row.get(2),
};
person_vec.push(person);
}
let conn = get_db_conn();
for row in &conn
.query(
"SELECT id, name, cult FROM persons WHERE cult = ANY($1)",
&[&cult_ids],
)
.unwrap()
{
let person = Person {
id: row.get(0),
name: row.get(1),
cult: row.get(2),
};
person_vec.push(person);
}
}

// pub fn create_person(data: NewPerson) -> Person {
Expand All @@ -40,38 +40,36 @@ pub fn get_persons_by_cult_ids(person_vec: &mut Vec<Person>, cult_ids: Vec<i32>)
// }

fn copy_by_cult_id(vec: &Vec<Person>, id: i32) -> Vec<Person> {
let mut res = Vec::new();
for p in vec {
if p.cult == Some(id) {
res.push(p.clone());
let mut res = Vec::new();
for p in vec {
if p.cult == Some(id) {
res.push(p.clone());
}
}
}
res
res
}


pub struct PersonsBatcher;

impl BatchFn<i32, Vec<Person>> for PersonsBatcher {
type Error = ();
type Error = ();

fn load(&self, keys: &[i32]) -> BatchFuture<Vec<Person>, Self::Error> {
println!("load batch {:?}", keys);
let mut person_vec = Vec::new();
get_persons_by_cult_ids(&mut person_vec, keys.to_vec());
future::ready(
keys.iter()
.map(|&key|
copy_by_cult_id(&person_vec, key)
fn load(&self, keys: &[i32]) -> BatchFuture<Vec<Person>, Self::Error> {
println!("load batch {:?}", keys);
let mut person_vec = Vec::new();
get_persons_by_cult_ids(&mut person_vec, keys.to_vec());
future::ready(
keys.iter()
.map(|&key| copy_by_cult_id(&person_vec, key))
.collect(),
)
.collect())
.unit_error()
.boxed()
}
.unit_error()
.boxed()
}
}

pub type PersonsLoader = Loader<i32, Vec<Person>, (), PersonsBatcher>;

pub fn get_loader() -> PersonsLoader {
Loader::new(PersonsBatcher)
Loader::new(PersonsBatcher)
}
42 changes: 23 additions & 19 deletions api/src/data/person/mod.rs
Original file line number Diff line number Diff line change
@@ -1,29 +1,33 @@
use crate::type_defs::{Person, NewPerson};
use crate::type_defs::{NewPerson, Person};

pub mod create_person;
pub mod get_person_all;
pub mod get_person_by_id;
pub mod get_persons_by_cult_id;
pub mod create_person;

#[derive(Clone)]
pub struct PersonData {
person_by_id: get_person_by_id::PersonLoader,
persons_by_cult_id: get_persons_by_cult_id::PersonsLoader,
person_by_id: get_person_by_id::PersonLoader,
persons_by_cult_id: get_persons_by_cult_id::PersonsLoader,
}

impl PersonData {
pub fn new() -> PersonData {
PersonData {
person_by_id: get_person_by_id::get_loader(),
persons_by_cult_id: get_persons_by_cult_id::get_loader(),
pub fn new() -> PersonData {
PersonData {
person_by_id: get_person_by_id::get_loader(),
persons_by_cult_id: get_persons_by_cult_id::get_loader(),
}
}
pub async fn person_by_id(&self, id: i32) -> Person {
self.person_by_id.load(id).await.unwrap()
}
pub async fn persons_by_cult_id(&self, id: i32) -> Vec<Person> {
self.persons_by_cult_id.load(id).await.unwrap()
}
}
pub async fn person_by_id(&self, id: i32) -> Person {
self.person_by_id.load(id).await.unwrap()
}
pub async fn persons_by_cult_id(&self, id: i32) -> Vec<Person> {
self.persons_by_cult_id.load(id).await.unwrap()
}
pub async fn create_person(&self, data: NewPerson) -> Person {
create_person::create_person(data)
}
}
pub async fn create_person(&self, data: NewPerson) -> Person {
create_person::create_person(data)
}
pub async fn get_all_persons(&self) -> Vec<Person> {
get_person_all::get_person_all()
}
}
5 changes: 2 additions & 3 deletions api/src/db/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@

extern crate postgres;
use postgres::{ Connection, TlsMode };
use postgres::{Connection, TlsMode};
use std::env;

pub fn get_db_conn() -> Connection {
Expand All @@ -9,4 +8,4 @@ pub fn get_db_conn() -> Connection {
let conn = Connection::connect(&pg_connection_string[..], TlsMode::None).unwrap();
println!("Connection is fine");
conn
}
}
Loading

0 comments on commit 09dadfd

Please sign in to comment.