feat: add inital startup
This commit is contained in:
parent
186ae62d6d
commit
9f33bdf4d3
@ -1,6 +1,6 @@
|
||||
-- Add migration script here
|
||||
CREATE TABLE FuzzyHashes(
|
||||
`id` SERIAL NOT NULL PRIMARY KEY
|
||||
`hash` BIGINT NOT NULL,
|
||||
`post_id` CHAR(25) NOT NULL
|
||||
id SERIAL NOT NULL PRIMARY KEY,
|
||||
hash BIGINT NOT NULL,
|
||||
post_id CHAR(25) NOT NULL
|
||||
)
|
27
src/db.rs
27
src/db.rs
@ -5,11 +5,34 @@ use sqlx::migrate::Migrator;
|
||||
|
||||
static MIGRATOR: Migrator = sqlx::migrate!(); // defaults to "./migrations"
|
||||
|
||||
pub struct FuzzyHash {
|
||||
pub id: i32,
|
||||
pub hash: i64,
|
||||
pub post_id: String,
|
||||
}
|
||||
|
||||
pub async fn connect(uri: &str) -> anyhow::Result<Pool<Postgres>> {
|
||||
Ok(PgPoolOptions::new()
|
||||
.connect(uri).await?)
|
||||
}
|
||||
|
||||
pub async fn migrate(pool: Pool<Postgres>) -> anyhow::Result<()> {
|
||||
Ok(MIGRATOR.run(&pool).await?)
|
||||
pub async fn migrate(pool: &Pool<Postgres>) -> anyhow::Result<()> {
|
||||
Ok(MIGRATOR.run(pool).await?)
|
||||
}
|
||||
|
||||
pub async fn get_all_hashes(pool: &Pool<Postgres>) -> anyhow::Result<Vec<FuzzyHash>> {
|
||||
let rows = sqlx::query!("SELECT id, hash, post_id FROM fuzzyhashes")
|
||||
.fetch_all(pool).await?;
|
||||
|
||||
let mut data = vec![];
|
||||
|
||||
for row in rows {
|
||||
data.push(FuzzyHash{
|
||||
id: row.id,
|
||||
hash: row.hash,
|
||||
post_id: row.post_id,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(data)
|
||||
}
|
48
src/main.rs
48
src/main.rs
@ -1,32 +1,36 @@
|
||||
use std::fs;
|
||||
use crate::fuzzy::tree::HashDistance;
|
||||
use std::{env};
|
||||
use dotenvy::dotenv;
|
||||
use env_logger::TimestampPrecision;
|
||||
use log::warn;
|
||||
|
||||
mod fuzzy;
|
||||
mod db;
|
||||
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
_ = env_logger::builder().is_test(true).try_init();
|
||||
let paths = fs::read_dir("./example").unwrap();
|
||||
let tree = fuzzy::tree::Tree::new();
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
dotenv().ok();
|
||||
env_logger::builder()
|
||||
.format_timestamp(Some(TimestampPrecision::Millis))
|
||||
.is_test(true)
|
||||
.init();
|
||||
|
||||
for path in paths {
|
||||
let hash = fuzzy::image::generate_hash(path.unwrap().path().to_str().unwrap()).await.unwrap();
|
||||
println!("{}", hash);
|
||||
tree.add(hash).await;
|
||||
}
|
||||
|
||||
let paths = fs::read_dir("./example").unwrap();
|
||||
for path in paths {
|
||||
let path_str = path.unwrap().path();
|
||||
let hash = fuzzy::image::generate_hash(path_str.to_str().unwrap()).await.unwrap();
|
||||
let founds = tree.find(vec![HashDistance{
|
||||
distance: 10,
|
||||
hash
|
||||
}]).await;
|
||||
|
||||
println!("{} -> {:?}", path_str.to_str().unwrap(), founds)
|
||||
let db_url = env::var("DATABASE_URL").expect("'DATABASE_URL' is required");
|
||||
|
||||
let pool = db::connect(db_url.as_str()).await?;
|
||||
|
||||
db::migrate(&pool).await?;
|
||||
|
||||
let fuzzy_hashes = db::get_all_hashes(&pool).await?;
|
||||
let fuzzy_tree = fuzzy::tree::Tree::new();
|
||||
|
||||
for fuzzy_hash in fuzzy_hashes {
|
||||
let exist = fuzzy_tree.add(fuzzy_hash.hash).await;
|
||||
|
||||
if exist {
|
||||
warn!("found already existing hash: {}", fuzzy_hash.hash)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user