feat: add inital startup
This commit is contained in:
parent
186ae62d6d
commit
9f33bdf4d3
@ -1,6 +1,6 @@
|
|||||||
-- Add migration script here
|
-- Add migration script here
|
||||||
CREATE TABLE FuzzyHashes(
|
CREATE TABLE FuzzyHashes(
|
||||||
`id` SERIAL NOT NULL PRIMARY KEY
|
id SERIAL NOT NULL PRIMARY KEY,
|
||||||
`hash` BIGINT NOT NULL,
|
hash BIGINT NOT NULL,
|
||||||
`post_id` CHAR(25) NOT NULL
|
post_id CHAR(25) NOT NULL
|
||||||
)
|
)
|
27
src/db.rs
27
src/db.rs
@ -5,11 +5,34 @@ use sqlx::migrate::Migrator;
|
|||||||
|
|
||||||
static MIGRATOR: Migrator = sqlx::migrate!(); // defaults to "./migrations"
|
static MIGRATOR: Migrator = sqlx::migrate!(); // defaults to "./migrations"
|
||||||
|
|
||||||
|
pub struct FuzzyHash {
|
||||||
|
pub id: i32,
|
||||||
|
pub hash: i64,
|
||||||
|
pub post_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn connect(uri: &str) -> anyhow::Result<Pool<Postgres>> {
|
pub async fn connect(uri: &str) -> anyhow::Result<Pool<Postgres>> {
|
||||||
Ok(PgPoolOptions::new()
|
Ok(PgPoolOptions::new()
|
||||||
.connect(uri).await?)
|
.connect(uri).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn migrate(pool: Pool<Postgres>) -> anyhow::Result<()> {
|
pub async fn migrate(pool: &Pool<Postgres>) -> anyhow::Result<()> {
|
||||||
Ok(MIGRATOR.run(&pool).await?)
|
Ok(MIGRATOR.run(pool).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_all_hashes(pool: &Pool<Postgres>) -> anyhow::Result<Vec<FuzzyHash>> {
|
||||||
|
let rows = sqlx::query!("SELECT id, hash, post_id FROM fuzzyhashes")
|
||||||
|
.fetch_all(pool).await?;
|
||||||
|
|
||||||
|
let mut data = vec![];
|
||||||
|
|
||||||
|
for row in rows {
|
||||||
|
data.push(FuzzyHash{
|
||||||
|
id: row.id,
|
||||||
|
hash: row.hash,
|
||||||
|
post_id: row.post_id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
}
|
}
|
48
src/main.rs
48
src/main.rs
@ -1,32 +1,36 @@
|
|||||||
use std::fs;
|
use std::{env};
|
||||||
use crate::fuzzy::tree::HashDistance;
|
use dotenvy::dotenv;
|
||||||
|
use env_logger::TimestampPrecision;
|
||||||
|
use log::warn;
|
||||||
|
|
||||||
mod fuzzy;
|
mod fuzzy;
|
||||||
mod db;
|
mod db;
|
||||||
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() -> anyhow::Result<()> {
|
||||||
_ = env_logger::builder().is_test(true).try_init();
|
dotenv().ok();
|
||||||
let paths = fs::read_dir("./example").unwrap();
|
env_logger::builder()
|
||||||
let tree = fuzzy::tree::Tree::new();
|
.format_timestamp(Some(TimestampPrecision::Millis))
|
||||||
|
.is_test(true)
|
||||||
|
.init();
|
||||||
|
|
||||||
for path in paths {
|
let db_url = env::var("DATABASE_URL").expect("'DATABASE_URL' is required");
|
||||||
let hash = fuzzy::image::generate_hash(path.unwrap().path().to_str().unwrap()).await.unwrap();
|
|
||||||
println!("{}", hash);
|
let pool = db::connect(db_url.as_str()).await?;
|
||||||
tree.add(hash).await;
|
|
||||||
}
|
db::migrate(&pool).await?;
|
||||||
|
|
||||||
let paths = fs::read_dir("./example").unwrap();
|
let fuzzy_hashes = db::get_all_hashes(&pool).await?;
|
||||||
for path in paths {
|
let fuzzy_tree = fuzzy::tree::Tree::new();
|
||||||
let path_str = path.unwrap().path();
|
|
||||||
let hash = fuzzy::image::generate_hash(path_str.to_str().unwrap()).await.unwrap();
|
for fuzzy_hash in fuzzy_hashes {
|
||||||
let founds = tree.find(vec![HashDistance{
|
let exist = fuzzy_tree.add(fuzzy_hash.hash).await;
|
||||||
distance: 10,
|
|
||||||
hash
|
if exist {
|
||||||
}]).await;
|
warn!("found already existing hash: {}", fuzzy_hash.hash)
|
||||||
|
}
|
||||||
println!("{} -> {:?}", path_str.to_str().unwrap(), founds)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user