Skip to content

Commit

Permalink
refactor:updated cli commands
Browse files Browse the repository at this point in the history
  • Loading branch information
yellowHatpro committed Aug 27, 2024
1 parent fc32450 commit c77bcae
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 53 deletions.
21 changes: 7 additions & 14 deletions src/cli/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,13 @@ pub struct CliArgs {
#[derive(Subcommand, Debug)]
pub enum Commands {
///Queue a single URL to be archived in Internet Archive History
QueueURL {
url: Option<String>,
},
/// Queue a Edit Data row to be archived in Internet Archive History
QueueEditData {
row_id: Option<i32>,
},
/// Queue a Edit Note row to be archived in Internet Archive History
QueueEditNote {
row_id: Option<i32>,
},
CheckStatus {
job_id: Option<String>,
},
QueueURL { url: String },
/// Queue an Edit Data row to be archived in Internet Archive History
QueueEditData { row_id: i32 },
/// Queue an Edit Note row to be archived in Internet Archive History
QueueEditNote { row_id: i32 },
/// Check the archival status of any URL by `job_id`
CheckStatus { job_id: String },
/// Start the app to poll from Edit Data and Edit Note tables. It is the default behaviour
Poll,
}
55 changes: 20 additions & 35 deletions src/cli/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::cli::args::{CliArgs, Commands};
use crate::cli::utils::check_before_inserting_url;
use clap::Parser;
use colorize::AnsiColor;
use sqlx::PgPool;
Expand All @@ -10,13 +11,6 @@ pub async fn start(pool: &PgPool) {
let args = CliArgs::parse();
match &args.command {
Some(Commands::QueueEditData { row_id }) => {
// Argument check
// TODO: Concider making the argument mandatory by removing the `Option`?
let Some(row_id) = row_id else {
println!("{}", "Please pass row id".red());
return;
};

match utils::insert_edit_data_row_to_internet_archive_urls(*row_id, pool).await {
// We got urls!
Ok(true) => println!(
Expand All @@ -37,13 +31,6 @@ pub async fn start(pool: &PgPool) {
}

Some(Commands::QueueEditNote { row_id }) => {
// Argument check
// TODO: Concider making the argument mandatory by removing the `Option`?
let Some(row_id) = row_id else {
println!("{}", "Please pass row id".red());
return;
};

match utils::insert_edit_note_row_to_internet_archive_urls(*row_id, pool).await {
// We got urls!
Ok(true) => println!(
Expand All @@ -62,15 +49,16 @@ pub async fn start(pool: &PgPool) {
}
}
}
Some(Commands::QueueURL { url }) => {
// Argument check
// TODO: Concider making the argument mandatory by removing the `Option`?
let Some(url) = url else {
println!("{}", "Please pass URL".red());
return;
};
Some(Commands::QueueURL { url }) => match check_before_inserting_url(url, pool).await {
Ok(false) => {
println!("{}", "URL is already queued: ".red(),);
}
Err(err) => {
println!("{}", "Some error occurred".red());
eprintln!("{err}");
}

match utils::insert_url_to_internet_archive_urls(url, pool).await {
Ok(true) => match utils::insert_url_to_internet_archive_urls(url, pool).await {
Ok(id) => println!(
"{} {}",
"URL queued in internet_archive_urls, id: ".green(),
Expand All @@ -81,20 +69,17 @@ pub async fn start(pool: &PgPool) {
println!("{}", "Some error occurred".red());
eprintln!("{err}");
}
}
}
},
},
Some(Commands::CheckStatus { job_id }) => {
// Argument check
// TODO: Concider making the argument mandatory by removing the `Option`?
let Some(job_id) = job_id else {
println!("{}", "Please pass job id".red());
return;
};

println!("Job id: {:?}", job_id);
utils::get_job_id_status(job_id.to_owned(), pool)
.await
.unwrap();
match utils::get_job_id_status(job_id.as_str(), pool).await {
Ok(res) => {
println!("Status: {}", res.status)
}
Err(e) => {
println!("Failed: {}", e)
}
}
}

Some(Commands::Poll) | None => {
Expand Down
18 changes: 14 additions & 4 deletions src/cli/utils.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
use crate::archival::archival_response::ArchivalStatusResponse;
use crate::archival::error::ArchivalError;
use crate::archival::utils::make_archival_status_request;
use crate::poller;
use crate::poller::utils::should_insert_url_to_internet_archive_urls;
use colorize::AnsiColor;
use mb_rs::schema::{EditData, EditNote};
use sqlx::{Error, PgPool};
Expand All @@ -18,6 +22,10 @@ pub async fn insert_url_to_internet_archive_urls(url: &str, pool: &PgPool) -> Re
.map(|result| result.id)
}

pub async fn check_before_inserting_url(url: &str, pool: &PgPool) -> Result<bool, Error> {
should_insert_url_to_internet_archive_urls(url, pool).await
}

/// This function takes in an `edit_data` `row_id`, extract the urls contained inside, then insert them into the `internet_archive_urls` table
pub async fn insert_edit_data_row_to_internet_archive_urls(
row_id: i32,
Expand Down Expand Up @@ -84,8 +92,10 @@ pub async fn insert_edit_note_row_to_internet_archive_urls(
Ok(!urls.is_empty())
}

pub async fn get_job_id_status(job_id: String, _pool: &PgPool) -> Result<&str, Error> {
// TODO: Concider using &str for job_id?
println!("job_id: {},", job_id);
Ok("")
pub async fn get_job_id_status(
job_id: &str,
_pool: &PgPool,
) -> Result<ArchivalStatusResponse, ArchivalError> {
let status = make_archival_status_request(job_id).await?;
Ok(status)
}

0 comments on commit c77bcae

Please sign in to comment.