fix: more renames

This commit is contained in:
Elijah McMorris 2025-02-03 12:47:44 -08:00
parent 5aab89ff0d
commit 051d18494b
Signed by: NexVeridian
SSH key fingerprint: SHA256:bsA1SKZxuEcEVHAy3gY1HUeM5ykRJl0U0kQHQn0hMg8
12 changed files with 441 additions and 322 deletions

View file

@ -47,7 +47,7 @@ fn csv_merge() -> Result<(), Error> {
fn ark_plan(ticker: Ticker) -> Result<(), Error> {
println!("Starting: {:#?}", ticker);
let sec = Duration::from_secs(rand::thread_rng().gen_range(30 * 60..=4 * 60 * 60));
let sec = Duration::from_secs(rand::rng().random_range(30 * 60..=4 * 60 * 60));
// sleep(sec).await;
thread::sleep(sec);

View file

@ -16,7 +16,7 @@ pub mod df;
mod format;
pub mod ticker;
#[derive(Debug, Default, EnumString, Clone, Copy, PartialEq)]
#[derive(Debug, Default, EnumString, Clone, Copy, PartialEq, Eq)]
pub enum Source {
// Reads Parquet file if exists
Read,
@ -132,12 +132,7 @@ impl Ark {
fn concat_df(dfs: Vec<DF>) -> Result<DF, Error> {
// with dedupe
let df = concat(
dfs.lazy(),
UnionArgs {
..Default::default()
},
)?;
let df = concat(dfs.lazy(), Default::default())?;
Self::dedupe(df.into())
}
@ -378,6 +373,10 @@ impl Ark {
.str()
.replace_all(lit("."), lit(""), true)
.str()
.replace_all(lit(" &CURITY"), lit(""), true)
.str()
.replace_all(lit(" &"), lit(""), true)
.str()
.replace(lit("HLDGS"), lit(""), true)
.str()
.replace(lit("HOLDINGS"), lit(""), true)
@ -394,6 +393,8 @@ impl Ark {
.str()
.replace(lit(" ADR"), lit(""), true)
.str()
.replace(lit("DR"), lit(""), true)
.str()
.replace(lit(" SA"), lit(""), true)
.str()
.replace(lit(" NV"), lit(""), true)
@ -561,13 +562,7 @@ impl Ark {
dfs.push(LazyCsvReader::new(x).finish()?);
}
let mut df = concat(
dfs,
UnionArgs {
..Default::default()
},
)?
.into();
let mut df = concat(dfs, Default::default())?.into();
if Self::read_parquet(&ticker, path.as_ref()).is_ok() {
let df_old = Self::read_parquet(&ticker, path.as_ref())?;
@ -611,7 +606,7 @@ mod tests {
],
)?;
Ark::write_df_parquet("data/test/ARKW.parquet".into(), test_df.clone().into())?;
Ark::write_df_parquet("data/test/ARKW.parquet".into(), test_df.into())?;
let read = Ark::new(Source::Read, Ticker::ARKW, Some("data/test".to_owned()))?.collect()?;
fs::remove_file("data/test/ARKW.parquet")?;
@ -637,7 +632,7 @@ mod tests {
Some("ARKB"),
],
)?;
Ark::write_df_parquet("data/test/ARKF.parquet".into(), test_df.clone().into())?;
Ark::write_df_parquet("data/test/ARKF.parquet".into(), test_df.into())?;
let read = Ark::new(Source::Read, Ticker::ARKF, Some("data/test".to_owned()))?.collect()?;
fs::remove_file("data/test/ARKF.parquet")?;

View file

@ -6,7 +6,7 @@ use reqwest::blocking::Client;
use serde_json::Value;
use std::io::Cursor;
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Reader {
Csv,
Json,

View file

@ -10,27 +10,27 @@ pub enum DF {
impl From<LazyFrame> for DF {
fn from(lf: LazyFrame) -> Self {
DF::LazyFrame(lf)
Self::LazyFrame(lf)
}
}
impl From<DataFrame> for DF {
fn from(df: DataFrame) -> Self {
DF::DataFrame(df)
Self::DataFrame(df)
}
}
impl DF {
pub fn collect(self) -> anyhow::Result<DataFrame, Error> {
match self {
DF::LazyFrame(x) => Ok(x.collect()?),
DF::DataFrame(x) => Ok(x),
Self::LazyFrame(x) => Ok(x.collect()?),
Self::DataFrame(x) => Ok(x),
}
}
pub fn lazy(self) -> LazyFrame {
match self {
DF::LazyFrame(x) => x,
DF::DataFrame(x) => x.lazy(),
Self::LazyFrame(x) => x,
Self::DataFrame(x) => x.lazy(),
}
}
}

View file

@ -6,7 +6,7 @@ use strum_macros::EnumIter;
use crate::util::df::DF;
#[allow(clippy::upper_case_acronyms, non_camel_case_types)]
#[derive(Debug, strum_macros::Display, EnumIter, Clone, Copy, PartialEq)]
#[derive(Debug, strum_macros::Display, EnumIter, Clone, Copy, PartialEq, Eq)]
pub enum Ticker {
ARKW,
MKFG,
@ -16,7 +16,7 @@ pub enum Ticker {
impl Ticker {
pub fn all(mut df: DF) -> Result<DF, Error> {
for ticker in Ticker::iter() {
for ticker in Self::iter() {
df = ticker.format(df)?;
}
Ok(df)
@ -24,10 +24,10 @@ impl Ticker {
pub fn format(&self, df: DF) -> Result<DF, Error> {
match self {
Ticker::ARKW => Self::arkw(df),
Ticker::MKFG => Self::mkfg(df),
Ticker::XYZ => Self::xyz(df),
Ticker::CASH_USD => Self::cash_usd(df),
Self::ARKW => Self::arkw(df),
Self::MKFG => Self::mkfg(df),
Self::XYZ => Self::xyz(df),
Self::CASH_USD => Self::cash_usd(df),
}
}

View file

@ -1,6 +1,6 @@
use strum_macros::EnumIter;
#[derive(Clone, Copy, PartialEq)]
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum DataSource {
ArkVenture,
Ark,
@ -10,7 +10,7 @@ pub enum DataSource {
}
#[allow(clippy::upper_case_acronyms, non_camel_case_types)]
#[derive(Debug, Default, strum_macros::Display, EnumIter, Clone, Copy, PartialEq)]
#[derive(Debug, Default, strum_macros::Display, EnumIter, Clone, Copy, PartialEq, Eq)]
pub enum Ticker {
ARKVX,
@ -46,72 +46,64 @@ pub enum Ticker {
}
impl Ticker {
pub fn value(&self) -> &str {
pub const fn value(&self) -> &str {
match *self {
Ticker::ARKVX => "ARK_VENTURE_FUND_ARKVX_HOLDINGS.csv",
Self::ARKVX => "ARK_VENTURE_FUND_ARKVX_HOLDINGS.csv",
Ticker::ARKF => "FINTECH_INNOVATION",
Ticker::ARKG => "GENOMIC_REVOLUTION",
Ticker::ARKK => "INNOVATION",
Ticker::ARKQ => "AUTONOMOUS_TECH._&_ROBOTICS",
Ticker::ARKW => "NEXT_GENERATION_INTERNET",
Ticker::ARKX => "SPACE_EXPLORATION_&_INNOVATION",
Self::ARKF => "FINTECH_INNOVATION",
Self::ARKG => "GENOMIC_REVOLUTION",
Self::ARKK => "INNOVATION",
Self::ARKQ => "AUTONOMOUS_TECH._&_ROBOTICS",
Self::ARKW => "NEXT_GENERATION_INTERNET",
Self::ARKX => "SPACE_EXPLORATION_&_INNOVATION",
Ticker::ARKA => "ARKA",
Ticker::ARKZ => "ARKZ",
Ticker::ARKC => "ARKC",
Ticker::ARKD => "ARKD",
Ticker::ARKY => "ARKY",
Ticker::ARKB => "21SHARES_BITCOIN",
Self::ARKA => "ARKA",
Self::ARKZ => "ARKZ",
Self::ARKC => "ARKC",
Self::ARKD => "ARKD",
Self::ARKY => "ARKY",
Self::ARKB => "21SHARES_BITCOIN",
Ticker::PRNT => "THE_3D_PRINTING",
Ticker::IZRL => "ISRAEL_INNOVATIVE_TECHNOLOGY",
Self::PRNT => "THE_3D_PRINTING",
Self::IZRL => "ISRAEL_INNOVATIVE_TECHNOLOGY",
Ticker::EUROPE_ARKI => "artificial-intelligence-robotics",
Ticker::EUROPE_ARKG => "genomic-revolution",
Ticker::EUROPE_ARKK => "innovation",
Self::EUROPE_ARKI => "artificial-intelligence-robotics",
Self::EUROPE_ARKG => "genomic-revolution",
Self::EUROPE_ARKK => "innovation",
Ticker::CYBR => "cybersecurity-and-data-privacy",
Ticker::CYCL => "circular-economy-enablers",
Ticker::FOOD => "sustainable-future-of-food",
Ticker::LIFE => "environmental-impact-100",
Ticker::LUSA => "usa-environmental-impact",
Ticker::NFRA => "global-sustainable-infrastructure",
Ticker::PMNT => "digital-payments-economy",
Self::CYBR => "cybersecurity-and-data-privacy",
Self::CYCL => "circular-economy-enablers",
Self::FOOD => "sustainable-future-of-food",
Self::LIFE => "environmental-impact-100",
Self::LUSA => "usa-environmental-impact",
Self::NFRA => "global-sustainable-infrastructure",
Self::PMNT => "digital-payments-economy",
}
}
pub fn data_source(&self) -> DataSource {
pub const fn data_source(&self) -> DataSource {
match *self {
Ticker::ARKVX => DataSource::ArkVenture,
Self::ARKVX => DataSource::ArkVenture,
Ticker::ARKF
| Ticker::ARKG
| Ticker::ARKK
| Ticker::ARKQ
| Ticker::ARKW
| Ticker::ARKX => DataSource::Ark,
Ticker::ARKA
| Ticker::ARKZ
| Ticker::ARKC
| Ticker::ARKD
| Ticker::ARKY
| Ticker::ARKB => DataSource::Shares21,
Ticker::PRNT | Ticker::IZRL => DataSource::Ark,
Ticker::EUROPE_ARKI | Ticker::EUROPE_ARKG | Ticker::EUROPE_ARKK => {
DataSource::ArkEurope
Self::ARKF | Self::ARKG | Self::ARKK | Self::ARKQ | Self::ARKW | Self::ARKX => {
DataSource::Ark
}
Ticker::CYBR
| Ticker::CYCL
| Ticker::FOOD
| Ticker::LIFE
| Ticker::LUSA
| Ticker::NFRA
| Ticker::PMNT => DataSource::Rize,
Self::ARKA | Self::ARKZ | Self::ARKC | Self::ARKD | Self::ARKY | Self::ARKB => {
DataSource::Shares21
}
Self::PRNT | Self::IZRL => DataSource::Ark,
Self::EUROPE_ARKI | Self::EUROPE_ARKG | Self::EUROPE_ARKK => DataSource::ArkEurope,
Self::CYBR
| Self::CYCL
| Self::FOOD
| Self::LIFE
| Self::LUSA
| Self::NFRA
| Self::PMNT => DataSource::Rize,
}
}