fix: nix updates, swap to attic, swap to nightly

This commit is contained in:
Elijah McMorris 2025-02-17 18:38:25 -08:00
parent 8df3ab1d74
commit 7671ab11f1
Signed by: NexVeridian
SSH key fingerprint: SHA256:bsA1SKZxuEcEVHAy3gY1HUeM5ykRJl0U0kQHQn0hMg8
10 changed files with 1184 additions and 661 deletions

View file

@ -1,7 +1,3 @@
# [target.x86_64-unknown-linux-gnu]
# linker = "clang"
# rustflags = ["-C", "link-arg=-fuse-ld=/usr/bin/mold"]
[alias]
t = "nextest run"
# https://github.com/tikv/pprof-rs?tab=readme-ov-file#use-with-pprof
@ -9,3 +5,12 @@ profile = "bench --bench bench -- --profile-time 10"
[build]
target-dir = "target/target"
[unstable]
codegen-backend = true
[profile.release]
codegen-backend = "cranelift"
[profile.dev]
codegen-backend = "cranelift"

View file

@ -1,6 +1,3 @@
# https://github.com/nextest-rs/reuse-build-partition-example
# https://keliris.dev/articles/setup-rust-github-actions
name: crane
on:
@ -24,5 +21,8 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client login nex https://nix.nexveridian.com ${{ secrets.ATTIC_TOKEN }}
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client cache create wikidata-to-surrealdb || true
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client use wikidata-to-surrealdb
- run: nix flake check --all-systems
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client push wikidata-to-surrealdb /nix/store/*/ || true

View file

@ -40,7 +40,9 @@ jobs:
# Install Nix
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/magic-nix-cache-action@main
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client login nex https://nix.nexveridian.com ${{ secrets.ATTIC_TOKEN }}
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client cache create wikidata-to-surrealdb || true
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client use wikidata-to-surrealdb
# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
@ -86,3 +88,5 @@ jobs:
docker load < result
docker tag ${{ env.STRIP_REPO_USERNAME }}:latest ${{ env.REGISTRY }}/${{ env.REPO }}:latest
docker push ${{ env.REGISTRY }}/${{ env.REPO }}:latest
- run: nix run -I nixpkgs=channel:nixos-unstable nixpkgs#attic-client push wikidata-to-surrealdb /nix/store/*/ || true

1629
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,18 +8,18 @@ license = "MIT OR Apache-2.0"
anyhow = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
surrealdb = { version = "2.0.2", features = ["protocol-http", "kv-mem"] }
tokio = { version = "1.39", features = ["fs", "time", "sync"] }
surrealdb = { version = "2.2.0", features = ["protocol-http", "kv-mem"] }
tokio = { version = "1.43", features = ["fs", "time", "sync"] }
futures = "0.3"
wikidata = "1.1"
bzip2 = { version = "0.4", features = ["tokio"] }
indicatif = "0.17"
rand = "0.8"
backon = { version = "1.2", features = ["tokio-sleep"] }
backon = { version = "1.3", features = ["tokio-sleep"] }
[dev-dependencies]
rstest = "0.22"
surrealdb = { version = "2.0.2", features = ["kv-mem"] }
surrealdb = { version = "2.2.0", features = ["kv-mem"] }
criterion = { version = "0.5", features = ["async_tokio"] }
pprof = { version = "0.13", features = ["criterion", "protobuf-codec"] }

49
flake.lock generated
View file

@ -3,11 +3,11 @@
"advisory-db": {
"flake": false,
"locked": {
"lastModified": 1727353582,
"narHash": "sha256-2csMEEOZhvowVKZNBHk1kMJqk72ZMrPj9LQYCzP6EKs=",
"lastModified": 1739520703,
"narHash": "sha256-UqR1f9gThWNBCBobWet7T46vTSxkB6dVAdeqNBoF8mc=",
"owner": "rustsec",
"repo": "advisory-db",
"rev": "cb905e6e405834bdff1eb1e20c9b10edb5403889",
"rev": "ddccfe8aced779f7b54d27bbe7e122ecb1dda33a",
"type": "github"
},
"original": {
@ -18,11 +18,11 @@
},
"crane": {
"locked": {
"lastModified": 1727316705,
"narHash": "sha256-/mumx8AQ5xFuCJqxCIOFCHTVlxHkMT21idpbgbm/TIE=",
"lastModified": 1739815359,
"narHash": "sha256-mjB72/7Fgk5bsIIKA4G9LkIb/u0Ci+VdOyQSgBuQtjo=",
"owner": "ipetkov",
"repo": "crane",
"rev": "5b03654ce046b5167e7b0bccbd8244cb56c16f0e",
"rev": "282159b2b0588b87a9dbcc40decc91dd5bed5c89",
"type": "github"
},
"original": {
@ -36,14 +36,14 @@
"nixpkgs": [
"nixpkgs"
],
"rust-analyzer-src": []
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1727332394,
"narHash": "sha256-dBYQD4DPxu/hBndSbfMA5HhHrVnrxrW9Ju8R3augGzw=",
"lastModified": 1739774189,
"narHash": "sha256-ZupCPljRian/MbkNdwsfMOePRwd6OATKxp/45DE3EL0=",
"owner": "nix-community",
"repo": "fenix",
"rev": "60a35a47e8ae3721efaae2229ec6037e3fde2d17",
"rev": "6b5b5f59f75aa4743fe4c150acf7cb8f8cabf787",
"type": "github"
},
"original": {
@ -57,11 +57,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1726560853,
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
@ -72,11 +72,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1727296349,
"narHash": "sha256-C3SRU3GMDNII9l16o4+nkybuxaDX4x5TBypwmmUBCo0=",
"lastModified": 1739742245,
"narHash": "sha256-oAgAV4fLWMIPqsEyZVVdk7c0SUdDOV1E7Epq0EIUQs4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "fe866c653c24adf1520628236d4e70bbb2fdd949",
"rev": "f0204ef4baa3b6317dee1c84ddeffbd293638836",
"type": "github"
},
"original": {
@ -95,6 +95,23 @@
"nixpkgs": "nixpkgs"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1739797572,
"narHash": "sha256-mNGd6sO4U2xpFl3yiivhJrzfxtQUri+FCi0lcYDE7HU=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "d10388096e2e02dbe3836460ba59930397d6c1e7",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,

112
flake.nix
View file

@ -3,39 +3,63 @@
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
crane.url = "github:ipetkov/crane";
fenix = {
url = "github:nix-community/fenix";
inputs.nixpkgs.follows = "nixpkgs";
inputs.rust-analyzer-src.follows = "";
};
flake-utils.url = "github:numtide/flake-utils";
advisory-db = {
url = "github:rustsec/advisory-db";
flake = false;
};
};
outputs = { self, nixpkgs, crane, fenix, flake-utils, advisory-db, ... }:
flake-utils.lib.eachDefaultSystem (system:
outputs =
{
self,
nixpkgs,
crane,
fenix,
flake-utils,
advisory-db,
...
}:
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
inherit (pkgs) lib;
craneLib = crane.mkLib pkgs;
craneLib = (crane.mkLib pkgs).overrideToolchain (
p:
(
let
fp = fenix.packages.${system};
fpc = fp.complete;
in
(fp.combine [
fpc.cargo
fpc.rustc
fpc.clippy
fpc.rust-src
fpc.rustc
fpc.rustfmt
fpc.rustc-codegen-cranelift-preview
fp.targets.wasm32-unknown-unknown.latest.rust-std
])
)
);
# src = craneLib.cleanCargoSource ./.;
# src = ./.;
src =
let
jsonFilter = path: _type: builtins.match ".*json$" path != null;
surqlFilter = path: _type: builtins.match ".*surql$" path != null;
customFilter = path: type:
customFilter =
path: type:
(jsonFilter path type) || (surqlFilter path type) || (craneLib.filterCargoSources path type);
in
pkgs.lib.cleanSourceWith {
@ -49,34 +73,32 @@
inherit src;
strictDeps = true;
buildInputs = [
# Add additional build inputs here
] ++ lib.optionals pkgs.stdenv.isDarwin [
# Additional darwin specific inputs can be set here
pkgs.libiconv
];
buildInputs =
[
# Add additional build inputs here
]
++ lib.optionals pkgs.stdenv.isDarwin [
# Additional darwin specific inputs can be set here
pkgs.libiconv
];
# Additional environment variables can be set directly
# MY_CUSTOM_VAR = "some value";
};
craneLibLLvmTools = craneLib.overrideToolchain
(fenix.packages.${system}.complete.withComponents [
"cargo"
"llvm-tools"
"rustc"
]);
# Build *just* the cargo dependencies, so we can reuse
# all of that work (e.g. via cachix) when running in CI
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
# Build the actual crate itself, reusing the dependency
# artifacts from above.
my-crate = craneLib.buildPackage (commonArgs // {
doCheck = false;
inherit cargoArtifacts;
});
my-crate = craneLib.buildPackage (
commonArgs
// {
doCheck = false;
inherit cargoArtifacts;
}
);
# Define the Docker image build
dockerImage = pkgs.dockerTools.buildImage {
@ -99,10 +121,16 @@
# Note that this is done as a separate derivation so that
# we can block the CI if there are issues here, but not
# prevent downstream consumers from building our crate by itself.
my-crate-clippy = craneLib.cargoClippy (commonArgs // {
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
});
my-crate-clippy = craneLib.cargoClippy (
commonArgs
// {
inherit cargoArtifacts;
cargoClippyExtraArgs = ''
--all-targets -- --deny warnings -W clippy::nursery -W rust-2018-idioms \
-A clippy::future_not_send -A clippy::option_if_let_else -A clippy::or_fun_call
'';
}
);
# my-crate-doc = craneLib.cargoDoc (commonArgs // {
# inherit cargoArtifacts;
@ -126,17 +154,22 @@
# Run tests with cargo-nextest
# Consider setting `doCheck = false` on `my-crate` if you do not want
# the tests to run twice
my-crate-nextest = craneLib.cargoNextest (commonArgs // {
inherit cargoArtifacts;
partitions = 1;
partitionType = "count";
});
my-crate-nextest = craneLib.cargoNextest (
commonArgs
// {
inherit cargoArtifacts;
partitions = 1;
partitionType = "count";
}
);
};
packages = {
default = my-crate;
inherit my-crate
dockerImage;
inherit
my-crate
dockerImage
;
};
apps.default = flake-utils.lib.mkApp {
@ -155,5 +188,6 @@
# pkgs.ripgrep
];
};
});
}
);
}

View file

@ -109,8 +109,8 @@ impl CreateVersion {
batch_size: usize,
) -> Result<(), Error> {
match self {
CreateVersion::Bulk => self.create_bulk(db, chunk, pb, batch_size).await,
CreateVersion::BulkFilter => self.create_bulk_filter(db, chunk, pb, batch_size).await,
Self::Bulk => self.create_bulk(db, chunk, pb, batch_size).await,
Self::BulkFilter => self.create_bulk_filter(db, chunk, pb, batch_size).await,
}
}

View file

@ -21,8 +21,8 @@ impl File_Format {
pub async fn reader(self, file: &str) -> Result<Box<dyn BufRead>, Error> {
let file = File::open(file)?;
match self {
File_Format::json => Ok(Box::new(BufReader::new(file))),
File_Format::bz2 => Ok(Box::new(BufReader::new(MultiBzDecoder::new(file)))),
Self::json => Ok(Box::new(BufReader::new(file))),
Self::bz2 => Ok(Box::new(BufReader::new(MultiBzDecoder::new(file)))),
}
}
}

View file

@ -22,14 +22,12 @@ pub enum ClaimData {
impl ClaimData {
async fn from_cvd(cvd: ClaimValueData) -> Self {
match cvd {
ClaimValueData::Item(qid) => ClaimData::Thing(Thing::from(("Entity", Id::from(qid.0)))),
ClaimValueData::Item(qid) => Self::Thing(Thing::from(("Entity", Id::from(qid.0)))),
ClaimValueData::Property(pid) => {
ClaimData::Thing(Thing::from(("Property", Id::from(pid.0))))
Self::Thing(Thing::from(("Property", Id::from(pid.0))))
}
ClaimValueData::Lexeme(lid) => {
ClaimData::Thing(Thing::from(("Lexeme", Id::from(lid.0))))
}
_ => ClaimData::ClaimValueData(cvd),
ClaimValueData::Lexeme(lid) => Self::Thing(Thing::from(("Lexeme", Id::from(lid.0)))),
_ => Self::ClaimValueData(cvd),
}
}
}
@ -46,7 +44,7 @@ pub struct Claim {
pub value: ClaimData,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct EntityMini {
// Table: Entity, Property, Lexeme
pub id: Option<Thing>,