diff --git a/.forgejo/workflows/ci.yaml b/.forgejo/workflows/ci.yaml index 6d58a4e..e507c78 100644 --- a/.forgejo/workflows/ci.yaml +++ b/.forgejo/workflows/ci.yaml @@ -30,7 +30,8 @@ jobs: info=$(echo "$file" | sd '^result-([a-z0-9_-]+)\.([a-zA-Z]+)(?:-cross-([a-z0-9_-]+))?$' '$1\x1F$2\x1F$3') IFS=$'\x1F' read -r hostArch name crossArch <<< "$info" arch=${crossArch:-$hostArch} - containerArch=$(arch=$arch nix eval --raw --impure --expr '(import { system = builtins.getEnv "arch";}).go.GOARCH') + containerArch=$(arch=$arch nix eval --raw --impure -I nixpkgs=flake:nixpkgs --expr '(import { system = builtins.getEnv "arch";}).go.GOARCH') + echo "Processed image for $containerArch" mv $file images/image-$containerArch.tar.gz done - name: Upload artifact @@ -44,16 +45,16 @@ jobs: run: podman login git.dirksys.ovh --username $USERNAME --password $PASSWORD env: USERNAME: ${{ github.actor }} - PASSWORD: ${{ secrets.GITHUB_TOKEN }} + PASSWORD: ${{ secrets.FORGEJO_REGISTRY_TOKEN }} - name: Push docker images if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || github.event_name == 'workflow_dispatch' shell: bash run: | manifest_id=$(podman manifest create git.dirksys.ovh/dirk/bankserver:latest) sleep 1 - for file in images/*; do - echo "Loading $file" - podman image load < $file + for file in $(ls images); do + echo "Loading images/$file" + podman image load < images/$file image_id=$(podman image ls --format '{{.ID}}' | head -n 2 | tail -1) architecture=$(podman image inspect $image_id | jq -r ".[0].Architecture") tag=$(podman image ls --format "{{.Tag}}" | head -n 2 | tail -1) @@ -63,7 +64,8 @@ jobs: podman manifest add $manifest_id $image_id done echo "Pushing manifest" - podman manifest push git.dirksys.ovh/dirk/bankserver:latest + podman login --get-login git.dirksys.ovh + podman --log-level debug manifest push git.dirksys.ovh/dirk/bankserver:latest - name: Notify server if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || github.event_name == 'workflow_dispatch' env: diff --git a/Cargo.toml b/Cargo.toml index fde2797..f178e01 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,10 @@ edition = "2024" default = ["schemas"] schemas = ["dep:schemars"] +[[bin]] +name = "generate-schemas" +features = ["schemas"] + [dependencies] axum = "0.8" chrono = { version = "0.4.40", features = ["serde"] } diff --git a/flake.nix b/flake.nix index 0d71015..4ed0904 100644 --- a/flake.nix +++ b/flake.nix @@ -39,7 +39,13 @@ overlay = pkgs: let - rustVersion = pkgs.pkgsBuildHost.rust-bin.stable.latest.minimal; + rustVersion = pkgs.pkgsBuildHost.rust-bin.fromRustupToolchain { + channel = "stable"; + targets = [ + pkgs.pkgsBuildHost.targetPlatform.rust.rustcTarget + pkgs.pkgsBuildHost.buildPlatform.rust.rustcTarget + ]; + }; rustPlatform = pkgs.makeRustPlatform { cargo = rustVersion; rustc = rustVersion; diff --git a/out.txt b/out.txt new file mode 100644 index 0000000..e69de29 diff --git a/package.nix b/package.nix index 7eff101..41728bc 100644 --- a/package.nix +++ b/package.nix @@ -5,21 +5,72 @@ redocly, yq-go, targetPlatform, + pkgsBuildBuild, rev ? "dirty", }: +let + src = lib.fileset.toSource { + root = ./.; + fileset = + with lib.fileset; + intersection (gitTracked ./.) (unions [ + (fileFilter ( + file: + file.type == "directory" + || file.hasExt "rs" + || file.hasExt "toml" + || file.name == "Cargo.lock" + || file.hasExt "sql" + || file.name == "openapi-def.yaml" + || file.hasExt "html" + ) ./.) + ]); + }; + cargoDeps = rustPlatform.importCargoLock { + lockFile = ./Cargo.lock; + outputHashes = { + "dbmigrator-0.4.4-alpha" = "sha256-Nwxw74IyZeZ9dODb+aneQmuQe0grO+g45B3zv1XaihE="; + }; + }; + schemas = pkgsBuildBuild.stdenv.mkDerivation { + pname = "bankingserver_schemas"; + version = "unstable-${rev}"; + inherit src cargoDeps; + cargoBuildType = "debug"; + # "CARGO_TARGET_${stdenv.hostPlatform.rust.cargoEnvVarTarget}_LINKER" = + # "${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc"; + nativeBuildInputs = [ + rustPlatform.cargoSetupHook + rustPlatform.rust.cargo + ]; + + buildPhase = '' + touch openapi.json + cargo build --bin generate-schemas + target/debug/generate-schemas + runHook postBuild + ''; + installPhase = '' + mkdir -p $out/share/bankserver + cp -r schemas $out/share/bankserver/schemas + ''; + doCheck = false; + }; +in rustPlatform.buildRustPackage { pname = "bankingserver"; version = "unstable-${rev}"; - - src = lib.cleanSource ./.; + inherit src cargoDeps; nativeBuildInputs = [ + schemas redocly yq-go ]; preBuild = '' - cargo test --features schemas --target ${stdenv.buildPlatform.rust.rustcTarget} + echo "Schemas: ${schemas}" + cp -r ${schemas}/share/bankserver/schemas schemas yq eval-all -n 'load("openapi-def.yaml") *n load("schemas/schemas.json")' > openapi-temp.yaml redocly bundle openapi-temp.yaml -o openapi.json @@ -27,11 +78,7 @@ rustPlatform.buildRustPackage { buildType = "debug"; - useFetchCargoVendor = false; - cargoLock.lockFile = ./Cargo.lock; - cargoLock.outputHashes = { - "dbmigrator-0.4.4-alpha" = "sha256-Nwxw74IyZeZ9dODb+aneQmuQe0grO+g45B3zv1XaihE="; - }; + cargoBuildFlags = "--bin bankserver"; CARGO_BUILD_TARGET = targetPlatform.config; diff --git a/src/bin/generate-schemas.rs b/src/bin/generate-schemas.rs new file mode 100644 index 0000000..2f39dc6 --- /dev/null +++ b/src/bin/generate-schemas.rs @@ -0,0 +1,94 @@ +use schemars::{ + SchemaGenerator, + generate::{Contract, SchemaSettings}, + transform::{Transform, transform_subschemas}, +}; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, +}; + +fn main() { + let directory = PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR"), "/schemas")); + std::fs::create_dir_all(&directory).unwrap(); + let mut settings = SchemaSettings::draft2020_12(); + settings.definitions_path = "".to_owned(); + + let mut request_generator = SchemaGenerator::new(settings.clone()); + settings.contract = Contract::Serialize; + let mut response_generator = SchemaGenerator::new(settings); + + let mut request_schemas = HashMap::new(); + let mut response_schemas = HashMap::new(); + + struct RefTransform; + + impl schemars::transform::Transform for RefTransform { + fn transform(&mut self, schema: &mut schemars::Schema) { + if schema + .get("$ref") + .map(|value| value.as_str().map(|v| v.starts_with("#/"))) + .flatten() + .unwrap_or(false) + { + if let Some(value) = schema.remove("$ref") { + let mut value = value + .as_str() + .unwrap() + .strip_prefix("#/") + .unwrap() + .to_owned(); + value.push_str(".json"); + schema.insert("$ref".into(), value.into()); + } + } + transform_subschemas(self, schema); + } + } + + bankserver::api::schemas().generate( + &mut request_generator, + &mut response_generator, + &mut request_schemas, + &mut response_schemas, + ); + let mut request_defs = request_generator.take_definitions(); + let mut response_defs = response_generator.take_definitions(); + + let request_keys: HashSet = request_defs.keys().cloned().collect(); + let response_keys: HashSet = response_defs.keys().cloned().collect(); + + let mut schemas = HashMap::new(); + + for key in request_keys.union(&response_keys) { + let mut path = directory.join(key); + path.set_extension("json"); + let schema = match (request_defs.remove(key), response_defs.remove(key)) { + (None, Some(schema)) | (Some(schema), None) => schema, + (Some(request_schema), Some(response_schema)) => { + if request_schema != response_schema { + panic!("Diverging schema for {key}"); + } + request_schema + } + _ => continue, + }; + let mut schema: schemars::Schema = schema.try_into().unwrap(); + RefTransform.transform(&mut schema); + schemas.insert( + key, + serde_json::json!({"$ref": format!("schemas/{key}.json")}), + ); + std::fs::write( + path, + serde_json::to_string_pretty(schema.as_value()).unwrap(), + ) + .unwrap(); + } + std::fs::write( + directory.join("schemas.json"), + serde_json::to_string_pretty(&serde_json::json!({"components": {"schemas": schemas}})) + .unwrap(), + ) + .unwrap(); +} diff --git a/src/lib.rs b/src/lib.rs index 7425428..3e1ca4f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,101 +6,4 @@ pub use config::Config; pub use db::setup_db; #[cfg(test)] -mod tests { - - #[cfg(feature = "schemas")] - #[test] - fn generate_schemas() { - use schemars::{ - SchemaGenerator, - generate::{Contract, SchemaSettings}, - transform::{Transform, transform_subschemas}, - }; - use std::{ - collections::{HashMap, HashSet}, - path::PathBuf, - }; - let directory = PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR"), "/schemas")); - std::fs::create_dir_all(&directory).unwrap(); - let mut settings = SchemaSettings::draft2020_12(); - settings.definitions_path = "".to_owned(); - - let mut request_generator = SchemaGenerator::new(settings.clone()); - settings.contract = Contract::Serialize; - let mut response_generator = SchemaGenerator::new(settings); - - let mut request_schemas = HashMap::new(); - let mut response_schemas = HashMap::new(); - - struct RefTransform; - - impl schemars::transform::Transform for RefTransform { - fn transform(&mut self, schema: &mut schemars::Schema) { - if schema - .get("$ref") - .map(|value| value.as_str().map(|v| v.starts_with("#/"))) - .flatten() - .unwrap_or(false) - { - if let Some(value) = schema.remove("$ref") { - let mut value = value - .as_str() - .unwrap() - .strip_prefix("#/") - .unwrap() - .to_owned(); - value.push_str(".json"); - schema.insert("$ref".into(), value.into()); - } - } - transform_subschemas(self, schema); - } - } - - crate::api::schemas().generate( - &mut request_generator, - &mut response_generator, - &mut request_schemas, - &mut response_schemas, - ); - let mut request_defs = request_generator.take_definitions(); - let mut response_defs = response_generator.take_definitions(); - - let request_keys: HashSet = request_defs.keys().cloned().collect(); - let response_keys: HashSet = response_defs.keys().cloned().collect(); - - let mut schemas = HashMap::new(); - - for key in request_keys.union(&response_keys) { - let mut path = directory.join(key); - path.set_extension("json"); - let schema = match (request_defs.remove(key), response_defs.remove(key)) { - (None, Some(schema)) | (Some(schema), None) => schema, - (Some(request_schema), Some(response_schema)) => { - if request_schema != response_schema { - panic!("Diverging schema for {key}"); - } - request_schema - } - _ => continue, - }; - let mut schema: schemars::Schema = schema.try_into().unwrap(); - RefTransform.transform(&mut schema); - schemas.insert( - key, - serde_json::json!({"$ref": format!("schemas/{key}.json")}), - ); - std::fs::write( - path, - serde_json::to_string_pretty(schema.as_value()).unwrap(), - ) - .unwrap(); - } - std::fs::write( - directory.join("schemas.json"), - serde_json::to_string_pretty(&serde_json::json!({"components": {"schemas": schemas}})) - .unwrap(), - ) - .unwrap(); - } -} +mod tests {} diff --git a/test.sh b/test.sh new file mode 100644 index 0000000..d76070d --- /dev/null +++ b/test.sh @@ -0,0 +1,30 @@ +set -euxo pipefail + +mkdir images +for file in result-*dockerImage*; do + if [ ! -f "$file" ]; then + continue + fi + info=$(echo "$file" | sd '^result-([a-z0-9_-]+)\.([a-zA-Z]+)(?:-cross-([a-z0-9_-]+))?$' '$1\x1F$2\x1F$3') + IFS=$'\x1F' read -r hostArch name crossArch <<< "$info" + arch=${crossArch:-$hostArch} + containerArch=$(arch=$arch nix eval --raw --impure -I nixpkgs=flake:nixpkgs --expr '(import { system = builtins.getEnv "arch";}).go.GOARCH') + echo "Processed image for $containerArch" + cp $file images/image-$containerArch.tar.gz +done + +manifest_id=$(podman manifest create git.dirksys.ovh/dirk/bankserver:latest) +sleep 1 +for file in $(ls images); do + echo "Loading images/$file" + podman image load < images/$file + image_id=$(podman image ls --format '{{.ID}}' | head -n 2 | tail -1) + architecture=$(podman image inspect $image_id | jq -r ".[0].Architecture") + tag=$(podman image ls --format "{{.Tag}}" | head -n 2 | tail -1) + tag="git.dirksys.ovh/dirk/bankserver:$tag-$architecture" + podman image untag $image_id + echo "Adding $architecture image to manifest" + podman manifest add $manifest_id $image_id +done +echo "Pushing manifest" +podman --log-level=debug manifest push git.dirksys.ovh/dirk/bankserver:latest > out.txt