♻️🍱 config, cli, vanth, vanth_derive, varo: Reformatted code and added editor configuration files
- Added `.zed/settings.json` to configure rust-analyzer with `leptosfmt`. - Created `rustfmt.toml` with max width 120 and comment wrapping settings. - Applied consistent formatting across all modified crates using rustfmt. - Reorganized import statements and improved code style in multiple modules.
This commit is contained in:
parent
87957bfbf8
commit
9e7979931c
15 changed files with 141 additions and 161 deletions
11
.zed/settings.json
Normal file
11
.zed/settings.json
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"lsp": {
|
||||||
|
"rust-analyzer": {
|
||||||
|
"initialization_options": {
|
||||||
|
"rustfmt": {
|
||||||
|
"overrideCommand": ["leptosfmt", "--stdin", "--rustfmt"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -122,13 +122,7 @@ impl<T: ?Sized> Hash for Id<T> {
|
||||||
impl<T: ?Sized> Debug for Id<T> {
|
impl<T: ?Sized> Debug for Id<T> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let (high, low) = self.to_u128_pair();
|
let (high, low) = self.to_u128_pair();
|
||||||
write!(
|
write!(f, "Id<{}>({:016x}{:016x})", std::any::type_name::<T>(), high, low)
|
||||||
f,
|
|
||||||
"Id<{}>({:016x}{:016x})",
|
|
||||||
std::any::type_name::<T>(),
|
|
||||||
high,
|
|
||||||
low
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +140,9 @@ pub struct ContentHash {
|
||||||
impl ContentHash {}
|
impl ContentHash {}
|
||||||
|
|
||||||
pub trait Entity {
|
pub trait Entity {
|
||||||
fn entity_id() -> Id<dyn Entity> where Self: Sized;
|
fn entity_id() -> Id<dyn Entity>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Component: Send + Sync + 'static {
|
pub trait Component: Send + Sync + 'static {
|
||||||
|
|
|
@ -4,11 +4,11 @@ use std::fmt;
|
||||||
|
|
||||||
use digest::Update;
|
use digest::Update;
|
||||||
use serde::{
|
use serde::{
|
||||||
|
Serialize, Serializer,
|
||||||
ser::{
|
ser::{
|
||||||
self, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple,
|
self, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple,
|
||||||
SerializeTupleStruct, SerializeTupleVariant,
|
SerializeTupleStruct, SerializeTupleVariant,
|
||||||
},
|
},
|
||||||
Serialize, Serializer,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::ContentHash;
|
use crate::ContentHash;
|
||||||
|
@ -18,7 +18,9 @@ pub fn hash(value: &impl Serialize) -> ContentHash {
|
||||||
let mut serializer = HashingSerializer { digest: &mut digest };
|
let mut serializer = HashingSerializer { digest: &mut digest };
|
||||||
// TODO: Don't unwrap.
|
// TODO: Don't unwrap.
|
||||||
serializer.serialize_value(value).unwrap();
|
serializer.serialize_value(value).unwrap();
|
||||||
ContentHash { hash: *serializer.digest.finalize().as_bytes() }
|
ContentHash {
|
||||||
|
hash: *serializer.digest.finalize().as_bytes(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A serializer that hashes the data instead of serializing it.
|
/// A serializer that hashes the data instead of serializing it.
|
||||||
|
@ -70,9 +72,7 @@ impl fmt::Display for Error {
|
||||||
/// Converts the `usize` sequence length to a fixed length type,
|
/// Converts the `usize` sequence length to a fixed length type,
|
||||||
/// since we want the result to be portable.
|
/// since we want the result to be portable.
|
||||||
fn try_into_sequence_length(len: usize) -> Result<u128, Error> {
|
fn try_into_sequence_length(len: usize) -> Result<u128, Error> {
|
||||||
u128::try_from(len)
|
u128::try_from(len).ok().ok_or(Error::SequenceLengthTooLarge)
|
||||||
.ok()
|
|
||||||
.ok_or(Error::SequenceLengthTooLarge)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement `serialize_$ty` for int types
|
// Implement `serialize_$ty` for int types
|
||||||
|
@ -189,14 +189,12 @@ impl<'a, T: Update> Serializer for HashingSerializer<'a, T> {
|
||||||
|
|
||||||
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
||||||
let len = len.ok_or(Error::UndefinedSequenceLength)?;
|
let len = len.ok_or(Error::UndefinedSequenceLength)?;
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,8 +203,7 @@ impl<'a, T: Update> Serializer for HashingSerializer<'a, T> {
|
||||||
_name: &'static str,
|
_name: &'static str,
|
||||||
len: usize,
|
len: usize,
|
||||||
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,25 +215,18 @@ impl<'a, T: Update> Serializer for HashingSerializer<'a, T> {
|
||||||
len: usize,
|
len: usize,
|
||||||
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
||||||
self.digest.update(&variant_index.to_be_bytes());
|
self.digest.update(&variant_index.to_be_bytes());
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||||
let len = len.ok_or(Error::UndefinedSequenceLength)?;
|
let len = len.ok_or(Error::UndefinedSequenceLength)?;
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_struct(
|
fn serialize_struct(self, _name: &'static str, len: usize) -> Result<Self::SerializeStruct, Self::Error> {
|
||||||
self,
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
_name: &'static str,
|
|
||||||
len: usize,
|
|
||||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
|
||||||
self.digest
|
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,8 +238,7 @@ impl<'a, T: Update> Serializer for HashingSerializer<'a, T> {
|
||||||
len: usize,
|
len: usize,
|
||||||
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
||||||
self.digest.update(&variant_index.to_be_bytes());
|
self.digest.update(&variant_index.to_be_bytes());
|
||||||
self.digest
|
self.digest.update(&try_into_sequence_length(len)?.to_be_bytes());
|
||||||
.update(&try_into_sequence_length(len)?.to_be_bytes());
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,9 +256,7 @@ impl<'a, T: Update> SerializeSeq for HashingSerializer<'a, T> {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_element<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
fn serialize_element<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
||||||
value.serialize(HashingSerializer {
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,9 +270,7 @@ impl<'a, T: Update> SerializeTuple for HashingSerializer<'a, T> {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_element<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
fn serialize_element<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
||||||
value.serialize(HashingSerializer {
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -299,9 +284,7 @@ impl<'a, T: Update> SerializeTupleStruct for HashingSerializer<'a, T> {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_field<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
fn serialize_field<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
||||||
value.serialize(HashingSerializer {
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -315,9 +298,7 @@ impl<'a, T: Update> SerializeTupleVariant for HashingSerializer<'a, T> {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_field<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
fn serialize_field<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
||||||
value.serialize(HashingSerializer {
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,16 +312,12 @@ impl<'a, T: Update> SerializeMap for HashingSerializer<'a, T> {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_key<K: ?Sized + Serialize>(&mut self, key: &K) -> Result<Self::Ok, Error> {
|
fn serialize_key<K: ?Sized + Serialize>(&mut self, key: &K) -> Result<Self::Ok, Error> {
|
||||||
key.serialize(HashingSerializer {
|
key.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_value<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
fn serialize_value<V: ?Sized + Serialize>(&mut self, value: &V) -> Result<Self::Ok, Error> {
|
||||||
value.serialize(HashingSerializer {
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -353,14 +330,8 @@ impl<'a, T: Update> SerializeStruct for HashingSerializer<'a, T> {
|
||||||
type Ok = ();
|
type Ok = ();
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_field<V: ?Sized + Serialize>(
|
fn serialize_field<V: ?Sized + Serialize>(&mut self, _key: &'static str, value: &V) -> Result<Self::Ok, Error> {
|
||||||
&mut self,
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
_key: &'static str,
|
|
||||||
value: &V,
|
|
||||||
) -> Result<Self::Ok, Error> {
|
|
||||||
value.serialize(HashingSerializer {
|
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -373,14 +344,8 @@ impl<'a, T: Update> SerializeStructVariant for HashingSerializer<'a, T> {
|
||||||
type Ok = ();
|
type Ok = ();
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
fn serialize_field<V: ?Sized + Serialize>(
|
fn serialize_field<V: ?Sized + Serialize>(&mut self, _key: &'static str, value: &V) -> Result<Self::Ok, Error> {
|
||||||
&mut self,
|
value.serialize(HashingSerializer { digest: self.digest })?;
|
||||||
_key: &'static str,
|
|
||||||
value: &V,
|
|
||||||
) -> Result<Self::Ok, Error> {
|
|
||||||
value.serialize(HashingSerializer {
|
|
||||||
digest: self.digest,
|
|
||||||
})?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,13 +3,13 @@ use std::marker::PhantomData;
|
||||||
/// Library crate for the `vanth` ECS-based database node.
|
/// Library crate for the `vanth` ECS-based database node.
|
||||||
use bevy_app::{App, Plugin};
|
use bevy_app::{App, Plugin};
|
||||||
use bevy_ecs::{prelude::*, query::QueryData};
|
use bevy_ecs::{prelude::*, query::QueryData};
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||||
|
|
||||||
use crate::entity::EntityId;
|
use crate::entity::EntityId;
|
||||||
|
|
||||||
pub mod store;
|
|
||||||
pub mod entity;
|
pub mod entity;
|
||||||
pub mod hashing_serializer;
|
pub mod hashing_serializer;
|
||||||
|
pub mod store;
|
||||||
|
|
||||||
pub use hashing_serializer::hash;
|
pub use hashing_serializer::hash;
|
||||||
pub use vanth_derive::Vanth;
|
pub use vanth_derive::Vanth;
|
||||||
|
@ -40,9 +40,7 @@ impl Node {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO
|
// TODO
|
||||||
pub fn run() {
|
pub fn run() {}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save(entity_id: impl Into<EntityId>) -> Result<()> {
|
pub fn save(entity_id: impl Into<EntityId>) -> Result<()> {
|
||||||
// TODO
|
// TODO
|
||||||
|
@ -58,7 +56,7 @@ impl Node {
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct HashedValue {
|
pub struct HashedValue {
|
||||||
content_hash: ContentHash,
|
content_hash: ContentHash,
|
||||||
inner: Value
|
inner: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Value> for HashedValue {
|
impl From<Value> for HashedValue {
|
||||||
|
@ -93,9 +91,9 @@ impl PartialEq for Ty {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl <T: AsRef<str>> PartialEq<T> for Ty {
|
impl<T: AsRef<str>> PartialEq<T> for Ty {
|
||||||
fn eq(&self, other: &T) -> bool {
|
fn eq(&self, other: &T) -> bool {
|
||||||
self.to_string() == *other.as_ref()
|
self.to_string() == *other.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,9 +102,7 @@ pub trait Vanth {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Impl for different tuple sizes
|
// TODO: Impl for different tuple sizes
|
||||||
pub trait VanthTuple {
|
pub trait VanthTuple {}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// #[derive(Clone, Debug, Deserialize, Serialize)]
|
// #[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
// pub struct EntityContents {
|
// pub struct EntityContents {
|
||||||
|
@ -134,17 +130,17 @@ pub struct ContentHash {
|
||||||
#[derive(Clone, Debug, Deserialize, Component, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Component, Serialize)]
|
||||||
pub struct Reference<T: Clone + Serialize> {
|
pub struct Reference<T: Clone + Serialize> {
|
||||||
value: ReferenceValue,
|
value: ReferenceValue,
|
||||||
_marker: PhantomData<T>
|
_marker: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Component, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Component, Serialize)]
|
||||||
pub enum ReferenceValue {
|
pub enum ReferenceValue {
|
||||||
Absent,
|
Absent,
|
||||||
Retrieving(ReferenceRetrievalTask),
|
Retrieving(ReferenceRetrievalTask),
|
||||||
Present(Vec<u8>)
|
Present(Vec<u8>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl <T: Clone + Serialize> Reference<T> {
|
impl<T: Clone + Serialize> Reference<T> {
|
||||||
pub async fn take() -> T {
|
pub async fn take() -> T {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
|
@ -155,9 +151,7 @@ impl <T: Clone + Serialize> Reference<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Component, Clone, Debug, Deserialize, Serialize)]
|
#[derive(Component, Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct ReferenceRetrievalTask {
|
pub struct ReferenceRetrievalTask {}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Future for ReferenceRetrievalTask {
|
impl Future for ReferenceRetrievalTask {
|
||||||
type Output = Vec<u8>;
|
type Output = Vec<u8>;
|
||||||
|
@ -168,19 +162,19 @@ impl Future for ReferenceRetrievalTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Handle<T> {
|
pub struct Handle<T> {
|
||||||
_marker: PhantomData<T>
|
_marker: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO:
|
// TODO:
|
||||||
// A trait is derivable for ECS components
|
// A trait is derivable for ECS components
|
||||||
// The components must have a content hash, not the entity. For efficiency and ergonomics. This means that a hash of each relevant component must be stored in the Vanth component of the entity, in a `HashMap` or something. The ID of the component used by Vanth should be a method on the derived trait.
|
// The components must have a content hash, not the entity. For efficiency and ergonomics. This means that a hash of
|
||||||
|
// each relevant component must be stored in the Vanth component of the entity, in a `HashMap` or something. The ID of
|
||||||
|
// the component used by Vanth should be a method on the derived trait.
|
||||||
|
|
||||||
pub struct VanthPlugin;
|
pub struct VanthPlugin;
|
||||||
|
|
||||||
impl Plugin for VanthPlugin {
|
impl Plugin for VanthPlugin {
|
||||||
fn build(&self, app: &mut App) {
|
fn build(&self, app: &mut App) {}
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// fn run_reference_tasks(tasks: Query<(&ReferenceGetTask<>)>) {
|
// fn run_reference_tasks(tasks: Query<(&ReferenceGetTask<>)>) {
|
||||||
|
@ -188,6 +182,4 @@ impl Plugin for VanthPlugin {
|
||||||
// }
|
// }
|
||||||
|
|
||||||
/// A world which Vanth entities live in. Lifetimes `'v` of [`Vanth<'v>`] types are tied to the lifetime of the `Root`.
|
/// A world which Vanth entities live in. Lifetimes `'v` of [`Vanth<'v>`] types are tied to the lifetime of the `Root`.
|
||||||
pub struct Root {
|
pub struct Root {}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use std::{collections::HashMap, marker::PhantomData, path::PathBuf};
|
use std::{collections::HashMap, marker::PhantomData, path::PathBuf};
|
||||||
|
|
||||||
use rusqlite::{Connection, params, named_params};
|
use rusqlite::{Connection, named_params, params};
|
||||||
|
|
||||||
use bevy_ecs::prelude::*;
|
use bevy_ecs::prelude::*;
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||||
|
|
||||||
use crate::{hash, ComponentContents, ContentHash, Ty, Vanth};
|
use crate::{ComponentContents, ContentHash, Ty, Vanth, hash};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Store {
|
pub struct Store {
|
||||||
|
@ -22,7 +22,7 @@ pub enum Error {
|
||||||
|
|
||||||
impl From<serde_json::Error> for Error {
|
impl From<serde_json::Error> for Error {
|
||||||
fn from(err: serde_json::Error) -> Self {
|
fn from(err: serde_json::Error) -> Self {
|
||||||
Error::Serializiation(err.to_string())
|
Error::Serializiation(err.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,13 +48,15 @@ impl Store {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_from_hash<T: Vanth + DeserializeOwned>(&mut self, content_hash: ContentHash) -> Result<Option<T>> {
|
pub fn get_from_hash<T: Vanth + DeserializeOwned>(&mut self, content_hash: ContentHash) -> Result<Option<T>> {
|
||||||
let Some(raw) = self.get_raw_from_hash::<T>(content_hash)? else { return Ok(None) };
|
let Some(raw) = self.get_from_hash_raw::<T>(content_hash)? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
let deserialized: T = serde_json::from_slice(&raw)?;
|
let deserialized: T = serde_json::from_slice(&raw)?;
|
||||||
Ok(Some(deserialized))
|
Ok(Some(deserialized))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_raw_from_hash<T: Vanth>(&mut self, content_hash: ContentHash) -> Result<Option<Vec<u8>>> {
|
pub fn get_from_hash_raw<T: Vanth>(&mut self, content_hash: ContentHash) -> Result<Option<Vec<u8>>> {
|
||||||
self.backend.get_from_hash(T::ty(), content_hash)
|
self.backend.get_from_hash(T::ty(), content_hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,9 +146,11 @@ impl Backend for Sqlite {
|
||||||
let table_name = Self::table_name(&ty);
|
let table_name = Self::table_name(&ty);
|
||||||
let query = format!("SELECT content FROM \"{}\" WHERE content_hash = :hash", table_name);
|
let query = format!("SELECT content FROM \"{}\" WHERE content_hash = :hash", table_name);
|
||||||
|
|
||||||
match self.conn.query_row(&query, named_params! {":hash": content_hash.hash.as_slice()}, |row| {
|
match self
|
||||||
row.get::<_, Vec<u8>>(0)
|
.conn
|
||||||
}) {
|
.query_row(&query, named_params! {":hash": content_hash.hash.as_slice()}, |row| {
|
||||||
|
row.get::<_, Vec<u8>>(0)
|
||||||
|
}) {
|
||||||
Ok(content) => Ok(Some(content)),
|
Ok(content) => Ok(Some(content)),
|
||||||
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(e.into()),
|
||||||
|
@ -181,7 +185,10 @@ impl Backend for Sqlite {
|
||||||
"INSERT OR REPLACE INTO \"{}\" (content_hash, content) VALUES (:hash, :content)",
|
"INSERT OR REPLACE INTO \"{}\" (content_hash, content) VALUES (:hash, :content)",
|
||||||
table_name
|
table_name
|
||||||
);
|
);
|
||||||
self.conn.execute(&query, named_params! {":hash": content_hash.hash.as_slice(), ":content": content})?;
|
self.conn.execute(
|
||||||
|
&query,
|
||||||
|
named_params! {":hash": content_hash.hash.as_slice(), ":content": content},
|
||||||
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,7 +196,8 @@ impl Backend for Sqlite {
|
||||||
self.ensure_table_exists(&ty)?;
|
self.ensure_table_exists(&ty)?;
|
||||||
let table_name = Self::table_name(&ty);
|
let table_name = Self::table_name(&ty);
|
||||||
let query = format!("DELETE FROM \"{}\" WHERE content_hash = :hash", table_name);
|
let query = format!("DELETE FROM \"{}\" WHERE content_hash = :hash", table_name);
|
||||||
self.conn.execute(&query, named_params! {":hash": content_hash.hash.as_slice()})?;
|
self.conn
|
||||||
|
.execute(&query, named_params! {":hash": content_hash.hash.as_slice()})?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -209,28 +217,20 @@ pub struct Memory {
|
||||||
|
|
||||||
impl Memory {
|
impl Memory {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self { tables: HashMap::new() }
|
||||||
tables: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend for Memory {
|
impl Backend for Memory {
|
||||||
fn get_from_hash(&mut self, ty: Ty, content_hash: ContentHash) -> Result<Option<Vec<u8>>> {
|
fn get_from_hash(&mut self, ty: Ty, content_hash: ContentHash) -> Result<Option<Vec<u8>>> {
|
||||||
Ok(self.tables
|
Ok(self.tables.get(&ty).and_then(|table| table.get(&content_hash)).cloned())
|
||||||
.get(&ty)
|
|
||||||
.and_then(|table| table.get(&content_hash))
|
|
||||||
.cloned())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_all_of_ty(&mut self, ty: Ty) -> Result<Vec<(ContentHash, Vec<u8>)>> {
|
fn get_all_of_ty(&mut self, ty: Ty) -> Result<Vec<(ContentHash, Vec<u8>)>> {
|
||||||
Ok(self.tables
|
Ok(self
|
||||||
|
.tables
|
||||||
.get(&ty)
|
.get(&ty)
|
||||||
.map(|table| {
|
.map(|table| table.iter().map(|(k, v)| (*k, v.clone())).collect())
|
||||||
table.iter()
|
|
||||||
.map(|(k, v)| (*k, v.clone()))
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.unwrap_or_else(Vec::new))
|
.unwrap_or_else(Vec::new))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
use bevy_ecs::component::Component;
|
use bevy_ecs::component::Component;
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||||
use vanth::Vanth;
|
use vanth::Vanth;
|
||||||
|
|
||||||
// TODO: derive `Vanth`
|
// TODO: derive `Vanth`
|
||||||
#[derive(Debug, Deserialize, Component, Serialize)]
|
#[derive(Debug, Deserialize, Component, Serialize)]
|
||||||
struct Foo {
|
struct Foo {}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_derive() {
|
fn test_derive() {
|
||||||
|
@ -34,5 +32,8 @@ fn test_derive() {
|
||||||
|
|
||||||
assert_eq!(Bar::ty(), format!("{base}Bar"));
|
assert_eq!(Bar::ty(), format!("{base}Bar"));
|
||||||
assert_eq!(Foo::<Bar>::ty(), format!("{base}Foo<{base}Bar>"));
|
assert_eq!(Foo::<Bar>::ty(), format!("{base}Foo<{base}Bar>"));
|
||||||
assert_eq!(Qux::<Bar, Foo<Bar>>::ty(), format!("{base}Qux<{base}Bar,{base}Foo<{base}Bar>>"));
|
assert_eq!(
|
||||||
|
Qux::<Bar, Foo<Bar>>::ty(),
|
||||||
|
format!("{base}Qux<{base}Bar,{base}Foo<{base}Bar>>")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -3,5 +3,5 @@ use vanth::{Component, Node, Reference};
|
||||||
|
|
||||||
mod derive;
|
mod derive;
|
||||||
mod fs;
|
mod fs;
|
||||||
mod store;
|
|
||||||
mod reference;
|
mod reference;
|
||||||
|
mod store;
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
use bevy_ecs::{component::Component, entity::Entity, event::{Event, EventWriter}, system::Query};
|
use bevy_ecs::{
|
||||||
|
component::Component,
|
||||||
|
entity::Entity,
|
||||||
|
event::{Event, EventWriter},
|
||||||
|
system::Query,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Event)]
|
#[derive(Event)]
|
||||||
struct LevelUpEvent<T> {
|
struct LevelUpEvent<T> {
|
||||||
|
@ -10,10 +15,7 @@ struct FooTask {
|
||||||
field: i32,
|
field: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn player_level_up(
|
fn player_level_up(mut ev_levelup: EventWriter<LevelUpEvent<i32>>, query: Query<(Entity, &FooTask)>) {
|
||||||
mut ev_levelup: EventWriter<LevelUpEvent<i32>>,
|
|
||||||
query: Query<(Entity, &FooTask)>,
|
|
||||||
) {
|
|
||||||
for (entity, xp) in query.iter() {
|
for (entity, xp) in query.iter() {
|
||||||
ev_levelup.write(LevelUpEvent::<i32> { inner: 5 });
|
ev_levelup.write(LevelUpEvent::<i32> { inner: 5 });
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use vanth::{hash, store::Store, Vanth};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
use vanth::{Vanth, hash, store::Store};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Vanth)]
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Vanth)]
|
||||||
struct Foo {
|
struct Foo {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{parse_macro_input, DeriveInput, GenericParam, Generics};
|
|
||||||
use syn::parse_quote;
|
use syn::parse_quote;
|
||||||
|
use syn::{DeriveInput, GenericParam, Generics, parse_macro_input};
|
||||||
|
|
||||||
#[proc_macro_derive(Vanth)]
|
#[proc_macro_derive(Vanth)]
|
||||||
pub fn vanth_derive(input: TokenStream) -> TokenStream {
|
pub fn vanth_derive(input: TokenStream) -> TokenStream {
|
||||||
|
@ -11,15 +11,19 @@ pub fn vanth_derive(input: TokenStream) -> TokenStream {
|
||||||
|
|
||||||
let mut generics = input.generics.clone();
|
let mut generics = input.generics.clone();
|
||||||
|
|
||||||
let type_params: Vec<syn::Ident> = generics.params.iter().filter_map(|param| {
|
let type_params: Vec<syn::Ident> = generics
|
||||||
if let GenericParam::Type(type_param) = param {
|
.params
|
||||||
Some(type_param.ident.clone())
|
.iter()
|
||||||
} else {
|
.filter_map(|param| {
|
||||||
None
|
if let GenericParam::Type(type_param) = param {
|
||||||
}
|
Some(type_param.ident.clone())
|
||||||
}).collect();
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let mut where_clause = generics.where_clause.clone().unwrap_or_else(|| parse_quote!(where ));
|
let mut where_clause = generics.where_clause.clone().unwrap_or_else(|| parse_quote!(where));
|
||||||
for tp in &type_params {
|
for tp in &type_params {
|
||||||
where_clause.predicates.push(parse_quote!(#tp : vanth::Vanth));
|
where_clause.predicates.push(parse_quote!(#tp : vanth::Vanth));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use rand_core::{RngCore, SeedableRng};
|
|
||||||
use rand_chacha::ChaCha8Rng;
|
use rand_chacha::ChaCha8Rng;
|
||||||
|
use rand_core::{RngCore, SeedableRng};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::f32::consts::PI;
|
use std::f32::consts::PI;
|
||||||
use vanth_derive::Vanth;
|
use vanth_derive::Vanth;
|
||||||
|
@ -10,11 +10,15 @@ pub struct Rng {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rng_new() -> Rng {
|
pub fn rng_new() -> Rng {
|
||||||
Rng { inner: ChaCha8Rng::from_seed([0u8; 32]) }
|
Rng {
|
||||||
|
inner: ChaCha8Rng::from_seed([0u8; 32]),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rng_from_seed(seed: [u8; 32]) -> Rng {
|
pub fn rng_from_seed(seed: [u8; 32]) -> Rng {
|
||||||
Rng { inner: ChaCha8Rng::from_seed(seed) }
|
Rng {
|
||||||
|
inner: ChaCha8Rng::from_seed(seed),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rng_set_stream(rng: &mut Rng, stream: u64) {
|
pub fn rng_set_stream(rng: &mut Rng, stream: u64) {
|
||||||
|
@ -82,7 +86,7 @@ impl From<f32> for Score {
|
||||||
#[derive(Clone, Debug, Deserialize, Serialize, Vanth)]
|
#[derive(Clone, Debug, Deserialize, Serialize, Vanth)]
|
||||||
pub struct OptimizationResult {
|
pub struct OptimizationResult {
|
||||||
/// List of pairs of evaluation score and Rng used to generate the value.
|
/// List of pairs of evaluation score and Rng used to generate the value.
|
||||||
pub values: Vec<(Rng, f32)>
|
pub values: Vec<(Rng, f32)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn optimize<T: Varo>(evaluator: impl Fn(T) -> Score, rng: &mut Rng, rounds: u32) -> OptimizationResult {
|
pub fn optimize<T: Varo>(evaluator: impl Fn(T) -> Score, rng: &mut Rng, rounds: u32) -> OptimizationResult {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use varo::{optimize, Rng, Score, Varo};
|
use varo::{Rng, Score, Varo, optimize};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_optimize() {
|
fn test_optimize() {
|
||||||
|
|
4
rustfmt.toml
Normal file
4
rustfmt.toml
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
edition = "2024"
|
||||||
|
max_width = 120
|
||||||
|
comment_width = 120
|
||||||
|
wrap_comments = true
|
Loading…
Add table
Add a link
Reference in a new issue