Skip to content

Commit

Permalink
Update request
Browse files Browse the repository at this point in the history
  • Loading branch information
tinrab committed Nov 17, 2023
1 parent 5008d21 commit 5abcaa0
Show file tree
Hide file tree
Showing 35 changed files with 383 additions and 352 deletions.
15 changes: 12 additions & 3 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,17 @@ jobs:
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }}

- name: Run lints
- name: Lint
run: ./develop.sh lint

- name: Run tests
run: ./develop.sh test
- name: Test
run: cargo test --workspace --all-targets --all-features

- name: Test - no default features
run: cargo test --workspace --all-targets --no-default-features

- name: Test doc
run: cargo test --workspace --doc --all-features

- name: Test doc - no default features
run: cargo test --workspace --doc --no-default-features
18 changes: 10 additions & 8 deletions bomboni_common/src/id/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use std::{
use super::Id;

#[derive(Debug, Clone, Copy)]
pub struct IdGenerator {
pub struct Generator {
worker: u16,
next: u16,
}
Expand All @@ -16,11 +16,12 @@ pub struct IdGenerator {
const SLEEP_DURATION: Duration = Duration::from_secs(1);

#[cfg(feature = "tokio")]
pub type IdGeneratorArc = std::sync::Arc<tokio::sync::Mutex<IdGenerator>>;
pub type IdGeneratorArc = std::sync::Arc<tokio::sync::Mutex<Generator>>;

impl IdGenerator {
pub fn new(worker: u16) -> Self {
IdGenerator { next: 0, worker }
impl Generator {
#[must_use]
pub const fn new(worker: u16) -> Self {
Self { next: 0, worker }
}

/// Generates a new random id.
Expand Down Expand Up @@ -137,7 +138,7 @@ mod tests {

#[test]
fn it_works() {
let mut id_generator = IdGenerator::new(42);
let mut id_generator = Generator::new(42);
let id = id_generator.generate();
let (_timestamp, worker, sequence) = id.decode();
assert_eq!(worker, 42);
Expand All @@ -148,9 +149,10 @@ mod tests {
#[cfg(feature = "tokio")]
#[tokio::test]
async fn generate_multiple() {
let mut g = IdGenerator::new(1);

const N: usize = 10;

let mut g = Generator::new(1);

let mut ids = HashSet::new();
ids.extend(g.generate_multiple_async(N / 2).await);
g.next = u16::MAX - 1;
Expand Down
21 changes: 12 additions & 9 deletions bomboni_common/src/id/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,23 @@ const SEQUENCE_BITS: i64 = 16;
pub struct Id(u128);

impl Id {
#[must_use]
pub const fn new(id: u128) -> Self {
Id(id)
Self(id)
}

/// Encodes the Id from parts.
#[must_use]
pub fn from_parts(time: SystemTime, worker: u16, sequence: u16) -> Self {
let timestamp = time.duration_since(UNIX_EPOCH).unwrap().as_secs() as u128;
let worker = worker as u128;
let sequence = sequence as u128;
let timestamp = u128::from(time.duration_since(UNIX_EPOCH).unwrap().as_secs());
let worker = u128::from(worker);
let sequence = u128::from(sequence);

assert!(timestamp < (1 << TIMESTAMP_BITS));
assert!(worker < (1 << WORKER_BITS));
assert!(sequence < (1 << SEQUENCE_BITS));

Id(
Self(
(timestamp & ((1 << TIMESTAMP_BITS) - 1)) << (WORKER_BITS + SEQUENCE_BITS)
| ((worker & ((1 << WORKER_BITS) - 1)) << SEQUENCE_BITS)
| (sequence & ((1 << SEQUENCE_BITS) - 1)),
Expand All @@ -61,6 +63,7 @@ impl Id {
/// assert_eq!(worker, 42);
/// assert_eq!(sequence, 1);
/// ```
#[must_use]
pub fn decode(self) -> (SystemTime, u16, u16) {
let timestamp = SystemTime::UNIX_EPOCH
+ Duration::from_secs((self.0 >> (WORKER_BITS + SEQUENCE_BITS)) as u64);
Expand All @@ -81,7 +84,7 @@ impl FromStr for Id {

fn from_str(s: &str) -> Result<Self, Self::Err> {
let value = u128::from_str_radix(s, 16)?;
Ok(Id::new(value))
Ok(Self::new(value))
}
}

Expand Down Expand Up @@ -114,7 +117,7 @@ impl<'de> Deserialize<'de> for Id {
{
use serde::de::Error;
let value = String::deserialize(deserializer)?;
value.parse::<Id>().map_err(|_| {
value.parse::<Self>().map_err(|_| {
<D as Deserializer<'de>>::Error::invalid_value(Unexpected::Str(value.as_str()), &"Id")
})
}
Expand All @@ -131,13 +134,13 @@ mod tests {
fn it_works() {
assert_eq!(
Id::from_parts(SystemTime::UNIX_EPOCH + Duration::from_secs(10), 1, 1),
Id(0b101000000000000000010000000000000001)
Id(0b1010_0000_0000_0000_0001_0000_0000_0000_0001)
);
let max_time = SystemTime::UNIX_EPOCH + Duration::from_secs(Duration::MAX.as_secs() / 2);
let id = Id::from_parts(max_time, 1, 1);
assert_eq!(
id,
Id(0b11111111111111111111111111111111111111111111111111111111111111100000000000000010000000000000001)
Id(0b111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_1111_0000_0000_0000_0001_0000_0000_0000_0001)
);
let (timestamp, worker, sequence) = id.decode();
assert_eq!(timestamp, max_time);
Expand Down
5 changes: 3 additions & 2 deletions bomboni_common/src/id/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@ impl<'q> Encode<'q, MySql> for Id {

impl<'q> Decode<'q, MySql> for Id {
fn decode(value: MySqlValueRef<'_>) -> Result<Self, BoxDynError> {
const MAX_SIZE: usize = std::mem::size_of::<u128>();

let mut bytes = <&[u8] as Decode<MySql>>::decode(value).map(ToOwned::to_owned)?;

const MAX_SIZE: usize = std::mem::size_of::<u128>();
assert!(bytes.len() <= MAX_SIZE, "invalid bytes length for `Id`");
let missing = MAX_SIZE - bytes.len();
if missing != 0 {
Expand All @@ -41,6 +42,6 @@ impl<'q> Decode<'q, MySql> for Id {
bytes = buf;
}

Ok(Id::new(u128::from_be_bytes(bytes.try_into().unwrap())))
Ok(Self::new(u128::from_be_bytes(bytes.try_into().unwrap())))
}
}
6 changes: 3 additions & 3 deletions bomboni_derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ pub fn parse_resource_name(input: TokenStream) -> TokenStream {

let mut parse_segments = quote!();
let mut had_optional = false;
for segment in resource.segments.iter() {
for segment in &resource.segments {
let name = &segment.name;
let ty = &segment.ty;
if is_option_type(ty) {
Expand Down Expand Up @@ -97,7 +97,7 @@ pub fn parse_resource_name(input: TokenStream) -> TokenStream {
impl Parse for Resource {
fn parse(input: ParseStream) -> Result<Self> {
let content;
Ok(Resource {
Ok(Self {
_bracket_token: syn::bracketed!(content in input),
segments: content.parse_terminated(Segment::parse, Token![,])?,
})
Expand All @@ -106,7 +106,7 @@ impl Parse for Resource {

impl Parse for Segment {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Segment {
Ok(Self {
span: input.span(),
name: input.parse()?,
_arrow_token: input.parse()?,
Expand Down
2 changes: 1 addition & 1 deletion bomboni_derive/src/utility.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use syn::Type;

pub(crate) fn is_option_type(ty: &Type) -> bool {
pub fn is_option_type(ty: &Type) -> bool {
if let Type::Path(type_path) = ty {
if let Some(segment) = type_path.path.segments.first() {
if segment.ident == "Option" {
Expand Down
4 changes: 2 additions & 2 deletions bomboni_proto/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ json = ["dep:serde_json"]
thiserror = "1.0.50"
itertools = "0.12.0"
time = { version = "0.3.30", features = ["serde", "formatting", "parsing"] }
prost = "0.12.1"
prost = "0.12.2"
serde = { version = "1.0.192", features = ["derive"] }
pot = "3.0.0"

Expand All @@ -37,7 +37,7 @@ serde_json = "1.0.108"

[build-dependencies]
bomboni_prost = { path = "../bomboni_prost", version = "0.1.5" }
prost-build = "0.12.1"
prost-build = "0.12.2"

[lints]
workspace = true
72 changes: 36 additions & 36 deletions bomboni_proto/build.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,32 @@
use std::{error::Error, io::Write, path::PathBuf};

use bomboni_prost::{
compile,
config::{ApiConfig, CompileConfig},
};
// use bomboni_prost::{
// compile,
// config::{ApiConfig, CompileConfig},
// };

fn main() -> Result<(), Box<dyn Error + 'static>> {
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let fd_path = out_dir.join("fd.pb");

#[cfg(feature = "testing")]
{
let mut config = prost_build::Config::new();
config
.file_descriptor_set_path(fd_path.clone())
.protoc_arg("--experimental_allow_proto3_optional")
.btree_map(["."])
.compile_protos(&["./tests/proto/tools.proto"], &["./tests/proto/"])?;
// #[cfg(feature = "testing")]
// {
// let fd_path = out_dir.join("test.pb");
// let mut config = prost_build::Config::new();
// config
// .file_descriptor_set_path(&fd_path)
// .protoc_arg("--experimental_allow_proto3_optional")
// .btree_map(["."])
// .enable_type_names()
// .type_name_domain(["."], "test.tools")
// .compile_protos(&["./tests/proto/tools.proto"], &["./tests/proto/"])?;

compile(CompileConfig {
..Default::default()
})?;
}
// // compile(CompileConfig {
// // api: ApiConfig::default(),
// // file_descriptor_set_path: out_dir.join(fd_path),
// // ..Default::default()
// // })?;
// }

let root_path = PathBuf::from("./proto");
let proto_paths: Vec<_> = [
Expand All @@ -38,16 +43,18 @@ fn main() -> Result<(), Box<dyn Error + 'static>> {
.map(|proto_path| root_path.join(proto_path))
.collect();

for proto_path in proto_paths.iter() {
for proto_path in &proto_paths {
println!("cargo:rerun-if-changed={}", proto_path.display());
}

let mut config = prost_build::Config::new();
config
.file_descriptor_set_path(fd_path)
.file_descriptor_set_path(&fd_path)
.compile_well_known_types()
.protoc_arg("--experimental_allow_proto3_optional")
.btree_map(["."]);
.btree_map(["."])
.enable_type_names()
.type_name_domain(["."], "type.googleapis.com");

for type_path in get_camel_cased_type_paths() {
config.type_attribute(
Expand All @@ -65,11 +72,11 @@ fn main() -> Result<(), Box<dyn Error + 'static>> {
);
}
for type_path in get_copy_type_paths() {
config.type_attribute(type_path, r#"#[derive(Copy)]"#);
config.type_attribute(type_path, r"#[derive(Copy)]");
}
config.type_attribute(
".google.rpc.Status",
r#"#[derive(::serde::Serialize, ::serde::Deserialize)]"#,
r"#[derive(::serde::Serialize, ::serde::Deserialize)]",
);
config.field_attribute(
".google.rpc.Status.details",
Expand All @@ -82,16 +89,11 @@ fn main() -> Result<(), Box<dyn Error + 'static>> {

config.compile_protos(&proto_paths, &["./proto"])?;

std::io::stdout().flush().unwrap();

compile(CompileConfig {
api: ApiConfig {
domain: Some("type.googleapis.com".into()),
..Default::default()
},
file_descriptor_set_path: out_dir.join("fd.pb"),
..Default::default()
})?;
// compile(CompileConfig {
// api: ApiConfig::default(),
// file_descriptor_set_path: out_dir.join(fd_path),
// ..Default::default()
// })?;

Ok(())
}
Expand All @@ -110,17 +112,15 @@ fn get_camel_cased_type_paths() -> impl Iterator<Item = String> {
"LocalizedMessage",
]
.into_iter()
.map(|type_name| format!(".google.rpc.{}", type_name))
.map(|type_name| format!(".google.rpc.{type_name}"))
}

fn get_default_type_paths() -> impl Iterator<Item = String> {
["ErrorInfo.metadata"]
.into_iter()
.map(|type_name| format!(".google.rpc.{}", type_name))
std::iter::once("ErrorInfo.metadata").map(|type_name| format!(".google.rpc.{type_name}"))
}

fn get_copy_type_paths() -> impl Iterator<Item = String> {
["Timestamp", "Empty", "Duration"]
.into_iter()
.map(|type_name| format!(".google.protobuf.{}", type_name))
.map(|type_name| format!(".google.protobuf.{type_name}"))
}
4 changes: 2 additions & 2 deletions bomboni_proto/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ mod rpc;
pub mod serde;

/// Includes generated protobuf code.
/// Base path is specified with OUT_DIR environment variable.
/// Base path is specified with `OUT_DIR` environment variable.
#[macro_export]
macro_rules! include_proto {
($package: tt) => {
Expand All @@ -22,7 +22,7 @@ macro_rules! include_file_descriptor_set {
};
}

#[allow(unused_qualifications)]
#[allow(unused_qualifications, clippy::all, clippy::pedantic)]
pub mod google {
pub mod protobuf {
pub use super::super::protobuf::*;
Expand Down
Loading

0 comments on commit 5abcaa0

Please sign in to comment.