From 0f6e93ab9b1d7d6b61d621cef8c3041dbe729596 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timoth=C3=A9e=20Haudebourg?= Date: Mon, 15 May 2023 12:09:35 +0200 Subject: [PATCH] Lexicon support (#140) --- Cargo.toml | 7 +- build/src/error/node_type_invalid.rs | 17 +- build/src/layout/primitive.rs | 6 + build/src/resource.rs | 190 +++--- core/src/layout.rs | 2 +- core/src/layout/primitive.rs | 52 +- core/src/layout/primitive/restriction.rs | 6 +- .../src/layout/primitive/restriction/bytes.rs | 24 + core/src/layout/primitive/restriction/cid.rs | 24 + core/src/to_rdf.rs | 2 + core/src/ty.rs | 3 + core/src/value.rs | 75 ++- core/src/value/numeric/real/rational.rs | 29 + load/Cargo.toml | 11 +- load/src/document.rs | 179 ++++++ load/src/document/json.rs | 147 +++++ load/src/document/json/lexicon.rs | 11 + load/src/document/json/schema.rs | 11 + load/src/document/nquads.rs | 24 + load/src/document/tldr.rs | 70 +++ load/src/document/turtle.rs | 30 + load/src/error.rs | 170 ++++++ load/src/lib.rs | 500 +-------------- load/src/source.rs | 24 +- modules/json-ld-context/src/command.rs | 2 + modules/json-schema/Cargo.toml | 5 +- modules/json-schema/src/command.rs | 13 +- modules/json-schema/src/import.rs | 50 +- modules/json-schema/src/lib.rs | 476 ++++++++------- modules/json-schema/src/schema.rs | 147 ++++- .../{from_serde_json.rs => from_syntax.rs} | 47 +- modules/json-schema/src/schema/validation.rs | 51 +- .../json-schema/tests/json_schema_generate.rs | 17 +- .../json-schema/tests/json_schema_import.rs | 7 +- modules/lexicon/.gitignore | 1 + modules/lexicon/.rustfmt.toml | 1 + modules/lexicon/Cargo.toml | 24 + modules/lexicon/README.md | 3 + .../examples/atproto/resolveHandle.json | 30 + modules/lexicon/src/export.rs | 1 + modules/lexicon/src/import.rs | 367 +++++++++++ modules/lexicon/src/import/array.rs | 77 +++ modules/lexicon/src/import/blob.rs | 58 ++ modules/lexicon/src/import/ipld.rs | 28 + modules/lexicon/src/import/ipld/bytes.rs | 58 ++ modules/lexicon/src/import/ipld/cid_link.rs | 50 ++ modules/lexicon/src/import/object.rs | 133 ++++ modules/lexicon/src/import/primitive.rs | 292 +++++++++ modules/lexicon/src/import/record.rs | 63 ++ modules/lexicon/src/import/reference.rs | 144 +++++ modules/lexicon/src/import/token.rs | 22 + modules/lexicon/src/import/xrpc.rs | 90 +++ modules/lexicon/src/import/xrpc/body.rs | 44 ++ modules/lexicon/src/import/xrpc/procedure.rs | 34 ++ modules/lexicon/src/import/xrpc/query.rs | 87 +++ .../lexicon/src/import/xrpc/subscription.rs | 189 ++++++ modules/lexicon/src/lib.rs | 573 ++++++++++++++++++ modules/lexicon/src/nsid.rs | 131 ++++ modules/lexicon/tests/t01.lexicon.json | 30 + modules/lexicon/tests/t01.rs | 14 + modules/rust/cli/src/main.rs | 77 ++- modules/rust/gen/src/context.rs | 100 ++- modules/rust/gen/src/error.rs | 5 +- modules/rust/gen/src/lib.rs | 2 +- modules/rust/gen/src/ty.rs | 12 +- modules/rust/gen/src/ty/generate.rs | 4 + modules/rust/macros/src/lib.rs | 6 +- modules/rust/macros/src/module.rs | 86 +-- modules/rust/prelude/src/ty.rs | 6 + schema/tldr.nq | 12 +- tldrc/src/main.rs | 11 +- 71 files changed, 4294 insertions(+), 1000 deletions(-) create mode 100644 core/src/layout/primitive/restriction/bytes.rs create mode 100644 core/src/layout/primitive/restriction/cid.rs create mode 100644 load/src/document.rs create mode 100644 load/src/document/json.rs create mode 100644 load/src/document/json/lexicon.rs create mode 100644 load/src/document/json/schema.rs create mode 100644 load/src/document/nquads.rs create mode 100644 load/src/document/tldr.rs create mode 100644 load/src/document/turtle.rs create mode 100644 load/src/error.rs rename modules/json-schema/src/schema/{from_serde_json.rs => from_syntax.rs} (89%) create mode 100644 modules/lexicon/.gitignore create mode 100644 modules/lexicon/.rustfmt.toml create mode 100644 modules/lexicon/Cargo.toml create mode 100644 modules/lexicon/README.md create mode 100644 modules/lexicon/examples/atproto/resolveHandle.json create mode 100644 modules/lexicon/src/export.rs create mode 100644 modules/lexicon/src/import.rs create mode 100644 modules/lexicon/src/import/array.rs create mode 100644 modules/lexicon/src/import/blob.rs create mode 100644 modules/lexicon/src/import/ipld.rs create mode 100644 modules/lexicon/src/import/ipld/bytes.rs create mode 100644 modules/lexicon/src/import/ipld/cid_link.rs create mode 100644 modules/lexicon/src/import/object.rs create mode 100644 modules/lexicon/src/import/primitive.rs create mode 100644 modules/lexicon/src/import/record.rs create mode 100644 modules/lexicon/src/import/reference.rs create mode 100644 modules/lexicon/src/import/token.rs create mode 100644 modules/lexicon/src/import/xrpc.rs create mode 100644 modules/lexicon/src/import/xrpc/body.rs create mode 100644 modules/lexicon/src/import/xrpc/procedure.rs create mode 100644 modules/lexicon/src/import/xrpc/query.rs create mode 100644 modules/lexicon/src/import/xrpc/subscription.rs create mode 100644 modules/lexicon/src/lib.rs create mode 100644 modules/lexicon/src/nsid.rs create mode 100644 modules/lexicon/tests/t01.lexicon.json create mode 100644 modules/lexicon/tests/t01.rs diff --git a/Cargo.toml b/Cargo.toml index d82b80a7..d0340766 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ members = [ "modules/rust/macros", "modules/rust/cli", "modules/rust/prelude", + "modules/lexicon" ] [workspace.package] @@ -28,6 +29,7 @@ treeldr-json-ld-context = { path = "modules/json-ld-context", version = "0.1.0" treeldr-rust-gen = { path = "modules/rust/gen", version = "0.1.0" } treeldr-rust-macros = { path = "modules/rust/macros", version = "0.1.0" } treeldr-rust-prelude = { path = "modules/rust/prelude", version = "0.1.0" } +treeldr-lexicon = { path = "modules/lexicon", version = "0.1.0" } log = "0.4" iref = "2.2" @@ -45,14 +47,15 @@ thiserror = "1.0.31" decoded-char = "0.1.1" contextual = "0.1.3" json-ld = "0.14.0" -json-syntax = "0.9.1" +json-syntax = "0.9.4" +serde = "1.0" chrono = "0.4.19" quote = "1.0" proc-macro2 = "1.0" syn = "2.0.15" -clap = "3.0" +clap = "4.0" async-std = "1.12.0" nquads-syntax = "0.12.0" diff --git a/build/src/error/node_type_invalid.rs b/build/src/error/node_type_invalid.rs index 36c54191..73926a25 100644 --- a/build/src/error/node_type_invalid.rs +++ b/build/src/error/node_type_invalid.rs @@ -9,9 +9,20 @@ use super::NodeBindingTypeInvalid; #[derive(Debug)] pub struct NodeTypeInvalid { - pub id: Id, - pub expected: Type, - pub found: PropertyValues + id: Id, + expected: Type, + found: PropertyValues +} + +impl NodeTypeInvalid { + pub fn new( + id: Id, + expected: Type, + found: PropertyValues + ) -> Self { + // panic!("invalid type"); + Self { id, expected, found } + } } impl NodeTypeInvalid { diff --git a/build/src/layout/primitive.rs b/build/src/layout/primitive.rs index 8135c8f5..5f5d6555 100644 --- a/build/src/layout/primitive.rs +++ b/build/src/layout/primitive.rs @@ -104,6 +104,12 @@ impl BuildPrimitive for Primitive { Primitive::Url => Ok(treeldr::layout::RestrictedPrimitive::Url( restrictions.try_map(|r| r.build(id))?, )), + Primitive::Bytes => Ok(treeldr::layout::RestrictedPrimitive::Bytes( + restrictions.try_map(|r| r.build(id))?, + )), + Primitive::Cid => Ok(treeldr::layout::RestrictedPrimitive::Cid( + restrictions.try_map(|r| r.build(id))?, + )), } } } diff --git a/build/src/resource.rs b/build/src/resource.rs index 84319842..86b10271 100644 --- a/build/src/resource.rs +++ b/build/src/resource.rs @@ -311,11 +311,11 @@ impl Definition { if self.has_type(context, Type::Class(None)) { Ok(self.as_type()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::Class(None).into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::Class(None).into(), + self.data.type_.clone(), + )) } } @@ -326,11 +326,11 @@ impl Definition { if self.has_type(context, Type::Class(None)) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::Class(None).into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::Class(None).into(), + self.data.type_.clone(), + )) } } @@ -341,11 +341,11 @@ impl Definition { if self.has_type(context, ty::SubClass::DataType) { Ok(self.as_datatype()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: ty::SubClass::DataType.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + ty::SubClass::DataType.into(), + self.data.type_.clone(), + )) } } @@ -356,11 +356,11 @@ impl Definition { if self.has_type(context, ty::SubClass::DataType) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: ty::SubClass::DataType.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + ty::SubClass::DataType.into(), + self.data.type_.clone(), + )) } } @@ -371,11 +371,11 @@ impl Definition { if self.has_type(context, ty::SubClass::Restriction) { Ok(self.as_restriction()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: ty::SubClass::Restriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + ty::SubClass::Restriction.into(), + self.data.type_.clone(), + )) } } @@ -386,11 +386,11 @@ impl Definition { if self.has_type(context, Type::LayoutRestriction) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::LayoutRestriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::LayoutRestriction.into(), + self.data.type_.clone(), + )) } } @@ -401,11 +401,11 @@ impl Definition { if self.has_type(context, Type::DatatypeRestriction) { Ok(self.as_datatype_restriction()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::DatatypeRestriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::DatatypeRestriction.into(), + self.data.type_.clone(), + )) } } @@ -416,11 +416,11 @@ impl Definition { if self.has_type(context, Type::DatatypeRestriction) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::DatatypeRestriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::DatatypeRestriction.into(), + self.data.type_.clone(), + )) } } @@ -431,11 +431,11 @@ impl Definition { if self.has_type(context, Type::Property(None)) { Ok(self.as_property()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::Property(None).into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::Property(None).into(), + self.data.type_.clone(), + )) } } @@ -446,11 +446,11 @@ impl Definition { if self.has_type(context, Type::Property(None)) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::Property(None).into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::Property(None).into(), + self.data.type_.clone(), + )) } } @@ -461,11 +461,11 @@ impl Definition { if self.has_type(context, component::Type::Layout) { Ok(self.as_layout()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::Type::Layout.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::Type::Layout.into(), + self.data.type_.clone(), + )) } } @@ -476,11 +476,11 @@ impl Definition { if self.has_type(context, component::Type::Layout) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::Type::Layout.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::Type::Layout.into(), + self.data.type_.clone(), + )) } } @@ -491,11 +491,11 @@ impl Definition { if self.has_type(context, component::formatted::Type::LayoutField) { Ok(self.as_layout_field()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::formatted::Type::LayoutField.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::formatted::Type::LayoutField.into(), + self.data.type_.clone(), + )) } } @@ -506,11 +506,11 @@ impl Definition { if self.has_type(context, component::formatted::Type::LayoutField) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::formatted::Type::LayoutField.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::formatted::Type::LayoutField.into(), + self.data.type_.clone(), + )) } } @@ -521,11 +521,11 @@ impl Definition { if self.has_type(context, component::formatted::Type::LayoutVariant) { Ok(self.as_layout_variant()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::formatted::Type::LayoutVariant.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::formatted::Type::LayoutVariant.into(), + self.data.type_.clone(), + )) } } @@ -536,11 +536,11 @@ impl Definition { if self.has_type(context, component::formatted::Type::LayoutVariant) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: component::formatted::Type::LayoutVariant.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + component::formatted::Type::LayoutVariant.into(), + self.data.type_.clone(), + )) } } @@ -551,11 +551,11 @@ impl Definition { if self.has_type(context, Type::LayoutRestriction) { Ok(self.as_layout_restriction()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::LayoutRestriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::LayoutRestriction.into(), + self.data.type_.clone(), + )) } } @@ -566,11 +566,11 @@ impl Definition { if self.has_type(context, Type::LayoutRestriction) { Ok(treeldr::TId::new(self.data.id)) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::LayoutRestriction.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::LayoutRestriction.into(), + self.data.type_.clone(), + )) } } @@ -581,11 +581,11 @@ impl Definition { if self.has_type(context, Type::List) { Ok(self.as_list()) } else { - Err(NodeTypeInvalid { - id: self.data.id, - expected: Type::List.into(), - found: self.data.type_.clone(), - }) + Err(NodeTypeInvalid::new( + self.data.id, + Type::List.into(), + self.data.type_.clone(), + )) } } diff --git a/core/src/layout.rs b/core/src/layout.rs index 554b759e..7704cc65 100644 --- a/core/src/layout.rs +++ b/core/src/layout.rs @@ -799,7 +799,7 @@ impl<'a, M> DescriptionBindingRef<'a, M> { pub fn value(&self) -> BindingValueRef<'a, M> { match self { - Self::DerivedFrom(_, p) => BindingValueRef::Type(p.ty()), + Self::DerivedFrom(_, p) => BindingValueRef::Layout(p.layout()), Self::Reference(_, v) => BindingValueRef::Layout(*v), Self::Struct(_, v) => BindingValueRef::Fields(v), Self::Enum(_, v) => BindingValueRef::Variants(v), diff --git a/core/src/layout/primitive.rs b/core/src/layout/primitive.rs index ed028b2d..9b7a76ea 100644 --- a/core/src/layout/primitive.rs +++ b/core/src/layout/primitive.rs @@ -111,6 +111,16 @@ pub enum Primitive { /// URL. #[iri("tldr:URL")] Url, + + /// Arbitrary bytes. + #[iri("tldr:Bytes")] + Bytes, + + /// CID (Content IDentifier). + /// + /// See + #[iri("tldr:CID")] + Cid, } impl Primitive { @@ -149,6 +159,8 @@ impl Primitive { Self::Iri => "iri", Self::Uri => "uri", Self::Url => "url", + Self::Bytes => "bytes", + Self::Cid => "content identifier", } } @@ -157,9 +169,47 @@ impl Primitive { Id::Iri(IriIndex::Iri(Term::TreeLdr(TreeLdr::Primitive(*self)))) } - pub fn ty(&self) -> TId { + pub fn layout(&self) -> TId { TId::new(self.id()) } + + pub fn natural_type_term(&self) -> Option { + use vocab::{Term, Xsd}; + match self { + Self::Boolean => Some(Term::Xsd(Xsd::Boolean)), + Self::Integer => Some(Term::Xsd(Xsd::Integer)), + Self::NonNegativeInteger => Some(Term::Xsd(Xsd::NonNegativeInteger)), + Self::NonPositiveInteger => Some(Term::Xsd(Xsd::NonPositiveInteger)), + Self::NegativeInteger => Some(Term::Xsd(Xsd::NegativeInteger)), + Self::PositiveInteger => Some(Term::Xsd(Xsd::PositiveInteger)), + Self::Float => Some(Term::Xsd(Xsd::Float)), + Self::Double => Some(Term::Xsd(Xsd::Double)), + Self::U64 => Some(Term::Xsd(Xsd::UnsignedLong)), + Self::U32 => Some(Term::Xsd(Xsd::UnsignedInt)), + Self::U16 => Some(Term::Xsd(Xsd::UnsignedShort)), + Self::U8 => Some(Term::Xsd(Xsd::UnsignedByte)), + Self::I64 => Some(Term::Xsd(Xsd::Long)), + Self::I32 => Some(Term::Xsd(Xsd::Int)), + Self::I16 => Some(Term::Xsd(Xsd::Short)), + Self::I8 => Some(Term::Xsd(Xsd::Byte)), + Self::Base64Bytes => Some(Term::Xsd(Xsd::Base64Binary)), + Self::HexBytes => Some(Term::Xsd(Xsd::HexBinary)), + Self::String => Some(Term::Xsd(Xsd::String)), + Self::Time => Some(Term::Xsd(Xsd::Time)), + Self::Date => Some(Term::Xsd(Xsd::Date)), + Self::DateTime => Some(Term::Xsd(Xsd::DateTime)), + Self::Iri => Some(Term::Xsd(Xsd::AnyUri)), + Self::Uri => Some(Term::Xsd(Xsd::AnyUri)), + Self::Url => Some(Term::Xsd(Xsd::AnyUri)), + Self::Bytes => None, + Self::Cid => None, + } + } + + pub fn natural_type(&self) -> Option> { + self.natural_type_term() + .map(|t| TId::new(Id::Iri(IriIndex::Iri(t)))) + } } impl fmt::Display for Primitive { diff --git a/core/src/layout/primitive/restriction.rs b/core/src/layout/primitive/restriction.rs index 23f3c31b..ac3f55c8 100644 --- a/core/src/layout/primitive/restriction.rs +++ b/core/src/layout/primitive/restriction.rs @@ -6,6 +6,8 @@ use locspan::Meta; pub mod base64_bytes; pub mod boolean; pub mod byte; +pub mod bytes; +pub mod cid; pub mod date; pub mod datetime; pub mod double; @@ -196,7 +198,9 @@ restricted_type! { DateTime: xsd_types::DateTime, Iri: iri::Iri, Uri: uri::Uri, - Url: url::Url + Url: url::Url, + Bytes: bytes::Bytes, + Cid: cid::Cid } /// Values of the `tldr:withRestrictions` property. diff --git a/core/src/layout/primitive/restriction/bytes.rs b/core/src/layout/primitive/restriction/bytes.rs new file mode 100644 index 00000000..ccfb9f0c --- /dev/null +++ b/core/src/layout/primitive/restriction/bytes.rs @@ -0,0 +1,24 @@ +use crate::layout::Primitive; + +use super::{template, RestrainableType}; + +#[derive(Debug)] +pub struct Bytes; + +pub type Restriction = template::none::Restriction; + +pub type RestrictionRef<'a> = template::none::RestrictionRef<'a, Bytes>; + +pub type Restrictions = template::none::Restrictions; + +pub type Conflict = template::none::Conflict; + +pub type Iter<'a, M> = template::none::Iter<'a, Bytes, M>; + +impl RestrainableType for Bytes { + const PRIMITIVE: Primitive = Primitive::Bytes; + + type RestrictionRef<'a> = RestrictionRef<'a>; + type Restrictions = Restrictions; + type RestrictionsIter<'a, M> = Iter<'a, M> where M: 'a; +} diff --git a/core/src/layout/primitive/restriction/cid.rs b/core/src/layout/primitive/restriction/cid.rs new file mode 100644 index 00000000..3da300d4 --- /dev/null +++ b/core/src/layout/primitive/restriction/cid.rs @@ -0,0 +1,24 @@ +use crate::layout::Primitive; + +use super::{template, RestrainableType}; + +#[derive(Debug)] +pub struct Cid; + +pub type Restriction = template::none::Restriction; + +pub type RestrictionRef<'a> = template::none::RestrictionRef<'a, Cid>; + +pub type Restrictions = template::none::Restrictions; + +pub type Conflict = template::none::Conflict; + +pub type Iter<'a, M> = template::none::Iter<'a, Cid, M>; + +impl RestrainableType for Cid { + const PRIMITIVE: Primitive = Primitive::Cid; + + type RestrictionRef<'a> = RestrictionRef<'a>; + type Restrictions = Restrictions; + type RestrictionsIter<'a, M> = Iter<'a, M> where M: 'a; +} diff --git a/core/src/to_rdf.rs b/core/src/to_rdf.rs index b8883425..afed0f09 100644 --- a/core/src/to_rdf.rs +++ b/core/src/to_rdf.rs @@ -562,6 +562,8 @@ impl<'a> IntoRdf for layout::primitive::RestrictionRef<'a> { Self::Iri(r) => r.into_rdf_with(vocabulary, generator, quads, options), Self::Uri(r) => r.into_rdf_with(vocabulary, generator, quads, options), Self::Url(r) => r.into_rdf_with(vocabulary, generator, quads, options), + Self::Bytes(r) => r.into_rdf_with(vocabulary, generator, quads, options), + Self::Cid(r) => r.into_rdf_with(vocabulary, generator, quads, options), } } } diff --git a/core/src/ty.rs b/core/src/ty.rs index 901cf40e..48a37773 100644 --- a/core/src/ty.rs +++ b/core/src/ty.rs @@ -136,6 +136,9 @@ impl From for Type { Term::TreeLdr(vocab::TreeLdr::Variant) => { component::formatted::Type::LayoutVariant.into() } + Term::TreeLdr(vocab::TreeLdr::LayoutRestriction) => { + node::Type::LayoutRestriction.into() + } t => Self::Other(OtherTypeId(TId::new(Id::Iri(IriIndex::Iri(t))))), } } diff --git a/core/src/value.rs b/core/src/value.rs index 2f587956..f6188388 100644 --- a/core/src/value.rs +++ b/core/src/value.rs @@ -4,6 +4,7 @@ use langtag::LanguageTag; use locspan::Meta; use rdf_types::IriVocabulary; pub use xsd_types::value::*; +use xsd_types::ParseRdf; use crate::{vocab, Id, IriIndex}; @@ -60,6 +61,35 @@ impl Literal { } } +macro_rules! from_rational { + ( $($ty:ty),* ) => { + $( + impl From<$ty> for Literal { + fn from(value: $ty) -> Self { + Self::Numeric(Numeric::Real(Real::Rational(value.into()))) + } + } + )* + }; +} + +from_rational! { + Decimal, + Integer, + NonNegativeInteger, + PositiveInteger, + NonPositiveInteger, + NegativeInteger, + Long, + Int, + Short, + Byte, + UnsignedLong, + UnsignedInt, + UnsignedShort, + UnsignedByte +} + impl From for Literal { fn from(value: String) -> Self { Self::String(value) @@ -94,22 +124,51 @@ impl> rdf_types::RdfDisplayWithContext for L pub enum InvalidLiteral { #[error("missing language tag")] MissingLanguageTag, + + #[error("invalid lexical value")] + InvalidLexicalValue(IriIndex), } impl TryFrom> for Literal { type Error = InvalidLiteral; fn try_from(value: vocab::Literal) -> Result { + macro_rules! match_type { + ( ($s:ident, $ty:ident): $($term:ident),* ) => { + match $ty { + IriIndex::Iri(vocab::Term::Xsd(vocab::Xsd::String)) => { + Ok(Literal::String($s.into_value())) + } + IriIndex::Iri(vocab::Term::Rdf(vocab::Rdf::LangString)) => { + Err(InvalidLiteral::MissingLanguageTag) + } + $( + IriIndex::Iri(vocab::Term::Xsd(vocab::Xsd::$term)) => { + Ok(Integer::parse_rdf(&$s).map_err(|_| InvalidLiteral::InvalidLexicalValue($ty))?.into()) + } + )* + ty => Ok(Literal::Other($s.into_value(), ty)), + } + }; + } + match value { vocab::Literal::String(s) => Ok(Literal::String(s.into_value())), - vocab::Literal::TypedString(s, Meta(ty, _)) => match ty { - IriIndex::Iri(vocab::Term::Xsd(vocab::Xsd::String)) => { - Ok(Literal::String(s.into_value())) - } - IriIndex::Iri(vocab::Term::Rdf(vocab::Rdf::LangString)) => { - Err(InvalidLiteral::MissingLanguageTag) - } - ty => Ok(Literal::Other(s.into_value(), ty)), + vocab::Literal::TypedString(s, Meta(ty, _)) => match_type! { (s, ty): + Decimal, + Integer, + NonNegativeInteger, + PositiveInteger, + NonPositiveInteger, + NegativeInteger, + Long, + Int, + Short, + Byte, + UnsignedLong, + UnsignedInt, + UnsignedShort, + UnsignedByte }, vocab::Literal::LangString(s, tag) => Ok(Literal::LangString(LangString::new( s.into_value(), diff --git a/core/src/value/numeric/real/rational.rs b/core/src/value/numeric/real/rational.rs index cc9f611e..906b3a8c 100644 --- a/core/src/value/numeric/real/rational.rs +++ b/core/src/value/numeric/real/rational.rs @@ -252,6 +252,35 @@ impl From for Rational { } } +macro_rules! from_integer { + ( $($ty:ty),* ) => { + $( + impl From<$ty> for Rational { + fn from(value: $ty) -> Self { + let n: BigInt = value.into(); + n.into() + } + } + )* + }; +} + +from_integer! { + Integer, + NonNegativeInteger, + PositiveInteger, + NonPositiveInteger, + NegativeInteger, + Long, + Int, + Short, + Byte, + UnsignedLong, + UnsignedInt, + UnsignedShort, + UnsignedByte +} + impl TryFrom for super::Decimal { type Error = Rational; diff --git a/load/Cargo.toml b/load/Cargo.toml index f41c9cd1..7341b3a9 100644 --- a/load/Cargo.toml +++ b/load/Cargo.toml @@ -7,11 +7,11 @@ edition.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -default = ["turtle"] +default = ["turtle", "json-schema", "lexicon"] turtle = ["turtle-syntax"] -json-schema = ["treeldr-json-schema", "serde_json"] +json-schema = ["treeldr-json-schema", "json-syntax"] json-ld-context = ["treeldr-json-ld-context"] -serde = ["dep:serde"] +lexicon = ["treeldr-lexicon", "json-syntax"] [dependencies] treeldr.workspace = true @@ -30,5 +30,6 @@ serde = { version = "1.0.154", features = ["derive"], optional = true } # Extensions. turtle-syntax = { version = "5.0.0", optional = true } treeldr-json-schema = { workspace = true, optional = true } -serde_json = { workspace = true, optional = true } -treeldr-json-ld-context = { workspace = true, optional = true } \ No newline at end of file +json-syntax = { workspace = true, features = ["serde"], optional = true } +treeldr-json-ld-context = { workspace = true, optional = true } +treeldr-lexicon = { workspace = true, optional = true } \ No newline at end of file diff --git a/load/src/document.rs b/load/src/document.rs new file mode 100644 index 00000000..dfb84978 --- /dev/null +++ b/load/src/document.rs @@ -0,0 +1,179 @@ +use std::{hash::Hash, path::Path}; + +use iref::IriBuf; +use locspan::Meta; +use rdf_types::{Generator, Id, VocabularyMut}; +use treeldr::{BlankIdIndex, IriIndex}; + +use crate::{source, BuildContext, Dataset, DisplayPath, FileId, LangError, LoadError, MimeType}; + +pub mod json; +pub mod nquads; +pub mod tldr; +#[cfg(feature = "turtle")] +pub mod turtle; + +pub enum Document { + TreeLdr(Box), + + NQuads(nquads_syntax::Document), + + #[cfg(feature = "turtle")] + Turtle(turtle_syntax::Document), + + Json(Box), +} + +pub enum DeclaredDocument { + TreeLdr(Box), + + NQuads(Dataset), + + #[cfg(feature = "turtle")] + Turtle(Dataset), + + Json(Box), +} + +impl Document { + /// Load the document located at the given `path`. + pub fn load<'f, P>( + files: &'f mut source::Files

, + filename: &Path, + ) -> Result<(Self, source::FileId), LoadError> + where + P: Clone + Eq + Hash + DisplayPath<'f> + for<'a> From<&'a Path>, + { + match files.load(&filename, None, None) { + Ok(file_id) => { + let document = match files.get(file_id).unwrap().mime_type() { + Some(type_) => Self::from_file_id(files, file_id, type_)?, + None => return Err(LoadError::UnrecognizedFormat(filename.to_owned())), + }; + + Ok((document, file_id)) + } + Err(e) => Err(LoadError::UnableToRead(filename.to_owned(), e)), + } + } + + /// Lead a document from its content. + pub fn load_content<'f, P>( + files: &'f mut source::Files

, + source: P, + content: String, + base_iri: Option, + type_: MimeType, + ) -> Result<(Self, source::FileId), LoadError> + where + P: Clone + Eq + Hash + DisplayPath<'f>, + { + let file_id = files.load_content(source, base_iri, Some(type_), content); + let document = Self::from_file_id(files, file_id, type_)?; + Ok((document, file_id)) + } + + fn from_file_id<'f, P>( + files: &'f mut source::Files

, + file_id: FileId, + type_: MimeType, + ) -> Result + where + P: DisplayPath<'f>, + { + match type_ { + MimeType::TreeLdr => Ok(Self::TreeLdr(Box::new(tldr::import(files, file_id)?))), + MimeType::NQuads => Ok(Self::NQuads(nquads::import(files, file_id)?)), + #[cfg(feature = "turtle")] + MimeType::Turtle => Ok(Self::Turtle(turtle::import(files, file_id)?)), + #[cfg(feature = "json-schema")] + MimeType::Json(t) => Ok(Self::Json(Box::new(json::import(files, file_id, t)?))), + #[allow(unreachable_patterns)] + mime_type => Err(LoadError::UnsupportedMimeType(mime_type)), + } + } + + pub fn declare>( + self, + context: &mut BuildContext, + vocabulary: &mut V, + generator: &mut impl Generator, + ) -> Result { + match self { + Self::TreeLdr(mut d) => { + d.declare(context, vocabulary, generator)?; + Ok(DeclaredDocument::TreeLdr(d)) + } + Self::NQuads(d) => { + let dataset: Dataset = d + .into_iter() + .map(|Meta(quad, meta)| { + Meta( + quad.insert_into(vocabulary) + .map_predicate(|Meta(p, m)| Meta(Id::Iri(p), m)), + meta, + ) + }) + .collect(); + + use treeldr_build::Document; + dataset + .declare(&mut (), context, vocabulary, generator) + .map_err(LangError::NQuads)?; + Ok(DeclaredDocument::NQuads(dataset)) + } + #[cfg(feature = "turtle")] + Self::Turtle(d) => { + let dataset: Dataset = d + .build_triples_with(None, vocabulary, &mut *generator)? + .into_iter() + .map(|Meta(triple, meta)| { + Meta( + triple + .map_predicate(|Meta(p, m)| Meta(Id::Iri(p), m)) + .into_quad(None), + meta, + ) + }) + .collect(); + + use treeldr_build::Document; + dataset + .declare(&mut (), context, vocabulary, generator) + .map_err(LangError::NQuads)?; + Ok(DeclaredDocument::NQuads(dataset)) + } + Self::Json(d) => Ok(d.declare(context, vocabulary, generator)?), + } + } +} + +impl DeclaredDocument { + pub fn build>( + self, + context: &mut BuildContext, + vocabulary: &mut V, + generator: &mut impl Generator, + ) -> Result<(), LangError> { + match self { + Self::TreeLdr(d) => { + d.build(context, vocabulary, generator)?; + Ok(()) + } + Self::NQuads(d) => { + use treeldr_build::Document; + d.define(&mut (), context, vocabulary, generator) + .map_err(LangError::NQuads)?; + Ok(()) + } + #[cfg(feature = "turtle")] + Self::Turtle(d) => { + use treeldr_build::Document; + d.define(&mut (), context, vocabulary, generator) + .map_err(LangError::NQuads)?; + Ok(()) + } + Self::Json(_) => Ok(()), + } + } +} diff --git a/load/src/document/json.rs b/load/src/document/json.rs new file mode 100644 index 00000000..19c301d0 --- /dev/null +++ b/load/src/document/json.rs @@ -0,0 +1,147 @@ +use json_syntax::Parse; +use locspan::{Location, Meta}; +use rdf_types::{Generator, Id, VocabularyMut}; +use treeldr::{BlankIdIndex, IriIndex}; + +use crate::{source, BuildContext, Dataset, LangError, LoadError}; + +#[cfg(feature = "json-schema")] +pub mod schema; + +#[cfg(feature = "lexicon")] +pub mod lexicon; + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub enum MimeType { + /// application/schema+json + JsonSchema, + + /// application/lexicon+json + Lexicon, +} + +impl MimeType { + pub fn name(&self) -> &'static str { + match self { + Self::JsonSchema => "application/schema+json", + Self::Lexicon => "application/lexicon+json", + } + } + + pub fn infer(json: &json_syntax::MetaValue) -> Option { + #[cfg(feature = "json-schema")] + if treeldr_json_schema::import::is_json_schema(json) { + return Some(Self::JsonSchema); + } + + #[cfg(feature = "lexicon")] + if treeldr_lexicon::import::is_lexicon_document(json) { + return Some(Self::Lexicon); + } + + None + } +} + +pub enum Document { + #[cfg(feature = "json-schema")] + Schema(treeldr_json_schema::Schema), + + #[cfg(feature = "lexicon")] + Lexicon(treeldr_lexicon::LexiconDoc), +} + +impl Document { + pub fn declare>( + self, + context: &mut BuildContext, + vocabulary: &mut V, + generator: &mut impl Generator, + ) -> Result { + match self { + #[cfg(feature = "json-schema")] + Self::Schema(s) => { + treeldr_json_schema::import_schema(&s, None, context, vocabulary, generator)?; + Ok(crate::document::DeclaredDocument::Json(Box::new( + DeclaredDocument::Schema(s), + ))) + } + #[cfg(feature = "lexicon")] + Self::Lexicon(d) => { + let dataset: Dataset = d + .into_triples(vocabulary, &mut *generator) + .map(|triple| { + Meta( + triple + .map_subject(|s| Meta(s, source::Metadata::default())) + .map_predicate(|p| Meta(Id::Iri(p), source::Metadata::default())) + .map_object(|o| Meta(label_object(o), source::Metadata::default())) + .into_quad(None), + source::Metadata::default(), + ) + }) + .collect(); + + use treeldr_build::Document; + dataset + .declare(&mut (), context, vocabulary, generator) + .map_err(LangError::NQuads)?; + Ok(crate::document::DeclaredDocument::NQuads(dataset)) + } + } + } +} + +fn label_object( + object: rdf_types::Object>, +) -> rdf_types::meta::Object { + match object { + rdf_types::Object::Id(id) => rdf_types::meta::Object::Id(id), + rdf_types::Object::Literal(l) => rdf_types::meta::Object::Literal(label_literal(l)), + } +} + +fn label_literal( + literal: rdf_types::Literal, +) -> rdf_types::meta::Literal { + match literal { + rdf_types::Literal::String(s) => { + rdf_types::meta::Literal::String(Meta(s, source::Metadata::default())) + } + rdf_types::Literal::TypedString(s, t) => rdf_types::meta::Literal::TypedString( + Meta(s, source::Metadata::default()), + Meta(t, source::Metadata::default()), + ), + rdf_types::Literal::LangString(s, t) => rdf_types::meta::Literal::LangString( + Meta(s, source::Metadata::default()), + Meta(t, source::Metadata::default()), + ), + } +} + +pub enum DeclaredDocument { + #[cfg(feature = "json-schema")] + Schema(treeldr_json_schema::Schema), +} + +pub fn import

( + files: &source::Files

, + source_id: source::FileId, + mime_type: Option, +) -> Result { + let file = files.get(source_id).unwrap(); + let json = json_syntax::Value::parse_str(file.buffer(), |span| { + source::Metadata::Extern(Location::new(source_id, span)) + }) + .expect("invalid JSON"); + + match mime_type.or_else(|| MimeType::infer(&json)) { + #[cfg(feature = "json-schema")] + Some(MimeType::JsonSchema) => Ok(Document::Schema(schema::import(json))), + #[cfg(feature = "lexicon")] + Some(MimeType::Lexicon) => Ok(Document::Lexicon(lexicon::import(json))), + unsupported => Err(LoadError::UnsupportedMimeType(crate::MimeType::Json( + unsupported, + ))), + } +} diff --git a/load/src/document/json/lexicon.rs b/load/src/document/json/lexicon.rs new file mode 100644 index 00000000..30e90205 --- /dev/null +++ b/load/src/document/json/lexicon.rs @@ -0,0 +1,11 @@ +use crate::source; + +// impl From> for LangError { +// fn from(e: treeldr_json_schema::import::Error) -> Self { +// Self::JsonSchema(e) +// } +// } + +pub fn import(json: json_syntax::MetaValue) -> treeldr_lexicon::LexiconDoc { + json_syntax::from_meta_value(json).expect("invalid Lexicon schema") +} diff --git a/load/src/document/json/schema.rs b/load/src/document/json/schema.rs new file mode 100644 index 00000000..2904d05f --- /dev/null +++ b/load/src/document/json/schema.rs @@ -0,0 +1,11 @@ +use crate::{source, LangError}; + +impl From> for LangError { + fn from(e: treeldr_json_schema::import::Error) -> Self { + Self::JsonSchema(e) + } +} + +pub fn import(json: json_syntax::MetaValue) -> treeldr_json_schema::Schema { + json_syntax::from_meta_value(json).expect("invalid JSON Schema") +} diff --git a/load/src/document/nquads.rs b/load/src/document/nquads.rs new file mode 100644 index 00000000..54af2c38 --- /dev/null +++ b/load/src/document/nquads.rs @@ -0,0 +1,24 @@ +use locspan::{Location, Meta}; + +use crate::{source, DisplayPath, ParseError}; + +/// Import a N-Quads file. +pub fn import<'f, P>( + files: &'f source::Files

, + source_id: source::FileId, +) -> Result, Meta> +where + P: DisplayPath<'f>, +{ + use nquads_syntax::Parse; + let file = files.get(source_id).unwrap(); + match nquads_syntax::Document::parse_str(file.buffer().as_str(), |span| { + source::Metadata::Extern(Location::new(source_id, span)) + }) { + Ok(Meta(doc, _)) => { + log::debug!("parsing succeeded."); + Ok(doc) + } + Err(Meta(e, meta)) => Err(Meta(ParseError::NQuads(e), meta)), + } +} diff --git a/load/src/document/tldr.rs b/load/src/document/tldr.rs new file mode 100644 index 00000000..29d1eff6 --- /dev/null +++ b/load/src/document/tldr.rs @@ -0,0 +1,70 @@ +use locspan::{Location, Meta}; +use rdf_types::{Generator, VocabularyMut}; +use treeldr::{BlankIdIndex, IriIndex}; +use treeldr_syntax as syntax; + +use crate::{source, BuildContext, DisplayPath, LangError, ParseError}; + +/// TreeLDR document. +pub struct Document { + doc: syntax::Document, + local_context: syntax::build::LocalContext, +} + +impl Document { + pub fn declare>( + &mut self, + context: &mut BuildContext, + vocabulary: &mut V, + generator: &mut impl Generator, + ) -> Result<(), syntax::build::Error> { + use treeldr_build::Document; + self.doc + .declare(&mut self.local_context, context, vocabulary, generator) + } + + pub fn build>( + mut self, + context: &mut BuildContext, + vocabulary: &mut V, + generator: &mut impl Generator, + ) -> Result<(), syntax::build::Error> { + use treeldr_build::Document; + self.doc + .define(&mut self.local_context, context, vocabulary, generator) + } +} + +impl From> for LangError { + fn from(e: treeldr_syntax::build::Error) -> Self { + Self::TreeLdr(e) + } +} + +/// Import a TreeLDR file. +pub fn import<'f, P>( + files: &'f source::Files

, + source_id: source::FileId, +) -> Result> +where + P: DisplayPath<'f>, +{ + use syntax::Parse; + let file = files.get(source_id).unwrap(); + + log::debug!("ready for parsing."); + match syntax::Document::parse_str(file.buffer().as_str(), |span| { + source::Metadata::Extern(Location::new(source_id, span)) + }) { + Ok(doc) => { + log::debug!("parsing succeeded."); + Ok(Document { + doc: doc.into_value(), + local_context: syntax::build::LocalContext::new( + file.base_iri().map(|iri| iri.into()), + ), + }) + } + Err(e) => Err(e.map(ParseError::TreeLdr)), + } +} diff --git a/load/src/document/turtle.rs b/load/src/document/turtle.rs new file mode 100644 index 00000000..08dea011 --- /dev/null +++ b/load/src/document/turtle.rs @@ -0,0 +1,30 @@ +use locspan::{Location, Meta}; + +use crate::{source, DisplayPath, LangError, ParseError}; + +impl From> for LangError { + fn from(e: turtle_syntax::build::MetaError) -> Self { + Self::Turtle(e) + } +} + +/// Import a RDF Turtle file. +pub fn import<'f, P>( + files: &'f source::Files

, + source_id: source::FileId, +) -> Result, Meta> +where + P: DisplayPath<'f>, +{ + use turtle_syntax::Parse; + let file = files.get(source_id).unwrap(); + match turtle_syntax::Document::parse_str(file.buffer().as_str(), |span| { + source::Metadata::Extern(Location::new(source_id, span)) + }) { + Ok(Meta(doc, _)) => { + log::debug!("parsing succeeded."); + Ok(doc) + } + Err(Meta(e, meta)) => Err(Meta(ParseError::Turtle(e), meta)), + } +} diff --git a/load/src/error.rs b/load/src/error.rs new file mode 100644 index 00000000..7e3926d2 --- /dev/null +++ b/load/src/error.rs @@ -0,0 +1,170 @@ +use std::path::PathBuf; + +use codespan_reporting::term::termcolor::{ColorChoice, StandardStream}; +use locspan::{MaybeLocated, Meta}; +use rdf_types::Vocabulary; +use thiserror::Error; +use treeldr::{reporting::Diagnose, BlankIdIndex, IriIndex}; + +use crate::source; + +#[derive(Error, Debug)] +pub enum LoadError { + #[error("unsupported MIME type `{0}`")] + UnsupportedMimeType(source::MimeType), + + #[error("unrecognized format for file `{0}`")] + UnrecognizedFormat(PathBuf), + + #[error("unable to read file `{0}`: {1}")] + UnableToRead(PathBuf, std::io::Error), + + #[error("parse error")] + Parsing(#[from] Meta), +} + +#[derive(Debug)] +pub enum BuildAllError { + Declaration(LangError), + Link(LangError), + Build(treeldr_build::Error), +} + +impl treeldr::reporting::DiagnoseWithVocabulary for BuildAllError { + fn message( + &self, + vocabulary: &impl Vocabulary, + ) -> String { + match self { + Self::Declaration(e) => e.message(vocabulary), + Self::Link(e) => e.message(vocabulary), + Self::Build(e) => e.message(vocabulary), + } + } + + fn labels( + &self, + vocabulary: &impl Vocabulary, + ) -> Vec> { + match self { + Self::Declaration(e) => e.labels(vocabulary), + Self::Link(e) => e.labels(vocabulary), + Self::Build(e) => e.labels(vocabulary), + } + } + + fn notes( + &self, + vocabulary: &impl Vocabulary, + ) -> Vec { + match self { + Self::Declaration(e) => e.notes(vocabulary), + Self::Link(e) => e.notes(vocabulary), + Self::Build(e) => e.notes(vocabulary), + } + } +} + +#[derive(Debug)] +pub enum LangError { + TreeLdr(treeldr_syntax::build::Error), + NQuads(treeldr_build::Error), + #[cfg(feature = "turtle")] + Turtle(turtle_syntax::build::MetaError), + #[cfg(feature = "json-schema")] + JsonSchema(treeldr_json_schema::import::Error), +} + +impl treeldr::reporting::DiagnoseWithVocabulary for LangError { + fn message( + &self, + vocabulary: &impl Vocabulary, + ) -> String { + match self { + Self::TreeLdr(e) => e.message(vocabulary), + Self::NQuads(e) => e.message(vocabulary), + #[cfg(feature = "turtle")] + Self::Turtle(e) => e.to_string(), + #[cfg(feature = "json-schema")] + Self::JsonSchema(e) => e.message(vocabulary), + } + } + + fn labels( + &self, + vocabulary: &impl Vocabulary, + ) -> Vec> { + match self { + Self::TreeLdr(e) => e.labels(vocabulary), + Self::NQuads(e) => e.labels(vocabulary), + #[cfg(feature = "turtle")] + Self::Turtle(_) => Vec::new(), + #[cfg(feature = "json-schema")] + Self::JsonSchema(e) => e.labels(vocabulary), + } + } + + fn notes( + &self, + vocabulary: &impl Vocabulary, + ) -> Vec { + match self { + Self::TreeLdr(e) => e.notes(vocabulary), + Self::NQuads(e) => e.notes(vocabulary), + #[cfg(feature = "turtle")] + Self::Turtle(_) => Vec::new(), + #[cfg(feature = "json-schema")] + Self::JsonSchema(e) => e.notes(vocabulary), + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ParseError { + #[error("TreeLDR syntax error")] + TreeLdr(Box>), + + #[error("Turtle syntax error")] + Turtle(Box>), + + #[error("N-Quads syntax error")] + NQuads(Box>), +} + +impl ParseError { + pub fn diagnostic( + self, + meta: source::Metadata, + ) -> codespan_reporting::diagnostic::Diagnostic { + match self { + Self::TreeLdr(e) => Meta(e, meta).diagnostic(), + Self::NQuads(e) => codespan_reporting::diagnostic::Diagnostic::error() + .with_message("parse error") + .with_labels(vec![meta + .optional_location() + .unwrap() + .as_primary_label() + .with_message(e.to_string())]), + Self::Turtle(e) => codespan_reporting::diagnostic::Diagnostic::error() + .with_message("parse error") + .with_labels(vec![meta + .optional_location() + .unwrap() + .as_primary_label() + .with_message(e.to_string())]), + } + } + + pub fn display_and_exit<'a, P: source::DisplayPath<'a>>( + self, + files: &'a source::Files

, + meta: source::Metadata, + ) { + let diagnostic = self.diagnostic(meta); + let writer = StandardStream::stderr(ColorChoice::Always); + let config = codespan_reporting::term::Config::default(); + codespan_reporting::term::emit(&mut writer.lock(), &config, files, &diagnostic) + .expect("diagnostic failed"); + std::process::exit(1); + } +} diff --git a/load/src/lib.rs b/load/src/lib.rs index ff265e31..a8567bfb 100644 --- a/load/src/lib.rs +++ b/load/src/lib.rs @@ -1,33 +1,27 @@ -use codespan_reporting::term::{ - self, - termcolor::{ColorChoice, StandardStream}, -}; -use iref::IriBuf; -use locspan::{Location, MaybeLocated, Meta}; -use rdf_types::{Generator, Vocabulary, VocabularyMut}; -use std::hash::Hash; -use std::path::{Path, PathBuf}; -use thiserror::Error; +use rdf_types::{Generator, VocabularyMut}; use treeldr::{ - reporting::Diagnose, vocab::{GraphLabel, Object}, BlankIdIndex, Id, IriIndex, }; -use treeldr_syntax as syntax; +mod document; +mod error; mod source; + +pub use document::Document; +pub use error::*; pub use source::*; pub use treeldr::reporting; pub type BuildContext = treeldr_build::Context; -/// Build all the given documents. -pub fn build_all>( +/// Declare all the given documents. +pub fn declare_all>( vocabulary: &mut V, generator: &mut impl Generator, build_context: &mut BuildContext, documents: Vec, -) -> Result, BuildAllError> { +) -> Result<(), BuildAllError> { build_context.apply_built_in_definitions(vocabulary, generator); let mut declared_documents = Vec::with_capacity(documents.len()); @@ -43,475 +37,23 @@ pub fn build_all>( .map_err(BuildAllError::Link)? } - build_context - .build(vocabulary, generator) - .map_err(BuildAllError::Build) -} - -#[derive(Error, Debug)] -pub enum LoadError { - #[error("unsupported MIME type `{0}`")] - UnsupportedMimeType(source::MimeType), - - #[error("unrecognized format for file `{0}`")] - UnrecognizedFormat(PathBuf), - - #[error("unable to read file `{0}`: {1}")] - UnableToRead(PathBuf, std::io::Error), - - #[error("parse error")] - Parsing(#[from] Meta), -} - -pub struct TreeLdrDocument { - doc: syntax::Document, - local_context: syntax::build::LocalContext, -} - -impl TreeLdrDocument { - fn declare>( - &mut self, - context: &mut BuildContext, - vocabulary: &mut V, - generator: &mut impl Generator, - ) -> Result<(), syntax::build::Error> { - use treeldr_build::Document; - self.doc - .declare(&mut self.local_context, context, vocabulary, generator) - } - - fn build>( - mut self, - context: &mut BuildContext, - vocabulary: &mut V, - generator: &mut impl Generator, - ) -> Result<(), syntax::build::Error> { - use treeldr_build::Document; - self.doc - .define(&mut self.local_context, context, vocabulary, generator) - } -} - -#[derive(Debug)] -pub enum BuildAllError { - Declaration(LangError), - Link(LangError), - Build(treeldr_build::Error), -} - -impl treeldr::reporting::DiagnoseWithVocabulary for BuildAllError { - fn message( - &self, - vocabulary: &impl Vocabulary, - ) -> String { - match self { - Self::Declaration(e) => e.message(vocabulary), - Self::Link(e) => e.message(vocabulary), - Self::Build(e) => e.message(vocabulary), - } - } - - fn labels( - &self, - vocabulary: &impl Vocabulary, - ) -> Vec> { - match self { - Self::Declaration(e) => e.labels(vocabulary), - Self::Link(e) => e.labels(vocabulary), - Self::Build(e) => e.labels(vocabulary), - } - } - - fn notes( - &self, - vocabulary: &impl Vocabulary, - ) -> Vec { - match self { - Self::Declaration(e) => e.notes(vocabulary), - Self::Link(e) => e.notes(vocabulary), - Self::Build(e) => e.notes(vocabulary), - } - } -} - -#[derive(Debug)] -pub enum LangError { - TreeLdr(syntax::build::Error), - NQuads(treeldr_build::Error), - #[cfg(feature = "turtle")] - Turtle(turtle_syntax::build::MetaError), - #[cfg(feature = "json-schema")] - JsonSchema(treeldr_json_schema::import::Error), -} - -impl From> for LangError { - fn from(e: syntax::build::Error) -> Self { - Self::TreeLdr(e) - } -} - -#[cfg(feature = "turtle")] -impl From> for LangError { - fn from(e: turtle_syntax::build::MetaError) -> Self { - Self::Turtle(e) - } -} - -#[cfg(feature = "json-schema")] -impl From> for LangError { - fn from(e: treeldr_json_schema::import::Error) -> Self { - Self::JsonSchema(e) - } -} - -impl treeldr::reporting::DiagnoseWithVocabulary for LangError { - fn message( - &self, - vocabulary: &impl Vocabulary, - ) -> String { - match self { - Self::TreeLdr(e) => e.message(vocabulary), - Self::NQuads(e) => e.message(vocabulary), - #[cfg(feature = "turtle")] - Self::Turtle(e) => e.to_string(), - #[cfg(feature = "json-schema")] - Self::JsonSchema(e) => e.message(vocabulary), - } - } - - fn labels( - &self, - vocabulary: &impl Vocabulary, - ) -> Vec> { - match self { - Self::TreeLdr(e) => e.labels(vocabulary), - Self::NQuads(e) => e.labels(vocabulary), - #[cfg(feature = "turtle")] - Self::Turtle(_) => Vec::new(), - #[cfg(feature = "json-schema")] - Self::JsonSchema(e) => e.labels(vocabulary), - } - } - - fn notes( - &self, - vocabulary: &impl Vocabulary, - ) -> Vec { - match self { - Self::TreeLdr(e) => e.notes(vocabulary), - Self::NQuads(e) => e.notes(vocabulary), - #[cfg(feature = "turtle")] - Self::Turtle(_) => Vec::new(), - #[cfg(feature = "json-schema")] - Self::JsonSchema(e) => e.notes(vocabulary), - } - } -} - -pub enum Document { - TreeLdr(Box), - - NQuads(nquads_syntax::Document), - - #[cfg(feature = "turtle")] - Turtle(turtle_syntax::Document), - - #[cfg(feature = "json-schema")] - JsonSchema(Box), -} - -pub enum DeclaredDocument { - TreeLdr(Box), - - NQuads(Dataset), - - #[cfg(feature = "turtle")] - Turtle(Dataset), - - #[cfg(feature = "json-schema")] - JsonSchema(Box), -} - -impl Document { - /// Load the document located at the given `path`. - pub fn load<'f, P>( - files: &'f mut source::Files

, - filename: &Path, - ) -> Result<(Self, source::FileId), LoadError> - where - P: Clone + Eq + Hash + DisplayPath<'f> + for<'a> From<&'a Path>, - { - match files.load(&filename, None, None) { - Ok(file_id) => { - let document = match files.get(file_id).unwrap().mime_type() { - Some(type_) => Self::from_file_id(files, file_id, type_)?, - None => return Err(LoadError::UnrecognizedFormat(filename.to_owned())), - }; - - Ok((document, file_id)) - } - Err(e) => Err(LoadError::UnableToRead(filename.to_owned(), e)), - } - } - - /// Lead a document from its content. - pub fn load_content<'f, P>( - files: &'f mut source::Files

, - source: P, - content: String, - base_iri: Option, - type_: MimeType, - ) -> Result<(Self, source::FileId), LoadError> - where - P: Clone + Eq + Hash + DisplayPath<'f>, - { - let file_id = files.load_content(source, base_iri, Some(type_), content); - let document = Self::from_file_id(files, file_id, type_)?; - Ok((document, file_id)) - } - - fn from_file_id<'f, P>( - files: &'f mut source::Files

, - file_id: FileId, - type_: MimeType, - ) -> Result - where - P: DisplayPath<'f>, - { - match type_ { - MimeType::TreeLdr => Ok(Self::TreeLdr(Box::new(import_treeldr(files, file_id)?))), - MimeType::NQuads => Ok(Self::NQuads(import_nquads(files, file_id)?)), - #[cfg(feature = "turtle")] - MimeType::Turtle => Ok(Self::Turtle(import_turtle(files, file_id)?)), - #[cfg(feature = "json-schema")] - MimeType::JsonSchema => Ok(Self::JsonSchema(Box::new(import_json_schema( - files, file_id, - )))), - #[allow(unreachable_patterns)] - mime_type => Err(LoadError::UnsupportedMimeType(mime_type)), - } - } - - fn declare>( - self, - context: &mut BuildContext, - vocabulary: &mut V, - generator: &mut impl Generator, - ) -> Result { - match self { - Self::TreeLdr(mut d) => { - d.declare(context, vocabulary, generator)?; - Ok(DeclaredDocument::TreeLdr(d)) - } - Self::NQuads(d) => { - let dataset: Dataset = d - .into_iter() - .map(|Meta(quad, meta)| { - Meta( - quad.insert_into(vocabulary) - .map_predicate(|Meta(p, m)| Meta(Id::Iri(p), m)), - meta, - ) - }) - .collect(); - - use treeldr_build::Document; - dataset - .declare(&mut (), context, vocabulary, generator) - .map_err(LangError::NQuads)?; - Ok(DeclaredDocument::NQuads(dataset)) - } - #[cfg(feature = "turtle")] - Self::Turtle(d) => { - let dataset: Dataset = d - .build_triples_with(None, vocabulary, &mut *generator)? - .into_iter() - .map(|Meta(triple, meta)| { - Meta( - triple - .map_predicate(|Meta(p, m)| Meta(Id::Iri(p), m)) - .into_quad(None), - meta, - ) - }) - .collect(); - - use treeldr_build::Document; - dataset - .declare(&mut (), context, vocabulary, generator) - .map_err(LangError::NQuads)?; - Ok(DeclaredDocument::NQuads(dataset)) - } - #[cfg(feature = "json-schema")] - Self::JsonSchema(s) => { - treeldr_json_schema::import_schema(&s, None, context, vocabulary, generator)?; - Ok(DeclaredDocument::JsonSchema(s)) - } - } - } -} - -impl DeclaredDocument { - fn build>( - self, - context: &mut BuildContext, - vocabulary: &mut V, - generator: &mut impl Generator, - ) -> Result<(), LangError> { - match self { - Self::TreeLdr(d) => { - d.build(context, vocabulary, generator)?; - Ok(()) - } - Self::NQuads(d) => { - use treeldr_build::Document; - d.define(&mut (), context, vocabulary, generator) - .map_err(LangError::NQuads)?; - Ok(()) - } - #[cfg(feature = "turtle")] - Self::Turtle(d) => { - use treeldr_build::Document; - d.define(&mut (), context, vocabulary, generator) - .map_err(LangError::NQuads)?; - Ok(()) - } - #[cfg(feature = "json-schema")] - Self::JsonSchema(_) => Ok(()), - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ParseError { - #[error("TreeLDR syntax error")] - TreeLdr(Box>), - - #[error("Turtle syntax error")] - Turtle(Box>), - - #[error("N-Quads syntax error")] - NQuads(Box>), -} - -impl ParseError { - pub fn diagnostic( - self, - meta: source::Metadata, - ) -> codespan_reporting::diagnostic::Diagnostic { - match self { - Self::TreeLdr(e) => Meta(e, meta).diagnostic(), - Self::NQuads(e) => codespan_reporting::diagnostic::Diagnostic::error() - .with_message("parse error") - .with_labels(vec![meta - .optional_location() - .unwrap() - .as_primary_label() - .with_message(e.to_string())]), - Self::Turtle(e) => codespan_reporting::diagnostic::Diagnostic::error() - .with_message("parse error") - .with_labels(vec![meta - .optional_location() - .unwrap() - .as_primary_label() - .with_message(e.to_string())]), - } - } - - pub fn display_and_exit<'a, P: source::DisplayPath<'a>>( - self, - files: &'a source::Files

, - meta: source::Metadata, - ) { - let diagnostic = self.diagnostic(meta); - let writer = StandardStream::stderr(ColorChoice::Always); - let config = codespan_reporting::term::Config::default(); - term::emit(&mut writer.lock(), &config, files, &diagnostic).expect("diagnostic failed"); - std::process::exit(1); - } + Ok(()) } -/// Import a TreeLDR file. -pub fn import_treeldr<'f, P>( - files: &'f source::Files

, - source_id: source::FileId, -) -> Result> -where - P: DisplayPath<'f>, -{ - use syntax::Parse; - let file = files.get(source_id).unwrap(); +/// Build all the given documents. +pub fn build_all>( + vocabulary: &mut V, + generator: &mut impl Generator, + build_context: &mut BuildContext, + documents: Vec, +) -> Result, BuildAllError> { + declare_all(vocabulary, generator, build_context, documents)?; - log::debug!("ready for parsing."); - match syntax::Document::parse_str(file.buffer().as_str(), |span| { - source::Metadata::Extern(Location::new(source_id, span)) - }) { - Ok(doc) => { - log::debug!("parsing succeeded."); - Ok(TreeLdrDocument { - doc: doc.into_value(), - local_context: syntax::build::LocalContext::new( - file.base_iri().map(|iri| iri.into()), - ), - }) - } - Err(e) => Err(e.map(ParseError::TreeLdr)), - } + build_context + .build(vocabulary, generator) + .map_err(BuildAllError::Build) } /// RDF dataset. pub type Dataset = grdf::meta::BTreeDataset, GraphLabel, source::Metadata>; - -/// Import a N-Quads file. -pub fn import_nquads<'f, P>( - files: &'f source::Files

, - source_id: source::FileId, -) -> Result, Meta> -where - P: DisplayPath<'f>, -{ - use nquads_syntax::Parse; - let file = files.get(source_id).unwrap(); - match nquads_syntax::Document::parse_str(file.buffer().as_str(), |span| { - source::Metadata::Extern(Location::new(source_id, span)) - }) { - Ok(Meta(doc, _)) => { - log::debug!("parsing succeeded."); - Ok(doc) - } - Err(Meta(e, meta)) => Err(Meta(ParseError::NQuads(e), meta)), - } -} - -/// Import a RDF Turtle file. -#[cfg(feature = "turtle")] -pub fn import_turtle<'f, P>( - files: &'f source::Files

, - source_id: source::FileId, -) -> Result, Meta> -where - P: DisplayPath<'f>, -{ - use turtle_syntax::Parse; - let file = files.get(source_id).unwrap(); - match turtle_syntax::Document::parse_str(file.buffer().as_str(), |span| { - source::Metadata::Extern(Location::new(source_id, span)) - }) { - Ok(Meta(doc, _)) => { - log::debug!("parsing succeeded."); - Ok(doc) - } - Err(Meta(e, meta)) => Err(Meta(ParseError::Turtle(e), meta)), - } -} - -#[cfg(feature = "json-schema")] -pub fn import_json_schema

( - files: &source::Files

, - source_id: source::FileId, -) -> treeldr_json_schema::Schema { - let file = files.get(source_id).unwrap(); - let json: serde_json::Value = serde_json::from_str(file.buffer()).expect("invalid JSON"); - treeldr_json_schema::Schema::try_from(json).expect("invalid JSON Schema") -} diff --git a/load/src/source.rs b/load/src/source.rs index 57f3b870..c9ef501a 100644 --- a/load/src/source.rs +++ b/load/src/source.rs @@ -5,6 +5,8 @@ use std::hash::Hash; use std::ops::{Deref, Range}; use std::path::{Path, PathBuf}; +use crate::document; + pub trait DisplayPath<'a> { type Display: 'a + fmt::Display; @@ -63,29 +65,37 @@ pub enum MimeType { /// text/turtle Turtle, - /// application/schema+json - JsonSchema, + /// application/json + Json(Option), } impl MimeType { - fn name(&self) -> &'static str { + pub fn name(&self) -> &'static str { match self { Self::TreeLdr => "application/treeldr", Self::NQuads => "application/n-quads", Self::Turtle => "text/turtle", - Self::JsonSchema => "application/schema+json", + Self::Json(None) => "application/json", + Self::Json(Some(t)) => t.name(), } } - fn infer(source: &Path, _content: &str) -> Option { + pub fn infer(source: &Path, _content: &str) -> Option { + use std::ffi::OsStr; + source .extension() - .and_then(std::ffi::OsStr::to_str) + .and_then(OsStr::to_str) .and_then(|ext| match ext { "tldr" => Some(MimeType::TreeLdr), "nq" => Some(MimeType::NQuads), "ttl" => Some(MimeType::Turtle), - "json" => Some(MimeType::JsonSchema), + "json" => match source.file_stem().and_then(OsStr::to_str) { + Some(stem) if stem.ends_with(".schema") => { + Some(MimeType::Json(Some(document::json::MimeType::JsonSchema))) + } + _ => Some(MimeType::Json(None)), + }, _ => None, }) } diff --git a/modules/json-ld-context/src/command.rs b/modules/json-ld-context/src/command.rs index 1bbc9df3..af0a2479 100644 --- a/modules/json-ld-context/src/command.rs +++ b/modules/json-ld-context/src/command.rs @@ -70,6 +70,7 @@ impl fmt::Display for InvalidMountPointSyntax { } } +#[derive(Debug, Clone)] pub struct MountPoint { pub iri: IriBuf, pub path: PathBuf, @@ -110,6 +111,7 @@ impl fmt::Display for InvalidPrefixBinding { } } +#[derive(Debug, Clone)] pub struct PrefixBinding { pub prefix: String, pub iri: IriBuf, diff --git a/modules/json-schema/Cargo.toml b/modules/json-schema/Cargo.toml index 54266dae..19e73fdd 100644 --- a/modules/json-schema/Cargo.toml +++ b/modules/json-schema/Cargo.toml @@ -8,11 +8,12 @@ edition.workspace = true [dependencies] treeldr.workspace = true log.workspace = true -iref.workspace = true +iref = { workspace = true, features = ["serde"] } clap = { workspace = true, features = ["derive"] } derivative.workspace = true -serde_json.workspace = true +json-syntax = { workspace = true, features = ["serde"] } thiserror.workspace = true +serde = { workspace = true, features = ["derive"] } # For the import function. treeldr-build.workspace = true diff --git a/modules/json-schema/src/command.rs b/modules/json-schema/src/command.rs index 226fd10b..39a34a31 100644 --- a/modules/json-schema/src/command.rs +++ b/modules/json-schema/src/command.rs @@ -2,6 +2,7 @@ use crate::embedding; use contextual::WithContext; use embedding::Embedding; use iref::{Iri, IriBuf}; +use json_syntax::Print; use rdf_types::Vocabulary; use std::fmt; use treeldr::{BlankIdIndex, IriIndex, TId}; @@ -9,11 +10,11 @@ use treeldr::{BlankIdIndex, IriIndex, TId}; #[derive(clap::Args)] /// Generate a JSON Schema from a TreeLDR model. pub struct Command { - #[clap(multiple_occurrences(true), required(true))] + #[clap(required(true))] /// Layout schema to generate. layouts: Vec, - #[clap(short = 'e', multiple_occurrences(true))] + #[clap(short = 'e')] /// Layout schema to embed. embeds: Vec, @@ -33,7 +34,6 @@ pub enum Error { UndefinedLayout(IriBuf), NotALayout(Box>), InfiniteSchema(String), - Serialization(serde_json::Error), } impl fmt::Display for Error { @@ -43,7 +43,6 @@ impl fmt::Display for Error { Self::UndefinedLayout(iri) => write!(f, "undefined layout `{iri}`"), Self::NotALayout(e) => write!(f, "node `{}` is not a layout", e.0), Self::InfiniteSchema(iri) => write!(f, "infinite schema `{iri}`"), - Self::Serialization(e) => write!(f, "JSON serialization failed: {e}"), } } } @@ -116,10 +115,7 @@ impl Command { main_layout_ref, ) { Ok(json_schema) => { - println!( - "{}", - serde_json::to_string_pretty(&json_schema).map_err(Error::Serialization)? - ); + println!("{}", json_schema.pretty_print()); Ok(()) } @@ -129,7 +125,6 @@ impl Command { Err(crate::Error::InfiniteSchema(r)) => Err(Box::new(Error::InfiniteSchema( model.get(r).unwrap().id().with(vocabulary).to_string(), ))), - Err(crate::Error::Serialization(e)) => Err(Box::new(Error::Serialization(e))), } } } diff --git a/modules/json-schema/src/import.rs b/modules/json-schema/src/import.rs index 70d995c4..f767ec83 100644 --- a/modules/json-schema/src/import.rs +++ b/modules/json-schema/src/import.rs @@ -9,6 +9,14 @@ use treeldr::{metadata::Merge, vocab, BlankIdIndex, Id, IriIndex, Name}; use treeldr_build::{layout::Restrictions, Context}; use vocab::{LocQuad, Object, Term}; +/// Checks if the given JSON document is a JSON Schema. +pub fn is_json_schema(json: &json_syntax::Value) -> bool { + match json.as_object() { + Some(object) => object.get("$schema").next().is_some(), + None => false, + } +} + /// Import error. #[derive(Debug)] pub enum Error { @@ -50,8 +58,8 @@ pub fn import_schema< generator: &mut impl Generator, ) -> Result> { match schema { - Schema::True => todo!(), - Schema::False => { + Schema::Boolean(true) => todo!(), + Schema::Boolean(false) => { let id = generator.next(vocabulary); context.declare_layout(id, M::default()); Ok(id) @@ -79,8 +87,8 @@ pub fn import_sub_schema< generator: &mut impl Generator, ) -> Result> { match schema { - Schema::True => todo!(), - Schema::False => { + Schema::Boolean(true) => todo!(), + Schema::Boolean(false) => { let id = generator.next(vocabulary); context.declare_layout(id, M::default()); Ok(id) @@ -269,24 +277,18 @@ fn strip_json_schema_extension(iri: Iri) -> Iri { fn into_numeric( primitive: treeldr::layout::Primitive, - n: &serde_json::Number, + n: &json_syntax::Number, ) -> treeldr::value::Numeric { use treeldr::value; match primitive { - treeldr::layout::Primitive::Float => match n.as_f64() { - Some(d) => value::Float::new(d as f32).into(), - None => todo!(), - }, - treeldr::layout::Primitive::Double => match n.as_f64() { - Some(d) => value::Double::new(d).into(), - None => todo!(), - }, - treeldr::layout::Primitive::Integer => match xsd_types::Integer::from_str(&n.to_string()) { + treeldr::layout::Primitive::Float => value::Float::new(n.as_f64_lossy() as f32).into(), + treeldr::layout::Primitive::Double => value::Double::new(n.as_f64_lossy()).into(), + treeldr::layout::Primitive::Integer => match xsd_types::Integer::from_str(n) { Ok(n) => n.into(), Err(_) => todo!(), }, treeldr::layout::Primitive::NonNegativeInteger => { - match xsd_types::NonNegativeInteger::from_str(&n.to_string()) { + match xsd_types::NonNegativeInteger::from_str(n) { Ok(n) => n.into(), Err(_) => todo!(), } @@ -313,7 +315,7 @@ fn import_layout_description< > { let mut kind = LayoutKind::Unknown; if let Some(types) = &schema.validation.any.ty { - for ty in types { + for ty in types.as_slice() { let k = match ty { schema::Type::Null => todo!(), schema::Type::Boolean => LayoutKind::Boolean, @@ -351,11 +353,11 @@ fn import_layout_description< } match &schema.desc { - schema::Description::Definition { + schema::Description::Definition(schema::Definition { string, array, object, - } => { + }) => { if !string.is_empty() || !schema.validation.string.is_empty() { kind.refine(LayoutKind::String)?; } @@ -437,15 +439,17 @@ fn import_layout_description< let mut restrictions = treeldr_build::layout::Restrictions::default(); if let Some(cnst) = &schema.validation.any.cnst { - restrictions.primitive.insert(Meta( - Restriction::String(String::Pattern(cnst.to_string().into())), - M::default(), - )); + if let Some(cnst) = cnst.as_str() { + restrictions.primitive.insert(Meta( + Restriction::String(String::Pattern(cnst.to_string().into())), + M::default(), + )); + } } if let Some(pattern) = &schema.validation.string.pattern { restrictions.primitive.insert(Meta( - Restriction::String(String::Pattern(pattern.to_string().into())), + Restriction::String(String::Pattern(pattern.clone().into())), M::default(), )); } diff --git a/modules/json-schema/src/lib.rs b/modules/json-schema/src/lib.rs index 99c9e273..b83ba537 100644 --- a/modules/json-schema/src/lib.rs +++ b/modules/json-schema/src/lib.rs @@ -1,4 +1,5 @@ use contextual::WithContext; +use locspan::Meta; use rdf_types::Vocabulary; use treeldr::{layout, BlankIdIndex, IriIndex, MetaOption, Name, TId}; @@ -19,9 +20,6 @@ pub enum Error { #[error("infinite schema")] InfiniteSchema(TId), - - #[error("serialization failed: {0}")] - Serialization(serde_json::Error), } /// Generate a JSON Schema from a TreeLDR model. @@ -31,7 +29,7 @@ pub fn generate( embedding: &embedding::Configuration, type_property: Option<&str>, layout_ref: TId, -) -> Result { +) -> Result { // Check there are no cycles induced by the embedded layouts. let strongly_connected_layouts = treeldr::layout::StronglyConnectedLayouts::with_filter(model, |_, sub_layout_ref| { @@ -65,18 +63,18 @@ pub fn generate( if let Some(json_schema) = json_schema.as_object_mut() { json_schema.insert( - "$schema".into(), - "https://json-schema.org/draft/2020-12/schema".into(), + Meta("$schema".into(), ()), + Meta("https://json-schema.org/draft/2020-12/schema".into(), ()), ); let title = match layout.preferred_label() { Some(label) => label.to_string(), None => name.to_pascal_case(), }; - json_schema.insert("title".into(), title.into()); + json_schema.insert(Meta("title".into(), ()), Meta(title.into(), ())); // Generate the `$defs` section. - let mut defs = serde_json::Map::new(); + let mut defs = json_syntax::Object::new(); for layout_ref in embedding.indirect_layouts() { let name = model .get(layout_ref) @@ -95,10 +93,10 @@ pub fn generate( layout_ref, )?; - defs.insert(name, json_schema); + defs.insert(Meta(name.into(), ()), Meta(json_schema, ())); } if !defs.is_empty() { - json_schema.insert("$defs".into(), defs.into()); + json_schema.insert(Meta("$defs".into(), ()), Meta(defs.into(), ())); } } @@ -126,7 +124,7 @@ fn generate_layout( type_property: Option<&str>, required: Option<&mut bool>, layout_ref: TId, -) -> Result { +) -> Result { let layout = model.get(layout_ref).unwrap(); let mut schema = generate_layout_schema( vocabulary, @@ -139,14 +137,14 @@ fn generate_layout( if let Some(schema) = schema.as_object_mut() { schema.insert( - "$id".into(), - layout.id().with(vocabulary).to_string().into(), + Meta("$id".into(), ()), + Meta(layout.id().with(vocabulary).to_string().into(), ()), ); if let Some(description) = layout.comment().short_description() { schema.insert( - "description".into(), - remove_newlines(description.trim()).into(), + Meta("description".into(), ()), + Meta(remove_newlines(description.trim()).into(), ()), ); } } @@ -161,19 +159,19 @@ fn generate_layout_schema( type_property: Option<&str>, mut required: Option<&mut bool>, layout: treeldr::Ref, -) -> Result { +) -> Result { if let Some(required) = required.as_mut() { **required = layout.as_layout().description().is_required() } use treeldr::layout::Description; match layout.as_layout().description() { - Description::Never => Ok(serde_json::Value::Bool(false)), + Description::Never => Ok(json_syntax::Value::Boolean(false)), Description::Primitive(n) => Ok(generate_primitive_type(*n)), Description::Derived(d) => Ok(generate_derived_type(d.value())), Description::Reference(_) => { - let mut json = serde_json::Map::new(); - json.insert("type".into(), "string".into()); + let mut json = json_syntax::Object::new(); + json.insert(Meta("type".into(), ()), Meta("string".into(), ())); Ok(json.into()) } Description::Struct(s) => { @@ -248,11 +246,11 @@ fn generate_layout_schema( a.restrictions(), ), Description::Alias(alias_ref) => { - let mut json = serde_json::Map::new(); + let mut json = json_syntax::Object::new(); let alias = model.get(*alias_ref.value()).unwrap(); json.insert( - "$ref".into(), - alias.id().with(vocabulary).to_string().into(), + Meta("$ref".into(), ()), + Meta(alias.id().with(vocabulary).to_string().into(), ()), ); Ok(json.into()) } @@ -266,19 +264,22 @@ fn generate_struct( type_property: Option<&str>, name: &Name, s: &treeldr::layout::Struct, -) -> Result { - let mut json = serde_json::Map::new(); - let mut properties = serde_json::Map::new(); +) -> Result { + let mut json = json_syntax::Object::new(); + let mut properties = json_syntax::Object::new(); let mut required_properties = Vec::new(); if let Some(type_prop) = type_property { - let mut type_schema = serde_json::Map::new(); + let mut type_schema = json_syntax::Object::new(); - type_schema.insert("type".into(), "string".into()); - type_schema.insert("pattern".into(), name.to_pascal_case().into()); + type_schema.insert(Meta("type".into(), ()), Meta("string".into(), ())); + type_schema.insert( + Meta("pattern".into(), ()), + Meta(name.to_pascal_case().into(), ()), + ); - properties.insert(type_prop.into(), type_schema.into()); - required_properties.push(type_prop.into()); + properties.insert(Meta(type_prop.into(), ()), Meta(type_schema.into(), ())); + required_properties.push(Meta(type_prop.into(), ())); } for field_id in s.fields() { @@ -298,29 +299,39 @@ fn generate_struct( if let Some(obj) = layout_schema.as_object_mut() { if let Some(description) = field.preferred_label() { obj.insert( - "description".into(), - remove_newlines(description.lexical_form().trim()).into(), + Meta("description".into(), ()), + Meta( + remove_newlines(description.lexical_form().trim()).into(), + (), + ), ); } } - properties.insert(field.name().unwrap().to_camel_case(), layout_schema); + properties.insert( + Meta(field.name().unwrap().to_camel_case().into(), ()), + Meta(layout_schema, ()), + ); if required { - required_properties.push(serde_json::Value::from( - field.name().unwrap().to_camel_case(), + required_properties.push(Meta( + json_syntax::Value::from(field.name().unwrap().to_camel_case()), + (), )); } } - json.insert("type".into(), "object".into()); + json.insert(Meta("type".into(), ()), Meta("object".into(), ())); if !properties.is_empty() { - json.insert("properties".into(), properties.into()); + json.insert(Meta("properties".into(), ()), Meta(properties.into(), ())); } if !required_properties.is_empty() { - json.insert("required".into(), required_properties.into()); + json.insert( + Meta("required".into(), ()), + Meta(required_properties.into(), ()), + ); } Ok(json.into()) @@ -333,7 +344,7 @@ fn embed_layout( type_property: Option<&str>, required: Option<&mut bool>, layout_ref: TId, -) -> Result { +) -> Result { match embedding.get(layout_ref) { Embedding::Reference => generate_layout_ref( vocabulary, @@ -344,7 +355,7 @@ fn embed_layout( layout_ref, ), Embedding::Indirect => { - let mut json = serde_json::Map::new(); + let mut json = json_syntax::Object::new(); generate_layout_defs_ref(&mut json, model, layout_ref)?; Ok(json.into()) } @@ -360,22 +371,25 @@ fn embed_layout( } fn generate_layout_defs_ref( - json: &mut serde_json::Map, + json: &mut json_syntax::Object, model: &treeldr::MutableModel, layout_ref: TId, ) -> Result<(), Error> { json.insert( - "$ref".into(), - format!( - "#/$defs/{}", - model - .get(layout_ref) - .unwrap() - .as_component() - .name() - .ok_or(Error::NoLayoutName(layout_ref))? - ) - .into(), + Meta("$ref".into(), ()), + Meta( + format!( + "#/$defs/{}", + model + .get(layout_ref) + .unwrap() + .as_component() + .name() + .ok_or(Error::NoLayoutName(layout_ref))? + ) + .into(), + (), + ), ); Ok(()) } @@ -387,7 +401,7 @@ fn generate_layout_ref( type_property: Option<&str>, mut required: Option<&mut bool>, layout_ref: TId, -) -> Result { +) -> Result { let layout = model.get(layout_ref).unwrap(); if let Some(required) = required.as_mut() { @@ -396,10 +410,10 @@ fn generate_layout_ref( use treeldr::layout::Description; match layout.as_layout().description() { - Description::Never => Ok(serde_json::Value::Bool(false)), + Description::Never => Ok(json_syntax::Value::Boolean(false)), Description::Reference(_) => { - let mut json = serde_json::Map::new(); - json.insert("type".into(), "string".into()); + let mut json = json_syntax::Object::new(); + json.insert(Meta("type".into(), ()), Meta("string".into(), ())); Ok(json.into()) } Description::Enum(enm) => { @@ -468,11 +482,11 @@ fn generate_layout_ref( a.restrictions(), ), Description::Struct(_) | Description::Alias(_) => { - let mut json = serde_json::Map::new(); + let mut json = json_syntax::Object::new(); let layout = model.get(layout_ref).unwrap(); json.insert( - "$ref".into(), - layout.id().with(vocabulary).to_string().into(), + Meta("$ref".into(), ()), + Meta(layout.id().with(vocabulary).to_string().into(), ()), ); Ok(json.into()) } @@ -485,11 +499,11 @@ fn generate_option_type( embedding: &embedding::Configuration, type_property: Option<&str>, item_layout_ref: TId, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); - let mut null_schema = serde_json::Map::new(); - null_schema.insert("type".into(), "null".into()); + let mut null_schema = json_syntax::Object::new(); + null_schema.insert(Meta("type".into(), ()), Meta("null".into(), ())); let item_schema = generate_layout_ref( vocabulary, @@ -500,7 +514,13 @@ fn generate_option_type( item_layout_ref, )?; - def.insert("anyOf".into(), vec![null_schema.into(), item_schema].into()); + def.insert( + Meta("anyOf".into(), ()), + Meta( + vec![Meta(null_schema.into(), ()), Meta(item_schema, ())].into(), + (), + ), + ); Ok(def.into()) } @@ -511,8 +531,8 @@ fn generate_set_type( type_property: Option<&str>, item_layout_ref: TId, restrictions: &MetaOption, F>, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); let item_schema = generate_layout_ref( vocabulary, model, @@ -521,9 +541,9 @@ fn generate_set_type( None, item_layout_ref, )?; - def.insert("type".into(), "array".into()); - def.insert("items".into(), item_schema); - def.insert("uniqueItems".into(), true.into()); + def.insert(Meta("type".into(), ()), Meta("array".into(), ())); + def.insert(Meta("items".into(), ()), Meta(item_schema, ())); + def.insert(Meta("uniqueItems".into(), ()), Meta(true.into(), ())); if let Some(restrictions) = restrictions.as_ref() { if !restrictions.cardinal().min().is_zero() { @@ -532,12 +552,12 @@ fn generate_set_type( .min() .try_into() .expect("minimum is too large"); - def.insert("minItems".into(), m.into()); + def.insert(Meta("minItems".into(), ()), Meta(m.into(), ())); } if let Some(m) = restrictions.cardinal().max() { let m: u64 = m.clone().try_into().expect("maximum is too large"); - def.insert("maxItems".into(), m.into()); + def.insert(Meta("maxItems".into(), ()), Meta(m.into(), ())); } } @@ -551,8 +571,8 @@ fn generate_map_type( type_property: Option<&str>, key_layout_ref: TId, value_layout_ref: TId, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); let key_schema = generate_layout_ref( vocabulary, @@ -572,9 +592,12 @@ fn generate_map_type( value_layout_ref, )?; - def.insert("type".into(), "object".into()); - def.insert("propertyNames".into(), key_schema); - def.insert("additionalProperties".into(), value_schema); + def.insert(Meta("type".into(), ()), Meta("object".into(), ())); + def.insert(Meta("propertyNames".into(), ()), Meta(key_schema, ())); + def.insert( + Meta("additionalProperties".into(), ()), + Meta(value_schema, ()), + ); Ok(def.into()) } @@ -586,8 +609,8 @@ fn generate_one_or_many_type( type_property: Option<&str>, item_layout_ref: TId, restrictions: &MetaOption, F>, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); let item_schema = generate_layout_ref( vocabulary, @@ -599,19 +622,25 @@ fn generate_one_or_many_type( )?; def.insert( - "oneOf".into(), - vec![ - item_schema, - generate_set_type( - vocabulary, - model, - embedding, - type_property, - item_layout_ref, - restrictions, - )?, - ] - .into(), + Meta("oneOf".into(), ()), + Meta( + vec![ + Meta(item_schema, ()), + Meta( + generate_set_type( + vocabulary, + model, + embedding, + type_property, + item_layout_ref, + restrictions, + )?, + (), + ), + ] + .into(), + (), + ), ); Ok(def.into()) @@ -624,8 +653,8 @@ fn generate_list_type( type_property: Option<&str>, item_layout_ref: TId, restrictions: &MetaOption, F>, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); let item_schema = generate_layout_ref( vocabulary, model, @@ -634,8 +663,8 @@ fn generate_list_type( None, item_layout_ref, )?; - def.insert("type".into(), "array".into()); - def.insert("items".into(), item_schema); + def.insert(Meta("type".into(), ()), Meta("array".into(), ())); + def.insert(Meta("items".into(), ()), Meta(item_schema, ())); if let Some(restrictions) = restrictions.as_ref() { if !restrictions.cardinal().min().is_zero() { @@ -644,12 +673,12 @@ fn generate_list_type( .min() .try_into() .expect("minimum is too large"); - def.insert("minItems".into(), m.into()); + def.insert(Meta("minItems".into(), ()), Meta(m.into(), ())); } if let Some(m) = restrictions.cardinal().max() { let m: u64 = m.clone().try_into().expect("maximum is too large"); - def.insert("maxItems".into(), m.into()); + def.insert(Meta("maxItems".into(), ()), Meta(m.into(), ())); } } @@ -662,8 +691,8 @@ fn generate_enum_type( embedding: &embedding::Configuration, type_property: Option<&str>, enm: &layout::Enum, -) -> Result { - let mut def = serde_json::Map::new(); +) -> Result { + let mut def = json_syntax::Object::new(); let mut variants = Vec::with_capacity(enm.variants().len()); for variant_id in enm.variants() { let variant = model.get(**variant_id).unwrap(); @@ -679,266 +708,287 @@ fn generate_enum_type( None, layout_ref, )?; - variants.push(variant_json) + variants.push(Meta(variant_json, ())) } - def.insert("oneOf".into(), variants.into()); + def.insert(Meta("oneOf".into(), ()), Meta(variants.into(), ())); Ok(def.into()) } -fn generate_primitive_type(p: treeldr::layout::Primitive) -> serde_json::Value { +fn generate_primitive_type(p: treeldr::layout::Primitive) -> json_syntax::Value { use treeldr::layout::Primitive; - let mut def = serde_json::Map::new(); + let mut def = json_syntax::Object::new(); match p { Primitive::Boolean => { - def.insert("type".into(), "bool".into()); + def.insert(Meta("type".into(), ()), Meta("bool".into(), ())); } Primitive::Integer => { - def.insert("type".into(), "integer".into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); } Primitive::NonNegativeInteger => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), 0.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(0.into(), ())); } Primitive::NonPositiveInteger => { - def.insert("type".into(), "integer".into()); - def.insert("maximum".into(), 0.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(0.into(), ())); } Primitive::PositiveInteger => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), 1.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(1.into(), ())); } Primitive::NegativeInteger => { - def.insert("type".into(), "integer".into()); - def.insert("maximum".into(), (-1).into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("maximum".into(), ()), Meta((-1).into(), ())); } Primitive::I64 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i64::MIN.into()); - def.insert("maximum".into(), i64::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i64::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i64::MAX.into(), ())); } Primitive::I32 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i32::MIN.into()); - def.insert("maximum".into(), i32::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i32::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i32::MAX.into(), ())); } Primitive::I16 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i16::MIN.into()); - def.insert("maximum".into(), i16::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i16::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i16::MAX.into(), ())); } Primitive::I8 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i8::MIN.into()); - def.insert("maximum".into(), i8::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i8::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i8::MAX.into(), ())); } Primitive::U64 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u64::MIN.into()); - def.insert("maximum".into(), u64::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u64::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u64::MAX.into(), ())); } Primitive::U32 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u32::MIN.into()); - def.insert("maximum".into(), u32::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u32::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u32::MAX.into(), ())); } Primitive::U16 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u16::MIN.into()); - def.insert("maximum".into(), u16::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u16::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u16::MAX.into(), ())); } Primitive::U8 => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u8::MIN.into()); - def.insert("maximum".into(), u8::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u8::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u8::MAX.into(), ())); } Primitive::Float => { - def.insert("type".into(), "number".into()); + def.insert(Meta("type".into(), ()), Meta("number".into(), ())); } Primitive::Double => { - def.insert("type".into(), "number".into()); + def.insert(Meta("type".into(), ()), Meta("number".into(), ())); } Primitive::Base64Bytes => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); } Primitive::HexBytes => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); } Primitive::String => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); } Primitive::Time => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "time".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("time".into(), ())); } Primitive::Date => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "date".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("date".into(), ())); } Primitive::DateTime => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "date-time".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("date-time".into(), ())); } Primitive::Iri => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "iri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("iri".into(), ())); } Primitive::Uri => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "uri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("uri".into(), ())); } Primitive::Url => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "uri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("uri".into(), ())); + } + Primitive::Bytes => { + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + } + Primitive::Cid => { + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); } } def.into() } -fn generate_derived_type(n: &treeldr::layout::primitive::Restricted) -> serde_json::Value { +fn generate_derived_type(n: &treeldr::layout::primitive::Restricted) -> json_syntax::Value { use treeldr::layout::RestrictedPrimitive; - let mut def = serde_json::Map::new(); + let mut def = json_syntax::Object::new(); match n { RestrictedPrimitive::Boolean(_) => { - def.insert("type".into(), "bool".into()); + def.insert(Meta("type".into(), ()), Meta("bool".into(), ())); } RestrictedPrimitive::Integer(_) => { - def.insert("type".into(), "integer".into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); } RestrictedPrimitive::NonNegativeInteger(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), 0.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(0.into(), ())); } RestrictedPrimitive::NonPositiveInteger(_) => { - def.insert("type".into(), "integer".into()); - def.insert("maximum".into(), 0.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(0.into(), ())); } RestrictedPrimitive::PositiveInteger(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), 1.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(1.into(), ())); } RestrictedPrimitive::NegativeInteger(_) => { - def.insert("type".into(), "integer".into()); - def.insert("maximum".into(), (-1).into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("maximum".into(), ()), Meta((-1).into(), ())); } RestrictedPrimitive::I64(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i64::MIN.into()); - def.insert("maximum".into(), i64::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i64::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i64::MAX.into(), ())); } RestrictedPrimitive::I32(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i32::MIN.into()); - def.insert("maximum".into(), i32::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i32::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i32::MAX.into(), ())); } RestrictedPrimitive::I16(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i16::MIN.into()); - def.insert("maximum".into(), i16::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i16::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i16::MAX.into(), ())); } RestrictedPrimitive::I8(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), i8::MIN.into()); - def.insert("maximum".into(), i8::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(i8::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(i8::MAX.into(), ())); } RestrictedPrimitive::U64(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u64::MIN.into()); - def.insert("maximum".into(), u64::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u64::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u64::MAX.into(), ())); } RestrictedPrimitive::U32(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u32::MIN.into()); - def.insert("maximum".into(), u32::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u32::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u32::MAX.into(), ())); } RestrictedPrimitive::U16(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u16::MIN.into()); - def.insert("maximum".into(), u16::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u16::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u16::MAX.into(), ())); } RestrictedPrimitive::U8(_) => { - def.insert("type".into(), "integer".into()); - def.insert("minimum".into(), u8::MIN.into()); - def.insert("maximum".into(), u8::MAX.into()); + def.insert(Meta("type".into(), ()), Meta("integer".into(), ())); + def.insert(Meta("minimum".into(), ()), Meta(u8::MIN.into(), ())); + def.insert(Meta("maximum".into(), ()), Meta(u8::MAX.into(), ())); } RestrictedPrimitive::Float(_) => { - def.insert("type".into(), "number".into()); + def.insert(Meta("type".into(), ()), Meta("number".into(), ())); } RestrictedPrimitive::Double(_) => { - def.insert("type".into(), "number".into()); + def.insert(Meta("type".into(), ()), Meta("number".into(), ())); } RestrictedPrimitive::Base64Bytes(restrictions) => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); if let Some(r) = restrictions.as_ref() { if let Some(pattern) = r.pattern() { match pattern.as_singleton() { Some(singleton) => { - def.insert("const".into(), singleton.into()); + def.insert(Meta("const".into(), ()), Meta(singleton.into(), ())); } None => { - def.insert("pattern".into(), pattern.to_string().into()); + def.insert( + Meta("pattern".into(), ()), + Meta(pattern.to_string().into(), ()), + ); } } } } } RestrictedPrimitive::HexBytes(restrictions) => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); if let Some(r) = restrictions.as_ref() { if let Some(pattern) = r.pattern() { match pattern.as_singleton() { Some(singleton) => { - def.insert("const".into(), singleton.into()); + def.insert(Meta("const".into(), ()), Meta(singleton.into(), ())); } None => { - def.insert("pattern".into(), pattern.to_string().into()); + def.insert( + Meta("pattern".into(), ()), + Meta(pattern.to_string().into(), ()), + ); } } } } } RestrictedPrimitive::String(restrictions) => { - def.insert("type".into(), "string".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); if let Some(r) = restrictions.as_ref() { if let Some(pattern) = r.pattern() { match pattern.as_singleton() { Some(singleton) => { - def.insert("const".into(), singleton.into()); + def.insert(Meta("const".into(), ()), Meta(singleton.into(), ())); } None => { - def.insert("pattern".into(), pattern.to_string().into()); + def.insert( + Meta("pattern".into(), ()), + Meta(pattern.to_string().into(), ()), + ); } } } } } RestrictedPrimitive::Time(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "time".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("time".into(), ())); } RestrictedPrimitive::Date(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "date".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("date".into(), ())); } RestrictedPrimitive::DateTime(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "date-time".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("date-time".into(), ())); } RestrictedPrimitive::Iri(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "iri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("iri".into(), ())); } RestrictedPrimitive::Uri(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "uri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("uri".into(), ())); } RestrictedPrimitive::Url(_) => { - def.insert("type".into(), "string".into()); - def.insert("format".into(), "uri".into()); + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + def.insert(Meta("format".into(), ()), Meta("uri".into(), ())); + } + RestrictedPrimitive::Bytes(_) => { + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); + } + RestrictedPrimitive::Cid(_) => { + def.insert(Meta("type".into(), ()), Meta("string".into(), ())); } } diff --git a/modules/json-schema/src/schema.rs b/modules/json-schema/src/schema.rs index 6416522f..067a47b5 100644 --- a/modules/json-schema/src/schema.rs +++ b/modules/json-schema/src/schema.rs @@ -1,15 +1,39 @@ use iref::{IriBuf, IriRefBuf}; +use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; mod validation; pub use validation::*; -pub mod from_serde_json; +#[derive(Serialize, Deserialize)] +#[serde(untagged)] +pub enum OneOrMany { + One(T), + Many(Vec), +} + +impl OneOrMany { + pub fn into_vec(self) -> Vec { + match self { + Self::One(t) => vec![t], + Self::Many(v) => v, + } + } + + pub fn as_slice(&self) -> &[T] { + match self { + Self::One(t) => std::slice::from_ref(t), + Self::Many(v) => v, + } + } +} +// pub mod from_syntax; +#[derive(Serialize, Deserialize)] +#[serde(untagged)] #[allow(clippy::large_enum_variant)] pub enum Schema { - True, - False, + Boolean(bool), Ref(RefSchema), DynamicRef(DynamicRefSchema), Regular(RegularSchema), @@ -64,29 +88,38 @@ impl From for Schema { } /// Regular schema definition. +#[derive(Serialize, Deserialize)] pub struct RegularSchema { /// Meta schema properties. + #[serde(flatten)] pub meta_schema: MetaSchema, /// Schema identifier. + #[serde(rename = "$id")] pub id: Option, /// Meta data. + #[serde(flatten)] pub meta_data: MetaData, /// Schema description. + #[serde(flatten)] pub desc: Description, /// Schema validation. + #[serde(flatten)] pub validation: Validation, + #[serde(rename = "$anchor")] pub anchor: Option, + #[serde(rename = "$dynamicAnchor")] pub dynamic_anchor: Option, /// The "$defs" keyword reserves a location for schema authors to inline /// re-usable JSON Schemas into a more general schema. The keyword does not /// directly affect the validation result. + #[serde(rename = "$defs")] pub defs: Option>, } @@ -97,14 +130,16 @@ impl RegularSchema { } /// A Vocabulary for Basic Meta-Data Annotations. +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct MetaData { pub title: Option, pub description: Option, - pub default: Option, + pub default: Option, pub deprecated: Option, pub read_only: Option, pub write_only: Option, - pub examples: Option>, + pub examples: Option>, } impl MetaData { @@ -120,10 +155,12 @@ impl MetaData { } /// Meta-Schemas and Vocabularies. +#[derive(Serialize, Deserialize)] pub struct MetaSchema { /// The "$schema" keyword is both used as a JSON Schema dialect identifier /// and as the identifier of a resource which is itself a JSON Schema, which /// describes the set of valid schemas written for this particular dialect. + #[serde(rename = "$schema")] pub schema: Option, /// The "$vocabulary" keyword is used in meta-schemas to identify the @@ -135,6 +172,7 @@ pub struct MetaSchema { /// understood by the implementation MUST be processed in a manner /// consistent with the semantic definitions contained within the /// vocabulary. + #[serde(rename = "$vocabulary")] pub vocabulary: Option>, } @@ -145,49 +183,102 @@ impl MetaSchema { } /// Schema defined with the `$ref` keyword. +#[derive(Serialize, Deserialize)] pub struct RefSchema { - pub meta_data: MetaData, + #[serde(rename = "$ref")] pub target: IriRefBuf, + + #[serde(flatten)] + pub meta_data: MetaData, } /// Schema defined with the `$dynamicRef` keyword. +#[derive(Serialize, Deserialize)] pub struct DynamicRefSchema { - pub meta_data: MetaData, + #[serde(rename = "$ref")] pub target: IriRefBuf, + + #[serde(flatten)] + pub meta_data: MetaData, } /// Schema description. +#[derive(Serialize, Deserialize)] +#[serde(untagged)] #[allow(clippy::large_enum_variant)] pub enum Description { - Definition { - string: StringEncodedData, - array: ArraySchema, - object: ObjectSchema, - }, - AllOf(Vec), - AnyOf(Vec), - OneOf(Vec), - Not(Box), - If { - condition: Box, - then: Option>, - els: Option>, - }, + AllOf(AllOf), + AnyOf(AnyOf), + OneOf(OneOf), + Not(Not), + If(IfThenElse), + Definition(Definition), +} + +#[derive(Serialize, Deserialize)] +pub struct AllOf { + #[serde(rename = "allOf")] + pub schemas: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AnyOf { + #[serde(rename = "anyOf")] + pub schemas: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct OneOf { + #[serde(rename = "oneOf")] + pub schemas: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct Not { + #[serde(rename = "not")] + pub schema: Box, +} + +#[derive(Serialize, Deserialize)] +pub struct IfThenElse { + #[serde(rename = "if")] + pub condition: Box, + + pub then: Option>, + + #[serde(rename = "else")] + pub els: Option>, +} + +#[derive(Serialize, Deserialize)] +pub struct Definition { + #[serde(flatten)] + pub string: StringEncodedData, + + #[serde(flatten)] + pub array: ArraySchema, + + #[serde(flatten)] + pub object: ObjectSchema, +} + +impl Definition { + pub fn is_empty(&self) -> bool { + self.string.is_empty() && self.array.is_empty() && self.object.is_empty() + } } impl Description { pub fn is_empty(&self) -> bool { match self { - Self::Definition { - string, - array, - object, - } => string.is_empty() && array.is_empty() && object.is_empty(), + Self::Definition(def) => def.is_empty(), _ => false, } } } +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct ArraySchema { /// Validation succeeds if each element of the instance validates against /// the schema at the same position, if any. This keyword does not constrain @@ -240,6 +331,8 @@ impl ArraySchema { } /// Keywords for Applying Subschemas to Objects. +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct ObjectSchema { /// Validation succeeds if, for each name that appears in both the instance /// and as a name within this keyword's value, the child instance for that @@ -312,6 +405,8 @@ impl ObjectSchema { } /// A Vocabulary for the Contents of String-Encoded Data +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct StringEncodedData { /// Defines that the string SHOULD be interpreted as binary data and decoded /// using the encoding named by this property. diff --git a/modules/json-schema/src/schema/from_serde_json.rs b/modules/json-schema/src/schema/from_syntax.rs similarity index 89% rename from modules/json-schema/src/schema/from_serde_json.rs rename to modules/json-schema/src/schema/from_syntax.rs index ed2fec22..61d781c9 100644 --- a/modules/json-schema/src/schema/from_serde_json.rs +++ b/modules/json-schema/src/schema/from_syntax.rs @@ -1,6 +1,7 @@ use super::*; use iref::{IriBuf, IriRefBuf}; -use serde_json::Value; +use json_syntax::Value; +use locspan::Meta; use std::fmt; #[derive(Debug)] @@ -38,14 +39,14 @@ impl fmt::Display for Error { trait ValueTryInto: Sized { fn try_into_bool(self) -> Result; - fn try_into_number(self) -> Result; + fn try_into_number(self) -> Result; fn try_into_u64(self) -> Result { self.try_into_number()? .as_u64() .ok_or(Error::NotAPositiveInteger) } fn try_into_string(self) -> Result; - fn try_into_array(self) -> Result, Error>; + fn try_into_array(self) -> Result; fn try_into_schema_array(self) -> Result, Error> { let mut schemas = Vec::new(); @@ -69,7 +70,7 @@ trait ValueTryInto: Sized { Ok(Box::new(self.try_into_schema()?)) } - fn try_into_object(self) -> Result, Error>; + fn try_into_object(self) -> Result; fn try_into_uri(self) -> Result { IriBuf::from_string(self.try_into_string()?).map_err(|_| Error::InvalidUri) @@ -83,12 +84,12 @@ trait ValueTryInto: Sized { impl ValueTryInto for Value { fn try_into_bool(self) -> Result { match self { - Self::Bool(b) => Ok(b), + Self::Boolean(b) => Ok(b), _ => Err(Error::NotABoolean), } } - fn try_into_number(self) -> Result { + fn try_into_number(self) -> Result { match self { Self::Number(n) => Ok(n), _ => Err(Error::NotANumber), @@ -97,19 +98,19 @@ impl ValueTryInto for Value { fn try_into_string(self) -> Result { match self { - Self::String(s) => Ok(s), + Self::String(s) => Ok(s.into_string()), _ => Err(Error::NotAString), } } - fn try_into_array(self) -> Result, Error> { + fn try_into_array(self) -> Result { match self { Self::Array(a) => Ok(a), _ => Err(Error::NotAnArray), } } - fn try_into_object(self) -> Result, Error> { + fn try_into_object(self) -> Result { match self { Self::Object(o) => Ok(o), _ => Err(Error::NotAnObject), @@ -121,7 +122,7 @@ impl ValueTryInto for Value { } } -fn read_meta_data(value: &mut serde_json::Map) -> Result { +fn read_meta_data(value: &mut json_syntax::Object) -> Result { Ok(MetaData { title: value .remove("title") @@ -151,7 +152,7 @@ fn read_meta_data(value: &mut serde_json::Map) -> Result) -> Result { +fn read_meta_schema(value: &mut json_syntax::Object) -> Result { Ok(MetaSchema { schema: value .remove("$schema") @@ -172,7 +173,7 @@ fn read_meta_schema(value: &mut serde_json::Map) -> Result) -> Result { +fn read_description(value: &mut json_syntax::Object) -> Result { if let Some(all_of) = value.remove("allOf") { Ok(Description::AllOf(all_of.try_into_schema_array()?)) } else if let Some(any_of) = value.remove("anyOf") { @@ -203,7 +204,7 @@ fn read_description(value: &mut serde_json::Map) -> Result, + value: &mut json_syntax::Object, ) -> Result { Ok(StringEncodedData { content_encoding: value @@ -221,7 +222,7 @@ fn read_string_encoded_data_schema( }) } -fn read_array_schema(value: &mut serde_json::Map) -> Result { +fn read_array_schema(value: &mut json_syntax::Object) -> Result { Ok(ArraySchema { prefix_items: value .remove("prefixItems") @@ -242,7 +243,7 @@ fn read_array_schema(value: &mut serde_json::Map) -> Result) -> Result { +fn read_object_schema(value: &mut json_syntax::Object) -> Result { Ok(ObjectSchema { properties: value .remove("properties") @@ -288,7 +289,7 @@ fn read_object_schema(value: &mut serde_json::Map) -> Result) -> Result { +fn read_validation(value: &mut json_syntax::Object) -> Result { Ok(Validation { any: read_any_validation(value)?, numeric: read_numeric_validation(value)?, @@ -299,7 +300,7 @@ fn read_validation(value: &mut serde_json::Map) -> Result) -> Result { +fn read_any_validation(value: &mut json_syntax::Object) -> Result { Ok(AnyValidation { ty: value .remove("type") @@ -325,7 +326,7 @@ fn read_any_validation(value: &mut serde_json::Map) -> Result, + value: &mut json_syntax::Object, ) -> Result { Ok(NumericValidation { multiple_of: value @@ -352,7 +353,7 @@ fn read_numeric_validation( } fn read_string_validation( - value: &mut serde_json::Map, + value: &mut json_syntax::Object, ) -> Result { Ok(StringValidation { max_length: value @@ -371,7 +372,7 @@ fn read_string_validation( } fn read_array_validation( - value: &mut serde_json::Map, + value: &mut json_syntax::Object, ) -> Result { Ok(ArrayValidation { max_items: value @@ -398,7 +399,7 @@ fn read_array_validation( } fn read_object_validation( - value: &mut serde_json::Map, + value: &mut json_syntax::Object, ) -> Result { Ok(ObjectValidation { max_properties: value @@ -484,8 +485,8 @@ impl TryFrom for Schema { fn try_from(v: Value) -> Result { match v { - Value::Bool(true) => Ok(Self::True), - Value::Bool(false) => Ok(Self::False), + Value::Boolean(true) => Ok(Self::True), + Value::Boolean(false) => Ok(Self::False), Value::Object(mut obj) => { if let Some(value) = obj.remove("$ref") { let value = value.as_str().ok_or(Error::NotAString)?; diff --git a/modules/json-schema/src/schema/validation.rs b/modules/json-schema/src/schema/validation.rs index d07a8d76..03772f9b 100644 --- a/modules/json-schema/src/schema/validation.rs +++ b/modules/json-schema/src/schema/validation.rs @@ -1,6 +1,11 @@ use std::collections::BTreeMap; -#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] +use serde::{Deserialize, Serialize}; + +use super::OneOrMany; + +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum Type { Null, Boolean, @@ -11,12 +16,23 @@ pub enum Type { Object, } +#[derive(Serialize, Deserialize)] pub struct Validation { + #[serde(flatten)] pub any: AnyValidation, + + #[serde(flatten)] pub numeric: NumericValidation, + + #[serde(flatten)] pub string: StringValidation, + + #[serde(flatten)] pub array: ArrayValidation, + + #[serde(flatten)] pub object: ObjectValidation, + pub format: Option, } @@ -32,47 +48,55 @@ impl Validation { } /// Validation Keywords for Any Instance Type. +#[derive(Serialize, Deserialize)] pub struct AnyValidation { - pub ty: Option>, - pub enm: Option>, - pub cnst: Option, + #[serde(rename = "type")] + pub ty: Option>, + + #[serde(rename = "enum")] + pub enm: Option>, + + #[serde(rename = "const")] + pub cnst: Option, } /// Validation Keywords for Numeric Instances (number and integer). +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct NumericValidation { /// The value of "multipleOf" MUST be a number, strictly greater than 0. /// /// A numeric instance is valid only if division by this keyword's value /// results in an integer. - pub multiple_of: Option, + pub multiple_of: Option, /// The value of "maximum" MUST be a number, representing an inclusive upper /// limit for a numeric instance. /// /// If the instance is a number, then this keyword validates only if the /// instance is less than or exactly equal to "maximum". - pub maximum: Option, + pub maximum: Option, /// The value of "exclusiveMaximum" MUST be a number, representing an /// exclusive upper limit for a numeric instance. /// /// If the instance is a number, then the instance is valid only if it has a /// value strictly less than (not equal to) "exclusiveMaximum". - pub exclusive_maximum: Option, + pub exclusive_maximum: Option, /// The value of "minimum" MUST be a number, representing an inclusive lower /// limit for a numeric instance. /// /// If the instance is a number, then this keyword validates only if the /// instance is greater than or exactly equal to "minimum". - pub minimum: Option, + pub minimum: Option, /// The value of "exclusiveMinimum" MUST be a number, representing an /// exclusive lower limit for a numeric instance. /// /// If the instance is a number, then the instance is valid only if it has a /// value strictly greater than (not equal to) "exclusiveMinimum". - pub exclusive_minimum: Option, + pub exclusive_minimum: Option, } impl NumericValidation { @@ -86,6 +110,8 @@ impl NumericValidation { } /// Validation Keywords for Strings +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct StringValidation { /// A string instance is valid against this keyword if its length is less /// than, or equal to, the value of this keyword. @@ -120,6 +146,8 @@ impl StringValidation { } /// Validation Keywords for Arrays +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct ArrayValidation { /// The value of this keyword MUST be a non-negative integer. /// @@ -181,6 +209,8 @@ impl ArrayValidation { } /// Validation Keywords for Objects +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct ObjectValidation { /// An object instance is valid against "maxProperties" if its number of /// properties is less than, or equal to, the value of this keyword. @@ -223,7 +253,8 @@ impl ObjectValidation { } } -#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum Format { /// A string instance is valid against this attribute if it is a valid /// representation according to the "date-time" production. diff --git a/modules/json-schema/tests/json_schema_generate.rs b/modules/json-schema/tests/json_schema_generate.rs index e854e372..ab8d0e72 100644 --- a/modules/json-schema/tests/json_schema_generate.rs +++ b/modules/json-schema/tests/json_schema_generate.rs @@ -1,4 +1,5 @@ use iref::Iri; +use json_syntax::{BorrowUnordered, Parse as ParseJson, Print}; use rdf_types::IriVocabulary; use static_iref::iri; use treeldr::{Id, TId}; @@ -73,16 +74,17 @@ impl Test { ) .expect("unable to generate JSON Schema"); - let expected: serde_json::Value = - serde_json::from_str(expected_output).expect("invalid JSON"); + let expected = json_syntax::Value::parse_str(expected_output, |_| ()) + .expect("invalid JSON") + .into_value(); - let success = output == expected; + let success = output.unordered() == expected.unordered(); if !success { eprintln!( "output:\n{}\nexpected:\n{}", - serde_json::to_string_pretty(&output).unwrap(), - serde_json::to_string_pretty(&expected).unwrap() + output.pretty_print(), + expected.pretty_print() ); } @@ -137,10 +139,7 @@ impl Test { ) .expect("unable to generate JSON Schema"); - eprintln!( - "output:\n{}", - serde_json::to_string_pretty(&output).unwrap() - ); + eprintln!("output:\n{}", output.pretty_print()); } } } diff --git a/modules/json-schema/tests/json_schema_import.rs b/modules/json-schema/tests/json_schema_import.rs index 2f2340ba..d65130c2 100644 --- a/modules/json-schema/tests/json_schema_import.rs +++ b/modules/json-schema/tests/json_schema_import.rs @@ -1,5 +1,6 @@ use contextual::WithContext; use iref::Iri; +use json_syntax::Parse; use locspan::Meta; use rdf_types::{BlankIdBuf, IriVocabularyMut, VocabularyMut}; use treeldr::{ @@ -34,8 +35,10 @@ fn import_json_schema>( grdf::BTreeDataset, Id, ) { - let json: serde_json::Value = serde_json::from_str(content).expect("invalid JSON"); - let input = treeldr_json_schema::Schema::try_from(json).expect("invalid JSON Schema"); + let json = json_syntax::Value::parse_str(content, |_| ()) + .expect("invalid JSON") + .into_value(); + let input = json_syntax::from_value(json).expect("invalid JSON Schema"); let mut context: Context<()> = Context::new(); let mut generator = rdf_types::generator::Blank::new_with_prefix("t".to_string()); diff --git a/modules/lexicon/.gitignore b/modules/lexicon/.gitignore new file mode 100644 index 00000000..c41cc9e3 --- /dev/null +++ b/modules/lexicon/.gitignore @@ -0,0 +1 @@ +/target \ No newline at end of file diff --git a/modules/lexicon/.rustfmt.toml b/modules/lexicon/.rustfmt.toml new file mode 100644 index 00000000..18d655e2 --- /dev/null +++ b/modules/lexicon/.rustfmt.toml @@ -0,0 +1 @@ +hard_tabs = true \ No newline at end of file diff --git a/modules/lexicon/Cargo.toml b/modules/lexicon/Cargo.toml new file mode 100644 index 00000000..4443309e --- /dev/null +++ b/modules/lexicon/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "treeldr-lexicon" +description = "TreeLDR Lexicon support" +version.workspace = true +authors.workspace = true +edition.workspace = true + +[dependencies] +treeldr.workspace = true +log.workspace = true +iref.workspace = true +static-iref.workspace = true +contextual.workspace = true +clap = { workspace = true, features = ["derive"] } +derivative.workspace = true +thiserror.workspace = true +json-syntax = { workspace = true, features = ["serde"] } +serde = { workspace = true, features = ["derive"] } +serde_repr = "0.1" +rdf-types.workspace = true + +[dev-dependencies] +treeldr-rust-prelude.workspace = true +treeldr-rust-macros.workspace = true \ No newline at end of file diff --git a/modules/lexicon/README.md b/modules/lexicon/README.md new file mode 100644 index 00000000..7808a1a8 --- /dev/null +++ b/modules/lexicon/README.md @@ -0,0 +1,3 @@ +# Lexicon module for TreeLDR + +This library provides support for the [Lexicon](https://atproto.com/guides/lexicon) schema definition language defined by [The AT Protocol](https://atproto.com/). \ No newline at end of file diff --git a/modules/lexicon/examples/atproto/resolveHandle.json b/modules/lexicon/examples/atproto/resolveHandle.json new file mode 100644 index 00000000..b21d4dcb --- /dev/null +++ b/modules/lexicon/examples/atproto/resolveHandle.json @@ -0,0 +1,30 @@ +{ + "lexicon": 1, + "id": "com.atproto.identity.resolveHandle", + "defs": { + "main": { + "type": "query", + "description": "Provides the DID of a repo.", + "parameters": { + "type": "params", + "properties": { + "handle": { + "type": "string", + "format": "handle", + "description": "The handle to resolve. If not supplied, will resolve the host's own handle." + } + } + }, + "output": { + "encoding": "application/json", + "schema": { + "type": "object", + "required": ["did"], + "properties": { + "did": {"type": "string", "format": "did"} + } + } + } + } + } +} \ No newline at end of file diff --git a/modules/lexicon/src/export.rs b/modules/lexicon/src/export.rs new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/modules/lexicon/src/export.rs @@ -0,0 +1 @@ + diff --git a/modules/lexicon/src/import.rs b/modules/lexicon/src/import.rs new file mode 100644 index 00000000..72dae560 --- /dev/null +++ b/modules/lexicon/src/import.rs @@ -0,0 +1,367 @@ +use contextual::DisplayWithContext; +use iref::{AsIri, Iri, IriBuf}; +use rdf_types::{ + BlankIdVocabulary, Generator, Id, IriVocabulary, Literal, Object, Triple, Vocabulary, + VocabularyMut, +}; +use treeldr::vocab; + +use crate::{ + LexAnyUserType, LexBlob, LexBoolean, LexInteger, LexIpldType, LexObject, LexPrimitive, + LexPrimitiveArray, LexRef, LexRefUnion, LexRefVariant, LexString, LexUnknown, LexXrpcBody, + LexXrpcParametersProperty, LexXrpcSubscriptionMessage, LexiconDoc, Nsid, ObjectProperty, +}; + +mod array; +mod blob; +mod ipld; +mod object; +mod primitive; +mod record; +mod reference; +mod token; +mod xrpc; + +/// Checks if the given JSON document is a supported Lexicon document. +pub fn is_lexicon_document(json: &json_syntax::Value) -> bool { + match json.as_object() { + Some(object) => match object.get("lexicon").next() { + Some(value) => match value.as_number() { + Some(number) => number.as_str() == "1", + None => false, + }, + None => false, + }, + None => false, + } +} + +pub type OutputSubject = Id<::Iri, ::BlankId>; +pub type OutputPredicate = ::Iri; +pub type OutputObject = Object, Literal::Iri>>; +pub type OutputTriple = Triple, OutputPredicate, OutputObject>; + +trait RdfId { + fn rdf_id(&self, vocabulary: &mut V, namespace: Iri) -> OutputSubject; +} + +struct Context { + base_iri: IriBuf, +} + +impl Context { + fn resolve_reference(&self, r: &str) -> IriBuf { + match r.split_once('#') { + Some((prefix, fragment)) => { + if prefix.is_empty() { + let mut iri = self.base_iri.clone(); + iri.set_fragment(Some(fragment.try_into().unwrap())); + iri + } else { + let mut iri = Nsid::new(prefix).unwrap().as_iri(); + iri.set_fragment(Some(fragment.try_into().unwrap())); + iri + } + } + None => Nsid::new(r).unwrap().as_iri(), + } + } +} + +pub struct IntoTriples<'v, V: Vocabulary, G> { + vocabulary: &'v mut V, + generator: G, + stack: Vec>, + pending: Vec>, + context: Context, +} + +impl<'v, V: Vocabulary, G> IntoTriples<'v, V, G> { + pub fn new(doc: LexiconDoc, vocabulary: &'v mut V, generator: G) -> Self { + let base_iri = doc.id.as_iri(); + + Self { + vocabulary, + generator, + stack: vec![Item::Doc(doc)], + pending: Vec::new(), + context: Context { base_iri }, + } + } +} + +impl<'v, V: VocabularyMut, G: Generator> Iterator for IntoTriples<'v, V, G> +where + V::Iri: Clone, + V::BlankId: Clone, + OutputTriple: DisplayWithContext, +{ + type Item = Triple< + Id, + V::Iri, + Object, Literal>, + >; + + fn next(&mut self) -> Option { + if let Some(triple) = self.pending.pop() { + // eprintln!("{} .", triple.with(&*self.vocabulary)); + return Some(triple); + } + + while let Some(item) = self.stack.pop() { + item.process( + self.vocabulary, + &mut self.generator, + &mut self.stack, + &mut self.pending, + &self.context, + ); + + if let Some(triple) = self.pending.pop() { + // eprintln!("{} .", triple.with(&*self.vocabulary)); + return Some(triple); + } + } + + None + } +} + +enum Item { + Doc(LexiconDoc), + UserType(OutputSubject, LexAnyUserType), + XrpcParametersProperty(OutputSubject, LexXrpcParametersProperty), + XrpcBody(OutputSubject, LexXrpcBody), + XrpcSubscriptionMessage(OutputSubject, LexXrpcSubscriptionMessage), + Primitive(OutputSubject, LexPrimitive), + PrimitiveArray(OutputSubject, LexPrimitiveArray), + RefVariant(OutputSubject, LexRefVariant), + Ref(OutputSubject, LexRef), + RefUnion(OutputSubject, LexRefUnion), + Boolean(OutputSubject, LexBoolean), + Integer(OutputSubject, LexInteger), + String(OutputSubject, LexString), + Object(OutputSubject, LexObject), + ObjectProperty(OutputSubject, ObjectProperty), + Blob(OutputSubject, LexBlob), + Ipld(OutputSubject, LexIpldType), + Unknown(OutputSubject, LexUnknown), +} + +trait IntoItem { + fn into_item(self, id: OutputSubject) -> Item; +} + +trait Process { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone; +} + +impl Item { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + Self::Doc(doc) => { + if let Some(main) = doc.definitions.main { + let iri = doc.id.as_iri(); + let id = Id::Iri(vocabulary.insert(iri.as_iri())); + stack.push(Item::UserType(id, main)) + } + + for (suffix, ty) in doc.definitions.other { + let iri = + IriBuf::from_string(format!("{}#{}", doc.id.as_iri(), suffix)).unwrap(); + let id = Id::Iri(vocabulary.insert(iri.as_iri())); + stack.push(Item::UserType(id, ty.into())) + } + } + Self::UserType(id, ty) => { + ty.process(vocabulary, generator, stack, triples, context, id) + } + Self::XrpcParametersProperty(id, p) => { + p.process(vocabulary, generator, stack, triples, context, id) + } + Self::XrpcBody(id, b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::XrpcSubscriptionMessage(id, m) => { + m.process(vocabulary, generator, stack, triples, context, id) + } + Self::Primitive(id, p) => p.process(vocabulary, generator, stack, triples, context, id), + Self::PrimitiveArray(id, a) => { + a.process(vocabulary, generator, stack, triples, context, id) + } + Self::RefVariant(id, r) => { + r.process(vocabulary, generator, stack, triples, context, id) + } + Self::Ref(id, r) => r.process(vocabulary, generator, stack, triples, context, id), + Self::RefUnion(id, r) => r.process(vocabulary, generator, stack, triples, context, id), + Self::Object(id, o) => o.process(vocabulary, generator, stack, triples, context, id), + Self::ObjectProperty(id, p) => { + p.process(vocabulary, generator, stack, triples, context, id) + } + Self::Boolean(id, b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::Integer(id, i) => i.process(vocabulary, generator, stack, triples, context, id), + Self::String(id, s) => s.process(vocabulary, generator, stack, triples, context, id), + Self::Blob(id, b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::Ipld(id, i) => i.process(vocabulary, generator, stack, triples, context, id), + Self::Unknown(id, u) => u.process(vocabulary, generator, stack, triples, context, id), + } + } +} + +impl Process for LexAnyUserType { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + Self::Record(r) => r.process(vocabulary, generator, stack, triples, context, id), + Self::Query(q) => q.process(vocabulary, generator, stack, triples, context, id), + Self::Procedure(p) => p.process(vocabulary, generator, stack, triples, context, id), + Self::Subscription(s) => s.process(vocabulary, generator, stack, triples, context, id), + Self::Array(a) => a.process(vocabulary, generator, stack, triples, context, id), + Self::Token(t) => t.process(vocabulary, generator, stack, triples, context, id), + Self::Object(o) => o.process(vocabulary, generator, stack, triples, context, id), + Self::Boolean(b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::Integer(i) => i.process(vocabulary, generator, stack, triples, context, id), + Self::String(s) => s.process(vocabulary, generator, stack, triples, context, id), + Self::Bytes(b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::CidLink(l) => l.process(vocabulary, generator, stack, triples, context, id), + Self::Unknown(u) => stack.push(Item::Unknown(id, u)), + } + } +} + +fn nsid_name(s: &str) -> &str { + match s.rsplit_once(|c| matches!(c, '/' | '.' | '#')) { + Some((_, r)) => r, + None => s, + } +} + +fn sub_id( + vocabulary: &mut V, + id: &OutputSubject, + name: &str, +) -> OutputSubject { + let parent_iri = vocabulary.iri(id.as_iri().unwrap()).unwrap(); + + let value = match parent_iri.as_str().split_once('#') { + Some((a, b)) => { + format!("{a}/{b}/{name}") + } + None => { + if parent_iri.path().segments().count() > 1 { + format!("{parent_iri}/{name}") + } else { + format!("{parent_iri}/main/{name}") + } + } + }; + + let iri = IriBuf::new(&value).unwrap(); + Id::Iri(vocabulary.insert(iri.as_iri())) +} + +fn build_rdf_list, I: IntoIterator>( + vocabulary: &mut V, + generator: &mut G, + triples: &mut Vec>, + items: I, + mut f: impl FnMut(&mut V, &mut G, &mut Vec>, I::Item) -> OutputObject, +) -> OutputSubject +where + I::IntoIter: DoubleEndedIterator, + V::Iri: Clone, + V::BlankId: Clone, +{ + let mut head = Id::Iri(vocabulary.insert(vocab::Rdf::Nil.as_iri())); + + for item in items.into_iter().rev() { + let first = f(vocabulary, generator, triples, item); + + let node = generator.next(vocabulary); + + triples.push(Triple( + node.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::Rdf::List.as_iri()))), + )); + + triples.push(Triple( + node.clone(), + vocabulary.insert(vocab::Rdf::First.as_iri()), + first, + )); + + triples.push(Triple( + node.clone(), + vocabulary.insert(vocab::Rdf::Rest.as_iri()), + Object::Id(head), + )); + + head = node + } + + head +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn sub_id_1() { + let parent_id = Id::Iri(IriBuf::new("lexicon:app.bsky.embed").unwrap()); + let id = sub_id(&mut (), &parent_id, "external"); + assert_eq!( + id.into_iri().unwrap().as_str(), + "lexicon:app.bsky.embed/main/external" + ) + } + + #[test] + fn sub_id_2() { + let parent_id = Id::Iri(IriBuf::new("lexicon:app.bsky.embed#external").unwrap()); + let id = sub_id(&mut (), &parent_id, "foo"); + assert_eq!( + id.into_iri().unwrap().as_str(), + "lexicon:app.bsky.embed/external/foo" + ) + } + + #[test] + fn sub_id_3() { + let parent_id = Id::Iri(IriBuf::new("lexicon:app.bsky.embed/external/foo").unwrap()); + let id = sub_id(&mut (), &parent_id, "bar"); + assert_eq!( + id.into_iri().unwrap().as_str(), + "lexicon:app.bsky.embed/external/foo/bar" + ) + } +} diff --git a/modules/lexicon/src/import/array.rs b/modules/lexicon/src/import/array.rs new file mode 100644 index 00000000..acafa6f4 --- /dev/null +++ b/modules/lexicon/src/import/array.rs @@ -0,0 +1,77 @@ +use iref::{AsIri, IriBuf}; +use rdf_types::{Generator, Id, Literal, Object, Triple, Vocabulary, VocabularyMut}; +use treeldr::vocab; + +use crate::{ArrayItem, ArrayNonPrimitiveItem, LexArray}; + +use super::{nsid_name, Context, IntoItem, Item, OutputSubject, OutputTriple, Process}; + +impl> Process for LexArray { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + let item_iri = IriBuf::from_string(format!( + "{}/items", + vocabulary.iri(id.as_iri().unwrap()).unwrap() + )) + .unwrap(); + let item_id = Id::Iri(vocabulary.insert(item_iri.as_iri())); + stack.push(self.items.into_item(item_id.clone())); + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Array.as_iri()), + Object::Id(item_id), + )); + } +} + +impl IntoItem for ArrayItem { + fn into_item(self, id: OutputSubject) -> Item { + match self { + Self::Ref(r) => Item::RefVariant(id, r), + Self::Primitive(p) => Item::Primitive(id, p), + Self::Ipld(i) => Item::Ipld(id, i), + Self::NonPrimitive(n) => n.into_item(id), + } + } +} + +impl IntoItem for ArrayNonPrimitiveItem { + fn into_item(self, id: OutputSubject) -> Item { + match self { + Self::Blob(b) => Item::Blob(id, b), + } + } +} diff --git a/modules/lexicon/src/import/blob.rs b/modules/lexicon/src/import/blob.rs new file mode 100644 index 00000000..b0746b31 --- /dev/null +++ b/modules/lexicon/src/import/blob.rs @@ -0,0 +1,58 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::LexBlob; + +use super::{nsid_name, Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexBlob { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if self.accept.is_some() { + log::warn!("blob `accept` constraint not yet supported") + } + + if self.max_size.is_some() { + log::warn!("blob `max_size` constraint not yet supported") + } + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::Primitive::Bytes.as_iri()))), + )); + } +} diff --git a/modules/lexicon/src/import/ipld.rs b/modules/lexicon/src/import/ipld.rs new file mode 100644 index 00000000..293cf7ae --- /dev/null +++ b/modules/lexicon/src/import/ipld.rs @@ -0,0 +1,28 @@ +use rdf_types::{Generator, VocabularyMut}; + +use crate::LexIpldType; + +use super::{Context, Item, OutputSubject, OutputTriple, Process}; + +mod bytes; +mod cid_link; + +impl Process for LexIpldType { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + Self::Bytes(b) => b.process(vocabulary, generator, stack, triples, context, id), + Self::CidLink(l) => l.process(vocabulary, generator, stack, triples, context, id), + } + } +} diff --git a/modules/lexicon/src/import/ipld/bytes.rs b/modules/lexicon/src/import/ipld/bytes.rs new file mode 100644 index 00000000..f5cc12b7 --- /dev/null +++ b/modules/lexicon/src/import/ipld/bytes.rs @@ -0,0 +1,58 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{import::nsid_name, LexBytes}; + +use super::{Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexBytes { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if self.min_size.is_some() { + log::warn!("bytes `min_size` constraint not yet supported") + } + + if self.max_size.is_some() { + log::warn!("bytes `max_size` constraint not yet supported") + } + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::Primitive::Bytes.as_iri()))), + )); + } +} diff --git a/modules/lexicon/src/import/ipld/cid_link.rs b/modules/lexicon/src/import/ipld/cid_link.rs new file mode 100644 index 00000000..0f87eab6 --- /dev/null +++ b/modules/lexicon/src/import/ipld/cid_link.rs @@ -0,0 +1,50 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{import::nsid_name, LexCidLink}; + +use super::{Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexCidLink { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::Primitive::Cid.as_iri()))), + )); + } +} diff --git a/modules/lexicon/src/import/object.rs b/modules/lexicon/src/import/object.rs new file mode 100644 index 00000000..40d8b6a1 --- /dev/null +++ b/modules/lexicon/src/import/object.rs @@ -0,0 +1,133 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{LexObject, ObjectNonPrimitiveProperty, ObjectProperty}; + +use super::{ + build_rdf_list, nsid_name, sub_id, Context, Item, OutputSubject, OutputTriple, Process, +}; + +impl Process for LexObject { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if !self.nullable.is_empty() { + log::warn!("object `nullable` constraint not yet supported") + } + + let fields_id = build_rdf_list( + vocabulary, + generator, + triples, + self.properties, + |vocabulary, generator, triples, (name, prop)| { + let f_id = generator.next(vocabulary); + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Field.as_iri()))), + )); + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String(name.clone())), + )); + + let item_id = sub_id(vocabulary, &id, &name); + stack.push(Item::ObjectProperty(item_id.clone(), prop)); + + let t_id = generator.next(vocabulary); + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + if self.required.contains(&name) { + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::TreeLdr::Required.as_iri()), + Object::Id(item_id), + )); + } else { + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::TreeLdr::Option.as_iri()), + Object::Id(item_id), + )); + }; + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::TreeLdr::Format.as_iri()), + Object::Id(t_id), + )); + + Object::Id(f_id) + }, + ); + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Fields.as_iri()), + Object::Id(fields_id), + )); + } +} + +impl Process for ObjectProperty { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + ObjectProperty::Ref(r) => r.process(vocabulary, generator, stack, triples, context, id), + ObjectProperty::Primitive(p) => { + p.process(vocabulary, generator, stack, triples, context, id) + } + ObjectProperty::NonPrimitive(ObjectNonPrimitiveProperty::Array(a)) => { + a.process(vocabulary, generator, stack, triples, context, id) + } + ObjectProperty::NonPrimitive(ObjectNonPrimitiveProperty::Blob(b)) => { + b.process(vocabulary, generator, stack, triples, context, id) + } + ObjectProperty::Ipld(i) => { + i.process(vocabulary, generator, stack, triples, context, id) + } + } + } +} diff --git a/modules/lexicon/src/import/primitive.rs b/modules/lexicon/src/import/primitive.rs new file mode 100644 index 00000000..6c05dcb7 --- /dev/null +++ b/modules/lexicon/src/import/primitive.rs @@ -0,0 +1,292 @@ +use contextual::WithContext; +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, Vocabulary, VocabularyMut}; +use treeldr::vocab; + +use crate::{LexBoolean, LexInteger, LexPrimitive, LexString, LexUnknown}; + +use super::{ + build_rdf_list, nsid_name, Context, IntoItem, Item, OutputSubject, OutputTriple, Process, +}; + +impl Process for LexPrimitive { + fn process( + self, + _vocabulary: &mut V, + _generator: &mut impl Generator, + stack: &mut Vec>, + _triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + LexPrimitive::Boolean(b) => stack.push(Item::Boolean(id, b)), + LexPrimitive::Integer(i) => stack.push(Item::Integer(id, i)), + LexPrimitive::String(s) => stack.push(Item::String(id, s)), + LexPrimitive::Unknown(u) => stack.push(Item::Unknown(id, u)), + } + } +} + +impl IntoItem for LexPrimitive { + fn into_item(self, id: OutputSubject) -> Item { + Item::Primitive(id, self) + } +} + +impl Process for LexBoolean { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if self.const_.is_some() { + log::warn!("boolean `const` constraint not yet supported") + } + + if self.default.is_some() { + log::warn!("boolean `default` constraint not yet supported") + } + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri( + vocabulary.insert(vocab::Primitive::Boolean.as_iri()), + )), + )); + } +} + +impl Process for LexInteger { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if self.const_.is_some() { + log::warn!("integer `const` constraint not yet supported") + } + + if self.default.is_some() { + log::warn!("integer `default` constraint not yet supported") + } + + if self.enum_.is_some() { + log::warn!("integer `enum` constraint not yet supported") + } + + let primitive = self.best_primitive(); + match self.bounds_constraints(primitive) { + (None, None) => { + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(primitive.as_iri()))), + )); + } + (min, max) => { + let constraits = min + .into_iter() + .map(|m| (vocab::TreeLdr::InclusiveMinimum, m)) + .chain( + max.into_iter() + .map(|m| (vocab::TreeLdr::InclusiveMaximum, m)), + ); + + let constraints_id = build_rdf_list( + vocabulary, + generator, + triples, + constraits, + |vocabulary, generator, triples, (prop, value)| { + let c_id = generator.next(vocabulary); + + triples.push(Triple( + c_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri( + vocabulary.insert(vocab::TreeLdr::LayoutRestriction.as_iri()), + )), + )); + + triples.push(Triple( + c_id.clone(), + vocabulary.insert(prop.as_iri()), + Object::Literal(Literal::TypedString( + value.to_string(), + vocabulary.insert(primitive.natural_type_term().unwrap().as_iri()), + )), + )); + + Object::Id(c_id) + }, + ); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::DerivedFrom.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(primitive.as_iri()))), + )); + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::WithRestrictions.as_iri()), + Object::Id(constraints_id), + )); + } + } + } +} + +impl Process for LexString { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if self.const_.is_some() { + log::warn!("string `const` constraint not yet supported") + } + + if self.default.is_some() { + log::warn!("string `default` constraint not yet supported") + } + + if self.enum_.is_some() { + log::warn!("string `enum` constraint not yet supported") + } + + if self.min_length.is_some() { + log::warn!("string `min_length` constraint not yet supported") + } + + if self.max_length.is_some() { + log::warn!("string `max_length` constraint not yet supported") + } + + if self.min_grapheme.is_some() { + log::warn!("string `min_grapheme` constraint not yet supported") + } + + if self.max_grapheme.is_some() { + log::warn!("string `max_grapheme` constraint not yet supported") + } + + if self.format.is_some() { + log::warn!("string `format` constraint not yet supported") + } + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri( + vocabulary.insert(vocab::Primitive::String.as_iri()), + )), + )); + } +} + +impl Process for LexUnknown { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + log::warn!("unknown user type {}", id.with(&*vocabulary)); + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + } +} diff --git a/modules/lexicon/src/import/record.rs b/modules/lexicon/src/import/record.rs new file mode 100644 index 00000000..0a136bd6 --- /dev/null +++ b/modules/lexicon/src/import/record.rs @@ -0,0 +1,63 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{import::sub_id, LexRecord}; + +use super::{nsid_name, Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexRecord { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if self.key.is_some() { + log::warn!("records `key` constraint not yet supported"); + } + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::MapKey.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::Xsd::String.as_iri()))), + )); + + let record_id = sub_id(vocabulary, &id, "record"); + stack.push(Item::Object(record_id.clone(), self.record)); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::MapValue.as_iri()), + Object::Id(record_id), + )); + } +} diff --git a/modules/lexicon/src/import/reference.rs b/modules/lexicon/src/import/reference.rs new file mode 100644 index 00000000..18388938 --- /dev/null +++ b/modules/lexicon/src/import/reference.rs @@ -0,0 +1,144 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{LexRef, LexRefUnion, LexRefVariant}; + +use super::{build_rdf_list, nsid_name, Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexRefVariant { + fn process( + self, + _vocabulary: &mut V, + _generator: &mut impl Generator, + stack: &mut Vec>, + _triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + LexRefVariant::Ref(r) => stack.push(Item::Ref(id, r)), + LexRefVariant::Union(u) => stack.push(Item::RefUnion(id, u)), + } + } +} + +impl Process for LexRef { + fn process( + self, + vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + let iri = context.resolve_reference(&self.ref_); + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Alias.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(iri.as_iri()))), + )); + } +} + +impl Process for LexRefUnion { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + _stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if self.closed.is_some() { + log::warn!("ref union `closed` constraint not yet supported") + } + + let variants_id = build_rdf_list( + vocabulary, + generator, + triples, + self.refs, + |vocabulary, generator, triples, r| { + let v_id = generator.next(vocabulary); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Variant.as_iri()))), + )); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String(nsid_name(&r).to_string())), + )); + + let format_iri = context.resolve_reference(&r); + let format_id = Id::Iri(vocabulary.insert(format_iri.as_iri())); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::TreeLdr::Format.as_iri()), + Object::Id(format_id), + )); + + Object::Id(v_id) + }, + ); + + triples.push(Triple( + id, + vocabulary.insert(vocab::TreeLdr::Enumeration.as_iri()), + Object::Id(variants_id), + )); + } +} diff --git a/modules/lexicon/src/import/token.rs b/modules/lexicon/src/import/token.rs new file mode 100644 index 00000000..7b9ed521 --- /dev/null +++ b/modules/lexicon/src/import/token.rs @@ -0,0 +1,22 @@ +use rdf_types::{Generator, VocabularyMut}; + +use crate::LexToken; + +use super::{Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexToken { + fn process( + self, + _vocabulary: &mut V, + _generator: &mut impl Generator, + _stack: &mut Vec>, + _triples: &mut Vec>, + _context: &Context, + _id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + log::warn!("tokens are not yet supported") + } +} diff --git a/modules/lexicon/src/import/xrpc.rs b/modules/lexicon/src/import/xrpc.rs new file mode 100644 index 00000000..839d6387 --- /dev/null +++ b/modules/lexicon/src/import/xrpc.rs @@ -0,0 +1,90 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::LexXrpcParameters; + +use super::{build_rdf_list, sub_id, Item, OutputSubject, OutputTriple}; + +mod body; +mod procedure; +mod query; +mod subscription; + +fn process_xrpc_parameters( + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + id: &OutputSubject, + parameters: Option, +) -> OutputSubject +where + V::Iri: Clone, + V::BlankId: Clone, +{ + match parameters { + Some(params) => build_rdf_list( + vocabulary, + generator, + triples, + params.properties, + |vocabulary, generator, triples, (name, p)| { + let f_id = generator.next(vocabulary); + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Field.as_iri()))), + )); + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String(name.clone())), + )); + + if let Some(desc) = p.description() { + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc.to_string())), + )); + } + + let item_id = sub_id(vocabulary, id, &name); + stack.push(Item::XrpcParametersProperty(item_id.clone(), p)); + + let t_id = generator.next(vocabulary); + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + if params.required.contains(&name) { + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::TreeLdr::Required.as_iri()), + Object::Id(item_id), + )); + } else { + triples.push(Triple( + t_id.clone(), + vocabulary.insert(vocab::TreeLdr::Option.as_iri()), + Object::Id(item_id), + )); + }; + + triples.push(Triple( + f_id.clone(), + vocabulary.insert(vocab::TreeLdr::Format.as_iri()), + Object::Id(t_id), + )); + + Object::Id(f_id) + }, + ), + None => Id::Iri(vocabulary.insert(vocab::Rdf::Nil.as_iri())), + } +} diff --git a/modules/lexicon/src/import/xrpc/body.rs b/modules/lexicon/src/import/xrpc/body.rs new file mode 100644 index 00000000..e4692efc --- /dev/null +++ b/modules/lexicon/src/import/xrpc/body.rs @@ -0,0 +1,44 @@ +use rdf_types::{Generator, VocabularyMut}; + +use crate::{LexXrpcBody, LexXrpcBodySchema}; + +use super::super::{Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexXrpcBody { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + if let Some(schema) = self.schema { + schema.process(vocabulary, generator, stack, triples, context, id) + } + } +} + +impl Process for LexXrpcBodySchema { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + Self::Ref(r) => r.process(vocabulary, generator, stack, triples, context, id), + Self::Object(o) => o.process(vocabulary, generator, stack, triples, context, id), + } + } +} diff --git a/modules/lexicon/src/import/xrpc/procedure.rs b/modules/lexicon/src/import/xrpc/procedure.rs new file mode 100644 index 00000000..45722c47 --- /dev/null +++ b/modules/lexicon/src/import/xrpc/procedure.rs @@ -0,0 +1,34 @@ +use rdf_types::{Generator, VocabularyMut}; + +use crate::{import::sub_id, LexXrpcProcedure}; + +use super::super::{Context, Item, OutputSubject, OutputTriple, Process}; + +impl Process for LexXrpcProcedure { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + for (name, p) in self.parameters { + let p_id = sub_id(vocabulary, &id, &name); + stack.push(Item::Primitive(p_id, p)); + } + + if let Some(output) = self.output { + let output_id = sub_id(vocabulary, &id, "output"); + stack.push(Item::XrpcBody(output_id, output)); + } + + if let Some(input) = self.input { + input.process(vocabulary, generator, stack, triples, context, id) + } + } +} diff --git a/modules/lexicon/src/import/xrpc/query.rs b/modules/lexicon/src/import/xrpc/query.rs new file mode 100644 index 00000000..9da176a5 --- /dev/null +++ b/modules/lexicon/src/import/xrpc/query.rs @@ -0,0 +1,87 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{ + import::sub_id, LexXrpcParametersNonPrimitiveProperty, LexXrpcParametersProperty, LexXrpcQuery, +}; + +use super::{ + super::{nsid_name, Context, Item, OutputSubject, OutputTriple, Process}, + process_xrpc_parameters, +}; + +impl Process for LexXrpcQuery { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + let fields_id = + process_xrpc_parameters(vocabulary, generator, stack, triples, &id, self.parameters); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Fields.as_iri()), + Object::Id(fields_id), + )); + + if let Some(output) = self.output { + let o_id = sub_id(vocabulary, &id, "output"); + stack.push(Item::XrpcBody(o_id, output)) + } + } +} + +impl Process for LexXrpcParametersProperty { + fn process( + self, + _vocabulary: &mut V, + _generator: &mut impl Generator, + stack: &mut Vec>, + _triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + match self { + LexXrpcParametersProperty::Primitive(p) => stack.push(Item::Primitive(id, p)), + LexXrpcParametersProperty::NonPrimitive(n) => match n { + LexXrpcParametersNonPrimitiveProperty::Array(a) => { + stack.push(Item::PrimitiveArray(id, a)) + } + }, + } + } +} diff --git a/modules/lexicon/src/import/xrpc/subscription.rs b/modules/lexicon/src/import/xrpc/subscription.rs new file mode 100644 index 00000000..0ef566b7 --- /dev/null +++ b/modules/lexicon/src/import/xrpc/subscription.rs @@ -0,0 +1,189 @@ +use iref::AsIri; +use rdf_types::{Generator, Id, Literal, Object, Triple, VocabularyMut}; +use treeldr::vocab; + +use crate::{import::sub_id, LexXrpcSubscription, LexXrpcSubscriptionMessage}; + +use super::{ + super::{build_rdf_list, nsid_name, Context, Item, OutputSubject, OutputTriple, Process}, + process_xrpc_parameters, +}; + +impl Process for LexXrpcSubscription { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + _context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String( + nsid_name(vocabulary.iri(id.as_iri().unwrap()).unwrap().as_str()).to_string(), + )), + )); + + if let Some(desc) = self.description { + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(desc)), + )); + } + + if let Some(message) = self.message { + let msg_id = sub_id(vocabulary, &id, "message"); + stack.push(Item::XrpcSubscriptionMessage(msg_id, message)) + } + + if !self.errors.is_empty() { + let error_id = sub_id(vocabulary, &id, "error"); + + triples.push(Triple( + error_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + error_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String("error".to_string())), + )); + + triples.push(Triple( + error_id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(format!( + "Errors of <{}>.", + vocabulary.iri(id.as_iri().unwrap()).unwrap() + ))), + )); + + let variants_id = build_rdf_list( + vocabulary, + generator, + triples, + self.errors, + |vocabulary, generator, triples, e| { + let v_id = generator.next(vocabulary); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Variant.as_iri()))), + )); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String(e.name)), + )); + + Object::Id(v_id) + }, + ); + + triples.push(Triple( + error_id, + vocabulary.insert(vocab::TreeLdr::Enumeration.as_iri()), + Object::Id(variants_id), + )); + } + + if !self.infos.is_empty() { + let info_id = sub_id(vocabulary, &id, "info"); + + triples.push(Triple( + info_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Layout.as_iri()))), + )); + + triples.push(Triple( + info_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String("info".to_string())), + )); + + triples.push(Triple( + info_id.clone(), + vocabulary.insert(vocab::Rdfs::Comment.as_iri()), + Object::Literal(Literal::String(format!( + "Infos of <{}>.", + vocabulary.iri(id.as_iri().unwrap()).unwrap() + ))), + )); + + let variants_id = build_rdf_list( + vocabulary, + generator, + triples, + self.infos, + |vocabulary, generator, triples, i| { + let v_id = generator.next(vocabulary); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::Rdf::Type.as_iri()), + Object::Id(Id::Iri(vocabulary.insert(vocab::TreeLdr::Variant.as_iri()))), + )); + + triples.push(Triple( + v_id.clone(), + vocabulary.insert(vocab::TreeLdr::Name.as_iri()), + Object::Literal(Literal::String(i.name)), + )); + + Object::Id(v_id) + }, + ); + + triples.push(Triple( + info_id, + vocabulary.insert(vocab::TreeLdr::Enumeration.as_iri()), + Object::Id(variants_id), + )); + } + + let fields_id = + process_xrpc_parameters(vocabulary, generator, stack, triples, &id, self.parameters); + + triples.push(Triple( + id.clone(), + vocabulary.insert(vocab::TreeLdr::Fields.as_iri()), + Object::Id(fields_id), + )); + } +} + +impl Process for LexXrpcSubscriptionMessage { + fn process( + self, + vocabulary: &mut V, + generator: &mut impl Generator, + stack: &mut Vec>, + triples: &mut Vec>, + context: &Context, + id: OutputSubject, + ) where + V::Iri: Clone, + V::BlankId: Clone, + { + self.schema + .process(vocabulary, generator, stack, triples, context, id) + } +} diff --git a/modules/lexicon/src/lib.rs b/modules/lexicon/src/lib.rs new file mode 100644 index 00000000..38537922 --- /dev/null +++ b/modules/lexicon/src/lib.rs @@ -0,0 +1,573 @@ +use std::collections::BTreeMap; + +use import::IntoTriples; +use rdf_types::Vocabulary; +use serde::{Deserialize, Serialize}; +use serde_repr::{Deserialize_repr, Serialize_repr}; + +pub mod export; +pub mod import; +mod nsid; + +pub use nsid::*; + +#[derive(Debug, Serialize_repr, Deserialize_repr)] +#[repr(u8)] +pub enum Version { + One = 1, +} + +/// A lexicon document. +#[derive(Debug, Serialize, Deserialize)] +pub struct LexiconDoc { + pub lexicon: Version, + pub id: NsidBuf, + + #[serde(skip_serializing_if = "Option::is_none")] + pub revision: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + + #[serde(rename = "defs")] + pub definitions: Definitions, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Definitions { + #[serde(skip_serializing_if = "Option::is_none")] + main: Option, + + #[serde(flatten)] + other: BTreeMap, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexAnyUserType { + Record(LexRecord), + Query(LexXrpcQuery), + Procedure(LexXrpcProcedure), + Subscription(LexXrpcSubscription), + Array(LexArray), + Token(LexToken), + Object(LexObject), + Boolean(LexBoolean), + Integer(LexInteger), + String(LexString), + Bytes(LexBytes), + CidLink(LexCidLink), + Unknown(LexUnknown), +} + +impl From for LexAnyUserType { + fn from(value: LexUserType) -> Self { + match value { + LexUserType::Array(a) => Self::Array(a), + LexUserType::Token(t) => Self::Token(t), + LexUserType::Object(o) => Self::Object(o), + LexUserType::Boolean(b) => Self::Boolean(b), + LexUserType::Integer(i) => Self::Integer(i), + LexUserType::String(s) => Self::String(s), + LexUserType::Bytes(b) => Self::Bytes(b), + LexUserType::CidLink(l) => Self::CidLink(l), + LexUserType::Unknown(u) => Self::Unknown(u), + } + } +} + +impl LexiconDoc { + pub fn into_triples( + self, + vocabulary: &mut V, + generator: G, + ) -> IntoTriples { + IntoTriples::new(self, vocabulary, generator) + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexRef { + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + + #[serde(rename = "ref")] + pub ref_: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexRefUnion { + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + + pub refs: Vec, + + #[serde(skip_serializing_if = "Option::is_none")] + pub closed: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexRefVariant { + Ref(LexRef), + Union(LexRefUnion), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexUserType { + Array(LexArray), + Token(LexToken), + Object(LexObject), + Boolean(LexBoolean), + Integer(LexInteger), + String(LexString), + Bytes(LexBytes), + CidLink(LexCidLink), + Unknown(LexUnknown), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcQuery { + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub parameters: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub output: Option, + + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub errors: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum LexXrpcParametersType { + Params, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcParameters { + #[serde(rename = "type")] + pub type_: LexXrpcParametersType, + + pub description: Option, + + #[serde(default)] + pub required: Vec, + + pub properties: BTreeMap, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum LexXrpcParametersProperty { + Primitive(LexPrimitive), + NonPrimitive(LexXrpcParametersNonPrimitiveProperty), +} + +impl LexXrpcParametersProperty { + pub fn description(&self) -> Option<&str> { + match self { + Self::Primitive(p) => p.description(), + Self::NonPrimitive(n) => n.description(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexXrpcParametersNonPrimitiveProperty { + Array(LexPrimitiveArray), +} + +impl LexXrpcParametersNonPrimitiveProperty { + pub fn description(&self) -> Option<&str> { + match self { + Self::Array(a) => a.description.as_deref(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcBody { + pub description: Option, + + pub encoding: LexXrpcBodyEncoding, + + pub schema: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum LexXrpcBodyEncoding { + One(String), + Many(Vec), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum LexXrpcBodySchema { + Object(LexObject), + Ref(LexRefVariant), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcSubscriptionMessage { + pub description: Option, + pub schema: LexXrpcBodySchema, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcError { + pub name: String, + pub description: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcProcedure { + pub description: Option, + + #[serde(default)] + pub parameters: BTreeMap, + + pub input: Option, + + pub output: Option, + + #[serde(default)] + pub errors: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexRecord { + pub description: Option, + pub key: Option, + pub record: LexObject, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexXrpcSubscription { + pub description: Option, + pub parameters: Option, + pub message: Option, + + #[serde(default)] + pub infos: Vec, + + #[serde(default)] + pub errors: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexToken { + pub description: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexObject { + pub description: Option, + + #[serde(default)] + pub required: Vec, + + #[serde(default)] + pub nullable: Vec, + + #[serde(default)] + pub properties: BTreeMap, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ObjectProperty { + Ref(LexRefVariant), + Ipld(LexIpldType), + Primitive(LexPrimitive), + NonPrimitive(ObjectNonPrimitiveProperty), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum ObjectNonPrimitiveProperty { + Array(LexArray), + Blob(LexBlob), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexIpldType { + Bytes(LexBytes), + CidLink(LexCidLink), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexCidLink { + pub description: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexBytes { + pub description: Option, + + pub min_size: Option, + + pub max_size: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexBlob { + pub description: Option, + + pub accept: Option>, + + pub max_size: Option, +} + +// #[derive(Debug, Serialize, Deserialize)] +// #[serde(rename_all = "camelCase")] +// pub struct LexImage { +// pub description: Option, +// pub accept: Option>, +// pub max_size: Option, +// pub max_width: Option, +// pub max_height: Option, +// } + +// #[derive(Debug, Serialize, Deserialize)] +// #[serde(rename_all = "camelCase")] +// pub struct LexVideo { +// pub description: Option, +// pub accept: Option>, +// pub max_size: Option, +// pub max_width: Option, +// pub max_height: Option, +// pub max_length: Option, +// } + +// #[derive(Debug, Serialize, Deserialize)] +// #[serde(rename_all = "camelCase")] +// pub struct LexAudio { +// pub description: Option, +// pub accept: Option>, +// pub max_size: Option, +// pub max_length: Option, +// } + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexArray { + pub description: Option, + pub items: T, + pub min_length: Option, + pub max_length: Option, +} + +pub type LexPrimitiveArray = LexArray; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ArrayItem { + Primitive(LexPrimitive), + Ipld(LexIpldType), + Ref(LexRefVariant), + NonPrimitive(ArrayNonPrimitiveItem), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum ArrayNonPrimitiveItem { + Blob(LexBlob), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "kebab-case")] +pub enum LexPrimitive { + Boolean(LexBoolean), + Integer(LexInteger), + String(LexString), + Unknown(LexUnknown), +} + +impl LexPrimitive { + pub fn description(&self) -> Option<&str> { + match self { + Self::Boolean(b) => b.description.as_deref(), + Self::Integer(i) => i.description.as_deref(), + Self::String(s) => s.description.as_deref(), + Self::Unknown(u) => u.description.as_deref(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexBoolean { + pub description: Option, + pub default: Option, + pub const_: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexNumber { + pub default: Option, + pub minimum: Option, + pub maximum: Option, + pub enum_: Option>, + pub const_: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexInteger { + pub description: Option, + pub default: Option, + pub minimum: Option, + pub maximum: Option, + pub enum_: Option>, + pub const_: Option, +} + +impl LexInteger { + /// Find the best fitting TreeLDR primitive integer layout for this integer + /// type. + pub fn best_primitive(&self) -> treeldr::vocab::Primitive { + match (self.minimum, self.maximum) { + (Some(min), Some(max)) if min >= u8::MIN as i64 && max <= u8::MAX as i64 => { + treeldr::vocab::Primitive::U8 + } + (Some(min), Some(max)) if min >= u16::MIN as i64 && max <= u16::MAX as i64 => { + treeldr::vocab::Primitive::U16 + } + (Some(min), Some(max)) if min >= u32::MIN as i64 && max <= u32::MAX as i64 => { + treeldr::vocab::Primitive::U32 + } + (Some(min), Some(_max)) if min >= u64::MIN as i64 => { + // && max <= u8::MAX as i64 + treeldr::vocab::Primitive::U64 + } + (Some(min), Some(max)) if min >= i8::MIN as i64 && max <= i8::MAX as i64 => { + treeldr::vocab::Primitive::I8 + } + (Some(min), Some(max)) if min >= i16::MIN as i64 && max <= i16::MAX as i64 => { + treeldr::vocab::Primitive::I16 + } + (Some(min), Some(max)) if min >= i32::MIN as i64 && max <= i32::MAX as i64 => { + treeldr::vocab::Primitive::I32 + } + (Some(_min), Some(_max)) => { + // if min >= i64::MIN && max <= i64::MAX => { + treeldr::vocab::Primitive::I64 + } + (Some(min), _) if min > 0 => treeldr::vocab::Primitive::PositiveInteger, + (Some(min), _) if min >= 0 => treeldr::vocab::Primitive::NonNegativeInteger, + (_, Some(max)) if max < 0 => treeldr::vocab::Primitive::NegativeInteger, + (_, Some(max)) if max <= 0 => treeldr::vocab::Primitive::NonPositiveInteger, + _ => treeldr::vocab::Primitive::Integer, + } + } + + pub fn bounds_constraints(&self, p: treeldr::vocab::Primitive) -> (Option, Option) { + match p { + treeldr::vocab::Primitive::U8 => ( + self.minimum.filter(|m| *m > u8::MIN as i64), + self.maximum.filter(|m| *m < u8::MAX as i64), + ), + treeldr::vocab::Primitive::U16 => ( + self.minimum.filter(|m| *m > u16::MIN as i64), + self.maximum.filter(|m| *m < u16::MAX as i64), + ), + treeldr::vocab::Primitive::U32 => ( + self.minimum.filter(|m| *m > u32::MIN as i64), + self.maximum.filter(|m| *m < u32::MAX as i64), + ), + treeldr::vocab::Primitive::U64 => { + (self.minimum.filter(|m| *m > u64::MIN as i64), self.maximum) + } + treeldr::vocab::Primitive::I8 => ( + self.minimum.filter(|m| *m > i8::MIN as i64), + self.maximum.filter(|m| *m < i8::MAX as i64), + ), + treeldr::vocab::Primitive::I16 => ( + self.minimum.filter(|m| *m > i16::MIN as i64), + self.maximum.filter(|m| *m < i16::MAX as i64), + ), + treeldr::vocab::Primitive::I32 => ( + self.minimum.filter(|m| *m > i32::MIN as i64), + self.maximum.filter(|m| *m < i32::MAX as i64), + ), + treeldr::vocab::Primitive::I64 => ( + self.minimum.filter(|m| *m > i64::MIN), + self.maximum.filter(|m| *m < i64::MAX), + ), + treeldr::vocab::Primitive::PositiveInteger => { + (self.minimum.filter(|m| *m > 1), self.maximum) + } + treeldr::vocab::Primitive::NonNegativeInteger => { + (self.minimum.filter(|m| *m > 0), self.maximum) + } + treeldr::vocab::Primitive::NegativeInteger => { + (self.minimum, self.maximum.filter(|m| *m < -1)) + } + treeldr::vocab::Primitive::NonPositiveInteger => { + (self.minimum, self.maximum.filter(|m| *m < 0)) + } + _ => (self.minimum, self.maximum), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexString { + pub description: Option, + pub default: Option, + pub format: Option, + pub min_length: Option, + pub max_length: Option, + pub min_grapheme: Option, + pub max_grapheme: Option, + pub enum_: Option>, + pub const_: Option, + + #[serde(default)] + pub known_values: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum LexStringFormat { + Datetime, + Uri, + AtUri, + Did, + Handle, + AtIdentifier, + Nsid, + Cid, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LexUnknown { + pub description: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LexParams { + #[serde(default)] + pub properties: BTreeMap, +} diff --git a/modules/lexicon/src/nsid.rs b/modules/lexicon/src/nsid.rs new file mode 100644 index 00000000..b0c25cc8 --- /dev/null +++ b/modules/lexicon/src/nsid.rs @@ -0,0 +1,131 @@ +use std::{fmt, ops::Deref}; + +use iref::IriBuf; +use serde::{Deserialize, Serialize}; + +#[derive(Debug)] +#[repr(transparent)] +pub struct Nsid(str); + +impl Nsid { + pub fn new(s: &str) -> Result<&Self, InvalidNsid> { + if check(s.bytes()) { + Ok(unsafe { std::mem::transmute(s) }) + } else { + Err(InvalidNsid(s.to_string())) + } + } + + /// Creates a new NSID from the given string. + /// + /// # Safety + /// + /// The input string must be a NSID. + pub unsafe fn new_unchecked(s: &str) -> &Self { + std::mem::transmute(s) + } + + pub fn as_str(&self) -> &str { + &self.0 + } + + pub fn as_iri(&self) -> IriBuf { + let mut iri = IriBuf::from_scheme("lexicon".try_into().unwrap()); + iri.path_mut().push(self.0.try_into().unwrap()); + iri + } +} + +#[derive(Debug, Serialize)] +pub struct NsidBuf(String); + +#[derive(Debug, thiserror::Error)] +#[error("invalid NSID `{0}`")] +pub struct InvalidNsid(pub String); + +impl NsidBuf { + pub fn new(s: String) -> Result { + if check(s.bytes()) { + Ok(Self(s)) + } else { + Err(InvalidNsid(s)) + } + } + + pub fn into_string(self) -> String { + self.0 + } +} + +impl Deref for NsidBuf { + type Target = Nsid; + + fn deref(&self) -> &Self::Target { + unsafe { Nsid::new_unchecked(&self.0) } + } +} + +impl fmt::Display for NsidBuf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +fn check(mut bytes: impl Iterator) -> bool { + enum State { + SegmentAlpha(usize), + Segment(usize), + } + + let mut state = State::SegmentAlpha(0); + + loop { + state = match state { + State::SegmentAlpha(n) => match bytes.next() { + Some(b'a'..=b'z' | b'A'..=b'Z') => State::Segment(n), + _ => break false, + }, + State::Segment(n) => match bytes.next() { + Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'-') => State::Segment(n), + Some(b'.') => State::SegmentAlpha(n + 1), + None if n >= 2 => break true, + _ => break false, + }, + } + } +} + +impl<'de> Deserialize<'de> for NsidBuf { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct Visitor; + + impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = NsidBuf; + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "a NSID") + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + self.visit_string(v.to_string()) + } + + fn visit_string(self, v: String) -> Result + where + E: serde::de::Error, + { + NsidBuf::new(v).map_err(|InvalidNsid(value)| { + E::invalid_value(serde::de::Unexpected::Str(&value), &self) + }) + } + } + + deserializer.deserialize_string(Visitor) + } +} diff --git a/modules/lexicon/tests/t01.lexicon.json b/modules/lexicon/tests/t01.lexicon.json new file mode 100644 index 00000000..b21d4dcb --- /dev/null +++ b/modules/lexicon/tests/t01.lexicon.json @@ -0,0 +1,30 @@ +{ + "lexicon": 1, + "id": "com.atproto.identity.resolveHandle", + "defs": { + "main": { + "type": "query", + "description": "Provides the DID of a repo.", + "parameters": { + "type": "params", + "properties": { + "handle": { + "type": "string", + "format": "handle", + "description": "The handle to resolve. If not supplied, will resolve the host's own handle." + } + } + }, + "output": { + "encoding": "application/json", + "schema": { + "type": "object", + "required": ["did"], + "properties": { + "did": {"type": "string", "format": "did"} + } + } + } + } + } +} \ No newline at end of file diff --git a/modules/lexicon/tests/t01.rs b/modules/lexicon/tests/t01.rs new file mode 100644 index 00000000..0f22c119 --- /dev/null +++ b/modules/lexicon/tests/t01.rs @@ -0,0 +1,14 @@ +use treeldr_rust_macros::tldr; + +#[tldr("modules/lexicon/tests/t01.lexicon.json", no_rdf)] +pub mod schema { + #[prefix("lexicon:com.atproto")] + pub mod atproto {} +} + +#[test] +fn t01() { + let _query = schema::atproto::identity::ResolveHandle { + handle: Some("@hello".to_string()), + }; +} diff --git a/modules/rust/cli/src/main.rs b/modules/rust/cli/src/main.rs index 46e743e1..271a9358 100644 --- a/modules/rust/cli/src/main.rs +++ b/modules/rust/cli/src/main.rs @@ -9,28 +9,32 @@ use std::{collections::HashMap, fmt, path::PathBuf, str::FromStr}; use stderrlog::ColorChoice; use treeldr::{Id, TId}; use treeldr_load as load; -use treeldr_rust_gen::{module::Visibility, tr::TraitModules, Generate}; +use treeldr_rust_gen::{module::Visibility, tr::TraitModules, DedicatedSubModule, Generate}; #[derive(Parser)] #[clap(name="treeldr", author, version, about, long_about = None)] struct Args { /// Input files. - #[clap(short = 'i', multiple_occurrences = true)] + #[clap(short = 'i')] filenames: Vec, /// Sets the level of verbosity. - #[clap(short, long = "verbose", parse(from_occurrences))] - verbosity: usize, + #[clap(short, long = "verbose", action = clap::ArgAction::Count)] + verbosity: u8, /// Layouts to generate. layouts: Vec, - #[clap(short = 'm', multiple_occurrences = true)] + #[clap(short = 'm')] modules: Vec, + + #[clap(long)] + no_rdf: bool, } +#[derive(Debug, Clone)] pub struct ModuleBinding { - pub ident: Ident, + pub ident: String, pub iri: IriBuf, } @@ -43,7 +47,7 @@ impl FromStr for ModuleBinding { let iri = IriBuf::new(iri) .map_err(|e| InvalidPrefixBinding::InvalidIri(iri.to_string(), e))?; Ok(Self { - ident: Ident::new(prefix, Span::call_site()), + ident: prefix.to_string(), iri, }) } @@ -74,7 +78,10 @@ pub fn main() { let args = Args::parse(); // Init logger. - stderrlog::new().verbosity(args.verbosity).init().unwrap(); + stderrlog::new() + .verbosity(args.verbosity as usize) + .init() + .unwrap(); let mut files = load::Files::::new(); let mut documents = Vec::new(); @@ -115,7 +122,11 @@ pub fn main() { // println!("{} .", quad.with(&vocabulary)) // } - let mut gen_context = treeldr_rust_gen::Context::new(&model, &vocabulary); + let options = treeldr_rust_gen::Options { + impl_rdf: !args.no_rdf, + }; + + let mut gen_context = treeldr_rust_gen::Context::new(&model, &vocabulary, options); let root_ref = gen_context.add_module(None, None, format_ident!("example"), Visibility::Public); @@ -124,44 +135,44 @@ pub fn main() { let mut type_map = HashMap::new(); for prefix in args.modules { - let module_ref = - gen_context.add_module(Some(root_ref), None, prefix.ident, Visibility::Public); - let providers_module_ref = gen_context.add_module( - Some(module_ref), - None, - format_ident!("provider"), - Visibility::Public, - ); - let trait_objects_module_ref = gen_context.add_module( - Some(module_ref), + let module_ref = gen_context.add_module( + Some(root_ref), None, - format_ident!("trait_object"), - Visibility::Public, - ); - let layouts_module_ref = gen_context.add_module( - Some(module_ref), - None, - format_ident!("layout"), + Ident::new(&prefix.ident, Span::call_site()), Visibility::Public, ); + let mut sub_modules = treeldr_rust_gen::ModulePathBuilder::new(module_ref); + for (id, node) in model.nodes() { if let treeldr::Id::Iri(term) = id { let iri = vocabulary.iri(&term).unwrap(); - if iri.as_str().strip_prefix(prefix.iri.as_str()).is_some() { + if let Some(suffix) = iri.as_str().strip_prefix(prefix.iri.as_str()) { + let path = + treeldr_rust_gen::ModulePathBuilder::split_iri_path(suffix).0; + eprintln!("path: {path}"); + if node.is_type() { type_map.insert( TId::new(id), TraitModules { main: Some(treeldr_rust_gen::module::Parent::Ref( - module_ref, + sub_modules.get(&mut gen_context, path, None), )), provider: Some(treeldr_rust_gen::module::Parent::Ref( - providers_module_ref, + sub_modules.get( + &mut gen_context, + path, + Some(DedicatedSubModule::ClassProviders), + ), )), trait_object: Some(treeldr_rust_gen::module::Parent::Ref( - trait_objects_module_ref, + sub_modules.get( + &mut gen_context, + path, + Some(DedicatedSubModule::TraitObjects), + ), )), }, ); @@ -170,7 +181,11 @@ pub fn main() { if node.is_layout() { layout_map.insert( TId::new(id), - treeldr_rust_gen::module::Parent::Ref(layouts_module_ref), + treeldr_rust_gen::module::Parent::Ref(sub_modules.get( + &mut gen_context, + path, + options.impl_rdf.then_some(DedicatedSubModule::Layouts), + )), ); } } diff --git a/modules/rust/gen/src/context.rs b/modules/rust/gen/src/context.rs index a8c6d9ca..2be5aa68 100644 --- a/modules/rust/gen/src/context.rs +++ b/modules/rust/gen/src/context.rs @@ -11,6 +11,17 @@ use shelves::{Ref, Shelf}; use std::collections::{BTreeMap, HashMap, HashSet}; use treeldr::{value::Literal, IriIndex, TId}; +#[derive(Debug, Clone, Copy)] +pub struct Options { + pub impl_rdf: bool, +} + +impl Default for Options { + fn default() -> Self { + Self { impl_rdf: true } + } +} + /// Rust context. pub struct Context<'a, V, M> { /// TreeLDR model. @@ -28,10 +39,12 @@ pub struct Context<'a, V, M> { types: BTreeMap, Trait>, anonymous_types: usize, + + options: Options, } impl<'a, V, M> Context<'a, V, M> { - pub fn new(model: &'a treeldr::Model, vocabulary: &'a V) -> Self { + pub fn new(model: &'a treeldr::Model, vocabulary: &'a V, options: Options) -> Self { Self { model, vocabulary, @@ -39,9 +52,14 @@ impl<'a, V, M> Context<'a, V, M> { layouts: BTreeMap::default(), types: BTreeMap::default(), anonymous_types: 0, + options, } } + pub fn options(&self) -> &Options { + &self.options + } + pub fn next_anonymous_type_ident(&mut self) -> Ident { let i = self.anonymous_types; self.anonymous_types += 1; @@ -238,3 +256,83 @@ impl<'a, V, M> Context<'a, V, M> { } } } + +pub struct ModulePathBuilder { + root: Ref, + by_path: HashMap, Ref>>, +} + +fn path_delimiter(c: char) -> bool { + matches!(c, '/' | ':' | '.') +} + +impl ModulePathBuilder { + pub const DELIMITER: fn(char) -> bool = path_delimiter; + + pub fn new(root: Ref) -> Self { + Self { + root, + by_path: HashMap::new(), + } + } + + pub fn split_iri_path(iri: &str) -> (&str, &str) { + iri.rsplit_once('#') + .or_else(|| iri.rsplit_once(Self::DELIMITER)) + .unwrap_or(("", iri)) + } + + pub fn get( + &mut self, + context: &mut Context, + path: &str, + dedicated_submodule: Option, + ) -> Ref { + if let Some(s) = self.by_path.get(path) { + if let Some(r) = s.get(&dedicated_submodule) { + return *r; + } + } + + let (parent, name) = match dedicated_submodule { + Some(d) => (self.get(context, path, None), d.name()), + None => match path.rsplit_once(Self::DELIMITER) { + Some((prefix, name)) => (self.get(context, prefix, None), name), + None => (self.root, path), + }, + }; + + let r = if name.is_empty() { + parent + } else { + let name = treeldr::Name::new(name).unwrap(); + let ident = + proc_macro2::Ident::new(&name.to_snake_case(), proc_macro2::Span::call_site()); + + context.add_module(Some(parent), None, ident, module::Visibility::Public) + }; + + self.by_path + .entry(path.to_string()) + .or_default() + .insert(dedicated_submodule, r); + r + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum DedicatedSubModule { + ClassProviders, + TraitObjects, + Layouts, +} + +impl DedicatedSubModule { + fn name(&self) -> &'static str { + match self { + Self::ClassProviders => "provider", + Self::TraitObjects => "trait_object", + Self::Layouts => "layout", + } + } +} diff --git a/modules/rust/gen/src/error.rs b/modules/rust/gen/src/error.rs index 0eb7bd00..26427994 100644 --- a/modules/rust/gen/src/error.rs +++ b/modules/rust/gen/src/error.rs @@ -29,7 +29,10 @@ impl> DisplayWithContext { - write!(f, "missing `Default` implementation") + write!( + f, + "missing `Default` implementation required for `FromRdf` trait implementation" + ) } Self::BlankProperty(prop_ref) => { write!(f, "blank property `{}`", prop_ref.id().with(vocabulary)) diff --git a/modules/rust/gen/src/lib.rs b/modules/rust/gen/src/lib.rs index 719c4928..b775a60b 100644 --- a/modules/rust/gen/src/lib.rs +++ b/modules/rust/gen/src/lib.rs @@ -15,7 +15,7 @@ pub mod path; pub mod tr; pub mod ty; -pub use context::Context; +pub use context::{Context, DedicatedSubModule, ModulePathBuilder, Options}; pub use error::Error; pub use module::Module; pub use path::Path; diff --git a/modules/rust/gen/src/ty.rs b/modules/rust/gen/src/ty.rs index 1aa9b673..8f8e9960 100644 --- a/modules/rust/gen/src/ty.rs +++ b/modules/rust/gen/src/ty.rs @@ -17,7 +17,7 @@ pub mod structure; use alias::Alias; use enumeration::Enum; -use params::{Parameters, ParametersValues}; +pub use params::{Parameter, Parameters, ParametersValues}; use structure::Struct; #[derive(Debug)] @@ -133,9 +133,13 @@ impl Type { if let Some(layout_ref) = layout { let layout = context.model().get(layout_ref).unwrap(); - f(TraitId::FromRdf.impl_for(layout_ref)); - f(TraitId::TriplesAndValues.impl_for(layout_ref)); - f(TraitId::IntoJsonLd.impl_for(layout_ref)); + + if context.options().impl_rdf { + f(TraitId::FromRdf.impl_for(layout_ref)); + f(TraitId::IntoJsonLd.impl_for(layout_ref)); + f(TraitId::TriplesAndValues.impl_for(layout_ref)); + } + f(TraitId::IntoJsonLdSyntax.impl_for(layout_ref)); let mut stack: Vec<_> = layout.as_layout().ty().iter().map(|v| **v.value).collect(); diff --git a/modules/rust/gen/src/ty/generate.rs b/modules/rust/gen/src/ty/generate.rs index 59ec22cf..8a1f459a 100644 --- a/modules/rust/gen/src/ty/generate.rs +++ b/modules/rust/gen/src/ty/generate.rs @@ -463,6 +463,8 @@ impl Generate for treeldr::layout::Primitive { Self::Url => quote! { ::treeldr_rust_prelude::iref::IriBuf }, Self::Uri => quote! { ::treeldr_rust_prelude::iref::IriBuf }, Self::Iri => quote! { ::treeldr_rust_prelude::iref::IriBuf }, + Self::Bytes => quote! { ::treeldr_rust_prelude::ty::BytesBuf }, + Self::Cid => quote! { ::treeldr_rust_prelude::ty::CidBuf }, }); Ok(()) @@ -508,6 +510,8 @@ impl Generate for Referenced { Primitive::Url => quote! { ::treeldr_rust_prelude::iref::Iri }, Primitive::Uri => quote! { ::treeldr_rust_prelude::iref::Iri }, Primitive::Iri => quote! { ::treeldr_rust_prelude::iref::Iri }, + Primitive::Bytes => quote! { &::treeldr_rust_prelude::ty::Bytes }, + Primitive::Cid => quote! { &::treeldr_rust_prelude::ty::Cid }, }); Ok(()) diff --git a/modules/rust/macros/src/lib.rs b/modules/rust/macros/src/lib.rs index 09311f40..47988329 100644 --- a/modules/rust/macros/src/lib.rs +++ b/modules/rust/macros/src/lib.rs @@ -16,6 +16,10 @@ use module::Module; pub fn tldr(attr: TokenStream, item: TokenStream) -> TokenStream { match module::Inputs::from_stream(attr.into()) { Ok(inputs) => { + let options = treeldr_rust_gen::Options { + impl_rdf: !inputs.no_rdf(), + }; + let item = syn::parse_macro_input!(item as syn::Item); match Module::from_item(item) { Ok(mut module) => { @@ -47,7 +51,7 @@ pub fn tldr(attr: TokenStream, item: TokenStream) -> TokenStream { ) { Ok(model) => { let mut gen_context = - treeldr_rust_gen::Context::new(&model, &vocabulary); + treeldr_rust_gen::Context::new(&model, &vocabulary, options); module.bind(&vocabulary, &mut gen_context); gen_context.run_pre_computations(); diff --git a/modules/rust/macros/src/module.rs b/modules/rust/macros/src/module.rs index a74effe2..28e0d001 100644 --- a/modules/rust/macros/src/module.rs +++ b/modules/rust/macros/src/module.rs @@ -1,13 +1,13 @@ use iref::IriBuf; use litrs::Literal; use proc_macro2::{Span, TokenStream, TokenTree}; -use quote::{format_ident, quote}; +use quote::quote; use rdf_types::Vocabulary; use std::path::PathBuf; use syn::spanned::Spanned; use thiserror::Error; use treeldr::{BlankIdIndex, IriIndex, TId}; -use treeldr_rust_gen::tr::TraitModules; +use treeldr_rust_gen::{tr::TraitModules, DedicatedSubModule, ModulePathBuilder}; pub type GenContext<'a, V> = treeldr_rust_gen::Context<'a, V, treeldr_load::Metadata>; @@ -38,28 +38,39 @@ pub type GenError = treeldr_rust_gen::Error; pub struct Inputs { list: Vec, + no_rdf: bool, } impl Inputs { pub fn from_stream(tokens: TokenStream) -> Result { let mut list = Vec::new(); + let mut no_rdf = false; let mut tokens = tokens.into_iter(); while let Some(token) = tokens.next() { - let (s, span) = token_to_string(token)?; - list.push(Input { - filename: s.into(), - span, - }); - - match tokens.next() { - None => (), - Some(TokenTree::Punct(p)) if p.as_char() == ',' => (), - Some(token) => return Err((ParseError::UnexpectedToken, token.span())), + match token { + TokenTree::Ident(id) if id == "no_rdf" => no_rdf = true, + token => { + let (s, span) = token_to_string(token)?; + list.push(Input { + filename: s.into(), + span, + }); + + match tokens.next() { + None => (), + Some(TokenTree::Punct(p)) if p.as_char() == ',' => (), + Some(token) => return Err((ParseError::UnexpectedToken, token.span())), + } + } } } - Ok(Self { list }) + Ok(Self { list, no_rdf }) + } + + pub fn no_rdf(&self) -> bool { + self.no_rdf } fn iter(&self) -> std::slice::Iter { @@ -144,24 +155,8 @@ impl Module { prefix.ident.clone(), prefix.vis.clone(), ); - let providers_module_ref = context.add_module( - Some(module_ref), - None, - format_ident!("provider"), - treeldr_rust_gen::module::Visibility::Public, - ); - let trait_objects_module_ref = context.add_module( - Some(module_ref), - None, - format_ident!("trait_object"), - treeldr_rust_gen::module::Visibility::Public, - ); - let layouts_module_ref = context.add_module( - Some(module_ref), - None, - format_ident!("layout"), - treeldr_rust_gen::module::Visibility::Public, - ); + + let mut sub_modules = ModulePathBuilder::new(module_ref); prefix.module = Some(module_ref); @@ -169,30 +164,47 @@ impl Module { if let treeldr::Id::Iri(term) = id { let iri = vocabulary.iri(&term).unwrap(); - if iri + if let Some(suffix) = iri .as_str() .strip_prefix(prefix.prefix_attrs.iri.0.as_str()) - .is_some() { + let path = treeldr_rust_gen::ModulePathBuilder::split_iri_path(suffix).0; + if node.is_type() { type_map.insert( TId::new(id), TraitModules { - main: Some(treeldr_rust_gen::module::Parent::Ref(module_ref)), + main: Some(treeldr_rust_gen::module::Parent::Ref( + sub_modules.get(context, path, None), + )), provider: Some(treeldr_rust_gen::module::Parent::Ref( - providers_module_ref, + sub_modules.get( + context, + path, + Some(DedicatedSubModule::ClassProviders), + ), )), trait_object: Some(treeldr_rust_gen::module::Parent::Ref( - trait_objects_module_ref, + sub_modules.get( + context, + path, + Some(DedicatedSubModule::TraitObjects), + ), )), }, ); } if node.is_layout() { + let sub_module = context + .options() + .impl_rdf + .then_some(DedicatedSubModule::Layouts); layout_map.insert( TId::new(id), - treeldr_rust_gen::module::Parent::Ref(layouts_module_ref), + treeldr_rust_gen::module::Parent::Ref( + sub_modules.get(context, path, sub_module), + ), ); } } diff --git a/modules/rust/prelude/src/ty.rs b/modules/rust/prelude/src/ty.rs index 7a3d8396..0186e414 100644 --- a/modules/rust/prelude/src/ty.rs +++ b/modules/rust/prelude/src/ty.rs @@ -3,3 +3,9 @@ pub use xsd_types::{ HexBinaryBuf as HexBytesBuf, Integer, NegativeInteger, NonNegativeInteger, NonPositiveInteger, PositiveInteger, }; + +pub type Bytes = [u8]; +pub type BytesBuf = Vec; + +pub type Cid = str; +pub type CidBuf = String; diff --git a/schema/tldr.nq b/schema/tldr.nq index 3fd9f1e4..13a582ef 100644 --- a/schema/tldr.nq +++ b/schema/tldr.nq @@ -42,4 +42,14 @@ . . . - . \ No newline at end of file + . + . + . + + . + . + . + + . + . + . \ No newline at end of file diff --git a/tldrc/src/main.rs b/tldrc/src/main.rs index 95dfeb73..b35efd93 100644 --- a/tldrc/src/main.rs +++ b/tldrc/src/main.rs @@ -15,12 +15,12 @@ type BuildContext = treeldr_build::Context; #[clap(name="treeldr", author, version, about, long_about = None)] struct Args { /// Input files. - #[clap(short = 'i', multiple_occurrences = true)] + #[clap(short = 'i')] filenames: Vec, /// Sets the level of verbosity. - #[clap(short, long = "verbose", parse(from_occurrences))] - verbosity: usize, + #[clap(short, long = "verbose", action = clap::ArgAction::Count)] + verbosity: u8, #[clap(subcommand)] command: Option, @@ -47,7 +47,10 @@ async fn main() { let args = Args::parse(); // Init logger. - stderrlog::new().verbosity(args.verbosity).init().unwrap(); + stderrlog::new() + .verbosity(args.verbosity as usize) + .init() + .unwrap(); let mut files = load::Files::::new(); let mut documents = Vec::new();