jspg additional properties bug squashed

This commit is contained in:
2025-09-30 19:56:34 -04:00
parent cc04f38c14
commit d6b34c99bb
26 changed files with 6340 additions and 6328 deletions

View File

@ -12,6 +12,7 @@ categories = ["web-programming"]
exclude = [ "tests", ".github", ".gitmodules" ]
[dependencies]
pgrx = "0.15.0"
serde = "1"
serde_json = "1"
regex = "1.10.3"
@ -26,6 +27,7 @@ ahash = "0.8.3"
appendlist = "1.4"
[dev-dependencies]
pgrx-tests = "0.15.0"
serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.9"
ureq = "2.12"

File diff suppressed because it is too large Load Diff

View File

@ -10,28 +10,28 @@ use serde_json::Value;
/// Defines Decoder for `contentEncoding`.
#[derive(Clone, Copy)]
pub struct Decoder {
/// Name of the encoding
pub name: &'static str,
/// Name of the encoding
pub name: &'static str,
/// Decodes given string to bytes
#[allow(clippy::type_complexity)]
pub func: fn(s: &str) -> Result<Vec<u8>, Box<dyn Error>>,
/// Decodes given string to bytes
#[allow(clippy::type_complexity)]
pub func: fn(s: &str) -> Result<Vec<u8>, Box<dyn Error>>,
}
pub(crate) static DECODERS: Lazy<HashMap<&'static str, Decoder>> = Lazy::new(|| {
let mut m = HashMap::<&'static str, Decoder>::new();
m.insert(
"base64",
Decoder {
name: "base64",
func: decode_base64,
},
);
m
let mut m = HashMap::<&'static str, Decoder>::new();
m.insert(
"base64",
Decoder {
name: "base64",
func: decode_base64,
},
);
m
});
fn decode_base64(s: &str) -> Result<Vec<u8>, Box<dyn Error>> {
Ok(base64::engine::general_purpose::STANDARD.decode(s)?)
Ok(base64::engine::general_purpose::STANDARD.decode(s)?)
}
// mediatypes --
@ -39,44 +39,44 @@ fn decode_base64(s: &str) -> Result<Vec<u8>, Box<dyn Error>> {
/// Defines Mediatype for `contentMediaType`.
#[derive(Clone, Copy)]
pub struct MediaType {
/// Name of this media-type as defined in RFC 2046.
/// Example: `application/json`
pub name: &'static str,
/// Name of this media-type as defined in RFC 2046.
/// Example: `application/json`
pub name: &'static str,
/// whether this media type can be deserialized to json. If so it can
/// be validated by `contentSchema` keyword.
pub json_compatible: bool,
/// whether this media type can be deserialized to json. If so it can
/// be validated by `contentSchema` keyword.
pub json_compatible: bool,
/**
Check whether `bytes` conforms to this media-type.
/**
Check whether `bytes` conforms to this media-type.
Should return `Ok(Some(Value))` if `deserialize` is `true`, otherwise it can return `Ok(None)`.
Ideally you could deserialize to `serde::de::IgnoredAny` if `deserialize` is `false` to gain
some performance.
Should return `Ok(Some(Value))` if `deserialize` is `true`, otherwise it can return `Ok(None)`.
Ideally you could deserialize to `serde::de::IgnoredAny` if `deserialize` is `false` to gain
some performance.
`deserialize` is always `false` if `json_compatible` is `false`.
*/
#[allow(clippy::type_complexity)]
pub func: fn(bytes: &[u8], deserialize: bool) -> Result<Option<Value>, Box<dyn Error>>,
`deserialize` is always `false` if `json_compatible` is `false`.
*/
#[allow(clippy::type_complexity)]
pub func: fn(bytes: &[u8], deserialize: bool) -> Result<Option<Value>, Box<dyn Error>>,
}
pub(crate) static MEDIA_TYPES: Lazy<HashMap<&'static str, MediaType>> = Lazy::new(|| {
let mut m = HashMap::<&'static str, MediaType>::new();
m.insert(
"application/json",
MediaType {
name: "application/json",
json_compatible: true,
func: check_json,
},
);
m
let mut m = HashMap::<&'static str, MediaType>::new();
m.insert(
"application/json",
MediaType {
name: "application/json",
json_compatible: true,
func: check_json,
},
);
m
});
fn check_json(bytes: &[u8], deserialize: bool) -> Result<Option<Value>, Box<dyn Error>> {
if deserialize {
return Ok(Some(serde_json::from_slice(bytes)?));
}
serde_json::from_slice::<IgnoredAny>(bytes)?;
Ok(None)
if deserialize {
return Ok(Some(serde_json::from_slice(bytes)?));
}
serde_json::from_slice::<IgnoredAny>(bytes)?;
Ok(None)
}

File diff suppressed because it is too large Load Diff

View File

@ -6,192 +6,192 @@ use regex_syntax::ast::{self, *};
// covert ecma regex to rust regex if possible
// see https://262.ecma-international.org/11.0/#sec-regexp-regular-expression-objects
pub(crate) fn convert(pattern: &str) -> Result<Cow<'_, str>, Box<dyn std::error::Error>> {
let mut pattern = Cow::Borrowed(pattern);
let mut pattern = Cow::Borrowed(pattern);
let mut ast = loop {
match Parser::new().parse(pattern.as_ref()) {
Ok(ast) => break ast,
Err(e) => {
if let Some(s) = fix_error(&e) {
pattern = Cow::Owned(s);
} else {
Err(e)?;
}
}
}
};
loop {
let translator = Translator {
pat: pattern.as_ref(),
out: None,
};
if let Some(updated_pattern) = ast::visit(&ast, translator)? {
match Parser::new().parse(&updated_pattern) {
Ok(updated_ast) => {
pattern = Cow::Owned(updated_pattern);
ast = updated_ast;
}
Err(e) => {
debug_assert!(
false,
"ecma::translate changed {:?} to {:?}: {e}",
pattern, updated_pattern
);
break;
}
}
let mut ast = loop {
match Parser::new().parse(pattern.as_ref()) {
Ok(ast) => break ast,
Err(e) => {
if let Some(s) = fix_error(&e) {
pattern = Cow::Owned(s);
} else {
break;
Err(e)?;
}
}
}
Ok(pattern)
};
loop {
let translator = Translator {
pat: pattern.as_ref(),
out: None,
};
if let Some(updated_pattern) = ast::visit(&ast, translator)? {
match Parser::new().parse(&updated_pattern) {
Ok(updated_ast) => {
pattern = Cow::Owned(updated_pattern);
ast = updated_ast;
}
Err(e) => {
debug_assert!(
false,
"ecma::translate changed {:?} to {:?}: {e}",
pattern, updated_pattern
);
break;
}
}
} else {
break;
}
}
Ok(pattern)
}
fn fix_error(e: &Error) -> Option<String> {
if let ErrorKind::EscapeUnrecognized = e.kind() {
let (start, end) = (e.span().start.offset, e.span().end.offset);
let s = &e.pattern()[start..end];
if let r"\c" = s {
// handle \c{control_letter}
if let Some(control_letter) = e.pattern()[end..].chars().next() {
if control_letter.is_ascii_alphabetic() {
return Some(format!(
"{}{}{}",
&e.pattern()[..start],
((control_letter as u8) % 32) as char,
&e.pattern()[end + 1..],
));
}
}
if let ErrorKind::EscapeUnrecognized = e.kind() {
let (start, end) = (e.span().start.offset, e.span().end.offset);
let s = &e.pattern()[start..end];
if let r"\c" = s {
// handle \c{control_letter}
if let Some(control_letter) = e.pattern()[end..].chars().next() {
if control_letter.is_ascii_alphabetic() {
return Some(format!(
"{}{}{}",
&e.pattern()[..start],
((control_letter as u8) % 32) as char,
&e.pattern()[end + 1..],
));
}
}
}
None
}
None
}
/**
handles following translations:
- \d should ascii digits only. so replace with [0-9]
- \D should match everything but ascii digits. so replace with [^0-9]
- \w should match ascii letters only. so replace with [a-zA-Z0-9_]
- \W should match everything but ascii letters. so replace with [^a-zA-Z0-9_]
- \s and \S differences
- \a is not an ECMA 262 control escape
- \d should ascii digits only. so replace with [0-9]
- \D should match everything but ascii digits. so replace with [^0-9]
- \w should match ascii letters only. so replace with [a-zA-Z0-9_]
- \W should match everything but ascii letters. so replace with [^a-zA-Z0-9_]
- \s and \S differences
- \a is not an ECMA 262 control escape
*/
struct Translator<'a> {
pat: &'a str,
out: Option<String>,
pat: &'a str,
out: Option<String>,
}
impl Translator<'_> {
fn replace(&mut self, span: &Span, with: &str) {
let (start, end) = (span.start.offset, span.end.offset);
self.out = Some(format!("{}{with}{}", &self.pat[..start], &self.pat[end..]));
}
fn replace(&mut self, span: &Span, with: &str) {
let (start, end) = (span.start.offset, span.end.offset);
self.out = Some(format!("{}{with}{}", &self.pat[..start], &self.pat[end..]));
}
fn replace_class_class(&mut self, perl: &ClassPerl) {
match perl.kind {
ClassPerlKind::Digit => {
self.replace(&perl.span, if perl.negated { "[^0-9]" } else { "[0-9]" });
}
ClassPerlKind::Word => {
let with = &if perl.negated {
"[^A-Za-z0-9_]"
} else {
"[A-Za-z0-9_]"
};
self.replace(&perl.span, with);
}
ClassPerlKind::Space => {
let with = &if perl.negated {
"[^ \t\n\r\u{000b}\u{000c}\u{00a0}\u{feff}\u{2003}\u{2029}]"
} else {
"[ \t\n\r\u{000b}\u{000c}\u{00a0}\u{feff}\u{2003}\u{2029}]"
};
self.replace(&perl.span, with);
}
}
fn replace_class_class(&mut self, perl: &ClassPerl) {
match perl.kind {
ClassPerlKind::Digit => {
self.replace(&perl.span, if perl.negated { "[^0-9]" } else { "[0-9]" });
}
ClassPerlKind::Word => {
let with = &if perl.negated {
"[^A-Za-z0-9_]"
} else {
"[A-Za-z0-9_]"
};
self.replace(&perl.span, with);
}
ClassPerlKind::Space => {
let with = &if perl.negated {
"[^ \t\n\r\u{000b}\u{000c}\u{00a0}\u{feff}\u{2003}\u{2029}]"
} else {
"[ \t\n\r\u{000b}\u{000c}\u{00a0}\u{feff}\u{2003}\u{2029}]"
};
self.replace(&perl.span, with);
}
}
}
}
impl Visitor for Translator<'_> {
type Output = Option<String>;
type Err = &'static str;
type Output = Option<String>;
type Err = &'static str;
fn finish(self) -> Result<Self::Output, Self::Err> {
Ok(self.out)
}
fn finish(self) -> Result<Self::Output, Self::Err> {
Ok(self.out)
}
fn visit_class_set_item_pre(&mut self, ast: &ast::ClassSetItem) -> Result<(), Self::Err> {
if let ClassSetItem::Perl(perl) = ast {
self.replace_class_class(perl);
}
Ok(())
fn visit_class_set_item_pre(&mut self, ast: &ast::ClassSetItem) -> Result<(), Self::Err> {
if let ClassSetItem::Perl(perl) = ast {
self.replace_class_class(perl);
}
Ok(())
}
fn visit_post(&mut self, ast: &Ast) -> Result<(), Self::Err> {
if self.out.is_some() {
return Ok(());
}
match ast {
Ast::ClassPerl(perl) => {
self.replace_class_class(perl);
}
Ast::Literal(ref literal) => {
if let Literal {
kind: LiteralKind::Special(SpecialLiteralKind::Bell),
..
} = literal.as_ref()
{
return Err("\\a is not an ECMA 262 control escape");
}
}
_ => (),
}
Ok(())
fn visit_post(&mut self, ast: &Ast) -> Result<(), Self::Err> {
if self.out.is_some() {
return Ok(());
}
match ast {
Ast::ClassPerl(perl) => {
self.replace_class_class(perl);
}
Ast::Literal(ref literal) => {
if let Literal {
kind: LiteralKind::Special(SpecialLiteralKind::Bell),
..
} = literal.as_ref()
{
return Err("\\a is not an ECMA 262 control escape");
}
}
_ => (),
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
#[test]
fn test_ecma_compat_valid() {
// println!("{:#?}", Parser::new().parse(r#"a\a"#));
let tests = [
(r"ab\cAcde\cBfg", "ab\u{1}cde\u{2}fg"), // \c{control_letter}
(r"\\comment", r"\\comment"), // there is no \c
(r"ab\def", r#"ab[0-9]ef"#), // \d
(r"ab[a-z\d]ef", r#"ab[a-z[0-9]]ef"#), // \d inside classSet
(r"ab\Def", r#"ab[^0-9]ef"#), // \d
(r"ab[a-z\D]ef", r#"ab[a-z[^0-9]]ef"#), // \D inside classSet
];
for (input, want) in tests {
match convert(input) {
Ok(got) => {
if got.as_ref() != want {
panic!("convert({input:?}): got: {got:?}, want: {want:?}");
}
}
Err(e) => {
panic!("convert({input:?}) failed: {e}");
}
}
#[test]
fn test_ecma_compat_valid() {
// println!("{:#?}", Parser::new().parse(r#"a\a"#));
let tests = [
(r"ab\cAcde\cBfg", "ab\u{1}cde\u{2}fg"), // \c{control_letter}
(r"\\comment", r"\\comment"), // there is no \c
(r"ab\def", r#"ab[0-9]ef"#), // \d
(r"ab[a-z\d]ef", r#"ab[a-z[0-9]]ef"#), // \d inside classSet
(r"ab\Def", r#"ab[^0-9]ef"#), // \d
(r"ab[a-z\D]ef", r#"ab[a-z[^0-9]]ef"#), // \D inside classSet
];
for (input, want) in tests {
match convert(input) {
Ok(got) => {
if got.as_ref() != want {
panic!("convert({input:?}): got: {got:?}, want: {want:?}");
}
}
Err(e) => {
panic!("convert({input:?}) failed: {e}");
}
}
}
}
#[test]
fn test_ecma_compat_invalid() {
// println!("{:#?}", Parser::new().parse(r#"a\a"#));
let tests = [
r"\c\n", // \c{invalid_char}
r"abc\adef", // \a is not valid
];
for input in tests {
if convert(input).is_ok() {
panic!("convert({input:?}) mut fail");
}
}
#[test]
fn test_ecma_compat_invalid() {
// println!("{:#?}", Parser::new().parse(r#"a\a"#));
let tests = [
r"\c\n", // \c{invalid_char}
r"abc\adef", // \a is not valid
];
for input in tests {
if convert(input).is_ok() {
panic!("convert({input:?}) mut fail");
}
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
error::Error,
cell::RefCell,
collections::{HashMap, HashSet},
error::Error,
};
#[cfg(not(target_arch = "wasm32"))]
@ -13,16 +13,16 @@ use serde_json::Value;
use url::Url;
use crate::{
compiler::CompileError,
draft::{latest, Draft},
util::split,
UrlPtr,
compiler::CompileError,
draft::{latest, Draft},
util::split,
UrlPtr,
};
/// A trait for loading json from given `url`
pub trait UrlLoader {
/// Loads json from given absolute `url`.
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>>;
/// Loads json from given absolute `url`.
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>>;
}
// --
@ -32,212 +32,212 @@ pub struct FileLoader;
#[cfg(not(target_arch = "wasm32"))]
impl UrlLoader for FileLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let path = url.to_file_path().map_err(|_| "invalid file path")?;
let file = File::open(path)?;
Ok(serde_json::from_reader(file)?)
}
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let path = url.to_file_path().map_err(|_| "invalid file path")?;
let file = File::open(path)?;
Ok(serde_json::from_reader(file)?)
}
}
// --
#[derive(Default)]
pub struct SchemeUrlLoader {
loaders: HashMap<&'static str, Box<dyn UrlLoader>>,
loaders: HashMap<&'static str, Box<dyn UrlLoader>>,
}
impl SchemeUrlLoader {
pub fn new() -> Self {
Self::default()
}
pub fn new() -> Self {
Self::default()
}
/// Registers [`UrlLoader`] for given url `scheme`
pub fn register(&mut self, scheme: &'static str, url_loader: Box<dyn UrlLoader>) {
self.loaders.insert(scheme, url_loader);
}
/// Registers [`UrlLoader`] for given url `scheme`
pub fn register(&mut self, scheme: &'static str, url_loader: Box<dyn UrlLoader>) {
self.loaders.insert(scheme, url_loader);
}
}
impl UrlLoader for SchemeUrlLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let Some(loader) = self.loaders.get(url.scheme()) else {
return Err(CompileError::UnsupportedUrlScheme {
url: url.as_str().to_owned(),
}
.into());
};
loader.load(url.as_str())
}
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let Some(loader) = self.loaders.get(url.scheme()) else {
return Err(CompileError::UnsupportedUrlScheme {
url: url.as_str().to_owned(),
}
.into());
};
loader.load(url.as_str())
}
}
// --
pub(crate) struct DefaultUrlLoader {
doc_map: RefCell<HashMap<Url, usize>>,
doc_list: AppendList<Value>,
loader: Box<dyn UrlLoader>,
doc_map: RefCell<HashMap<Url, usize>>,
doc_list: AppendList<Value>,
loader: Box<dyn UrlLoader>,
}
impl DefaultUrlLoader {
#[cfg_attr(target_arch = "wasm32", allow(unused_mut))]
pub fn new() -> Self {
let mut loader = SchemeUrlLoader::new();
#[cfg(not(target_arch = "wasm32"))]
loader.register("file", Box::new(FileLoader));
Self {
doc_map: Default::default(),
doc_list: AppendList::new(),
loader: Box::new(loader),
}
#[cfg_attr(target_arch = "wasm32", allow(unused_mut))]
pub fn new() -> Self {
let mut loader = SchemeUrlLoader::new();
#[cfg(not(target_arch = "wasm32"))]
loader.register("file", Box::new(FileLoader));
Self {
doc_map: Default::default(),
doc_list: AppendList::new(),
loader: Box::new(loader),
}
}
pub fn get_doc(&self, url: &Url) -> Option<&Value> {
self.doc_map
.borrow()
.get(url)
.and_then(|i| self.doc_list.get(*i))
}
pub fn add_doc(&self, url: Url, json: Value) {
if self.get_doc(&url).is_some() {
return;
}
self.doc_list.push(json);
self.doc_map
.borrow_mut()
.insert(url, self.doc_list.len() - 1);
}
pub fn use_loader(&mut self, loader: Box<dyn UrlLoader>) {
self.loader = loader;
}
pub(crate) fn load(&self, url: &Url) -> Result<&Value, CompileError> {
if let Some(doc) = self.get_doc(url) {
return Ok(doc);
}
pub fn get_doc(&self, url: &Url) -> Option<&Value> {
self.doc_map
.borrow()
.get(url)
.and_then(|i| self.doc_list.get(*i))
// check in STD_METAFILES
let doc = if let Some(content) = load_std_meta(url.as_str()) {
serde_json::from_str::<Value>(content).map_err(|e| CompileError::LoadUrlError {
url: url.to_string(),
src: e.into(),
})?
} else {
self.loader
.load(url.as_str())
.map_err(|src| CompileError::LoadUrlError {
url: url.as_str().to_owned(),
src,
})?
};
self.add_doc(url.clone(), doc);
self.get_doc(url)
.ok_or(CompileError::Bug("doc must exist".into()))
}
pub(crate) fn get_draft(
&self,
up: &UrlPtr,
doc: &Value,
default_draft: &'static Draft,
mut cycle: HashSet<Url>,
) -> Result<&'static Draft, CompileError> {
let Value::Object(obj) = &doc else {
return Ok(default_draft);
};
let Some(Value::String(sch)) = obj.get("$schema") else {
return Ok(default_draft);
};
if let Some(draft) = Draft::from_url(sch) {
return Ok(draft);
}
let (sch, _) = split(sch);
let sch = Url::parse(sch).map_err(|e| CompileError::InvalidMetaSchemaUrl {
url: up.to_string(),
src: e.into(),
})?;
if up.ptr.is_empty() && sch == up.url {
return Err(CompileError::UnsupportedDraft { url: sch.into() });
}
if !cycle.insert(sch.clone()) {
return Err(CompileError::MetaSchemaCycle { url: sch.into() });
}
pub fn add_doc(&self, url: Url, json: Value) {
if self.get_doc(&url).is_some() {
return;
}
self.doc_list.push(json);
self.doc_map
.borrow_mut()
.insert(url, self.doc_list.len() - 1);
}
pub fn use_loader(&mut self, loader: Box<dyn UrlLoader>) {
self.loader = loader;
}
pub(crate) fn load(&self, url: &Url) -> Result<&Value, CompileError> {
if let Some(doc) = self.get_doc(url) {
return Ok(doc);
}
// check in STD_METAFILES
let doc = if let Some(content) = load_std_meta(url.as_str()) {
serde_json::from_str::<Value>(content).map_err(|e| CompileError::LoadUrlError {
url: url.to_string(),
src: e.into(),
})?
} else {
self.loader
.load(url.as_str())
.map_err(|src| CompileError::LoadUrlError {
url: url.as_str().to_owned(),
src,
})?
};
self.add_doc(url.clone(), doc);
self.get_doc(url)
.ok_or(CompileError::Bug("doc must exist".into()))
}
pub(crate) fn get_draft(
&self,
up: &UrlPtr,
doc: &Value,
default_draft: &'static Draft,
mut cycle: HashSet<Url>,
) -> Result<&'static Draft, CompileError> {
let Value::Object(obj) = &doc else {
return Ok(default_draft);
};
let Some(Value::String(sch)) = obj.get("$schema") else {
return Ok(default_draft);
};
if let Some(draft) = Draft::from_url(sch) {
return Ok(draft);
}
let (sch, _) = split(sch);
let sch = Url::parse(sch).map_err(|e| CompileError::InvalidMetaSchemaUrl {
url: up.to_string(),
src: e.into(),
})?;
if up.ptr.is_empty() && sch == up.url {
return Err(CompileError::UnsupportedDraft { url: sch.into() });
}
if !cycle.insert(sch.clone()) {
return Err(CompileError::MetaSchemaCycle { url: sch.into() });
}
let doc = self.load(&sch)?;
let up = UrlPtr {
url: sch,
ptr: "".into(),
};
self.get_draft(&up, doc, default_draft, cycle)
}
pub(crate) fn get_meta_vocabs(
&self,
doc: &Value,
draft: &'static Draft,
) -> Result<Option<Vec<String>>, CompileError> {
let Value::Object(obj) = &doc else {
return Ok(None);
};
let Some(Value::String(sch)) = obj.get("$schema") else {
return Ok(None);
};
if Draft::from_url(sch).is_some() {
return Ok(None);
}
let (sch, _) = split(sch);
let sch = Url::parse(sch).map_err(|e| CompileError::ParseUrlError {
url: sch.to_string(),
src: e.into(),
})?;
let doc = self.load(&sch)?;
draft.get_vocabs(&sch, doc)
let doc = self.load(&sch)?;
let up = UrlPtr {
url: sch,
ptr: "".into(),
};
self.get_draft(&up, doc, default_draft, cycle)
}
pub(crate) fn get_meta_vocabs(
&self,
doc: &Value,
draft: &'static Draft,
) -> Result<Option<Vec<String>>, CompileError> {
let Value::Object(obj) = &doc else {
return Ok(None);
};
let Some(Value::String(sch)) = obj.get("$schema") else {
return Ok(None);
};
if Draft::from_url(sch).is_some() {
return Ok(None);
}
let (sch, _) = split(sch);
let sch = Url::parse(sch).map_err(|e| CompileError::ParseUrlError {
url: sch.to_string(),
src: e.into(),
})?;
let doc = self.load(&sch)?;
draft.get_vocabs(&sch, doc)
}
}
pub(crate) static STD_METAFILES: Lazy<HashMap<String, &str>> = Lazy::new(|| {
let mut files = HashMap::new();
macro_rules! add {
($path:expr) => {
files.insert(
$path["metaschemas/".len()..].to_owned(),
include_str!($path),
);
};
}
add!("metaschemas/draft-04/schema");
add!("metaschemas/draft-06/schema");
add!("metaschemas/draft-07/schema");
add!("metaschemas/draft/2019-09/schema");
add!("metaschemas/draft/2019-09/meta/core");
add!("metaschemas/draft/2019-09/meta/applicator");
add!("metaschemas/draft/2019-09/meta/validation");
add!("metaschemas/draft/2019-09/meta/meta-data");
add!("metaschemas/draft/2019-09/meta/format");
add!("metaschemas/draft/2019-09/meta/content");
add!("metaschemas/draft/2020-12/schema");
add!("metaschemas/draft/2020-12/meta/core");
add!("metaschemas/draft/2020-12/meta/applicator");
add!("metaschemas/draft/2020-12/meta/unevaluated");
add!("metaschemas/draft/2020-12/meta/validation");
add!("metaschemas/draft/2020-12/meta/meta-data");
add!("metaschemas/draft/2020-12/meta/content");
add!("metaschemas/draft/2020-12/meta/format-annotation");
add!("metaschemas/draft/2020-12/meta/format-assertion");
files
let mut files = HashMap::new();
macro_rules! add {
($path:expr) => {
files.insert(
$path["metaschemas/".len()..].to_owned(),
include_str!($path),
);
};
}
add!("metaschemas/draft-04/schema");
add!("metaschemas/draft-06/schema");
add!("metaschemas/draft-07/schema");
add!("metaschemas/draft/2019-09/schema");
add!("metaschemas/draft/2019-09/meta/core");
add!("metaschemas/draft/2019-09/meta/applicator");
add!("metaschemas/draft/2019-09/meta/validation");
add!("metaschemas/draft/2019-09/meta/meta-data");
add!("metaschemas/draft/2019-09/meta/format");
add!("metaschemas/draft/2019-09/meta/content");
add!("metaschemas/draft/2020-12/schema");
add!("metaschemas/draft/2020-12/meta/core");
add!("metaschemas/draft/2020-12/meta/applicator");
add!("metaschemas/draft/2020-12/meta/unevaluated");
add!("metaschemas/draft/2020-12/meta/validation");
add!("metaschemas/draft/2020-12/meta/meta-data");
add!("metaschemas/draft/2020-12/meta/content");
add!("metaschemas/draft/2020-12/meta/format-annotation");
add!("metaschemas/draft/2020-12/meta/format-assertion");
files
});
fn load_std_meta(url: &str) -> Option<&'static str> {
let meta = url
.strip_prefix("http://json-schema.org/")
.or_else(|| url.strip_prefix("https://json-schema.org/"));
if let Some(meta) = meta {
if meta == "schema" {
return load_std_meta(latest().url);
}
return STD_METAFILES.get(meta).cloned();
let meta = url
.strip_prefix("http://json-schema.org/")
.or_else(|| url.strip_prefix("https://json-schema.org/"));
if let Some(meta) = meta {
if meta == "schema" {
return load_std_meta(latest().url);
}
None
return STD_METAFILES.get(meta).cloned();
}
None
}

File diff suppressed because it is too large Load Diff

View File

@ -6,123 +6,123 @@ use serde_json::Value;
use url::Url;
pub(crate) struct Root {
pub(crate) draft: &'static Draft,
pub(crate) resources: HashMap<JsonPointer, Resource>, // ptr => _
pub(crate) url: Url,
pub(crate) meta_vocabs: Option<Vec<String>>,
pub(crate) draft: &'static Draft,
pub(crate) resources: HashMap<JsonPointer, Resource>, // ptr => _
pub(crate) url: Url,
pub(crate) meta_vocabs: Option<Vec<String>>,
}
impl Root {
pub(crate) fn has_vocab(&self, name: &str) -> bool {
if self.draft.version < 2019 || name == "core" {
return true;
}
if let Some(vocabs) = &self.meta_vocabs {
return vocabs.iter().any(|s| s == name);
}
self.draft.default_vocabs.contains(&name)
pub(crate) fn has_vocab(&self, name: &str) -> bool {
if self.draft.version < 2019 || name == "core" {
return true;
}
if let Some(vocabs) = &self.meta_vocabs {
return vocabs.iter().any(|s| s == name);
}
self.draft.default_vocabs.contains(&name)
}
fn resolve_fragment_in(&self, frag: &Fragment, res: &Resource) -> Result<UrlPtr, CompileError> {
let ptr = match frag {
Fragment::Anchor(anchor) => {
let Some(ptr) = res.anchors.get(anchor) else {
return Err(CompileError::AnchorNotFound {
url: self.url.to_string(),
reference: UrlFrag::format(&res.id, frag.as_str()),
});
};
ptr.clone()
}
Fragment::JsonPointer(ptr) => res.ptr.concat(ptr),
fn resolve_fragment_in(&self, frag: &Fragment, res: &Resource) -> Result<UrlPtr, CompileError> {
let ptr = match frag {
Fragment::Anchor(anchor) => {
let Some(ptr) = res.anchors.get(anchor) else {
return Err(CompileError::AnchorNotFound {
url: self.url.to_string(),
reference: UrlFrag::format(&res.id, frag.as_str()),
});
};
Ok(UrlPtr {
url: self.url.clone(),
ptr,
})
}
ptr.clone()
}
Fragment::JsonPointer(ptr) => res.ptr.concat(ptr),
};
Ok(UrlPtr {
url: self.url.clone(),
ptr,
})
}
pub(crate) fn resolve_fragment(&self, frag: &Fragment) -> Result<UrlPtr, CompileError> {
let res = self.resources.get("").ok_or(CompileError::Bug(
format!("no root resource found for {}", self.url).into(),
))?;
self.resolve_fragment_in(frag, res)
}
pub(crate) fn resolve_fragment(&self, frag: &Fragment) -> Result<UrlPtr, CompileError> {
let res = self.resources.get("").ok_or(CompileError::Bug(
format!("no root resource found for {}", self.url).into(),
))?;
self.resolve_fragment_in(frag, res)
}
// resolves `UrlFrag` to `UrlPtr` from root.
// returns `None` if it is external.
pub(crate) fn resolve(&self, uf: &UrlFrag) -> Result<Option<UrlPtr>, CompileError> {
let res = {
if uf.url == self.url {
self.resources.get("").ok_or(CompileError::Bug(
format!("no root resource found for {}", self.url).into(),
))?
} else {
// look for resource with id==uf.url
let Some(res) = self.resources.values().find(|res| res.id == uf.url) else {
return Ok(None); // external url
};
res
}
// resolves `UrlFrag` to `UrlPtr` from root.
// returns `None` if it is external.
pub(crate) fn resolve(&self, uf: &UrlFrag) -> Result<Option<UrlPtr>, CompileError> {
let res = {
if uf.url == self.url {
self.resources.get("").ok_or(CompileError::Bug(
format!("no root resource found for {}", self.url).into(),
))?
} else {
// look for resource with id==uf.url
let Some(res) = self.resources.values().find(|res| res.id == uf.url) else {
return Ok(None); // external url
};
res
}
};
self.resolve_fragment_in(&uf.frag, res).map(Some)
self.resolve_fragment_in(&uf.frag, res).map(Some)
}
pub(crate) fn resource(&self, ptr: &JsonPointer) -> &Resource {
let mut ptr = ptr.as_str();
loop {
if let Some(res) = self.resources.get(ptr) {
return res;
}
let Some((prefix, _)) = ptr.rsplit_once('/') else {
break;
};
ptr = prefix;
}
self.resources.get("").expect("root resource should exist")
}
pub(crate) fn resource(&self, ptr: &JsonPointer) -> &Resource {
let mut ptr = ptr.as_str();
loop {
if let Some(res) = self.resources.get(ptr) {
return res;
}
let Some((prefix, _)) = ptr.rsplit_once('/') else {
break;
};
ptr = prefix;
}
self.resources.get("").expect("root resource should exist")
}
pub(crate) fn base_url(&self, ptr: &JsonPointer) -> &Url {
&self.resource(ptr).id
}
pub(crate) fn add_subschema(
&mut self,
doc: &Value,
ptr: &JsonPointer,
) -> Result<(), CompileError> {
let v = ptr.lookup(doc, &self.url)?;
let base_url = self.base_url(ptr).clone();
self.draft
.collect_resources(v, &base_url, ptr.clone(), &self.url, &mut self.resources)?;
// collect anchors
if !self.resources.contains_key(ptr) {
let res = self.resource(ptr);
if let Some(res) = self.resources.get_mut(&res.ptr.clone()) {
self.draft.collect_anchors(v, ptr, res, &self.url)?;
}
}
Ok(())
pub(crate) fn base_url(&self, ptr: &JsonPointer) -> &Url {
&self.resource(ptr).id
}
pub(crate) fn add_subschema(
&mut self,
doc: &Value,
ptr: &JsonPointer,
) -> Result<(), CompileError> {
let v = ptr.lookup(doc, &self.url)?;
let base_url = self.base_url(ptr).clone();
self.draft
.collect_resources(v, &base_url, ptr.clone(), &self.url, &mut self.resources)?;
// collect anchors
if !self.resources.contains_key(ptr) {
let res = self.resource(ptr);
if let Some(res) = self.resources.get_mut(&res.ptr.clone()) {
self.draft.collect_anchors(v, ptr, res, &self.url)?;
}
}
Ok(())
}
}
#[derive(Debug)]
pub(crate) struct Resource {
pub(crate) ptr: JsonPointer, // from root
pub(crate) id: Url,
pub(crate) anchors: HashMap<Anchor, JsonPointer>, // anchor => ptr
pub(crate) dynamic_anchors: HashSet<Anchor>,
pub(crate) ptr: JsonPointer, // from root
pub(crate) id: Url,
pub(crate) anchors: HashMap<Anchor, JsonPointer>, // anchor => ptr
pub(crate) dynamic_anchors: HashSet<Anchor>,
}
impl Resource {
pub(crate) fn new(ptr: JsonPointer, id: Url) -> Self {
Self {
ptr,
id,
anchors: HashMap::new(),
dynamic_anchors: HashSet::new(),
}
pub(crate) fn new(ptr: JsonPointer, id: Url) -> Self {
Self {
ptr,
id,
anchors: HashMap::new(),
dynamic_anchors: HashSet::new(),
}
}
}

View File

@ -8,100 +8,100 @@ use url::Url;
// --
pub(crate) struct Roots {
pub(crate) default_draft: &'static Draft,
map: HashMap<Url, Root>,
pub(crate) loader: DefaultUrlLoader,
pub(crate) default_draft: &'static Draft,
map: HashMap<Url, Root>,
pub(crate) loader: DefaultUrlLoader,
}
impl Roots {
fn new() -> Self {
Self {
default_draft: latest(),
map: Default::default(),
loader: DefaultUrlLoader::new(),
}
fn new() -> Self {
Self {
default_draft: latest(),
map: Default::default(),
loader: DefaultUrlLoader::new(),
}
}
}
impl Default for Roots {
fn default() -> Self {
Self::new()
}
fn default() -> Self {
Self::new()
}
}
impl Roots {
pub(crate) fn get(&self, url: &Url) -> Option<&Root> {
self.map.get(url)
pub(crate) fn get(&self, url: &Url) -> Option<&Root> {
self.map.get(url)
}
pub(crate) fn resolve_fragment(&mut self, uf: UrlFrag) -> Result<UrlPtr, CompileError> {
self.or_load(uf.url.clone())?;
let Some(root) = self.map.get(&uf.url) else {
return Err(CompileError::Bug("or_load didn't add".into()));
};
root.resolve_fragment(&uf.frag)
}
pub(crate) fn ensure_subschema(&mut self, up: &UrlPtr) -> Result<(), CompileError> {
self.or_load(up.url.clone())?;
let Some(root) = self.map.get_mut(&up.url) else {
return Err(CompileError::Bug("or_load didn't add".into()));
};
if !root.draft.is_subschema(up.ptr.as_str()) {
let doc = self.loader.load(&root.url)?;
let v = up.ptr.lookup(doc, &up.url)?;
root.draft.validate(up, v)?;
root.add_subschema(doc, &up.ptr)?;
}
Ok(())
}
pub(crate) fn or_load(&mut self, url: Url) -> Result<(), CompileError> {
debug_assert!(url.fragment().is_none(), "trying to add root with fragment");
if self.map.contains_key(&url) {
return Ok(());
}
let doc = self.loader.load(&url)?;
let r = self.create_root(url.clone(), doc)?;
self.map.insert(url, r);
Ok(())
}
pub(crate) fn create_root(&self, url: Url, doc: &Value) -> Result<Root, CompileError> {
let draft = {
let up = UrlPtr {
url: url.clone(),
ptr: "".into(),
};
self.loader
.get_draft(&up, doc, self.default_draft, HashSet::new())?
};
let vocabs = self.loader.get_meta_vocabs(doc, draft)?;
let resources = {
let mut m = HashMap::default();
draft.collect_resources(doc, &url, "".into(), &url, &mut m)?;
m
};
if !matches!(url.host_str(), Some("json-schema.org")) {
draft.validate(
&UrlPtr {
url: url.clone(),
ptr: "".into(),
},
doc,
)?;
}
pub(crate) fn resolve_fragment(&mut self, uf: UrlFrag) -> Result<UrlPtr, CompileError> {
self.or_load(uf.url.clone())?;
let Some(root) = self.map.get(&uf.url) else {
return Err(CompileError::Bug("or_load didn't add".into()));
};
root.resolve_fragment(&uf.frag)
}
Ok(Root {
draft,
resources,
url: url.clone(),
meta_vocabs: vocabs,
})
}
pub(crate) fn ensure_subschema(&mut self, up: &UrlPtr) -> Result<(), CompileError> {
self.or_load(up.url.clone())?;
let Some(root) = self.map.get_mut(&up.url) else {
return Err(CompileError::Bug("or_load didn't add".into()));
};
if !root.draft.is_subschema(up.ptr.as_str()) {
let doc = self.loader.load(&root.url)?;
let v = up.ptr.lookup(doc, &up.url)?;
root.draft.validate(up, v)?;
root.add_subschema(doc, &up.ptr)?;
}
Ok(())
}
pub(crate) fn or_load(&mut self, url: Url) -> Result<(), CompileError> {
debug_assert!(url.fragment().is_none(), "trying to add root with fragment");
if self.map.contains_key(&url) {
return Ok(());
}
let doc = self.loader.load(&url)?;
let r = self.create_root(url.clone(), doc)?;
self.map.insert(url, r);
Ok(())
}
pub(crate) fn create_root(&self, url: Url, doc: &Value) -> Result<Root, CompileError> {
let draft = {
let up = UrlPtr {
url: url.clone(),
ptr: "".into(),
};
self.loader
.get_draft(&up, doc, self.default_draft, HashSet::new())?
};
let vocabs = self.loader.get_meta_vocabs(doc, draft)?;
let resources = {
let mut m = HashMap::default();
draft.collect_resources(doc, &url, "".into(), &url, &mut m)?;
m
};
if !matches!(url.host_str(), Some("json-schema.org")) {
draft.validate(
&UrlPtr {
url: url.clone(),
ptr: "".into(),
},
doc,
)?;
}
Ok(Root {
draft,
resources,
url: url.clone(),
meta_vocabs: vocabs,
})
}
pub(crate) fn insert(&mut self, roots: &mut HashMap<Url, Root>) {
self.map.extend(roots.drain());
}
pub(crate) fn insert(&mut self, roots: &mut HashMap<Url, Root>) {
self.map.extend(roots.drain());
}
}

View File

@ -1,8 +1,8 @@
use std::{
borrow::{Borrow, Cow},
fmt::Display,
hash::{Hash, Hasher},
str::FromStr,
borrow::{Borrow, Cow},
fmt::Display,
hash::{Hash, Hasher},
str::FromStr,
};
use ahash::{AHashMap, AHasher};
@ -19,112 +19,112 @@ pub(crate) struct JsonPointer(pub(crate) String);
impl JsonPointer {
pub(crate) fn escape(token: &str) -> Cow<'_, str> {
const SPECIAL: [char; 2] = ['~', '/'];
if token.contains(SPECIAL) {
token.replace('~', "~0").replace('/', "~1").into()
} else {
token.into()
}
const SPECIAL: [char; 2] = ['~', '/'];
if token.contains(SPECIAL) {
token.replace('~', "~0").replace('/', "~1").into()
} else {
token.into()
}
}
pub(crate) fn unescape(mut tok: &str) -> Result<Cow<'_, str>, ()> {
let Some(mut tilde) = tok.find('~') else {
return Ok(Cow::Borrowed(tok));
};
let mut s = String::with_capacity(tok.len());
loop {
s.push_str(&tok[..tilde]);
tok = &tok[tilde + 1..];
match tok.chars().next() {
Some('1') => s.push('/'),
Some('0') => s.push('~'),
_ => return Err(()),
pub(crate) fn unescape(mut tok: &str) -> Result<Cow<'_, str>, ()> {
let Some(mut tilde) = tok.find('~') else {
return Ok(Cow::Borrowed(tok));
};
let mut s = String::with_capacity(tok.len());
loop {
s.push_str(&tok[..tilde]);
tok = &tok[tilde + 1..];
match tok.chars().next() {
Some('1') => s.push('/'),
Some('0') => s.push('~'),
_ => return Err(()),
}
tok = &tok[1..];
let Some(i) = tok.find('~') else {
s.push_str(tok);
break;
};
tilde = i;
}
Ok(Cow::Owned(s))
}
pub(crate) fn lookup<'a>(
&self,
mut v: &'a Value,
v_url: &Url,
) -> Result<&'a Value, CompileError> {
for tok in self.0.split('/').skip(1) {
let Ok(tok) = Self::unescape(tok) else {
let loc = UrlFrag::format(v_url, self.as_str());
return Err(CompileError::InvalidJsonPointer(loc));
};
match v {
Value::Object(obj) => {
if let Some(pvalue) = obj.get(tok.as_ref()) {
v = pvalue;
continue;
}
}
Value::Array(arr) => {
if let Ok(i) = usize::from_str(tok.as_ref()) {
if let Some(item) = arr.get(i) {
v = item;
continue;
}
tok = &tok[1..];
let Some(i) = tok.find('~') else {
s.push_str(tok);
break;
};
tilde = i;
};
}
Ok(Cow::Owned(s))
_ => {}
}
let loc = UrlFrag::format(v_url, self.as_str());
return Err(CompileError::JsonPointerNotFound(loc));
}
Ok(v)
}
pub(crate) fn lookup<'a>(
&self,
mut v: &'a Value,
v_url: &Url,
) -> Result<&'a Value, CompileError> {
for tok in self.0.split('/').skip(1) {
let Ok(tok) = Self::unescape(tok) else {
let loc = UrlFrag::format(v_url, self.as_str());
return Err(CompileError::InvalidJsonPointer(loc));
};
match v {
Value::Object(obj) => {
if let Some(pvalue) = obj.get(tok.as_ref()) {
v = pvalue;
continue;
}
}
Value::Array(arr) => {
if let Ok(i) = usize::from_str(tok.as_ref()) {
if let Some(item) = arr.get(i) {
v = item;
continue;
}
};
}
_ => {}
}
let loc = UrlFrag::format(v_url, self.as_str());
return Err(CompileError::JsonPointerNotFound(loc));
}
Ok(v)
}
pub(crate) fn as_str(&self) -> &str {
&self.0
}
pub(crate) fn as_str(&self) -> &str {
&self.0
}
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub(crate) fn concat(&self, next: &Self) -> Self {
JsonPointer(format!("{}{}", self.0, next.0))
}
pub(crate) fn concat(&self, next: &Self) -> Self {
JsonPointer(format!("{}{}", self.0, next.0))
}
pub(crate) fn append(&self, tok: &str) -> Self {
Self(format!("{}/{}", self, Self::escape(tok)))
}
pub(crate) fn append(&self, tok: &str) -> Self {
Self(format!("{}/{}", self, Self::escape(tok)))
}
pub(crate) fn append2(&self, tok1: &str, tok2: &str) -> Self {
Self(format!(
"{}/{}/{}",
self,
Self::escape(tok1),
Self::escape(tok2)
))
}
pub(crate) fn append2(&self, tok1: &str, tok2: &str) -> Self {
Self(format!(
"{}/{}/{}",
self,
Self::escape(tok1),
Self::escape(tok2)
))
}
}
impl Display for JsonPointer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Borrow<str> for JsonPointer {
fn borrow(&self) -> &str {
&self.0
}
fn borrow(&self) -> &str {
&self.0
}
}
impl From<&str> for JsonPointer {
fn from(value: &str) -> Self {
Self(value.into())
}
fn from(value: &str) -> Self {
Self(value.into())
}
}
// --
@ -133,297 +133,297 @@ impl From<&str> for JsonPointer {
pub(crate) struct Anchor(pub(crate) String);
impl Display for Anchor {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Borrow<str> for Anchor {
fn borrow(&self) -> &str {
&self.0
}
fn borrow(&self) -> &str {
&self.0
}
}
impl From<&str> for Anchor {
fn from(value: &str) -> Self {
Self(value.into())
}
fn from(value: &str) -> Self {
Self(value.into())
}
}
// --
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) enum Fragment {
Anchor(Anchor),
JsonPointer(JsonPointer),
Anchor(Anchor),
JsonPointer(JsonPointer),
}
impl Fragment {
pub(crate) fn split(s: &str) -> Result<(&str, Fragment), CompileError> {
let (u, frag) = split(s);
let frag = percent_decode_str(frag)
.decode_utf8()
.map_err(|src| CompileError::ParseUrlError {
url: s.to_string(),
src: src.into(),
})?
.to_string();
let frag = if frag.is_empty() || frag.starts_with('/') {
Fragment::JsonPointer(JsonPointer(frag))
} else {
Fragment::Anchor(Anchor(frag))
};
Ok((u, frag))
}
pub(crate) fn split(s: &str) -> Result<(&str, Fragment), CompileError> {
let (u, frag) = split(s);
let frag = percent_decode_str(frag)
.decode_utf8()
.map_err(|src| CompileError::ParseUrlError {
url: s.to_string(),
src: src.into(),
})?
.to_string();
let frag = if frag.is_empty() || frag.starts_with('/') {
Fragment::JsonPointer(JsonPointer(frag))
} else {
Fragment::Anchor(Anchor(frag))
};
Ok((u, frag))
}
pub(crate) fn encode(frag: &str) -> String {
// https://url.spec.whatwg.org/#fragment-percent-encode-set
const FRAGMENT: &AsciiSet = &CONTROLS
.add(b'%')
.add(b' ')
.add(b'"')
.add(b'<')
.add(b'>')
.add(b'`');
percent_encoding::utf8_percent_encode(frag, FRAGMENT).to_string()
}
pub(crate) fn encode(frag: &str) -> String {
// https://url.spec.whatwg.org/#fragment-percent-encode-set
const FRAGMENT: &AsciiSet = &CONTROLS
.add(b'%')
.add(b' ')
.add(b'"')
.add(b'<')
.add(b'>')
.add(b'`');
percent_encoding::utf8_percent_encode(frag, FRAGMENT).to_string()
}
pub(crate) fn as_str(&self) -> &str {
match self {
Fragment::Anchor(s) => &s.0,
Fragment::JsonPointer(s) => &s.0,
}
pub(crate) fn as_str(&self) -> &str {
match self {
Fragment::Anchor(s) => &s.0,
Fragment::JsonPointer(s) => &s.0,
}
}
}
// --
#[derive(Clone)]
pub(crate) struct UrlFrag {
pub(crate) url: Url,
pub(crate) frag: Fragment,
pub(crate) url: Url,
pub(crate) frag: Fragment,
}
impl UrlFrag {
pub(crate) fn absolute(input: &str) -> Result<UrlFrag, CompileError> {
let (u, frag) = Fragment::split(input)?;
pub(crate) fn absolute(input: &str) -> Result<UrlFrag, CompileError> {
let (u, frag) = Fragment::split(input)?;
// note: windows drive letter is treated as url scheme by url parser
#[cfg(not(target_arch = "wasm32"))]
if std::env::consts::OS == "windows" && starts_with_windows_drive(u) {
let url = Url::from_file_path(u)
.map_err(|_| CompileError::Bug(format!("failed to convert {u} into url").into()))?;
return Ok(UrlFrag { url, frag });
}
match Url::parse(u) {
Ok(url) => Ok(UrlFrag { url, frag }),
#[cfg(not(target_arch = "wasm32"))]
Err(url::ParseError::RelativeUrlWithoutBase) => {
let p = std::path::absolute(u).map_err(|e| CompileError::ParseUrlError {
url: u.to_owned(),
src: e.into(),
})?;
let url = Url::from_file_path(p).map_err(|_| {
CompileError::Bug(format!("failed to convert {u} into url").into())
})?;
Ok(UrlFrag { url, frag })
}
Err(e) => Err(CompileError::ParseUrlError {
url: u.to_owned(),
src: e.into(),
}),
}
// note: windows drive letter is treated as url scheme by url parser
#[cfg(not(target_arch = "wasm32"))]
if std::env::consts::OS == "windows" && starts_with_windows_drive(u) {
let url = Url::from_file_path(u)
.map_err(|_| CompileError::Bug(format!("failed to convert {u} into url").into()))?;
return Ok(UrlFrag { url, frag });
}
pub(crate) fn join(url: &Url, input: &str) -> Result<UrlFrag, CompileError> {
let (input, frag) = Fragment::split(input)?;
if input.is_empty() {
return Ok(UrlFrag {
url: url.clone(),
frag,
});
}
let url = url.join(input).map_err(|e| CompileError::ParseUrlError {
url: input.to_string(),
src: e.into(),
match Url::parse(u) {
Ok(url) => Ok(UrlFrag { url, frag }),
#[cfg(not(target_arch = "wasm32"))]
Err(url::ParseError::RelativeUrlWithoutBase) => {
let p = std::path::absolute(u).map_err(|e| CompileError::ParseUrlError {
url: u.to_owned(),
src: e.into(),
})?;
let url = Url::from_file_path(p).map_err(|_| {
CompileError::Bug(format!("failed to convert {u} into url").into())
})?;
Ok(UrlFrag { url, frag })
}
Err(e) => Err(CompileError::ParseUrlError {
url: u.to_owned(),
src: e.into(),
}),
}
}
pub(crate) fn format(url: &Url, frag: &str) -> String {
if frag.is_empty() {
url.to_string()
} else {
format!("{}#{}", url, Fragment::encode(frag))
}
pub(crate) fn join(url: &Url, input: &str) -> Result<UrlFrag, CompileError> {
let (input, frag) = Fragment::split(input)?;
if input.is_empty() {
return Ok(UrlFrag {
url: url.clone(),
frag,
});
}
let url = url.join(input).map_err(|e| CompileError::ParseUrlError {
url: input.to_string(),
src: e.into(),
})?;
Ok(UrlFrag { url, frag })
}
pub(crate) fn format(url: &Url, frag: &str) -> String {
if frag.is_empty() {
url.to_string()
} else {
format!("{}#{}", url, Fragment::encode(frag))
}
}
}
impl Display for UrlFrag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}#{}", self.url, Fragment::encode(self.frag.as_str()))
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}#{}", self.url, Fragment::encode(self.frag.as_str()))
}
}
// --
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub(crate) struct UrlPtr {
pub(crate) url: Url,
pub(crate) ptr: JsonPointer,
pub(crate) url: Url,
pub(crate) ptr: JsonPointer,
}
impl UrlPtr {
pub(crate) fn lookup<'a>(&self, doc: &'a Value) -> Result<&'a Value, CompileError> {
self.ptr.lookup(doc, &self.url)
}
pub(crate) fn lookup<'a>(&self, doc: &'a Value) -> Result<&'a Value, CompileError> {
self.ptr.lookup(doc, &self.url)
}
pub(crate) fn format(&self, tok: &str) -> String {
format!(
"{}#{}/{}",
self.url,
Fragment::encode(self.ptr.as_str()),
Fragment::encode(JsonPointer::escape(tok).as_ref()),
)
}
pub(crate) fn format(&self, tok: &str) -> String {
format!(
"{}#{}/{}",
self.url,
Fragment::encode(self.ptr.as_str()),
Fragment::encode(JsonPointer::escape(tok).as_ref()),
)
}
}
impl Display for UrlPtr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}#{}", self.url, Fragment::encode(self.ptr.as_str()))
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}#{}", self.url, Fragment::encode(self.ptr.as_str()))
}
}
// --
pub(crate) fn is_integer(v: &Value) -> bool {
match v {
Value::Number(n) => {
n.is_i64() || n.is_u64() || n.as_f64().filter(|n| n.fract() == 0.0).is_some()
}
_ => false,
match v {
Value::Number(n) => {
n.is_i64() || n.is_u64() || n.as_f64().filter(|n| n.fract() == 0.0).is_some()
}
_ => false,
}
}
#[cfg(not(target_arch = "wasm32"))]
fn starts_with_windows_drive(p: &str) -> bool {
p.chars().next().filter(char::is_ascii_uppercase).is_some() && p[1..].starts_with(":\\")
p.chars().next().filter(char::is_ascii_uppercase).is_some() && p[1..].starts_with(":\\")
}
/// returns single-quoted string
pub(crate) fn quote<T>(s: &T) -> String
where
T: AsRef<str> + std::fmt::Debug + ?Sized,
T: AsRef<str> + std::fmt::Debug + ?Sized,
{
let s = format!("{s:?}").replace(r#"\""#, "\"").replace('\'', r"\'");
format!("'{}'", &s[1..s.len() - 1])
let s = format!("{s:?}").replace(r#"\""#, "\"").replace('\'', r"\'");
format!("'{}'", &s[1..s.len() - 1])
}
pub(crate) fn join_iter<T>(iterable: T, sep: &str) -> String
where
T: IntoIterator,
T::Item: Display,
T: IntoIterator,
T::Item: Display,
{
iterable
.into_iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join(sep)
iterable
.into_iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join(sep)
}
pub(crate) fn escape(token: &str) -> Cow<'_, str> {
JsonPointer::escape(token)
JsonPointer::escape(token)
}
pub(crate) fn split(url: &str) -> (&str, &str) {
if let Some(i) = url.find('#') {
(&url[..i], &url[i + 1..])
} else {
(url, "")
}
if let Some(i) = url.find('#') {
(&url[..i], &url[i + 1..])
} else {
(url, "")
}
}
/// serde_json treats 0 and 0.0 not equal. so we cannot simply use v1==v2
pub(crate) fn equals(v1: &Value, v2: &Value) -> bool {
match (v1, v2) {
(Value::Null, Value::Null) => true,
(Value::Bool(b1), Value::Bool(b2)) => b1 == b2,
(Value::Number(n1), Value::Number(n2)) => {
if let (Some(n1), Some(n2)) = (n1.as_u64(), n2.as_u64()) {
return n1 == n2;
}
if let (Some(n1), Some(n2)) = (n1.as_i64(), n2.as_i64()) {
return n1 == n2;
}
if let (Some(n1), Some(n2)) = (n1.as_f64(), n2.as_f64()) {
return n1 == n2;
}
false
}
(Value::String(s1), Value::String(s2)) => s1 == s2,
(Value::Array(arr1), Value::Array(arr2)) => {
if arr1.len() != arr2.len() {
return false;
}
arr1.iter().zip(arr2).all(|(e1, e2)| equals(e1, e2))
}
(Value::Object(obj1), Value::Object(obj2)) => {
if obj1.len() != obj2.len() {
return false;
}
for (k1, v1) in obj1 {
if let Some(v2) = obj2.get(k1) {
if !equals(v1, v2) {
return false;
}
} else {
return false;
}
}
true
}
_ => false,
match (v1, v2) {
(Value::Null, Value::Null) => true,
(Value::Bool(b1), Value::Bool(b2)) => b1 == b2,
(Value::Number(n1), Value::Number(n2)) => {
if let (Some(n1), Some(n2)) = (n1.as_u64(), n2.as_u64()) {
return n1 == n2;
}
if let (Some(n1), Some(n2)) = (n1.as_i64(), n2.as_i64()) {
return n1 == n2;
}
if let (Some(n1), Some(n2)) = (n1.as_f64(), n2.as_f64()) {
return n1 == n2;
}
false
}
(Value::String(s1), Value::String(s2)) => s1 == s2,
(Value::Array(arr1), Value::Array(arr2)) => {
if arr1.len() != arr2.len() {
return false;
}
arr1.iter().zip(arr2).all(|(e1, e2)| equals(e1, e2))
}
(Value::Object(obj1), Value::Object(obj2)) => {
if obj1.len() != obj2.len() {
return false;
}
for (k1, v1) in obj1 {
if let Some(v2) = obj2.get(k1) {
if !equals(v1, v2) {
return false;
}
} else {
return false;
}
}
true
}
_ => false,
}
}
pub(crate) fn duplicates(arr: &Vec<Value>) -> Option<(usize, usize)> {
match arr.as_slice() {
[e0, e1] => {
if equals(e0, e1) {
return Some((0, 1));
}
}
[e0, e1, e2] => {
if equals(e0, e1) {
return Some((0, 1));
} else if equals(e0, e2) {
return Some((0, 2));
} else if equals(e1, e2) {
return Some((1, 2));
}
}
_ => {
let len = arr.len();
if len <= 20 {
for i in 0..len - 1 {
for j in i + 1..len {
if equals(&arr[i], &arr[j]) {
return Some((i, j));
}
}
}
} else {
let mut seen = AHashMap::with_capacity(len);
for (i, item) in arr.iter().enumerate() {
if let Some(j) = seen.insert(HashedValue(item), i) {
return Some((j, i));
}
}
}
}
match arr.as_slice() {
[e0, e1] => {
if equals(e0, e1) {
return Some((0, 1));
}
}
None
[e0, e1, e2] => {
if equals(e0, e1) {
return Some((0, 1));
} else if equals(e0, e2) {
return Some((0, 2));
} else if equals(e1, e2) {
return Some((1, 2));
}
}
_ => {
let len = arr.len();
if len <= 20 {
for i in 0..len - 1 {
for j in i + 1..len {
if equals(&arr[i], &arr[j]) {
return Some((i, j));
}
}
}
} else {
let mut seen = AHashMap::with_capacity(len);
for (i, item) in arr.iter().enumerate() {
if let Some(j) = seen.insert(HashedValue(item), i) {
return Some((j, i));
}
}
}
}
}
None
}
// HashedValue --
@ -433,113 +433,113 @@ pub(crate) fn duplicates(arr: &Vec<Value>) -> Option<(usize, usize)> {
pub(crate) struct HashedValue<'a>(pub(crate) &'a Value);
impl PartialEq for HashedValue<'_> {
fn eq(&self, other: &Self) -> bool {
equals(self.0, other.0)
}
fn eq(&self, other: &Self) -> bool {
equals(self.0, other.0)
}
}
impl Eq for HashedValue<'_> {}
impl Hash for HashedValue<'_> {
fn hash<H: Hasher>(&self, state: &mut H) {
match self.0 {
Value::Null => state.write_u32(3_221_225_473), // chosen randomly
Value::Bool(ref b) => b.hash(state),
Value::Number(ref num) => {
if let Some(num) = num.as_f64() {
num.to_bits().hash(state);
} else if let Some(num) = num.as_u64() {
num.hash(state);
} else if let Some(num) = num.as_i64() {
num.hash(state);
}
}
Value::String(ref str) => str.hash(state),
Value::Array(ref arr) => {
for item in arr {
HashedValue(item).hash(state);
}
}
Value::Object(ref obj) => {
let mut hash = 0;
for (pname, pvalue) in obj {
// We have no way of building a new hasher of type `H`, so we
// hardcode using the default hasher of a hash map.
let mut hasher = AHasher::default();
pname.hash(&mut hasher);
HashedValue(pvalue).hash(&mut hasher);
hash ^= hasher.finish();
}
state.write_u64(hash);
}
fn hash<H: Hasher>(&self, state: &mut H) {
match self.0 {
Value::Null => state.write_u32(3_221_225_473), // chosen randomly
Value::Bool(ref b) => b.hash(state),
Value::Number(ref num) => {
if let Some(num) = num.as_f64() {
num.to_bits().hash(state);
} else if let Some(num) = num.as_u64() {
num.hash(state);
} else if let Some(num) = num.as_i64() {
num.hash(state);
}
}
Value::String(ref str) => str.hash(state),
Value::Array(ref arr) => {
for item in arr {
HashedValue(item).hash(state);
}
}
Value::Object(ref obj) => {
let mut hash = 0;
for (pname, pvalue) in obj {
// We have no way of building a new hasher of type `H`, so we
// hardcode using the default hasher of a hash map.
let mut hasher = AHasher::default();
pname.hash(&mut hasher);
HashedValue(pvalue).hash(&mut hasher);
hash ^= hasher.finish();
}
state.write_u64(hash);
}
}
}
}
#[cfg(test)]
mod tests {
use ahash::AHashMap;
use serde_json::json;
use ahash::AHashMap;
use serde_json::json;
use super::*;
use super::*;
#[test]
fn test_quote() {
assert_eq!(quote(r#"abc"def'ghi"#), r#"'abc"def\'ghi'"#);
#[test]
fn test_quote() {
assert_eq!(quote(r#"abc"def'ghi"#), r#"'abc"def\'ghi'"#);
}
#[test]
fn test_fragment_split() {
let tests = [
("#", Fragment::JsonPointer("".into())),
("#/a/b", Fragment::JsonPointer("/a/b".into())),
("#abcd", Fragment::Anchor("abcd".into())),
("#%61%62%63%64", Fragment::Anchor("abcd".into())),
(
"#%2F%61%62%63%64%2fef",
Fragment::JsonPointer("/abcd/ef".into()),
), // '/' is encoded
("#abcd+ef", Fragment::Anchor("abcd+ef".into())), // '+' should not traslate to space
];
for test in tests {
let (_, got) = Fragment::split(test.0).unwrap();
assert_eq!(got, test.1, "Fragment::split({:?})", test.0);
}
}
#[test]
fn test_fragment_split() {
let tests = [
("#", Fragment::JsonPointer("".into())),
("#/a/b", Fragment::JsonPointer("/a/b".into())),
("#abcd", Fragment::Anchor("abcd".into())),
("#%61%62%63%64", Fragment::Anchor("abcd".into())),
(
"#%2F%61%62%63%64%2fef",
Fragment::JsonPointer("/abcd/ef".into()),
), // '/' is encoded
("#abcd+ef", Fragment::Anchor("abcd+ef".into())), // '+' should not traslate to space
];
for test in tests {
let (_, got) = Fragment::split(test.0).unwrap();
assert_eq!(got, test.1, "Fragment::split({:?})", test.0);
}
#[test]
fn test_unescape() {
let tests = [
("bar~0", Some("bar~")),
("bar~1", Some("bar/")),
("bar~01", Some("bar~1")),
("bar~", None),
("bar~~", None),
];
for (tok, want) in tests {
let res = JsonPointer::unescape(tok).ok();
let got = res.as_ref().map(|c| c.as_ref());
assert_eq!(got, want, "unescape({:?})", tok)
}
}
#[test]
fn test_unescape() {
let tests = [
("bar~0", Some("bar~")),
("bar~1", Some("bar/")),
("bar~01", Some("bar~1")),
("bar~", None),
("bar~~", None),
];
for (tok, want) in tests {
let res = JsonPointer::unescape(tok).ok();
let got = res.as_ref().map(|c| c.as_ref());
assert_eq!(got, want, "unescape({:?})", tok)
}
#[test]
fn test_equals() {
let tests = [["1.0", "1"], ["-1.0", "-1"]];
for [a, b] in tests {
let a = serde_json::from_str(a).unwrap();
let b = serde_json::from_str(b).unwrap();
assert!(equals(&a, &b));
}
}
#[test]
fn test_equals() {
let tests = [["1.0", "1"], ["-1.0", "-1"]];
for [a, b] in tests {
let a = serde_json::from_str(a).unwrap();
let b = serde_json::from_str(b).unwrap();
assert!(equals(&a, &b));
}
}
#[test]
fn test_hashed_value() {
let mut seen = AHashMap::with_capacity(10);
let (v1, v2) = (json!(2), json!(2.0));
assert!(equals(&v1, &v2));
assert!(seen.insert(HashedValue(&v1), 1).is_none());
assert!(seen.insert(HashedValue(&v2), 1).is_some());
}
#[test]
fn test_hashed_value() {
let mut seen = AHashMap::with_capacity(10);
let (v1, v2) = (json!(2), json!(2.0));
assert!(equals(&v1, &v2));
assert!(seen.insert(HashedValue(&v1), 1).is_none());
assert!(seen.insert(HashedValue(&v2), 1).is_some());
}
}

File diff suppressed because it is too large Load Diff

View File

@ -5,83 +5,83 @@ use serde_json::json;
#[test]
fn test_metaschema_resource() -> Result<(), Box<dyn Error>> {
let main_schema = json!({
"$schema": "http://tmp.com/meta.json",
"type": "number"
});
let meta_schema = json!({
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$vocabulary": {
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
"https://json-schema.org/draft/2020-12/vocab/core": true
},
"allOf": [
{ "$ref": "https://json-schema.org/draft/2020-12/meta/applicator" },
{ "$ref": "https://json-schema.org/draft/2020-12/meta/core" }
]
});
let main_schema = json!({
"$schema": "http://tmp.com/meta.json",
"type": "number"
});
let meta_schema = json!({
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$vocabulary": {
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
"https://json-schema.org/draft/2020-12/vocab/core": true
},
"allOf": [
{ "$ref": "https://json-schema.org/draft/2020-12/meta/applicator" },
{ "$ref": "https://json-schema.org/draft/2020-12/meta/core" }
]
});
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", main_schema)?;
compiler.add_resource("http://tmp.com/meta.json", meta_schema)?;
compiler.compile("schema.json", &mut schemas)?;
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", main_schema)?;
compiler.add_resource("http://tmp.com/meta.json", meta_schema)?;
compiler.compile("schema.json", &mut schemas)?;
Ok(())
Ok(())
}
#[test]
fn test_compile_anchor() -> Result<(), Box<dyn Error>> {
let schema = json!({
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$defs": {
"x": {
"$anchor": "a1",
"type": "number"
}
}
});
let schema = json!({
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$defs": {
"x": {
"$anchor": "a1",
"type": "number"
}
}
});
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", schema)?;
let sch_index1 = compiler.compile("schema.json#a1", &mut schemas)?;
let sch_index2 = compiler.compile("schema.json#/$defs/x", &mut schemas)?;
assert_eq!(sch_index1, sch_index2);
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", schema)?;
let sch_index1 = compiler.compile("schema.json#a1", &mut schemas)?;
let sch_index2 = compiler.compile("schema.json#/$defs/x", &mut schemas)?;
assert_eq!(sch_index1, sch_index2);
Ok(())
Ok(())
}
#[test]
fn test_compile_nonstd() -> Result<(), Box<dyn Error>> {
let schema = json!({
"components": {
"schemas": {
"foo" : {
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$defs": {
"x": {
"$anchor": "a",
"type": "number"
},
"y": {
"$id": "http://temp.com/y",
"type": "string"
}
},
"oneOf": [
{ "$ref": "#a" },
{ "$ref": "http://temp.com/y" }
]
}
let schema = json!({
"components": {
"schemas": {
"foo" : {
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$defs": {
"x": {
"$anchor": "a",
"type": "number"
},
"y": {
"$id": "http://temp.com/y",
"type": "string"
}
},
"oneOf": [
{ "$ref": "#a" },
{ "$ref": "http://temp.com/y" }
]
}
});
}
}
});
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", schema)?;
compiler.compile("schema.json#/components/schemas/foo", &mut schemas)?;
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("schema.json", schema)?;
compiler.compile("schema.json#/components/schemas/foo", &mut schemas)?;
Ok(())
Ok(())
}

View File

@ -5,37 +5,37 @@ use serde_json::{Map, Value};
#[test]
fn test_debug() -> Result<(), Box<dyn Error>> {
let test: Value = serde_json::from_reader(File::open("tests/debug.json")?)?;
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
compiler.enable_content_assertions();
let remotes = Remotes(test["remotes"].as_object().unwrap().clone());
compiler.use_loader(Box::new(remotes));
let url = "http://debug.com/schema.json";
compiler.add_resource(url, test["schema"].clone())?;
let sch = compiler.compile(url, &mut schemas)?;
let result = schemas.validate(&test["data"], sch);
if let Err(e) = &result {
for line in format!("{e}").lines() {
println!(" {line}");
}
for line in format!("{e:#}").lines() {
println!(" {line}");
}
println!("{:#}", e.detailed_output());
let test: Value = serde_json::from_reader(File::open("tests/debug.json")?)?;
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_format_assertions();
compiler.enable_content_assertions();
let remotes = Remotes(test["remotes"].as_object().unwrap().clone());
compiler.use_loader(Box::new(remotes));
let url = "http://debug.com/schema.json";
compiler.add_resource(url, test["schema"].clone())?;
let sch = compiler.compile(url, &mut schemas)?;
let result = schemas.validate(&test["data"], sch);
if let Err(e) = &result {
for line in format!("{e}").lines() {
println!(" {line}");
}
assert_eq!(result.is_ok(), test["valid"].as_bool().unwrap());
Ok(())
for line in format!("{e:#}").lines() {
println!(" {line}");
}
println!("{:#}", e.detailed_output());
}
assert_eq!(result.is_ok(), test["valid"].as_bool().unwrap());
Ok(())
}
struct Remotes(Map<String, Value>);
impl UrlLoader for Remotes {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
if let Some(v) = self.0.get(url) {
return Ok(v.clone());
}
Err("remote not found")?
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
if let Some(v) = self.0.get(url) {
return Ok(v.clone());
}
Err("remote not found")?
}
}

View File

@ -7,16 +7,16 @@ use url::Url;
#[test]
fn example_from_files() -> Result<(), Box<dyn Error>> {
let schema_file = "tests/examples/schema.json";
let instance: Value = serde_json::from_reader(File::open("tests/examples/instance.json")?)?;
let schema_file = "tests/examples/schema.json";
let instance: Value = serde_json::from_reader(File::open("tests/examples/instance.json")?)?;
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let sch_index = compiler.compile(schema_file, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let sch_index = compiler.compile(schema_file, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
Ok(())
}
/**
@ -31,200 +31,200 @@ to local file.
*/
#[test]
fn example_from_strings() -> Result<(), Box<dyn Error>> {
let cat_schema: Value = json!({
"type": "object",
"properties": {
"speak": { "const": "meow" }
},
"required": ["speak"]
});
let pet_schema: Value = json!({
"oneOf": [
{ "$ref": "dog.json" },
{ "$ref": "cat.json" }
]
});
let instance: Value = json!({"speak": "bow"});
let cat_schema: Value = json!({
"type": "object",
"properties": {
"speak": { "const": "meow" }
},
"required": ["speak"]
});
let pet_schema: Value = json!({
"oneOf": [
{ "$ref": "dog.json" },
{ "$ref": "cat.json" }
]
});
let instance: Value = json!({"speak": "bow"});
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("tests/examples/pet.json", pet_schema)?;
compiler.add_resource("tests/examples/cat.json", cat_schema)?;
let sch_index = compiler.compile("tests/examples/pet.json", &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.add_resource("tests/examples/pet.json", pet_schema)?;
compiler.add_resource("tests/examples/cat.json", cat_schema)?;
let sch_index = compiler.compile("tests/examples/pet.json", &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
Ok(())
}
#[test]
#[ignore]
fn example_from_https() -> Result<(), Box<dyn Error>> {
let schema_url = "https://json-schema.org/learn/examples/geographical-location.schema.json";
let instance: Value = json!({"latitude": 48.858093, "longitude": 2.294694});
let schema_url = "https://json-schema.org/learn/examples/geographical-location.schema.json";
let instance: Value = json!({"latitude": 48.858093, "longitude": 2.294694});
struct HttpUrlLoader;
impl UrlLoader for HttpUrlLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let reader = ureq::get(url).call()?.into_reader();
Ok(serde_json::from_reader(reader)?)
}
struct HttpUrlLoader;
impl UrlLoader for HttpUrlLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let reader = ureq::get(url).call()?.into_reader();
Ok(serde_json::from_reader(reader)?)
}
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let mut loader = SchemeUrlLoader::new();
loader.register("file", Box::new(FileLoader));
loader.register("http", Box::new(HttpUrlLoader));
loader.register("https", Box::new(HttpUrlLoader));
compiler.use_loader(Box::new(loader));
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let mut loader = SchemeUrlLoader::new();
loader.register("file", Box::new(FileLoader));
loader.register("http", Box::new(HttpUrlLoader));
loader.register("https", Box::new(HttpUrlLoader));
compiler.use_loader(Box::new(loader));
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
Ok(())
}
#[test]
fn example_from_yaml_files() -> Result<(), Box<dyn Error>> {
let schema_file = "tests/examples/schema.yml";
let instance: Value = serde_yaml::from_reader(File::open("tests/examples/instance.yml")?)?;
let schema_file = "tests/examples/schema.yml";
let instance: Value = serde_yaml::from_reader(File::open("tests/examples/instance.yml")?)?;
struct FileUrlLoader;
impl UrlLoader for FileUrlLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let path = url.to_file_path().map_err(|_| "invalid file path")?;
let file = File::open(&path)?;
if path
.extension()
.filter(|&ext| ext == "yaml" || ext == "yml")
.is_some()
{
Ok(serde_yaml::from_reader(file)?)
} else {
Ok(serde_json::from_reader(file)?)
}
}
struct FileUrlLoader;
impl UrlLoader for FileUrlLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
let url = Url::parse(url)?;
let path = url.to_file_path().map_err(|_| "invalid file path")?;
let file = File::open(&path)?;
if path
.extension()
.filter(|&ext| ext == "yaml" || ext == "yml")
.is_some()
{
Ok(serde_yaml::from_reader(file)?)
} else {
Ok(serde_json::from_reader(file)?)
}
}
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let mut loader = SchemeUrlLoader::new();
loader.register("file", Box::new(FileUrlLoader));
compiler.use_loader(Box::new(loader));
let sch_index = compiler.compile(schema_file, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let mut loader = SchemeUrlLoader::new();
loader.register("file", Box::new(FileUrlLoader));
compiler.use_loader(Box::new(loader));
let sch_index = compiler.compile(schema_file, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
Ok(())
}
#[test]
fn example_custom_format() -> Result<(), Box<dyn Error>> {
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "format": "palindrome"});
let instance: Value = json!("step on no pets");
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "format": "palindrome"});
let instance: Value = json!("step on no pets");
fn is_palindrome(v: &Value) -> Result<(), Box<dyn Error>> {
let Value::String(s) = v else {
return Ok(()); // applicable only on strings
};
let mut chars = s.chars();
while let (Some(c1), Some(c2)) = (chars.next(), chars.next_back()) {
if c1 != c2 {
Err("char mismatch")?;
}
}
Ok(())
fn is_palindrome(v: &Value) -> Result<(), Box<dyn Error>> {
let Value::String(s) = v else {
return Ok(()); // applicable only on strings
};
let mut chars = s.chars();
while let (Some(c1), Some(c2)) = (chars.next(), chars.next_back()) {
if c1 != c2 {
Err("char mismatch")?;
}
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_format_assertions(); // in draft2020-12 format assertions are not enabled by default
compiler.register_format(Format {
name: "palindrome",
func: is_palindrome,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_format_assertions(); // in draft2020-12 format assertions are not enabled by default
compiler.register_format(Format {
name: "palindrome",
func: is_palindrome,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
}
#[test]
fn example_custom_content_encoding() -> Result<(), Box<dyn Error>> {
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "contentEncoding": "hex"});
let instance: Value = json!("aBcdxyz");
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "contentEncoding": "hex"});
let instance: Value = json!("aBcdxyz");
fn decode(b: u8) -> Result<u8, Box<dyn Error>> {
match b {
b'0'..=b'9' => Ok(b - b'0'),
b'a'..=b'f' => Ok(b - b'a' + 10),
b'A'..=b'F' => Ok(b - b'A' + 10),
_ => Err("decode_hex: non-hex char")?,
}
fn decode(b: u8) -> Result<u8, Box<dyn Error>> {
match b {
b'0'..=b'9' => Ok(b - b'0'),
b'a'..=b'f' => Ok(b - b'a' + 10),
b'A'..=b'F' => Ok(b - b'A' + 10),
_ => Err("decode_hex: non-hex char")?,
}
fn decode_hex(s: &str) -> Result<Vec<u8>, Box<dyn Error>> {
if s.len() % 2 != 0 {
Err("decode_hex: odd length")?;
}
let mut bytes = s.bytes();
let mut out = Vec::with_capacity(s.len() / 2);
for _ in 0..out.len() {
if let (Some(b1), Some(b2)) = (bytes.next(), bytes.next()) {
out.push(decode(b1)? << 4 | decode(b2)?);
} else {
Err("decode_hex: non-ascii char")?;
}
}
Ok(out)
}
fn decode_hex(s: &str) -> Result<Vec<u8>, Box<dyn Error>> {
if s.len() % 2 != 0 {
Err("decode_hex: odd length")?;
}
let mut bytes = s.bytes();
let mut out = Vec::with_capacity(s.len() / 2);
for _ in 0..out.len() {
if let (Some(b1), Some(b2)) = (bytes.next(), bytes.next()) {
out.push(decode(b1)? << 4 | decode(b2)?);
} else {
Err("decode_hex: non-ascii char")?;
}
}
Ok(out)
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_content_assertions(); // content assertions are not enabled by default
compiler.register_content_encoding(Decoder {
name: "hex",
func: decode_hex,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_err());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_content_assertions(); // content assertions are not enabled by default
compiler.register_content_encoding(Decoder {
name: "hex",
func: decode_hex,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_err());
Ok(())
Ok(())
}
#[test]
fn example_custom_content_media_type() -> Result<(), Box<dyn Error>> {
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "contentMediaType": "application/yaml"});
let instance: Value = json!("name:foobar");
let schema_url = "http://tmp/schema.json";
let schema: Value = json!({"type": "string", "contentMediaType": "application/yaml"});
let instance: Value = json!("name:foobar");
fn check_yaml(bytes: &[u8], deserialize: bool) -> Result<Option<Value>, Box<dyn Error>> {
if deserialize {
return Ok(Some(serde_yaml::from_slice(bytes)?));
}
serde_yaml::from_slice::<IgnoredAny>(bytes)?;
Ok(None)
fn check_yaml(bytes: &[u8], deserialize: bool) -> Result<Option<Value>, Box<dyn Error>> {
if deserialize {
return Ok(Some(serde_yaml::from_slice(bytes)?));
}
serde_yaml::from_slice::<IgnoredAny>(bytes)?;
Ok(None)
}
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_content_assertions(); // content assertions are not enabled by default
compiler.register_content_media_type(MediaType {
name: "application/yaml",
json_compatible: true,
func: check_yaml,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.enable_content_assertions(); // content assertions are not enabled by default
compiler.register_content_media_type(MediaType {
name: "application/yaml",
json_compatible: true,
func: check_yaml,
});
compiler.add_resource(schema_url, schema)?;
let sch_index = compiler.compile(schema_url, &mut schemas)?;
let result = schemas.validate(&instance, sch_index);
assert!(result.is_ok());
Ok(())
Ok(())
}

View File

@ -3,42 +3,42 @@ use std::fs;
use boon::{CompileError, Compiler, Schemas};
fn test(path: &str) -> Result<(), CompileError> {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.compile(path, &mut schemas)?;
Ok(())
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.compile(path, &mut schemas)?;
Ok(())
}
#[test]
fn test_absolute() -> Result<(), CompileError> {
let path = fs::canonicalize("tests/examples/schema.json").unwrap();
test(path.to_string_lossy().as_ref())
let path = fs::canonicalize("tests/examples/schema.json").unwrap();
test(path.to_string_lossy().as_ref())
}
#[test]
fn test_relative_slash() -> Result<(), CompileError> {
test("tests/examples/schema.json")
test("tests/examples/schema.json")
}
#[test]
#[cfg(windows)]
fn test_relative_backslash() -> Result<(), CompileError> {
test("tests\\examples\\schema.json")
test("tests\\examples\\schema.json")
}
#[test]
fn test_absolutei_space() -> Result<(), CompileError> {
let path = fs::canonicalize("tests/examples/sample schema.json").unwrap();
test(path.to_string_lossy().as_ref())
let path = fs::canonicalize("tests/examples/sample schema.json").unwrap();
test(path.to_string_lossy().as_ref())
}
#[test]
fn test_relative_slash_space() -> Result<(), CompileError> {
test("tests/examples/sample schema.json")
test("tests/examples/sample schema.json")
}
#[test]
#[cfg(windows)]
fn test_relative_backslash_space() -> Result<(), CompileError> {
test("tests\\examples\\sample schema.json")
test("tests\\examples\\sample schema.json")
}

View File

@ -6,62 +6,62 @@ use serde_json::Value;
#[derive(Debug, Deserialize)]
struct Test {
description: String,
remotes: Option<HashMap<String, Value>>,
schema: Value,
errors: Option<Vec<String>>,
description: String,
remotes: Option<HashMap<String, Value>>,
schema: Value,
errors: Option<Vec<String>>,
}
#[test]
fn test_invalid_schemas() -> Result<(), Box<dyn Error>> {
let file = File::open("tests/invalid-schemas.json")?;
let tests: Vec<Test> = serde_json::from_reader(file)?;
for test in tests {
println!("{}", test.description);
match compile(&test) {
Ok(_) => {
if test.errors.is_some() {
Err("want compilation to fail")?
}
}
Err(e) => {
println!(" {e}");
let error = format!("{e:?}");
let Some(errors) = &test.errors else {
Err("want compilation to succeed")?
};
for want in errors {
if !error.contains(want) {
println!(" got {error}");
println!(" want {want}");
panic!("error mismatch");
}
}
}
let file = File::open("tests/invalid-schemas.json")?;
let tests: Vec<Test> = serde_json::from_reader(file)?;
for test in tests {
println!("{}", test.description);
match compile(&test) {
Ok(_) => {
if test.errors.is_some() {
Err("want compilation to fail")?
}
}
Err(e) => {
println!(" {e}");
let error = format!("{e:?}");
let Some(errors) = &test.errors else {
Err("want compilation to succeed")?
};
for want in errors {
if !error.contains(want) {
println!(" got {error}");
println!(" want {want}");
panic!("error mismatch");
}
}
}
}
Ok(())
}
Ok(())
}
fn compile(test: &Test) -> Result<(), CompileError> {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let url = "http://fake.com/schema.json";
if let Some(remotes) = &test.remotes {
compiler.use_loader(Box::new(Remotes(remotes.clone())));
}
compiler.add_resource(url, test.schema.clone())?;
compiler.compile(url, &mut schemas)?;
Ok(())
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
let url = "http://fake.com/schema.json";
if let Some(remotes) = &test.remotes {
compiler.use_loader(Box::new(Remotes(remotes.clone())));
}
compiler.add_resource(url, test.schema.clone())?;
compiler.compile(url, &mut schemas)?;
Ok(())
}
struct Remotes(HashMap<String, Value>);
impl UrlLoader for Remotes {
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
if let Some(v) = self.0.get(url) {
return Ok(v.clone());
}
Err("remote not found")?
fn load(&self, url: &str) -> Result<Value, Box<dyn Error>> {
if let Some(v) = self.0.get(url) {
return Ok(v.clone());
}
Err("remote not found")?
}
}

View File

@ -6,117 +6,117 @@ use serde_json::Value;
#[test]
fn test_suites() -> Result<(), Box<dyn Error>> {
if let Ok(suite) = env::var("TEST_SUITE") {
test_suite(&suite)?;
} else {
test_suite("tests/JSON-Schema-Test-Suite")?;
test_suite("tests/Extra-Suite")?;
}
Ok(())
if let Ok(suite) = env::var("TEST_SUITE") {
test_suite(&suite)?;
} else {
test_suite("tests/JSON-Schema-Test-Suite")?;
test_suite("tests/Extra-Suite")?;
}
Ok(())
}
fn test_suite(suite: &str) -> Result<(), Box<dyn Error>> {
test_folder(suite, "draft2019-09", Draft::V2019_09)?;
test_folder(suite, "draft2020-12", Draft::V2020_12)?;
Ok(())
test_folder(suite, "draft2019-09", Draft::V2019_09)?;
test_folder(suite, "draft2020-12", Draft::V2020_12)?;
Ok(())
}
fn test_folder(suite: &str, folder: &str, draft: Draft) -> Result<(), Box<dyn Error>> {
let output_schema_url = format!(
"https://json-schema.org/draft/{}/output/schema",
folder.strip_prefix("draft").unwrap()
);
let prefix = Path::new(suite).join("output-tests");
let folder = prefix.join(folder);
let content = folder.join("content");
if !content.is_dir() {
return Ok(());
}
let output_schema: Value =
serde_json::from_reader(File::open(folder.join("output-schema.json"))?)?;
for entry in content.read_dir()? {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
};
let entry_path = entry.path();
println!("{}", entry_path.strip_prefix(&prefix)?.to_str().unwrap());
let groups: Vec<Group> = serde_json::from_reader(File::open(entry_path)?)?;
for group in groups {
println!(" {}", group.description);
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, group.schema)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
for test in group.tests {
println!(" {}", test.description);
match schemas.validate(&test.data, sch) {
Ok(_) => println!(" validation success"),
Err(e) => {
if let Some(sch) = test.output.basic {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
compiler.add_resource(&output_schema_url, output_schema.clone())?;
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, sch)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
let basic: Value = serde_json::from_str(&e.basic_output().to_string())?;
let result = schemas.validate(&basic, sch);
if let Err(e) = result {
println!("{basic:#}\n");
for line in format!("{e}").lines() {
println!(" {line}");
}
panic!("basic output did not match");
}
}
if let Some(sch) = test.output.detailed {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
compiler.add_resource(&output_schema_url, output_schema.clone())?;
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, sch)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
let detailed: Value =
serde_json::from_str(&e.detailed_output().to_string())?;
let result = schemas.validate(&detailed, sch);
if let Err(e) = result {
println!("{detailed:#}\n");
for line in format!("{e}").lines() {
println!(" {line}");
}
panic!("detailed output did not match");
}
}
}
let output_schema_url = format!(
"https://json-schema.org/draft/{}/output/schema",
folder.strip_prefix("draft").unwrap()
);
let prefix = Path::new(suite).join("output-tests");
let folder = prefix.join(folder);
let content = folder.join("content");
if !content.is_dir() {
return Ok(());
}
let output_schema: Value =
serde_json::from_reader(File::open(folder.join("output-schema.json"))?)?;
for entry in content.read_dir()? {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
};
let entry_path = entry.path();
println!("{}", entry_path.strip_prefix(&prefix)?.to_str().unwrap());
let groups: Vec<Group> = serde_json::from_reader(File::open(entry_path)?)?;
for group in groups {
println!(" {}", group.description);
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, group.schema)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
for test in group.tests {
println!(" {}", test.description);
match schemas.validate(&test.data, sch) {
Ok(_) => println!(" validation success"),
Err(e) => {
if let Some(sch) = test.output.basic {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
compiler.add_resource(&output_schema_url, output_schema.clone())?;
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, sch)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
let basic: Value = serde_json::from_str(&e.basic_output().to_string())?;
let result = schemas.validate(&basic, sch);
if let Err(e) = result {
println!("{basic:#}\n");
for line in format!("{e}").lines() {
println!(" {line}");
}
panic!("basic output did not match");
}
}
if let Some(sch) = test.output.detailed {
let mut schemas = Schemas::new();
let mut compiler = Compiler::new();
compiler.set_default_draft(draft);
compiler.add_resource(&output_schema_url, output_schema.clone())?;
let schema_url = "http://output-tests/schema";
compiler.add_resource(schema_url, sch)?;
let sch = compiler.compile(schema_url, &mut schemas)?;
let detailed: Value =
serde_json::from_str(&e.detailed_output().to_string())?;
let result = schemas.validate(&detailed, sch);
if let Err(e) = result {
println!("{detailed:#}\n");
for line in format!("{e}").lines() {
println!(" {line}");
}
panic!("detailed output did not match");
}
}
}
}
}
}
}
Ok(())
Ok(())
}
#[derive(Debug, Serialize, Deserialize)]
struct Group {
description: String,
schema: Value,
tests: Vec<Test>,
description: String,
schema: Value,
tests: Vec<Test>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Test {
description: String,
data: Value,
output: Output,
description: String,
data: Value,
output: Output,
}
#[derive(Debug, Serialize, Deserialize)]
struct Output {
basic: Option<Value>,
detailed: Option<Value>,
basic: Option<Value>,
detailed: Option<Value>,
}

View File

@ -5,116 +5,116 @@ use serde::{Deserialize, Serialize};
use serde_json::Value;
static SKIP: [&str; 2] = [
"zeroTerminatedFloats.json", // only draft4: this behavior is changed in later drafts
"float-overflow.json",
"zeroTerminatedFloats.json", // only draft4: this behavior is changed in later drafts
"float-overflow.json",
];
#[derive(Debug, Serialize, Deserialize)]
struct Group {
description: String,
schema: Value,
tests: Vec<Test>,
description: String,
schema: Value,
tests: Vec<Test>,
}
#[derive(Debug, Serialize, Deserialize)]
struct Test {
description: String,
data: Value,
valid: bool,
description: String,
data: Value,
valid: bool,
}
#[test]
fn test_suites() -> Result<(), Box<dyn Error>> {
if let Ok(suite) = env::var("TEST_SUITE") {
test_suite(&suite)?;
} else {
test_suite("tests/JSON-Schema-Test-Suite")?;
test_suite("tests/Extra-Test-Suite")?;
}
Ok(())
if let Ok(suite) = env::var("TEST_SUITE") {
test_suite(&suite)?;
} else {
test_suite("tests/JSON-Schema-Test-Suite")?;
test_suite("tests/Extra-Test-Suite")?;
}
Ok(())
}
fn test_suite(suite: &str) -> Result<(), Box<dyn Error>> {
if !Path::new(suite).exists() {
Err(format!("test suite {suite} does not exist"))?;
}
test_dir(suite, "draft4", Draft::V4)?;
test_dir(suite, "draft6", Draft::V6)?;
test_dir(suite, "draft7", Draft::V7)?;
test_dir(suite, "draft2019-09", Draft::V2019_09)?;
test_dir(suite, "draft2020-12", Draft::V2020_12)?;
Ok(())
if !Path::new(suite).exists() {
Err(format!("test suite {suite} does not exist"))?;
}
test_dir(suite, "draft4", Draft::V4)?;
test_dir(suite, "draft6", Draft::V6)?;
test_dir(suite, "draft7", Draft::V7)?;
test_dir(suite, "draft2019-09", Draft::V2019_09)?;
test_dir(suite, "draft2020-12", Draft::V2020_12)?;
Ok(())
}
fn test_dir(suite: &str, path: &str, draft: Draft) -> Result<(), Box<dyn Error>> {
let prefix = Path::new(suite).join("tests");
let dir = prefix.join(path);
if !dir.is_dir() {
return Ok(());
let prefix = Path::new(suite).join("tests");
let dir = prefix.join(path);
if !dir.is_dir() {
return Ok(());
}
for entry in dir.read_dir()? {
let entry = entry?;
let file_type = entry.file_type()?;
let tmp_entry_path = entry.path();
let entry_path = tmp_entry_path.strip_prefix(&prefix)?.to_str().unwrap();
if file_type.is_file() {
if !SKIP.iter().any(|n| OsStr::new(n) == entry.file_name()) {
test_file(suite, entry_path, draft)?;
}
} else if file_type.is_dir() {
test_dir(suite, entry_path, draft)?;
}
for entry in dir.read_dir()? {
let entry = entry?;
let file_type = entry.file_type()?;
let tmp_entry_path = entry.path();
let entry_path = tmp_entry_path.strip_prefix(&prefix)?.to_str().unwrap();
if file_type.is_file() {
if !SKIP.iter().any(|n| OsStr::new(n) == entry.file_name()) {
test_file(suite, entry_path, draft)?;
}
} else if file_type.is_dir() {
test_dir(suite, entry_path, draft)?;
}
}
Ok(())
}
Ok(())
}
fn test_file(suite: &str, path: &str, draft: Draft) -> Result<(), Box<dyn Error>> {
println!("FILE: {path}");
let path = Path::new(suite).join("tests").join(path);
let optional = path.components().any(|comp| comp.as_os_str() == "optional");
let file = File::open(path)?;
println!("FILE: {path}");
let path = Path::new(suite).join("tests").join(path);
let optional = path.components().any(|comp| comp.as_os_str() == "optional");
let file = File::open(path)?;
let url = "http://testsuite.com/schema.json";
let groups: Vec<Group> = serde_json::from_reader(file)?;
for group in groups {
println!("{}", group.description);
let mut schemas = Schemas::default();
let mut compiler = Compiler::default();
compiler.set_default_draft(draft);
if optional {
compiler.enable_format_assertions();
compiler.enable_content_assertions();
}
compiler.use_loader(Box::new(RemotesLoader(suite.to_owned())));
compiler.add_resource(url, group.schema)?;
let sch_index = compiler.compile(url, &mut schemas)?;
for test in group.tests {
println!(" {}", test.description);
let result = schemas.validate(&test.data, sch_index);
if let Err(e) = &result {
for line in format!("{e}").lines() {
println!(" {line}");
}
for line in format!("{e:#}").lines() {
println!(" {line}");
}
}
assert_eq!(result.is_ok(), test.valid);
}
let url = "http://testsuite.com/schema.json";
let groups: Vec<Group> = serde_json::from_reader(file)?;
for group in groups {
println!("{}", group.description);
let mut schemas = Schemas::default();
let mut compiler = Compiler::default();
compiler.set_default_draft(draft);
if optional {
compiler.enable_format_assertions();
compiler.enable_content_assertions();
}
Ok(())
compiler.use_loader(Box::new(RemotesLoader(suite.to_owned())));
compiler.add_resource(url, group.schema)?;
let sch_index = compiler.compile(url, &mut schemas)?;
for test in group.tests {
println!(" {}", test.description);
let result = schemas.validate(&test.data, sch_index);
if let Err(e) = &result {
for line in format!("{e}").lines() {
println!(" {line}");
}
for line in format!("{e:#}").lines() {
println!(" {line}");
}
}
assert_eq!(result.is_ok(), test.valid);
}
}
Ok(())
}
struct RemotesLoader(String);
impl UrlLoader for RemotesLoader {
fn load(&self, url: &str) -> Result<Value, Box<dyn std::error::Error>> {
// remotes folder --
if let Some(path) = url.strip_prefix("http://localhost:1234/") {
let path = Path::new(&self.0).join("remotes").join(path);
let file = File::open(path)?;
let json: Value = serde_json::from_reader(file)?;
return Ok(json);
}
Err("no internet")?
fn load(&self, url: &str) -> Result<Value, Box<dyn std::error::Error>> {
// remotes folder --
if let Some(path) = url.strip_prefix("http://localhost:1234/") {
let path = Path::new(&self.0).join("remotes").join(path);
let file = File::open(path)?;
let json: Value = serde_json::from_reader(file)?;
return Ok(json);
}
Err("no internet")?
}
}