diff --git a/autorust/codegen/src/autorust_toml.rs b/autorust/codegen/src/autorust_toml.rs index 6db82413..1e2333e7 100644 --- a/autorust/codegen/src/autorust_toml.rs +++ b/autorust/codegen/src/autorust_toml.rs @@ -116,7 +116,8 @@ impl<'a> PackageConfig { let deny: HashSet<&str> = self.tags.deny.iter().map(String::as_str).collect(); tags.retain(|tag| !deny.contains(tag.name())); } - let mut deny_contains: Vec<&str> = self.tags.deny_contains.iter().map(String::as_str).collect(); + let mut deny_contains: Vec<&str> = + self.tags.deny_contains.iter().map(String::as_str).collect(); if self.tags.deny_contains_preview.unwrap_or_default() { deny_contains.push("preview"); } @@ -325,7 +326,10 @@ mod tests { default = "package-resources-2021-04" "#, )?; - assert_eq!(Some("package-resources-2021-04".to_string()), config.tags.default); + assert_eq!( + Some("package-resources-2021-04".to_string()), + config.tags.default + ); Ok(()) } diff --git a/autorust/codegen/src/cargo_toml.rs b/autorust/codegen/src/cargo_toml.rs index deab7ca6..b3cf2a6d 100644 --- a/autorust/codegen/src/cargo_toml.rs +++ b/autorust/codegen/src/cargo_toml.rs @@ -2,7 +2,13 @@ use crate::Result; use crate::{config_parser::Tag, jinja::CargoToml}; use camino::Utf8Path; -pub fn create(package_name: &str, tags: &[&Tag], default_tag: &Tag, has_xml: bool, path: &Utf8Path) -> Result<()> { +pub fn create( + package_name: &str, + tags: &[&Tag], + default_tag: &Tag, + has_xml: bool, + path: &Utf8Path, +) -> Result<()> { let default_tag = &default_tag.rust_feature_name(); // https://docs.rs/about/metadata @@ -22,7 +28,9 @@ pub fn create(package_name: &str, tags: &[&Tag], default_tag: &Tag, has_xml: boo pub fn get_default_tag<'a>(tags: &[&'a Tag], default_tag: Option<&str>) -> &'a Tag { let default_tag = tags.iter().find(|tag| Some(tag.name()) == default_tag); - let is_preview = default_tag.map(|tag| tag.name().contains("preview")).unwrap_or_default(); + let is_preview = default_tag + .map(|tag| tag.name().contains("preview")) + .unwrap_or_default(); let stable_tag = tags.iter().find(|tag| !tag.name().contains("preview")); match (default_tag, is_preview, stable_tag) { (Some(default_tag), false, _) => default_tag, @@ -94,13 +102,20 @@ mod tests { ]; let tags: Vec<_> = tags.into_iter().map(Tag::new).collect(); let tags: Vec<_> = tags.iter().collect(); - assert_eq!("package-2020-04", get_default_tag(&tags, Some("package-2020-04")).name()); + assert_eq!( + "package-2020-04", + get_default_tag(&tags, Some("package-2020-04")).name() + ); Ok(()) } #[test] fn specified_preview() -> Result<()> { - let tags = vec!["package-preview-2022-05", "package-2019-06-preview", "package-2019-04-preview"]; + let tags = vec![ + "package-preview-2022-05", + "package-2019-06-preview", + "package-2019-04-preview", + ]; let tags: Vec<_> = tags.into_iter().map(Tag::new).collect(); let tags: Vec<_> = tags.iter().collect(); assert_eq!( diff --git a/autorust/codegen/src/codegen_operations/function_code.rs b/autorust/codegen/src/codegen_operations/function_code.rs index 401ee397..6b2245ef 100644 --- a/autorust/codegen/src/codegen_operations/function_code.rs +++ b/autorust/codegen/src/codegen_operations/function_code.rs @@ -17,7 +17,11 @@ pub(crate) struct ClientFunctionCode { } impl ClientFunctionCode { - pub fn new(operation: &WebOperationGen, parameters: &FunctionParams, in_operation_group: bool) -> Result { + pub fn new( + operation: &WebOperationGen, + parameters: &FunctionParams, + in_operation_group: bool, + ) -> Result { let fname = operation.function_name()?; let summary = operation.0.summary.clone(); let description = operation.0.description.clone(); @@ -41,7 +45,9 @@ impl ToTokens for ClientFunctionCode { } for param in self.parameters.required_params() { let FunctionParam { - variable_name, type_name, .. + variable_name, + type_name, + .. } = param; let mut type_name = type_name.clone(); let is_vec = type_name.is_vec(); @@ -54,7 +60,9 @@ impl ToTokens for ClientFunctionCode { } for param in self.parameters.optional_params() { let FunctionParam { - variable_name, type_name, .. + variable_name, + type_name, + .. } = param; if type_name.is_vec() { params.push(quote! { #variable_name: Vec::new() }); diff --git a/autorust/codegen/src/codegen_operations/operation_module.rs b/autorust/codegen/src/codegen_operations/operation_module.rs index 8c3a45e7..e3510758 100644 --- a/autorust/codegen/src/codegen_operations/operation_module.rs +++ b/autorust/codegen/src/codegen_operations/operation_module.rs @@ -2,8 +2,10 @@ use proc_macro2::{Ident, TokenStream}; use quote::{quote, ToTokens}; use super::{ - request_builder_into_future::RequestBuilderIntoFutureCode, request_builder_send::RequestBuilderSendCode, - request_builder_setter::RequestBuilderSettersCode, request_builder_struct::RequestBuilderStructCode, response_code::ResponseCode, + request_builder_into_future::RequestBuilderIntoFutureCode, + request_builder_send::RequestBuilderSendCode, + request_builder_setter::RequestBuilderSettersCode, + request_builder_struct::RequestBuilderStructCode, response_code::ResponseCode, }; pub struct OperationModuleCode { pub module_name: Ident, diff --git a/autorust/codegen/src/codegen_operations/operations.rs b/autorust/codegen/src/codegen_operations/operations.rs index 1379e77c..a80ba694 100644 --- a/autorust/codegen/src/codegen_operations/operations.rs +++ b/autorust/codegen/src/codegen_operations/operations.rs @@ -34,11 +34,19 @@ impl OperationCode { // get the content-types from the operation, else the spec, else default to json let consumes = operation .pick_consumes() - .unwrap_or_else(|| cg.spec.pick_consumes().unwrap_or(content_type::APPLICATION_JSON)) + .unwrap_or_else(|| { + cg.spec + .pick_consumes() + .unwrap_or(content_type::APPLICATION_JSON) + }) .to_string(); let produces = operation .pick_produces() - .unwrap_or_else(|| cg.spec.pick_produces().unwrap_or(content_type::APPLICATION_JSON)) + .unwrap_or_else(|| { + cg.spec + .pick_produces() + .unwrap_or(content_type::APPLICATION_JSON) + }) .to_string(); let lro = operation.0.long_running_operation; @@ -46,12 +54,16 @@ impl OperationCode { let request_builder = SetRequestCode::new(operation, parameters, consumes); let in_operation_group = operation.0.in_group(); - let client_function_code = ClientFunctionCode::new(operation, parameters, in_operation_group)?; - let request_builder_struct_code = RequestBuilderStructCode::new(parameters, in_operation_group, lro, lro_options.clone()); + let client_function_code = + ClientFunctionCode::new(operation, parameters, in_operation_group)?; + let request_builder_struct_code = + RequestBuilderStructCode::new(parameters, in_operation_group, lro, lro_options.clone()); let request_builder_setters_code = RequestBuilderSettersCode::new(parameters); let response_code = ResponseCode::new(cg, operation, produces)?; - let request_builder_send_code = RequestBuilderSendCode::new(new_request_code, request_builder, response_code.clone())?; - let request_builder_intofuture_code = RequestBuilderIntoFutureCode::new(response_code.clone(), lro, lro_options)?; + let request_builder_send_code = + RequestBuilderSendCode::new(new_request_code, request_builder, response_code.clone())?; + let request_builder_intofuture_code = + RequestBuilderIntoFutureCode::new(response_code.clone(), lro, lro_options)?; let module_code = OperationModuleCode { module_name: operation.function_name()?, diff --git a/autorust/codegen/src/codegen_operations/request_builder_send.rs b/autorust/codegen/src/codegen_operations/request_builder_send.rs index baff0b69..41848aa5 100644 --- a/autorust/codegen/src/codegen_operations/request_builder_send.rs +++ b/autorust/codegen/src/codegen_operations/request_builder_send.rs @@ -6,7 +6,9 @@ use crate::codegen::PARAM_RE; use crate::Result; use crate::{codegen::parse_path_params, identifier::SnakeCaseIdent}; -use super::{new_request_code::NewRequestCode, response_code::ResponseCode, set_request_code::SetRequestCode}; +use super::{ + new_request_code::NewRequestCode, response_code::ResponseCode, set_request_code::SetRequestCode, +}; /// The `send` function of the request builder. pub struct RequestBuilderSendCode { new_request_code: NewRequestCode, @@ -16,7 +18,11 @@ pub struct RequestBuilderSendCode { } impl RequestBuilderSendCode { - pub fn new(new_request_code: NewRequestCode, request_builder: SetRequestCode, response_code: ResponseCode) -> Result { + pub fn new( + new_request_code: NewRequestCode, + request_builder: SetRequestCode, + response_code: ResponseCode, + ) -> Result { let params = parse_path_params(&new_request_code.path); let url_args: Result> = params.iter().map(|s| s.to_snake_case_ident()).collect(); let url_args = url_args?; @@ -121,12 +127,17 @@ impl ToTokens for RequestBuilderSendCode { req.insert_header(azure_core::headers::VERSION, #api_version); }); } - let response_type = self.response_code.response_type().expect("pageable response has a body"); + let response_type = self + .response_code + .response_type() + .expect("pageable response has a body"); // some of the pageable requests specify the continuation token // as a parameter. In this case, use the basic request builder, // but insert the continuation parameter - if let Some(continuable_param) = get_continuable_param(next_link_name, request_builder) { + if let Some(continuable_param) = + get_continuable_param(next_link_name, request_builder) + { quote! { pub fn into_stream(self) -> azure_core::Pageable<#response_type, azure_core::error::Error> { let make_request = move |continuation: Option| { diff --git a/autorust/codegen/src/codegen_operations/request_builder_setter.rs b/autorust/codegen/src/codegen_operations/request_builder_setter.rs index d47dbaf1..9053bbe8 100644 --- a/autorust/codegen/src/codegen_operations/request_builder_setter.rs +++ b/autorust/codegen/src/codegen_operations/request_builder_setter.rs @@ -21,7 +21,9 @@ impl ToTokens for RequestBuilderSettersCode { fn to_tokens(&self, tokens: &mut TokenStream) { for param in self.parameters.optional_params() { let FunctionParam { - variable_name, type_name, .. + variable_name, + type_name, + .. } = param; let is_vec = type_name.is_vec(); let mut type_name = type_name.clone(); diff --git a/autorust/codegen/src/codegen_operations/set_request_code.rs b/autorust/codegen/src/codegen_operations/set_request_code.rs index 05b9f92e..81cac03d 100644 --- a/autorust/codegen/src/codegen_operations/set_request_code.rs +++ b/autorust/codegen/src/codegen_operations/set_request_code.rs @@ -3,7 +3,10 @@ use quote::{quote, ToTokens}; use crate::spec::WebVerb; -use super::{function_params::FunctionParams, set_request_param_code::SetRequestParamsCode, web_operation_gen::WebOperationGen}; +use super::{ + function_params::FunctionParams, set_request_param_code::SetRequestParamsCode, + web_operation_gen::WebOperationGen, +}; /// Set all body and parameters for the request. pub struct SetRequestCode { pub has_param_api_version: bool, diff --git a/autorust/codegen/src/codegen_operations/set_request_param_code.rs b/autorust/codegen/src/codegen_operations/set_request_param_code.rs index 547fd2f1..0e2be49b 100644 --- a/autorust/codegen/src/codegen_operations/set_request_param_code.rs +++ b/autorust/codegen/src/codegen_operations/set_request_param_code.rs @@ -111,7 +111,9 @@ impl ToTokens for SetRequestParamsCode { // TODO: more work needs to be done to ensure we're using // the right encoder. - let encoder = if !self.params.has_content_type_header() && self.content_type.starts_with("application/xml") { + let encoder = if !self.params.has_content_type_header() + && self.content_type.starts_with("application/xml") + { quote! {azure_core::xml::to_xml} } else { quote! { azure_core::to_json } diff --git a/autorust/codegen/src/codegen_operations/web_operation_gen.rs b/autorust/codegen/src/codegen_operations/web_operation_gen.rs index 956dd0a7..9f7a2544 100644 --- a/autorust/codegen/src/codegen_operations/web_operation_gen.rs +++ b/autorust/codegen/src/codegen_operations/web_operation_gen.rs @@ -143,7 +143,10 @@ pub struct Pageable { /// Creating a function name from the path and verb when an operationId is not specified. /// All azure-rest-api-specs operations should have an operationId. fn create_function_name(verb: &WebVerb, path: &str) -> String { - let mut path = path.split('/').filter(|&x| !x.is_empty()).collect::>(); + let mut path = path + .split('/') + .filter(|&x| !x.is_empty()) + .collect::>(); path.insert(0, verb.as_str()); path.join("_") } @@ -165,7 +168,10 @@ mod tests { verb: WebVerb::Get, ..Default::default() }); - assert_eq!(Some("private_clouds".to_owned()), operation.rust_module_name()); + assert_eq!( + Some("private_clouds".to_owned()), + operation.rust_module_name() + ); assert_eq!("create_or_update", operation.rust_function_name()); } diff --git a/autorust/codegen/src/config_parser.rs b/autorust/codegen/src/config_parser.rs index c4b91198..9d47b95b 100644 --- a/autorust/codegen/src/config_parser.rs +++ b/autorust/codegen/src/config_parser.rs @@ -77,15 +77,21 @@ impl Tag { /// Receives the AutoRest configuration file and parses it to its various configurations (by tags/API versions), /// according to its extension. /// e.g. for "path/to/config.md", it will get parsed as CommonMark [Literate Tag](http://azure.github.io/autorest/user/literate-file-formats/configuration.html). -pub fn parse_configurations_from_autorest_config_file(config_file: &Utf8Path) -> Result { - let extension = config_file - .extension() - .ok_or_else(|| Error::with_message(ErrorKind::Parse, || format!("expected md extension {config_file}")))?; +pub fn parse_configurations_from_autorest_config_file( + config_file: &Utf8Path, +) -> Result { + let extension = config_file.extension().ok_or_else(|| { + Error::with_message(ErrorKind::Parse, || { + format!("expected md extension {config_file}") + }) + })?; match extension.to_lowercase().as_str() { "md" => { use literate_config::*; - let cmark_content = - std::fs::read_to_string(config_file).with_context(ErrorKind::Io, || format!("reading the md file {config_file}"))?; + let cmark_content = std::fs::read_to_string(config_file) + .with_context(ErrorKind::Io, || { + format!("reading the md file {config_file}") + })?; Ok(parse_configuration(&cmark_content)?) } _ => Err(Error::with_message(ErrorKind::Io, || { @@ -118,12 +124,13 @@ mod literate_config { let root = parse_document(&arena, cmark_content, &ComrakOptions::default()); // Get the AST node corresponding with "## Configuration". - let configuration_heading_node = get_configuration_section_heading_node(root).ok_or_else(|| { - Error::message( - ErrorKind::Parse, - "no `## Configuration` heading in the AutoRest literate configuration file", - ) - })?; + let configuration_heading_node = + get_configuration_section_heading_node(root).ok_or_else(|| { + Error::message( + ErrorKind::Parse, + "no `## Configuration` heading in the AutoRest literate configuration file", + ) + })?; let mut tags = Vec::new(); let mut basic_info = BasicInformation::default(); @@ -133,15 +140,27 @@ mod literate_config { let mut current_node = configuration_heading_node.next_sibling(); while let Some(node) = current_node { if is_basic_information(node) { - let yaml = extract_yaml(node)? - .ok_or_else(|| Error::message(ErrorKind::Parse, "expected configuration tag to contain a YAML code block"))?; - basic_info = serde_yaml::from_str(&yaml).context(ErrorKind::DataConversion, "reading basic information block yaml")?; + let yaml = extract_yaml(node)?.ok_or_else(|| { + Error::message( + ErrorKind::Parse, + "expected configuration tag to contain a YAML code block", + ) + })?; + basic_info = serde_yaml::from_str(&yaml).context( + ErrorKind::DataConversion, + "reading basic information block yaml", + )?; } else if let Some(tag_name) = get_tag_name(node) { // Extract the configuration from the first node inside the tag heading ("Tag: ..."), // by looking at the first YAML code block. - let yaml = extract_yaml(node)? - .ok_or_else(|| Error::message(ErrorKind::Parse, "Expected configuration tag to contain a YAML code block."))?; - let mut tag: Tag = serde_yaml::from_str(&yaml).context(ErrorKind::Parse, "reading configuration block yaml")?; + let yaml = extract_yaml(node)?.ok_or_else(|| { + Error::message( + ErrorKind::Parse, + "Expected configuration tag to contain a YAML code block.", + ) + })?; + let mut tag: Tag = serde_yaml::from_str(&yaml) + .context(ErrorKind::Parse, "reading configuration block yaml")?; for input_file in tag.input_files.iter_mut() { *input_file = input_file.replace('\\', "/"); } @@ -167,7 +186,9 @@ mod literate_config { // from https://github.com/kivikakk/comrak/blob/main/examples/headers.rs fn collect_text<'a>(node: &'a AstNode<'a>, output: &mut String) { match node.data.borrow().value { - NodeValue::Text(ref literal) | NodeValue::Code(NodeCode { ref literal, .. }) => output.push_str(literal), + NodeValue::Text(ref literal) | NodeValue::Code(NodeCode { ref literal, .. }) => { + output.push_str(literal) + } NodeValue::LineBreak | NodeValue::SoftBreak => output.push(' '), _ => { for n in node.children() { @@ -179,7 +200,9 @@ mod literate_config { /// Returns the first "## Configuration" AST Node. /// There should only be one per Literate Configuration file. - fn get_configuration_section_heading_node<'a>(root: &'a AstNode<'a>) -> Option<&'a AstNode<'a>> { + fn get_configuration_section_heading_node<'a>( + root: &'a AstNode<'a>, + ) -> Option<&'a AstNode<'a>> { root.children().find(|node| { if is_header_at_level(node, 2) { let mut text = String::new(); @@ -220,9 +243,20 @@ mod literate_config { fn extract_yaml<'a>(configuration_tag_heading_node: &'a AstNode<'a>) -> Result> { let mut current_node = configuration_tag_heading_node .next_sibling() - .ok_or_else(|| Error::message(ErrorKind::Parse, "markdown ended unexpectedly after configuration tag heading"))?; + .ok_or_else(|| { + Error::message( + ErrorKind::Parse, + "markdown ended unexpectedly after configuration tag heading", + ) + })?; loop { - if let NodeValue::CodeBlock(NodeCodeBlock { info, literal, fenced, .. }) = ¤t_node.data.borrow().value { + if let NodeValue::CodeBlock(NodeCodeBlock { + info, + literal, + fenced, + .. + }) = ¤t_node.data.borrow().value + { if !fenced { continue; } @@ -230,9 +264,12 @@ mod literate_config { return Ok(Some(literal.to_owned())); } } - current_node = current_node - .next_sibling() - .ok_or_else(|| Error::message(ErrorKind::Parse, "markdown ended unexpectedly after configuration tag heading"))?; + current_node = current_node.next_sibling().ok_or_else(|| { + Error::message( + ErrorKind::Parse, + "markdown ended unexpectedly after configuration tag heading", + ) + })?; } } } @@ -277,7 +314,9 @@ mod tests { fn test_get_input_file_api_version() { assert_eq!( Some("2019-05-05-preview".to_owned()), - get_input_file_api_version("Microsoft.AlertsManagement/preview/2019-05-05-preview/ActionRules.json") + get_input_file_api_version( + "Microsoft.AlertsManagement/preview/2019-05-05-preview/ActionRules.json" + ) ); } @@ -317,8 +356,14 @@ input-file: assert_eq!(1, tags.len()); assert_eq!("package-2019-06", tags[0].tag); assert_eq!(5, tags[0].input_files.len()); - assert_eq!("Microsoft.Storage/stable/2019-06-01/storage.json", tags[0].input_files[0]); - assert_eq!("Microsoft.Storage/stable/2019-06-01/blob.json", tags[0].input_files[1]); + assert_eq!( + "Microsoft.Storage/stable/2019-06-01/storage.json", + tags[0].input_files[0] + ); + assert_eq!( + "Microsoft.Storage/stable/2019-06-01/blob.json", + tags[0].input_files[1] + ); Ok(()) } @@ -354,12 +399,21 @@ input-file: assert_eq!(2, tags.len()); assert_eq!("package-2019-06", tags[0].tag); assert_eq!(5, tags[0].input_files.len()); - assert_eq!("Microsoft.Storage/stable/2019-06-01/storage.json", tags[0].input_files[0]); - assert_eq!("Microsoft.Storage/stable/2019-06-01/blob.json", tags[0].input_files[1]); + assert_eq!( + "Microsoft.Storage/stable/2019-06-01/storage.json", + tags[0].input_files[0] + ); + assert_eq!( + "Microsoft.Storage/stable/2019-06-01/blob.json", + tags[0].input_files[1] + ); assert_eq!("package-2015-05-preview", tags[1].tag); assert_eq!(1, tags[1].input_files.len()); - assert_eq!("Microsoft.Storage/preview/2015-05-01-preview/storage.json", tags[1].input_files[0]); + assert_eq!( + "Microsoft.Storage/preview/2015-05-01-preview/storage.json", + tags[1].input_files[0] + ); Ok(()) } diff --git a/autorust/codegen/src/crates.rs b/autorust/codegen/src/crates.rs index 43ce993d..a8458e51 100644 --- a/autorust/codegen/src/crates.rs +++ b/autorust/codegen/src/crates.rs @@ -31,7 +31,9 @@ pub fn list_crates(services_dir: &Path) -> Result> { if let Some(workspaces) = manifest.workspace { for member in workspaces.members { let member_path = services_dir.join(member).join("Cargo.toml"); - let Ok(manifest) = Manifest::from_path(member_path) else { continue }; + let Ok(manifest) = Manifest::from_path(member_path) else { + continue; + }; let Some(package) = manifest.package else { continue; }; @@ -49,7 +51,9 @@ pub fn list_dirs() -> Result> { } pub fn has_version(name: &str, version: &str) -> Result { - Ok(get_versions(name)?.iter().any(|v| v.vers.as_str() == version)) + Ok(get_versions(name)? + .iter() + .any(|v| v.vers.as_str() == version)) } /// Gets all the versions for a given crate diff --git a/autorust/codegen/src/error.rs b/autorust/codegen/src/error.rs index e67729c7..2a0c8eb7 100644 --- a/autorust/codegen/src/error.rs +++ b/autorust/codegen/src/error.rs @@ -51,7 +51,10 @@ impl Error { E: Into>, { Self { - context: Context::Custom(Custom { kind, error: error.into() }), + context: Context::Custom(Custom { + kind, + error: error.into(), + }), } } @@ -63,7 +66,13 @@ impl Error { C: Into>, { Self { - context: Context::Full(Custom { kind, error: error.into() }, message.into()), + context: Context::Full( + Custom { + kind, + error: error.into(), + }, + message.into(), + ), } } @@ -122,7 +131,10 @@ impl Error { return Err(self); } // Unwrapping is ok here since we already check above that the downcast will work - Ok(*self.into_inner()?.downcast().expect("failed to unwrap downcast")) + Ok(*self + .into_inner()? + .downcast() + .expect("failed to unwrap downcast")) } /// Returns a reference to the inner error wrapped by this error (if any). @@ -279,7 +291,13 @@ where C: Into>, { self.map_err(|e| Error { - context: Context::Full(Custom { error: Box::new(e), kind }, message.into()), + context: Context::Full( + Custom { + error: Box::new(e), + kind, + }, + message.into(), + ), }) } @@ -296,7 +314,10 @@ where #[derive(Debug)] enum Context { Simple(ErrorKind), - Message { kind: ErrorKind, message: Cow<'static, str> }, + Message { + kind: ErrorKind, + message: Cow<'static, str>, + }, Custom(Custom), Full(Custom, Cow<'static, str>), } @@ -351,7 +372,8 @@ mod tests { assert_eq!(errors.join(","), "second error,third error"); let inner = io::Error::new(io::ErrorKind::BrokenPipe, "third error"); - let error: Result<()> = std::result::Result::<(), std::io::Error>::Err(inner).context(ErrorKind::Io, "oh no broken pipe!"); + let error: Result<()> = std::result::Result::<(), std::io::Error>::Err(inner) + .context(ErrorKind::Io, "oh no broken pipe!"); assert_eq!(format!("{}", error.unwrap_err()), "oh no broken pipe!"); } @@ -359,7 +381,11 @@ mod tests { fn downcasting_works() { let error = &create_error() as &dyn std::error::Error; assert!(error.is::()); - let downcasted = error.source().unwrap().downcast_ref::().unwrap(); + let downcasted = error + .source() + .unwrap() + .downcast_ref::() + .unwrap(); assert_eq!(format!("{downcasted}"), "third error"); } diff --git a/autorust/codegen/src/gen.rs b/autorust/codegen/src/gen.rs index 3a0e7741..715912f9 100644 --- a/autorust/codegen/src/gen.rs +++ b/autorust/codegen/src/gen.rs @@ -11,7 +11,12 @@ pub fn package_name(spec: &SpecReadme, run_config: &RunConfig) -> String { format!("{}{}", &run_config.crate_name_prefix, &spec.service_name()) } -pub fn gen_crate(package_name: &str, spec: &SpecReadme, run_config: &RunConfig, output_folder: &str) -> Result> { +pub fn gen_crate( + package_name: &str, + spec: &SpecReadme, + run_config: &RunConfig, + output_folder: &str, +) -> Result> { let mut generated_tags = vec![]; let spec_config = spec.config()?; let service_name = &spec.service_name(); @@ -62,23 +67,33 @@ pub fn gen_crate(package_name: &str, spec: &SpecReadme, run_config: &RunConfig, .iter() .map(|input_file| io::join(spec.readme(), input_file).map_err(Error::from)) .collect(); - let input_files = input_files.with_context(ErrorKind::CodeGen, || format!("collecting input files for tag {name}"))?; + let input_files = input_files.with_context(ErrorKind::CodeGen, || { + format!("collecting input files for tag {name}") + })?; let crate_config = &CrateConfig { run_config, output_folder, input_files, }; - let cg = run(crate_config, &package_config).with_context(ErrorKind::CodeGen, || format!("gen_crate run for tag {name}"))?; - let operations = cg - .spec - .operations() - .with_context(ErrorKind::CodeGen, || format!("gen_crate operations for tag {name}"))?; + let cg = run(crate_config, &package_config).with_context(ErrorKind::CodeGen, || { + format!("gen_crate run for tag {name}") + })?; + let operations = cg.spec.operations().with_context(ErrorKind::CodeGen, || { + format!("gen_crate operations for tag {name}") + })?; operation_totals.insert(tag.name(), operations.len()); let mut versions = cg.spec.api_versions(); versions.sort_unstable(); api_version_totals.insert(tag.name(), versions.len()); - api_versions.insert(tag.name(), versions.iter().map(|v| format!("`{v}`")).collect::>().join(", ")); + api_versions.insert( + tag.name(), + versions + .iter() + .map(|v| format!("`{v}`")) + .collect::>() + .join(", "), + ); has_xml = cg.has_xml(); } @@ -89,8 +104,10 @@ pub fn gen_crate(package_name: &str, spec: &SpecReadme, run_config: &RunConfig, }; let default_tag = cargo_toml::get_default_tag(tags, default_tag_name); - cargo_toml::create(package_name, tags, default_tag, has_xml, &cargo_toml_path).context(ErrorKind::CodeGen, "cargo_toml::create")?; - lib_rs::create(tags, default_tag, lib_rs_path, false).context(ErrorKind::CodeGen, "lib_rs::create")?; + cargo_toml::create(package_name, tags, default_tag, has_xml, &cargo_toml_path) + .context(ErrorKind::CodeGen, "cargo_toml::create")?; + lib_rs::create(tags, default_tag, lib_rs_path, false) + .context(ErrorKind::CodeGen, "lib_rs::create")?; let readme = ReadmeMd { package_name, readme_url: readme_md::url(spec.readme().as_str()), @@ -100,7 +117,9 @@ pub fn gen_crate(package_name: &str, spec: &SpecReadme, run_config: &RunConfig, api_version_totals, api_versions, }; - readme.create(&readme_path).context(ErrorKind::CodeGen, "readme::create")?; + readme + .create(&readme_path) + .context(ErrorKind::CodeGen, "readme::create")?; Ok(generated_tags) } diff --git a/autorust/codegen/src/identifier.rs b/autorust/codegen/src/identifier.rs index 5e50a915..6eea5c9e 100644 --- a/autorust/codegen/src/identifier.rs +++ b/autorust/codegen/src/identifier.rs @@ -62,7 +62,8 @@ pub fn id(text: &str) -> String { } pub fn parse_ident(text: &str) -> Result { - syn::parse_str::(&id(text)).with_context(ErrorKind::Parse, || format!("parse ident {text}")) + syn::parse_str::(&id(text)) + .with_context(ErrorKind::Parse, || format!("parse ident {text}")) } pub fn raw_str_to_ident(text: &str) -> Result { @@ -234,7 +235,10 @@ mod tests { #[test] fn test_system_assigned_user_assigned() -> Result<()> { - assert_eq!("SystemAssigned, UserAssigned".to_camel_case_id(), "SystemAssignedUserAssigned"); + assert_eq!( + "SystemAssigned, UserAssigned".to_camel_case_id(), + "SystemAssignedUserAssigned" + ); Ok(()) } @@ -261,7 +265,10 @@ mod tests { #[test] fn test_microsoft_key_vault_vaults() -> Result<()> { - assert_eq!("Microsoft.KeyVault/vaults".to_camel_case_id(), "MicrosoftKeyVaultVaults"); + assert_eq!( + "Microsoft.KeyVault/vaults".to_camel_case_id(), + "MicrosoftKeyVaultVaults" + ); Ok(()) } @@ -288,7 +295,10 @@ mod tests { #[test] fn test_attr_qualified_name() -> Result<()> { - assert_eq!("attr:qualifiedName".to_snake_case_id(), "attr_qualified_name"); + assert_eq!( + "attr:qualifiedName".to_snake_case_id(), + "attr_qualified_name" + ); Ok(()) } diff --git a/autorust/codegen/src/io.rs b/autorust/codegen/src/io.rs index 1f7bb83c..4b721216 100644 --- a/autorust/codegen/src/io.rs +++ b/autorust/codegen/src/io.rs @@ -10,15 +10,20 @@ use std::path::PathBuf; pub fn join, P2: AsRef>(a: P1, b: P2) -> Result { let mut c = a.as_ref(); if c.extension().is_some() { - c = c - .parent() - .ok_or_else(|| Error::with_message(ErrorKind::Io, || "unable to get parent path of {c}"))?; + c = c.parent().ok_or_else(|| { + Error::with_message(ErrorKind::Io, || "unable to get parent path of {c}") + })?; // to directory } let mut c = PathBuf::from(c); let b = b.as_ref(); - c.append(b).with_context(ErrorKind::Io, || format!("append path {b} to {c:?}"))?; - Utf8PathBuf::from_path_buf(c).map_err(|path| Error::with_message(ErrorKind::Io, || format!("converting path to UTF-8: {path:?}"))) + c.append(b) + .with_context(ErrorKind::Io, || format!("append path {b} to {c:?}"))?; + Utf8PathBuf::from_path_buf(c).map_err(|path| { + Error::with_message(ErrorKind::Io, || { + format!("converting path to UTF-8: {path:?}") + }) + }) } pub fn join_several>(a: P1, b: &[Utf8PathBuf]) -> Result> { diff --git a/autorust/codegen/src/jinja.rs b/autorust/codegen/src/jinja.rs index 882ff76e..5fef45e9 100644 --- a/autorust/codegen/src/jinja.rs +++ b/autorust/codegen/src/jinja.rs @@ -10,7 +10,9 @@ pub struct PublishServicesYml<'a> { } pub fn render(template: &T, path: impl AsRef) -> Result<()> { - let rendered = template.render().with_context(ErrorKind::Io, || "render {path}")?; + let rendered = template + .render() + .with_context(ErrorKind::Io, || "render {path}")?; let mut file = File::create(path.as_ref())?; write!(file, "{rendered}")?; Ok(()) diff --git a/autorust/codegen/src/lib_rs.rs b/autorust/codegen/src/lib_rs.rs index 20f504d9..58839582 100644 --- a/autorust/codegen/src/lib_rs.rs +++ b/autorust/codegen/src/lib_rs.rs @@ -5,8 +5,17 @@ use proc_macro2::{Ident, TokenStream}; use quote::{quote, ToTokens}; use std::convert::{TryFrom, TryInto}; -pub fn create(tags: &[&Tag], default_tag: &Tag, path: &Utf8Path, print_writing_file: bool) -> Result<()> { - write_file(path, &create_body(tags, default_tag)?.into_token_stream(), print_writing_file) +pub fn create( + tags: &[&Tag], + default_tag: &Tag, + path: &Utf8Path, + print_writing_file: bool, +) -> Result<()> { + write_file( + path, + &create_body(tags, default_tag)?.into_token_stream(), + print_writing_file, + ) } struct Feature { @@ -19,7 +28,10 @@ impl TryFrom<&&Tag> for Feature { fn try_from(tag: &&Tag) -> Result { let feature_name = tag.rust_feature_name(); let mod_name = parse_ident(&tag.rust_mod_name()).context(ErrorKind::Parse, "mod name")?; - Ok(Feature { feature_name, mod_name }) + Ok(Feature { + feature_name, + mod_name, + }) } } @@ -29,7 +41,10 @@ struct BodyCode { } fn create_body(tags: &[&Tag], default_tag: &Tag) -> Result { - let features: Vec = tags.iter().map(|tag| tag.try_into()).collect::>()?; + let features: Vec = tags + .iter() + .map(|tag| tag.try_into()) + .collect::>()?; let default = (&default_tag).try_into()?; Ok(BodyCode { features, default }) @@ -40,7 +55,10 @@ impl ToTokens for BodyCode { let mut cfgs = TokenStream::new(); for feature in &self.features { - let Feature { feature_name, mod_name } = feature; + let Feature { + feature_name, + mod_name, + } = feature; cfgs.extend(quote! { #[cfg(feature = #feature_name)] pub mod #mod_name; @@ -48,7 +66,10 @@ impl ToTokens for BodyCode { } { - let Feature { feature_name, mod_name } = &self.default; + let Feature { + feature_name, + mod_name, + } = &self.default; cfgs.extend(quote! { #[cfg(all(feature="default_tag", feature = #feature_name))] pub use #mod_name::*; diff --git a/autorust/codegen/src/readme_md.rs b/autorust/codegen/src/readme_md.rs index 065cff00..5a20f78d 100644 --- a/autorust/codegen/src/readme_md.rs +++ b/autorust/codegen/src/readme_md.rs @@ -26,7 +26,10 @@ impl<'a> ReadmeMd<'a> { self.api_version_totals.get(tag.name()).unwrap_or(&0) } pub fn api_versions(&self, tag: &'a Tag) -> &str { - self.api_versions.get(tag.name()).map(String::as_str).unwrap_or_default() + self.api_versions + .get(tag.name()) + .map(String::as_str) + .unwrap_or_default() } } diff --git a/autorust/codegen/src/spec.rs b/autorust/codegen/src/spec.rs index 30e6faa0..c37ceff5 100644 --- a/autorust/codegen/src/spec.rs +++ b/autorust/codegen/src/spec.rs @@ -1,8 +1,9 @@ use crate::io; use crate::{Error, ErrorKind, Result}; use autorust_openapi::{ - AdditionalProperties, CollectionFormat, DataType, MsExamples, MsLongRunningOperationOptions, MsPageable, OpenAPI, Operation, Parameter, - ParameterIn, PathItem, Reference, ReferenceOr, Response, Schema, SchemaCommon, StatusCode, + AdditionalProperties, CollectionFormat, DataType, MsExamples, MsLongRunningOperationOptions, + MsPageable, OpenAPI, Operation, Parameter, ParameterIn, PathItem, Reference, ReferenceOr, + Response, Schema, SchemaCommon, StatusCode, }; use camino::{Utf8Path, Utf8PathBuf}; use indexmap::{IndexMap, IndexSet}; @@ -56,12 +57,18 @@ impl Spec { docs, schemas, parameters, - input_files_paths: input_files_paths.iter().map(|f| f.as_ref().to_owned()).collect(), + input_files_paths: input_files_paths + .iter() + .map(|f| f.as_ref().to_owned()) + .collect(), }) } /// Read a file and references too, recursively into the map - fn read_file(docs: &mut IndexMap, file_path: impl AsRef) -> Result<()> { + fn read_file( + docs: &mut IndexMap, + file_path: impl AsRef, + ) -> Result<()> { let file_path = file_path.as_ref(); if !docs.contains_key(file_path) { let doc = openapi::parse(file_path)?; @@ -80,9 +87,9 @@ impl Spec { } pub fn doc(&self, doc_file: &Utf8Path) -> Result<&OpenAPI> { - self.docs - .get(doc_file) - .ok_or_else(|| Error::with_message(ErrorKind::Parse, || format!("key not found {doc_file}"))) + self.docs.get(doc_file).ok_or_else(|| { + Error::with_message(ErrorKind::Parse, || format!("key not found {doc_file}")) + }) } pub fn title(&self) -> Option<&str> { @@ -108,7 +115,11 @@ impl Spec { pub fn endpoint(&self) -> Option { let scheme = self.scheme(); match (self.host(), self.base_path()) { - (Some(host), Some(base_path)) => Some(format!("{scheme}://{host}{base_path}").trim_end_matches('/').to_owned()), + (Some(host), Some(base_path)) => Some( + format!("{scheme}://{host}{base_path}") + .trim_end_matches('/') + .to_owned(), + ), (Some(host), None) => Some(format!("{scheme}://{host}")), _ => None, } @@ -180,12 +191,20 @@ impl Spec { } /// Find the schema for a given doc path and reference - pub fn resolve_schema_ref(&self, doc_file: impl AsRef, reference: &Reference) -> Result { + pub fn resolve_schema_ref( + &self, + doc_file: impl AsRef, + reference: &Reference, + ) -> Result { let ref_key = self.ref_key(doc_file, reference)?; let schema = self .schemas .get(&ref_key) - .ok_or_else(|| Error::with_message(ErrorKind::Parse, || format!("parameter not found {ref_key:?}")))? + .ok_or_else(|| { + Error::with_message(ErrorKind::Parse, || { + format!("parameter not found {ref_key:?}") + }) + })? .clone(); Ok(ResolvedSchema { ref_key: Some(ref_key), @@ -194,13 +213,19 @@ impl Spec { } /// Find the parameter for a given doc path and reference - pub fn resolve_parameter_ref(&self, doc_file: impl AsRef, reference: Reference) -> Result { + pub fn resolve_parameter_ref( + &self, + doc_file: impl AsRef, + reference: Reference, + ) -> Result { let doc_file = doc_file.as_ref(); let full_path = match reference.file { None => doc_file.to_owned(), Some(file) => io::join(doc_file, file)?, }; - let name = reference.name.ok_or_else(|| Error::message(ErrorKind::Parse, "no name in ref"))?; + let name = reference + .name + .ok_or_else(|| Error::message(ErrorKind::Parse, "no name in ref"))?; let ref_key = RefKey { file_path: full_path, name, @@ -208,18 +233,28 @@ impl Spec { Ok(self .parameters .get(&ref_key) - .ok_or_else(|| Error::with_message(ErrorKind::Parse, || format!("parameter not found {ref_key:?}")))? + .ok_or_else(|| { + Error::with_message(ErrorKind::Parse, || { + format!("parameter not found {ref_key:?}") + }) + })? .clone()) } /// Resolve a reference or schema to a resolved schema - fn resolve_schema(&self, doc_file: impl AsRef, ref_or_schema: &ReferenceOr) -> Result { + fn resolve_schema( + &self, + doc_file: impl AsRef, + ref_or_schema: &ReferenceOr, + ) -> Result { match ref_or_schema { ReferenceOr::Item(schema) => Ok(ResolvedSchema { ref_key: None, schema: schema.clone(), }), - ReferenceOr::Reference { reference, .. } => self.resolve_schema_ref(doc_file, reference), + ReferenceOr::Reference { reference, .. } => { + self.resolve_schema_ref(doc_file, reference) + } } } @@ -237,10 +272,16 @@ impl Spec { Ok(resolved) } - pub fn resolve_path(&self, _doc_file: impl AsRef, path: &ReferenceOr) -> Result { + pub fn resolve_path( + &self, + _doc_file: impl AsRef, + path: &ReferenceOr, + ) -> Result { match path { ReferenceOr::Item(path) => Ok(path.clone()), - ReferenceOr::Reference { .. } => Err(Error::message(ErrorKind::Parse, "not implemented")), + ReferenceOr::Reference { .. } => { + Err(Error::message(ErrorKind::Parse, "not implemented")) + } } } @@ -256,14 +297,24 @@ impl Spec { Ok(resolved) } - fn resolve_parameter(&self, doc_file: &Utf8Path, parameter: &ReferenceOr) -> Result { + fn resolve_parameter( + &self, + doc_file: &Utf8Path, + parameter: &ReferenceOr, + ) -> Result { match parameter { ReferenceOr::Item(param) => Ok(param.clone()), - ReferenceOr::Reference { reference, .. } => self.resolve_parameter_ref(doc_file, reference.clone()), + ReferenceOr::Reference { reference, .. } => { + self.resolve_parameter_ref(doc_file, reference.clone()) + } } } - fn resolve_parameters(&self, doc_file: &Utf8Path, parameters: &[ReferenceOr]) -> Result> { + fn resolve_parameters( + &self, + doc_file: &Utf8Path, + parameters: &[ReferenceOr], + ) -> Result> { let mut resolved = Vec::new(); for param in parameters { resolved.push(WebParameter(self.resolve_parameter(doc_file, param)?)); @@ -345,7 +396,10 @@ pub mod openapi { } /// Returns a set of referenced relative file paths from an OpenAPI specficiation - pub fn get_reference_file_paths(doc_file: impl AsRef, api: &OpenAPI) -> IndexSet { + pub fn get_reference_file_paths( + doc_file: impl AsRef, + api: &OpenAPI, + ) -> IndexSet { get_references(doc_file, api) .into_iter() .filter_map(|reference| match reference { @@ -365,16 +419,24 @@ pub mod openapi { // paths and operations for (path, item) in api.paths() { match item { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::PathItem(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::PathItem(reference.clone())) + } ReferenceOr::Item(item) => { for operation in path_operations_unresolved(&doc_file, &path, &item) { // parameters for param in &operation.parameters { match param { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Parameter(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Parameter(reference.clone())) + } ReferenceOr::Item(parameter) => match ¶meter.schema { - Some(ReferenceOr::Reference { reference, .. }) => list.push(TypedReference::Schema(reference.clone())), - Some(ReferenceOr::Item(schema)) => add_references_for_schema(&mut list, schema), + Some(ReferenceOr::Reference { reference, .. }) => { + list.push(TypedReference::Schema(reference.clone())) + } + Some(ReferenceOr::Item(schema)) => { + add_references_for_schema(&mut list, schema) + } None => {} }, } @@ -383,8 +445,12 @@ pub mod openapi { // responses for (_code, rsp) in &operation.responses { match &rsp.schema { - Some(ReferenceOr::Reference { reference, .. }) => list.push(TypedReference::Schema(reference.clone())), - Some(ReferenceOr::Item(schema)) => add_references_for_schema(&mut list, schema), + Some(ReferenceOr::Reference { reference, .. }) => { + list.push(TypedReference::Schema(reference.clone())) + } + Some(ReferenceOr::Item(schema)) => { + add_references_for_schema(&mut list, schema) + } None => {} } } @@ -403,7 +469,9 @@ pub mod openapi { // definitions for (_name, schema) in &api.definitions { match schema { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Schema(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Schema(reference.clone())) + } ReferenceOr::Item(schema) => add_references_for_schema(&mut list, schema), } } @@ -514,7 +582,10 @@ impl WebParameter { } pub fn collection_format(&self) -> &CollectionFormat { - self.0.collection_format.as_ref().unwrap_or(&CollectionFormat::Csv) + self.0 + .collection_format + .as_ref() + .unwrap_or(&CollectionFormat::Csv) } pub fn in_body(&self) -> bool { @@ -570,7 +641,10 @@ impl WebParameter { pub fn type_is_ref(&self) -> Result { Ok(if let Some(data_type) = self.data_type() { - matches!(data_type, DataType::String | DataType::Object | DataType::File) + matches!( + data_type, + DataType::String | DataType::Object | DataType::File + ) } else { true }) @@ -628,7 +702,11 @@ struct OperationVerb<'a> { pub verb: WebVerb, } -fn path_operations_unresolved(doc_file: impl AsRef, path: &str, item: &PathItem) -> Vec { +fn path_operations_unresolved( + doc_file: impl AsRef, + path: &str, + item: &PathItem, +) -> Vec { vec![ OperationVerb { operation: item.get.as_ref(), @@ -721,7 +799,9 @@ pub fn get_schema_schema_references(schema: &Schema) -> Vec { fn add_references_for_schema(list: &mut Vec, schema: &Schema) { for (_, schema) in &schema.properties { match schema { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Schema(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Schema(reference.clone())) + } ReferenceOr::Item(schema) => add_references_for_schema(list, schema), } } @@ -730,20 +810,26 @@ fn add_references_for_schema(list: &mut Vec, schema: &Schema) { match ap { AdditionalProperties::Boolean(_) => {} AdditionalProperties::Schema(schema) => match schema { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Schema(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Schema(reference.clone())) + } ReferenceOr::Item(schema) => add_references_for_schema(list, schema), }, } } if let Some(schema) = schema.common.items.as_ref() { match schema { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Schema(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Schema(reference.clone())) + } ReferenceOr::Item(schema) => add_references_for_schema(list, schema), } } for schema in &schema.all_of { match schema { - ReferenceOr::Reference { reference, .. } => list.push(TypedReference::Schema(reference.clone())), + ReferenceOr::Reference { reference, .. } => { + list.push(TypedReference::Schema(reference.clone())) + } ReferenceOr::Item(schema) => add_references_for_schema(list, schema), } } diff --git a/autorust/codegen/src/status_codes.rs b/autorust/codegen/src/status_codes.rs index a9ab3121..242377cb 100644 --- a/autorust/codegen/src/status_codes.rs +++ b/autorust/codegen/src/status_codes.rs @@ -9,15 +9,20 @@ use proc_macro2::Ident; use std::convert::TryFrom; fn try_from_u16(status_code: u16) -> Result { - HttpStatusCode::try_from(status_code) - .map_err(|_| Error::with_message(ErrorKind::Parse, || format!("invalid status code '{status_code}'"))) + HttpStatusCode::try_from(status_code).map_err(|_| { + Error::with_message(ErrorKind::Parse, || { + format!("invalid status code '{status_code}'") + }) + }) } /// Get the status code canonical reason pub fn get_status_code_name(status_code: &StatusCode) -> Result<&'static str> { match status_code { StatusCode::Code(status_code) => Ok(try_from_u16(*status_code)?.canonical_reason()), - StatusCode::Default => Err(Error::with_message(ErrorKind::Parse, || "no status code name for default")), + StatusCode::Default => Err(Error::with_message(ErrorKind::Parse, || { + "no status code name for default" + })), } } @@ -63,7 +68,10 @@ mod tests { #[test] fn test_get_status_code_name() -> Result<()> { - assert_eq!("Loop Detected", get_status_code_name(&StatusCode::Code(508))?); + assert_eq!( + "Loop Detected", + get_status_code_name(&StatusCode::Code(508))? + ); Ok(()) } } diff --git a/autorust/codegen/tests/azure_rest_api_specs.rs b/autorust/codegen/tests/azure_rest_api_specs.rs index 41d5dc5c..05d2cb55 100644 --- a/autorust/codegen/tests/azure_rest_api_specs.rs +++ b/autorust/codegen/tests/azure_rest_api_specs.rs @@ -9,7 +9,8 @@ use spec::TypedReference; type Result = std::result::Result>; -const COMMON_TYPES_SPEC: &str = "../../../../azure-rest-api-specs/specification/security/resource-manager/common/v1/types.json"; +const COMMON_TYPES_SPEC: &str = + "../../../../azure-rest-api-specs/specification/security/resource-manager/common/v1/types.json"; const VMWARE_SPEC: &str = "../../../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json"; @@ -55,7 +56,10 @@ fn read_spec_avs() -> Result<()> { fn test_resolve_schema_ref() -> Result<()> { let file = Utf8PathBuf::from(VMWARE_SPEC); let spec = &Spec::read_files(&[&file])?; - spec.resolve_schema_ref(&file, &Reference::parse("#/definitions/OperationList").unwrap())?; + spec.resolve_schema_ref( + &file, + &Reference::parse("#/definitions/OperationList").unwrap(), + )?; spec.resolve_schema_ref( &file, &Reference::parse("../../../../../common-types/resource-management/v1/types.json#/definitions/ErrorResponse").unwrap(), diff --git a/autorust/openapi/src/header.rs b/autorust/openapi/src/header.rs index e28f9d35..5c667e7f 100644 --- a/autorust/openapi/src/header.rs +++ b/autorust/openapi/src/header.rs @@ -25,6 +25,9 @@ pub struct Header { pub enum_: Vec, /// https://github.com/Azure/autorest/blob/main/docs/extensions/readme.md#x-ms-header-collection-prefix - #[serde(rename = "x-ms-header-collection-prefix", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-header-collection-prefix", + skip_serializing_if = "Option::is_none" + )] pub x_ms_header_collection_prefix: Option, } diff --git a/autorust/openapi/src/info.rs b/autorust/openapi/src/info.rs index 646f0226..c69d00c8 100644 --- a/autorust/openapi/src/info.rs +++ b/autorust/openapi/src/info.rs @@ -25,6 +25,10 @@ pub struct Info { /// enables passing code generation settings via OpenAPI definition /// (deprecated! Please use configuration files instead.) /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-code-generation-settings - #[serde(rename = "x-ms-code-generation-settings", default, skip_serializing_if = "IndexMap::is_empty")] + #[serde( + rename = "x-ms-code-generation-settings", + default, + skip_serializing_if = "IndexMap::is_empty" + )] pub x_ms_code_generation_settings: IndexMap, } diff --git a/autorust/openapi/src/lib.rs b/autorust/openapi/src/lib.rs index d805f2ce..6cd6b65f 100644 --- a/autorust/openapi/src/lib.rs +++ b/autorust/openapi/src/lib.rs @@ -16,8 +16,9 @@ mod status_code; mod tag; pub use self::{ - autorest::*, contact::*, external_documentation::*, header::*, info::*, license::*, openapi::*, operation::*, parameter::*, paths::*, - reference::*, schema::*, security::*, status_code::*, tag::*, + autorest::*, contact::*, external_documentation::*, header::*, info::*, license::*, openapi::*, + operation::*, parameter::*, paths::*, reference::*, schema::*, security::*, status_code::*, + tag::*, }; #[derive(Debug, thiserror::Error)] diff --git a/autorust/openapi/src/openapi.rs b/autorust/openapi/src/openapi.rs index e012a4f9..3d81b37f 100644 --- a/autorust/openapi/src/openapi.rs +++ b/autorust/openapi/src/openapi.rs @@ -29,7 +29,11 @@ pub struct OpenAPI { // #[serde(default, skip_serializing_if = "IndexMap::is_empty")] // do not skip pub paths: IndexMap>, /// Relative paths to the individual endpoints. They must be relative to the 'basePath'. - #[serde(default, rename = "x-ms-paths", skip_serializing_if = "IndexMap::is_empty")] + #[serde( + default, + rename = "x-ms-paths", + skip_serializing_if = "IndexMap::is_empty" + )] pub x_ms_paths: IndexMap>, #[serde(default, skip_serializing_if = "IndexMap::is_empty")] pub definitions: IndexMap>, @@ -49,7 +53,10 @@ pub struct OpenAPI { /// replaces the fixed host with a host template that can be replaced with variable parameters /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-parameterized-host - #[serde(rename = "x-ms-parameterized-host", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-parameterized-host", + skip_serializing_if = "Option::is_none" + )] pub x_ms_parameterized_host: Option, } @@ -67,6 +74,11 @@ impl OpenAPI { result } pub fn version(&self) -> Result<&str, Error> { - Ok(self.info.version.as_ref().ok_or(Error::MissingApiVersion)?.as_str()) + Ok(self + .info + .version + .as_ref() + .ok_or(Error::MissingApiVersion)? + .as_str()) } } diff --git a/autorust/openapi/src/operation.rs b/autorust/openapi/src/operation.rs index 7b5e0b21..836e75d4 100644 --- a/autorust/openapi/src/operation.rs +++ b/autorust/openapi/src/operation.rs @@ -28,11 +28,21 @@ pub struct Operation { #[serde(rename = "x-ms-pageable", skip_serializing_if = "Option::is_none")] pub x_ms_pageable: Option, - #[serde(rename = "x-ms-examples", default, skip_serializing_if = "IndexMap::is_empty")] + #[serde( + rename = "x-ms-examples", + default, + skip_serializing_if = "IndexMap::is_empty" + )] pub x_ms_examples: MsExamples, - #[serde(rename = "x-ms-long-running-operation", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-long-running-operation", + skip_serializing_if = "Option::is_none" + )] pub x_ms_long_running_operation: Option, - #[serde(rename = "x-ms-long-running-operation-options", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-long-running-operation-options", + skip_serializing_if = "Option::is_none" + )] pub x_ms_long_running_operation_options: Option, #[serde(rename = "x-ms-request-id", skip_serializing_if = "Option::is_none")] pub x_ms_request_id: Option, diff --git a/autorust/openapi/src/parameter.rs b/autorust/openapi/src/parameter.rs index d475e254..882d0086 100644 --- a/autorust/openapi/src/parameter.rs +++ b/autorust/openapi/src/parameter.rs @@ -29,24 +29,39 @@ pub struct Parameter { /// provides a mechanism to specify that the global parameter is actually a parameter on the operation and not a client property /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-parameter-location - #[serde(rename = "x-ms-parameter-location", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-parameter-location", + skip_serializing_if = "Option::is_none" + )] pub x_ms_parameter_location: Option, /// skips URL encoding for path and query parameters /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-skip-url-encoding - #[serde(rename = "x-ms-skip-url-encoding", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-skip-url-encoding", + skip_serializing_if = "Option::is_none" + )] pub x_ms_skip_url_encoding: Option, /// groups method parameters in generated clients /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-parameter-grouping - #[serde(rename = "x-ms-parameter-grouping", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-parameter-grouping", + skip_serializing_if = "Option::is_none" + )] pub x_ms_parameter_grouping: Option, - #[serde(rename = "x-ms-client-request-id", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-client-request-id", + skip_serializing_if = "Option::is_none" + )] pub x_ms_client_request_id: Option, /// https://github.com/Azure/autorest/blob/main/docs/extensions/readme.md#x-ms-header-collection-prefix - #[serde(rename = "x-ms-header-collection-prefix", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-header-collection-prefix", + skip_serializing_if = "Option::is_none" + )] pub x_ms_header_collection_prefix: Option, } diff --git a/autorust/openapi/src/reference.rs b/autorust/openapi/src/reference.rs index effc5c46..e9eca11f 100644 --- a/autorust/openapi/src/reference.rs +++ b/autorust/openapi/src/reference.rs @@ -24,7 +24,10 @@ pub enum ReferenceOr { /// flattens client model property or parameter /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-client-flatten - #[serde(rename = "x-ms-client-flatten", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-client-flatten", + skip_serializing_if = "Option::is_none" + )] x_ms_client_flatten: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -152,7 +155,9 @@ mod tests { assert_eq!( reference, Reference { - file: Some("../../../../../common-types/resource-management/v1/types.json".to_owned()), + file: Some( + "../../../../../common-types/resource-management/v1/types.json".to_owned() + ), path: vec!["parameters".to_owned()], name: Some("SubscriptionIdParameter".to_owned()), } @@ -201,7 +206,10 @@ mod tests { let json = r#"{"$ref":"foo/bar"}"#; assert_eq!( json, - serde_json::to_string(&ReferenceOr::::from_reference(Reference::from_file("foo/bar"))).unwrap() + serde_json::to_string(&ReferenceOr::::from_reference( + Reference::from_file("foo/bar") + )) + .unwrap() ); } } diff --git a/autorust/openapi/src/schema.rs b/autorust/openapi/src/schema.rs index dd527a26..aa5486f2 100644 --- a/autorust/openapi/src/schema.rs +++ b/autorust/openapi/src/schema.rs @@ -56,7 +56,10 @@ pub struct Response { #[serde(default, skip_serializing_if = "IndexMap::is_empty")] pub headers: IndexMap>, - #[serde(rename = "x-ms-error-response", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-error-response", + skip_serializing_if = "Option::is_none" + )] pub x_ms_error_response: Option, } @@ -172,12 +175,19 @@ pub struct Schema { /// indicates that the Definition Schema Object is a resource as defined by the Resource Manager API /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-azure-resource - #[serde(rename = "x-ms-azure-resource", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-azure-resource", + skip_serializing_if = "Option::is_none" + )] pub x_ms_azure_resource: Option, /// provides insight to Autorest on how to generate code. It doesn't alter the modeling of what is actually sent on the wire /// https://github.com/Azure/autorest/blob/master/docs/extensions/readme.md#x-ms-mutability - #[serde(rename = "x-ms-mutability", default, skip_serializing_if = "Vec::is_empty")] + #[serde( + rename = "x-ms-mutability", + default, + skip_serializing_if = "Vec::is_empty" + )] pub x_ms_mutability: Vec, /// allows specific Definition Objects to be excluded from code generation @@ -188,7 +198,10 @@ pub struct Schema { #[serde(rename = "x-nullable", skip_serializing_if = "Option::is_none")] pub x_nullable: Option, - #[serde(rename = "x-ms-discriminator-value", skip_serializing_if = "Option::is_none")] + #[serde( + rename = "x-ms-discriminator-value", + skip_serializing_if = "Option::is_none" + )] pub x_ms_discriminator_value: Option, #[serde(skip_serializing_if = "Option::is_none")] diff --git a/autorust/openapi/src/security.rs b/autorust/openapi/src/security.rs index 49b8f316..a0f42b81 100644 --- a/autorust/openapi/src/security.rs +++ b/autorust/openapi/src/security.rs @@ -86,7 +86,10 @@ mod tests { #[test] fn basic_serializes() { let json = r#"{"type":"basic"}"#; - assert_eq!(json, serde_json::to_string(&Security::Basic { description: None }).unwrap()); + assert_eq!( + json, + serde_json::to_string(&Security::Basic { description: None }).unwrap() + ); } #[test] diff --git a/azure_devops_rust_api/Cargo.toml b/azure_devops_rust_api/Cargo.toml index f3f8245a..b819e571 100644 --- a/azure_devops_rust_api/Cargo.toml +++ b/azure_devops_rust_api/Cargo.toml @@ -48,6 +48,7 @@ no-default-tag = [] accounts = [] approvals_and_checks = [] artifacts = [] +artifacts_download = [] artifacts_package_types = [] audit = [] build = [] @@ -295,3 +296,7 @@ required-features = ["release"] [[example]] name = "member_entitlement_management" required-features = ["member_entitlement_management"] + +[[example]] +name = "download_artifact" +required-features = ["artifacts_download"] diff --git a/azure_devops_rust_api/examples/download_artifact.rs b/azure_devops_rust_api/examples/download_artifact.rs new file mode 100644 index 00000000..416f34a8 --- /dev/null +++ b/azure_devops_rust_api/examples/download_artifact.rs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// Download a Universal Package from Azure Artifacts. +// +// Usage: +// export ADO_ORGANIZATION= +// export ADO_PROJECT= +// cargo run --example download_artifact --features="artifacts_download" -- \ +// --feed --name --version --path + +use anyhow::{Context, Result}; +use azure_devops_rust_api::artifacts_download; +use std::env; +use std::path::PathBuf; + +mod utils; + +// --- CLI argument parsing --- + +struct Args { + organization: String, + project: String, + feed: String, + name: String, + version: String, + path: PathBuf, +} + +fn parse_args() -> Result { + let organization = env::var("ADO_ORGANIZATION").context("Must define ADO_ORGANIZATION")?; + let project = env::var("ADO_PROJECT").context("Must define ADO_PROJECT")?; + + let args: Vec = env::args().collect(); + let mut feed = None; + let mut name = None; + let mut version = None; + let mut path = None; + + let mut i = 1; + while i < args.len() { + match args[i].as_str() { + "--feed" => { + feed = Some(args.get(i + 1).context("--feed requires a value")?.clone()); + i += 2; + } + "--name" => { + name = Some(args.get(i + 1).context("--name requires a value")?.clone()); + i += 2; + } + "--version" => { + version = Some( + args.get(i + 1) + .context("--version requires a value")? + .clone(), + ); + i += 2; + } + "--path" => { + path = Some(args.get(i + 1).context("--path requires a value")?.clone()); + i += 2; + } + _ => { + i += 1; + } + } + } + + Ok(Args { + organization, + project, + feed: feed.context("--feed is required")?, + name: name.context("--name is required")?, + version: version.context("--version is required")?, + path: PathBuf::from(path.context("--path is required")?), + }) +} + +#[tokio::main] +async fn main() -> Result<()> { + let args = parse_args()?; + let credential = utils::get_credential()?; + + println!( + "Downloading Universal Package: {}@{} from {}/{}", + args.name, args.version, args.organization, args.project + ); + + let client = artifacts_download::ClientBuilder::new(credential).build(); + + let metadata = client + .download_universal_package( + &args.organization, + &args.project, + &args.feed, + &args.name, + &args.version, + &args.path, + ) + .await?; + + println!( + "Downloaded {} v{} ({} bytes) to {:?}", + args.name, metadata.version, metadata.package_size, args.path + ); + + Ok(()) +} diff --git a/azure_devops_rust_api/src/artifacts_download/decompress.rs b/azure_devops_rust_api/src/artifacts_download/decompress.rs new file mode 100644 index 00000000..c53a83ec --- /dev/null +++ b/azure_devops_rust_api/src/artifacts_download/decompress.rs @@ -0,0 +1,284 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +//! Decompression for Azure DevOps blob store chunks. +//! +//! Azure DevOps blob store uses an LZ77 variant for content chunks. +//! This module implements the decompressor. + +use azure_core::error::{Error, ErrorKind, Result}; + +/// Decompress a chunk compressed with the LZ77 encoding used by +/// Azure DevOps blob store. +/// +/// The format uses: +/// - 32-bit flag indicators with a sentinel bit for tracking consumption +/// - 2-bit literal length encoding (1/2/3/4 byte batches) +/// - 16-bit match references: 13-bit offset + 3-bit length +/// - Nibble-based extended length encoding +pub fn decompress_chunk(compressed: &[u8]) -> Result> { + if compressed.len() < 5 { + return Err(Error::message( + ErrorKind::DataConversion, + format!("Compressed data too small: {} bytes", compressed.len()), + )); + } + + let mut output = Vec::with_capacity(compressed.len() * 4); + let mut ci = 0usize; + let mut indicator: i32; + let mut nibble_pos: Option<(usize, bool)> = None; + + // When true, the current indicator MSB is a literal-encoding bit (not a + // decision bit). This happens after reading a fresh indicator whose raw + // MSB was 0 (literal): that bit is consumed by the explicit check, and + // after *2+1 the MSB is the first encoding bit. + let mut fresh_literal; + + let raw = i32::from_le_bytes(compressed[ci..ci + 4].try_into().unwrap()); + ci += 4; + indicator = raw.wrapping_mul(2).wrapping_add(1); + fresh_literal = raw >= 0; + + // If the first decision is match, process it before the main loop. + if raw < 0 { + if ci + 1 >= compressed.len() { + return Ok(output); + } + process_match(compressed, &mut ci, &mut output, &mut nibble_pos)?; + } + + loop { + if ci >= compressed.len() { + break; + } + + if fresh_literal { + fresh_literal = false; + } else if indicator >= 0 { + indicator = indicator.wrapping_mul(2); + } else { + indicator = indicator.wrapping_mul(2); + + if indicator == 0 { + if ci + 3 >= compressed.len() { + break; + } + let raw = i32::from_le_bytes(compressed[ci..ci + 4].try_into().unwrap()); + ci += 4; + indicator = raw.wrapping_mul(2).wrapping_add(1); + if raw >= 0 { + fresh_literal = true; + continue; + } + } + + if ci + 1 >= compressed.len() { + break; + } + process_match(compressed, &mut ci, &mut output, &mut nibble_pos)?; + continue; + } + + // Literal encoding: bits encode length as 1/2/3/4 bytes per batch + loop { + if indicator < 0 { + if ci >= compressed.len() { + return Ok(output); + } + output.push(compressed[ci]); + ci += 1; + break; + } + indicator = indicator.wrapping_mul(2); + if indicator < 0 { + if ci + 1 >= compressed.len() { + return Ok(output); + } + output.extend_from_slice(&compressed[ci..ci + 2]); + ci += 2; + break; + } + indicator = indicator.wrapping_mul(2); + if indicator < 0 { + if ci + 2 >= compressed.len() { + return Ok(output); + } + output.extend_from_slice(&compressed[ci..ci + 3]); + ci += 3; + break; + } + indicator = indicator.wrapping_mul(2); + if ci + 3 >= compressed.len() { + return Ok(output); + } + output.extend_from_slice(&compressed[ci..ci + 4]); + ci += 4; + if indicator < 0 { + break; + } + indicator = indicator.wrapping_mul(2); + } + + // Post-literal shift: consume the "1" bit that ended the literal group + indicator = indicator.wrapping_mul(2); + + if indicator == 0 { + if ci + 3 >= compressed.len() { + break; + } + let raw = i32::from_le_bytes(compressed[ci..ci + 4].try_into().unwrap()); + ci += 4; + indicator = raw.wrapping_mul(2).wrapping_add(1); + if raw >= 0 { + fresh_literal = true; + continue; + } + } + + // Match always follows a literal group + if ci + 1 >= compressed.len() { + break; + } + process_match(compressed, &mut ci, &mut output, &mut nibble_pos)?; + } + + Ok(output) +} + +/// Process a single LZ match: read the 16-bit match descriptor and optional +/// extended length, then copy `match_len` bytes from the output history. +fn process_match( + compressed: &[u8], + ci: &mut usize, + output: &mut Vec, + nibble_pos: &mut Option<(usize, bool)>, +) -> Result<()> { + let v = u16::from_le_bytes(compressed[*ci..*ci + 2].try_into().unwrap()); + *ci += 2; + + let mut match_len = (v & 7) as usize; + let offset = ((v >> 3) as usize) + 1; + + if match_len == 7 { + let nibble_val = if let Some((nib_idx, _)) = nibble_pos.take() { + (compressed[nib_idx] >> 4) as usize + } else { + if *ci >= compressed.len() { + return Err(Error::message( + ErrorKind::DataConversion, + "Unexpected end of compressed data in nibble read", + )); + } + let nib_idx = *ci; + *ci += 1; + *nibble_pos = Some((nib_idx, true)); + (compressed[nib_idx] & 0x0F) as usize + }; + + match_len = nibble_val; + if match_len == 15 { + if *ci >= compressed.len() { + return Err(Error::message( + ErrorKind::DataConversion, + "Unexpected end of compressed data in length extension", + )); + } + match_len = compressed[*ci] as usize; + *ci += 1; + if match_len == 255 { + if *ci + 1 >= compressed.len() { + return Err(Error::message( + ErrorKind::DataConversion, + "Unexpected end of compressed data in 16-bit length", + )); + } + match_len = + u16::from_le_bytes(compressed[*ci..*ci + 2].try_into().unwrap()) as usize; + *ci += 2; + if match_len == 0 { + if *ci + 3 >= compressed.len() { + return Err(Error::message( + ErrorKind::DataConversion, + "Unexpected end of compressed data in 32-bit length", + )); + } + match_len = + u32::from_le_bytes(compressed[*ci..*ci + 4].try_into().unwrap()) as usize; + *ci += 4; + } + if match_len < 22 { + return Err(Error::message( + ErrorKind::DataConversion, + format!("Invalid extended match length: {}", match_len), + )); + } + match_len -= 22; + } + match_len += 15; + } + match_len += 7; + } + match_len += 3; + + if offset > output.len() { + return Err(Error::message( + ErrorKind::DataConversion, + format!( + "Match offset {} exceeds output size {} at compressed pos {}", + offset, + output.len(), + ci + ), + )); + } + let src_start = output.len() - offset; + for i in 0..match_len { + let byte = output[src_start + (i % offset)]; + output.push(byte); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_too_small_input() { + assert!(decompress_chunk(&[]).is_err()); + assert!(decompress_chunk(&[0; 4]).is_err()); + } + + #[test] + fn test_single_literal() { + // Indicator 0x40000000: bit31=0 (literal), bit30=1 (1-byte literal) + let compressed: &[u8] = &[0x00, 0x00, 0x00, 0x40, 0x48]; // literal 'H' + let result = decompress_chunk(compressed).unwrap(); + assert_eq!(result, b"H"); + } + + #[test] + fn test_literal_then_match() { + // Indicator 0x40000000 + literal 'A' + match(offset=1, len=3) -> "AAAA" + let compressed: &[u8] = &[0x00, 0x00, 0x00, 0x40, 0x41, 0x00, 0x00]; + let result = decompress_chunk(compressed).unwrap(); + assert_eq!(result, b"AAAA"); + } + + #[test] + fn test_minimum_valid_input() { + // 5 bytes is the minimum accepted size + let compressed: &[u8] = &[0x00, 0x00, 0x00, 0x40, 0x58]; // literal 'X' + let result = decompress_chunk(compressed).unwrap(); + assert_eq!(result, b"X"); + } + + #[test] + fn test_match_offset_out_of_bounds() { + // First decision is match (bit31=1), match with offset > 0 when output is empty + // raw = 0x80000000, match v=0x0008 -> offset=2, but output is empty -> error + let compressed: &[u8] = &[0x00, 0x00, 0x00, 0x80, 0x08, 0x00]; + assert!(decompress_chunk(compressed).is_err()); + } +} diff --git a/azure_devops_rust_api/src/artifacts_download/mod.rs b/azure_devops_rust_api/src/artifacts_download/mod.rs new file mode 100644 index 00000000..6e68b084 --- /dev/null +++ b/azure_devops_rust_api/src/artifacts_download/mod.rs @@ -0,0 +1,648 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +//! Download Universal Packages from Azure DevOps Artifacts. +//! +//! This module implements the dedup-based download protocol used by +//! Azure DevOps Artifacts for universal packages. +//! +//! # Protocol overview +//! +//! 1. Discover service URLs via the ResourceAreas API +//! 2. Get package metadata (manifestId, superRootId) from the packaging endpoint +//! 3. Resolve blob IDs to download URLs via the dedup service +//! 4. Download and parse the manifest to get the file/chunk structure +//! 5. Download content chunks, decompress, and reassemble files + +mod decompress; + +use azure_core::error::{Error, ErrorKind, Result, ResultExt}; +use azure_core::headers::{self, HeaderValue}; +use azure_core::{Method, Request, Url}; +use serde::Deserialize; +use std::collections::HashMap; +use std::io::Write; +use std::path::Path; + +pub use decompress::decompress_chunk; + +// --- Data structures --- + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ResourceArea { + #[allow(dead_code)] + id: String, + name: String, + location_url: String, +} + +#[derive(Debug, Deserialize)] +struct ResourceAreasResponse { + value: Vec, +} + +/// Package metadata returned by the packaging endpoint. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PackageMetadata { + /// Package version string. + pub version: String, + /// Blob ID of the dedup manifest. + pub manifest_id: String, + /// Blob ID of the super-root node. + pub super_root_id: String, + /// Total package size in bytes. + pub package_size: u64, +} + +/// A file entry in the dedup manifest. +#[derive(Debug, Deserialize)] +pub struct ManifestItem { + /// File path within the package (e.g. "/myfile.bin"). + pub path: String, + /// Reference to the dedup blob for this file. + pub blob: DedupBlobRef, +} + +/// A reference to a dedup blob (hash ID + logical size). +#[derive(Debug, Deserialize)] +pub struct DedupBlobRef { + /// Hex-encoded blob ID with type suffix ("01" = content, "02" = node). + pub id: String, + /// Decompressed size in bytes. + pub size: u64, +} + +/// Parsed manifest listing all files in a package. +#[derive(Debug, Deserialize)] +pub struct Manifest { + /// The files contained in the package. + pub items: Vec, +} + +// --- Client --- + +/// Client for downloading Universal Packages from Azure Artifacts. +#[derive(Clone)] +pub struct Client { + credential: crate::Credential, + scopes: Vec, + pipeline: azure_core::Pipeline, +} + +/// Builder for creating an artifacts download [`Client`]. +#[derive(Clone)] +pub struct ClientBuilder { + credential: crate::Credential, + scopes: Option>, + options: azure_core::ClientOptions, +} + +impl ClientBuilder { + /// Create a new `ClientBuilder`. + #[must_use] + pub fn new(credential: crate::Credential) -> Self { + Self { + credential, + scopes: None, + options: azure_core::ClientOptions::default(), + } + } + + /// Set the authentication scopes. + #[must_use] + pub fn scopes(mut self, scopes: &[&str]) -> Self { + self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect()); + self + } + + /// Set the retry options. + #[must_use] + pub fn retry(mut self, retry: impl Into) -> Self { + self.options = self.options.retry(retry); + self + } + + /// Set the transport options. + #[must_use] + pub fn transport(mut self, transport: impl Into) -> Self { + self.options = self.options.transport(transport); + self + } + + /// Build the [`Client`]. + pub fn build(self) -> Client { + let scopes = self + .scopes + .unwrap_or_else(|| vec![crate::ADO_SCOPE.to_string()]); + let pipeline = azure_core::Pipeline::new( + option_env!("CARGO_PKG_NAME"), + option_env!("CARGO_PKG_VERSION"), + self.options, + Vec::new(), + Vec::new(), + ); + Client { + credential: self.credential, + scopes, + pipeline, + } + } +} + +impl Client { + /// Create a new `ClientBuilder`. + #[must_use] + pub fn builder(credential: crate::Credential) -> ClientBuilder { + ClientBuilder::new(credential) + } + + /// Get the authorization header value for the current credential. + async fn auth_header(&self) -> Result { + let scopes: Vec<&str> = self.scopes.iter().map(String::as_str).collect(); + self.credential + .http_authorization_header(&scopes) + .await? + .ok_or_else(|| Error::message(ErrorKind::Credential, "No credential configured")) + } + + /// Send a request through the pipeline. + async fn send(&self, request: &mut Request) -> Result { + let context = azure_core::Context::default(); + self.pipeline.send(&context, request).await + } + + /// Send an authenticated GET request and parse the JSON response. + async fn get_json(&self, url: Url) -> Result { + let mut req = Request::new(url, Method::Get); + let auth = self.auth_header().await?; + req.insert_header(headers::AUTHORIZATION, HeaderValue::from(auth)); + req.insert_header( + headers::ACCEPT, + HeaderValue::from("application/json; api-version=7.1-preview.1"), + ); + req.insert_header("x-tfs-fedauthredirect", HeaderValue::from("Suppress")); + req.set_body(azure_core::EMPTY_BODY); + + let resp = self.send(&mut req).await?; + let bytes = resp.into_body().collect().await?; + serde_json::from_slice(&bytes).map_err(|e| { + Error::full( + ErrorKind::DataConversion, + e, + format!( + "Failed to deserialize response:\n{}", + String::from_utf8_lossy(&bytes) + ), + ) + }) + } + + /// Send an unauthenticated GET request and return the raw bytes. + async fn get_bytes(&self, url: Url) -> Result> { + let mut req = Request::new(url, Method::Get); + req.set_body(azure_core::EMPTY_BODY); + let resp = self.send(&mut req).await?; + let bytes = resp.into_body().collect().await?; + Ok(bytes.to_vec()) + } + + // --- Service discovery --- + + /// Discover Azure DevOps service URLs via the ResourceAreas API. + /// Returns a map of service name -> location URL. + pub async fn discover_services(&self, organization: &str) -> Result> { + let url = Url::parse(&format!( + "https://dev.azure.com/{}/_apis/ResourceAreas", + organization + )) + .context(ErrorKind::DataConversion, "invalid organization URL")?; + + let areas: ResourceAreasResponse = self.get_json(url).await?; + let map: HashMap = areas + .value + .into_iter() + .map(|a| (a.name.to_lowercase(), a.location_url)) + .collect(); + Ok(map) + } + + /// Find the packages service URL from discovered services. + pub fn find_packages_url(services: &HashMap, organization: &str) -> String { + services + .values() + .find(|url| url.contains("pkgs.")) + .cloned() + .unwrap_or_else(|| format!("https://pkgs.dev.azure.com/{}", organization)) + } + + /// Find the blob/dedup service URL from discovered services. + pub fn find_blob_url(services: &HashMap) -> Result { + services.get("dedup").cloned().ok_or_else(|| { + Error::message( + ErrorKind::Other, + "Could not find 'dedup' service in ResourceAreas", + ) + }) + } + + // --- Package metadata --- + + /// Get package download metadata from the packaging endpoint. + pub async fn get_package_metadata( + &self, + packages_url: &str, + project: &str, + feed: &str, + name: &str, + version: &str, + ) -> Result { + let mut url = Url::parse(&format!( + "{}/{}/_packaging/{}/upack/packages/{}/versions/{}", + packages_url.trim_end_matches('/'), + project, + feed, + name, + version, + )) + .context(ErrorKind::DataConversion, "invalid package metadata URL")?; + + url.query_pairs_mut().append_pair("intent", "Download"); + self.get_json(url).await + } + + // --- Dedup blob operations --- + + /// Resolve dedup blob IDs to download URLs via the dedup service. + pub async fn resolve_blob_urls( + &self, + blob_service_url: &str, + blob_ids: &[String], + ) -> Result> { + let mut url = Url::parse(&format!( + "{}/_apis/dedup/urls", + blob_service_url.trim_end_matches('/') + )) + .context(ErrorKind::DataConversion, "invalid dedup URL")?; + + url.query_pairs_mut().append_pair("allowEdge", "true"); + + let mut req = Request::new(url, Method::Post); + let auth = self.auth_header().await?; + req.insert_header(headers::AUTHORIZATION, HeaderValue::from(auth)); + req.insert_header( + headers::CONTENT_TYPE, + HeaderValue::from("application/json; charset=utf-8; api-version=1.0-preview"), + ); + req.insert_header( + headers::ACCEPT, + HeaderValue::from("application/json; api-version=1.0"), + ); + req.insert_header("x-tfs-fedauthredirect", HeaderValue::from("Suppress")); + let body = azure_core::to_json(blob_ids)?; + req.set_body(body); + + let resp = self.send(&mut req).await?; + let bytes = resp.into_body().collect().await?; + serde_json::from_slice(&bytes).map_err(|e| { + Error::full( + ErrorKind::DataConversion, + e, + "Failed to parse blob URL response", + ) + }) + } + + /// Download a blob from a SAS URL (no auth required). + pub async fn download_blob(&self, url: &str) -> Result> { + let parsed = + Url::parse(url).context(ErrorKind::DataConversion, "invalid blob download URL")?; + self.get_bytes(parsed).await + } + + // --- Manifest parsing --- + + /// Parse the dedup manifest blob (JSON) to extract file entries. + pub fn parse_manifest(data: &[u8]) -> Result { + serde_json::from_slice(data).map_err(|e| { + Error::full( + ErrorKind::DataConversion, + e, + "Failed to parse manifest JSON", + ) + }) + } + + /// Parse a dedup node blob (binary format) to extract chunk references. + /// + /// A dedup node (ID ending in "02") contains references to child blobs. + /// The binary format is: + /// - 4-byte header + /// - N entries of: 4-byte metadata + 32-byte hash + /// + /// Content chunk IDs are formed by hex-encoding the 32-byte hash + /// and appending "01" (content type marker). + pub fn parse_dedup_node(data: &[u8]) -> Result> { + const HEADER_SIZE: usize = 4; + const HASH_SIZE: usize = 32; + const METADATA_SIZE: usize = 4; + const ENTRY_SIZE: usize = METADATA_SIZE + HASH_SIZE; + + if data.len() < HEADER_SIZE + ENTRY_SIZE { + return Err(Error::message( + ErrorKind::DataConversion, + format!( + "Dedup node blob too small: {} bytes (minimum {})", + data.len(), + HEADER_SIZE + ENTRY_SIZE + ), + )); + } + + let data_portion = data.len() - HEADER_SIZE; + if data_portion % ENTRY_SIZE != 0 { + return Err(Error::message( + ErrorKind::DataConversion, + format!( + "Dedup node blob has unexpected size: {} bytes \ + (data portion {} is not a multiple of entry size {})", + data.len(), + data_portion, + ENTRY_SIZE + ), + )); + } + + let num_entries = data_portion / ENTRY_SIZE; + let mut chunk_ids = Vec::with_capacity(num_entries); + + for i in 0..num_entries { + let offset = HEADER_SIZE + i * ENTRY_SIZE; + let hash_bytes = &data[offset + METADATA_SIZE..offset + METADATA_SIZE + HASH_SIZE]; + let hex_hash: String = hash_bytes.iter().map(|b| format!("{:02X}", b)).collect(); + chunk_ids.push(format!("{}01", hex_hash)); + } + + if chunk_ids.is_empty() { + return Err(Error::message( + ErrorKind::DataConversion, + format!( + "No chunk references found in dedup node blob ({} bytes)", + data.len() + ), + )); + } + Ok(chunk_ids) + } + + // --- High-level download --- + + /// Download a universal package to the specified output directory. + /// + /// Performs the full download protocol: service discovery, metadata fetch, + /// manifest download, chunk download with decompression, and file assembly. + pub async fn download_universal_package( + &self, + organization: &str, + project: &str, + feed: &str, + name: &str, + version: &str, + output_path: &Path, + ) -> Result { + // Step 1: Discover service URLs + let services = self.discover_services(organization).await?; + let packages_url = Self::find_packages_url(&services, organization); + let blob_service_url = Self::find_blob_url(&services)?; + + // Step 2: Get package metadata + let metadata = self + .get_package_metadata(&packages_url, project, feed, name, version) + .await?; + + // Step 3: Download the manifest blob + let manifest_urls = self + .resolve_blob_urls(&blob_service_url, &[metadata.manifest_id.clone()]) + .await?; + let manifest_url = manifest_urls.get(&metadata.manifest_id).ok_or_else(|| { + Error::message(ErrorKind::Other, "Manifest URL not found in response") + })?; + let manifest_data = self.download_blob(manifest_url).await?; + let manifest = Self::parse_manifest(&manifest_data)?; + + // Step 4: Create output directory + std::fs::create_dir_all(output_path).map_err(|e| { + Error::full( + ErrorKind::Io, + e, + format!("Failed to create output directory: {:?}", output_path), + ) + })?; + + // Step 5: Download each file + for item in &manifest.items { + let file_root_urls = self + .resolve_blob_urls(&blob_service_url, &[item.blob.id.clone()]) + .await?; + let file_root_url = file_root_urls + .get(&item.blob.id) + .ok_or_else(|| Error::message(ErrorKind::Other, "File root URL not found"))?; + let file_root_data = self.download_blob(file_root_url).await?; + + let is_node = item.blob.id.ends_with("02"); + + let file_data = if is_node { + let chunk_ids = Self::parse_dedup_node(&file_root_data)?; + let chunk_urls = self + .resolve_blob_urls(&blob_service_url, &chunk_ids) + .await?; + + let mut file_data = Vec::with_capacity(item.blob.size as usize); + for chunk_id in &chunk_ids { + let chunk_url = chunk_urls.get(chunk_id).ok_or_else(|| { + Error::message( + ErrorKind::Other, + format!("Chunk URL not found for {}", chunk_id), + ) + })?; + let chunk_data = self.download_blob(chunk_url).await?; + let decompressed = decompress_chunk(&chunk_data)?; + file_data.extend_from_slice(&decompressed); + } + file_data + } else { + file_root_data + }; + + let relative_path = item.path.trim_start_matches('/'); + let file_path = output_path.join(relative_path); + if let Some(parent) = file_path.parent() { + std::fs::create_dir_all(parent).map_err(|e| { + Error::full( + ErrorKind::Io, + e, + format!("Failed to create directory: {:?}", parent), + ) + })?; + } + let mut file = std::fs::File::create(&file_path).map_err(|e| { + Error::full( + ErrorKind::Io, + e, + format!("Failed to create file: {:?}", file_path), + ) + })?; + file.write_all(&file_data) + .map_err(|e| Error::full(ErrorKind::Io, e, "Failed to write file data"))?; + } + + Ok(metadata) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // --- find_packages_url --- + + #[test] + fn test_find_packages_url_with_pkgs_service() { + let mut services = HashMap::new(); + services.insert( + "packaging".to_string(), + "https://pkgs.dev.azure.com/myorg/".to_string(), + ); + services.insert( + "dedup".to_string(), + "https://vsblob.dev.azure.com/myorg/".to_string(), + ); + + let url = Client::find_packages_url(&services, "myorg"); + assert!(url.contains("pkgs.")); + } + + #[test] + fn test_find_packages_url_fallback() { + let services = HashMap::new(); + let url = Client::find_packages_url(&services, "myorg"); + assert_eq!(url, "https://pkgs.dev.azure.com/myorg"); + } + + // --- find_blob_url --- + + #[test] + fn test_find_blob_url_found() { + let mut services = HashMap::new(); + services.insert( + "dedup".to_string(), + "https://vsblob.dev.azure.com/myorg/".to_string(), + ); + + let url = Client::find_blob_url(&services).unwrap(); + assert_eq!(url, "https://vsblob.dev.azure.com/myorg/"); + } + + #[test] + fn test_find_blob_url_missing() { + let services = HashMap::new(); + assert!(Client::find_blob_url(&services).is_err()); + } + + // --- parse_manifest --- + + #[test] + fn test_parse_manifest_valid() { + let json = br#"{"items":[{"path":"/file1.txt","blob":{"id":"ABC01","size":100}},{"path":"/dir/file2.bin","blob":{"id":"DEF02","size":200}}]}"#; + let manifest = Client::parse_manifest(json).unwrap(); + assert_eq!(manifest.items.len(), 2); + assert_eq!(manifest.items[0].path, "/file1.txt"); + assert_eq!(manifest.items[0].blob.id, "ABC01"); + assert_eq!(manifest.items[0].blob.size, 100); + assert_eq!(manifest.items[1].path, "/dir/file2.bin"); + assert_eq!(manifest.items[1].blob.id, "DEF02"); + assert_eq!(manifest.items[1].blob.size, 200); + } + + #[test] + fn test_parse_manifest_empty_items() { + let json = br#"{"items":[]}"#; + let manifest = Client::parse_manifest(json).unwrap(); + assert!(manifest.items.is_empty()); + } + + #[test] + fn test_parse_manifest_invalid_json() { + assert!(Client::parse_manifest(b"not json").is_err()); + } + + #[test] + fn test_parse_manifest_missing_field() { + let json = br#"{"items":[{"path":"/f"}]}"#; + assert!(Client::parse_manifest(json).is_err()); + } + + // --- parse_dedup_node --- + + #[test] + fn test_parse_dedup_node_single_entry() { + // 4-byte header + 1 entry (4-byte meta + 32-byte hash) + let mut data = vec![0x00, 0x01, 0x00, 0x00]; // header + data.extend_from_slice(&[0x00; 4]); // metadata + let hash: Vec = (0..32).collect(); + data.extend_from_slice(&hash); + + let ids = Client::parse_dedup_node(&data).unwrap(); + assert_eq!(ids.len(), 1); + let expected: String = hash + .iter() + .map(|b| format!("{:02X}", b)) + .collect::() + + "01"; + assert_eq!(ids[0], expected); + } + + #[test] + fn test_parse_dedup_node_two_entries() { + let mut data = vec![0x00, 0x01, 0x00, 0x00]; // header + // Entry 1 + data.extend_from_slice(&[0x00; 4]); // metadata + let hash1: Vec = (0..32).collect(); + data.extend_from_slice(&hash1); + // Entry 2 + data.extend_from_slice(&[0x01, 0x00, 0x00, 0x00]); // metadata + let hash2: Vec = (32..64).collect(); + data.extend_from_slice(&hash2); + + let ids = Client::parse_dedup_node(&data).unwrap(); + assert_eq!(ids.len(), 2); + assert!(ids[0].ends_with("01")); + assert!(ids[1].ends_with("01")); + } + + #[test] + fn test_parse_dedup_node_too_small() { + assert!(Client::parse_dedup_node(&[0; 10]).is_err()); + } + + #[test] + fn test_parse_dedup_node_invalid_size() { + // 4 header + 37 bytes (not a multiple of 36) + let data = vec![0u8; 4 + 37]; + assert!(Client::parse_dedup_node(&data).is_err()); + } + + #[test] + fn test_parse_dedup_node_chunk_ids_are_content_type() { + let mut data = vec![0x00; 4]; // header + data.extend_from_slice(&[0x00; 4]); // metadata + data.extend_from_slice(&[0xFF; 32]); // all-FF hash + + let ids = Client::parse_dedup_node(&data).unwrap(); + assert_eq!(ids.len(), 1); + // Should end with "01" (content type), not "02" (node type) + assert!(ids[0].ends_with("01")); + assert_eq!(ids[0].len(), 66); // 64 hex chars + "01" + } +} diff --git a/azure_devops_rust_api/src/lib.rs b/azure_devops_rust_api/src/lib.rs index afc47c04..a28e402d 100644 --- a/azure_devops_rust_api/src/lib.rs +++ b/azure_devops_rust_api/src/lib.rs @@ -22,6 +22,9 @@ pub mod approvals_and_checks; /// Artifacts #[cfg(feature = "artifacts")] pub mod artifacts; +/// Artifacts download (Universal Packages) +#[cfg(feature = "artifacts_download")] +pub mod artifacts_download; /// Artifacts Package Types #[cfg(feature = "artifacts_package_types")] pub mod artifacts_package_types;