diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ce7a8c9..8b742317 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,15 @@ ## 0.20.0 (unreleased) + - **file inclusion**. This is a long awaited feature that allows you to include the contents of one file in another. This is useful to factorize common parts of your website, such as the header, or the authentication logic. There is a new [`sqlpage.run_sql`](https://sql.ophir.dev/functions.sql?function=run_sql#function) function that runs a given SQL file and returns its result as a JSON array. Combined with the existing [`dynamic`](https://sql.ophir.dev/documentation.sql?component=dynamic#component) component, this allows you to include the content of a file in another, like this: + ```sql + select 'dynamic' as component, sqlpage.run_sql('header.sql') as properties; + ``` + - **more powerful *dynamic* component**: the [`dynamic`](https://sql.ophir.dev/documentation.sql?component=dynamic#component) component can now be used to generate the special *header* components too, such as the `redirect`, `cookie`, `authentication`, `http_header` and `json` components. The *shell* component used to be allowed in dynamic components, but only if they were not nested (a dynamic component inside another one). This limitation is now lifted. This is particularly useful in combination with the new file inclusion feature, to factorize common parts of your website. There used to be a limited to how deeply nested dynamic components could be, but this limitation is now lifted too. - Add an `id` attribute to form fields in the [form](https://sql.ophir.dev/documentation.sql?component=form#component) component. This allows you to easily reference form fields in custom javascript code. - New [`rss`](https://sql.ophir.dev/documentation.sql?component=rss#component) component to create RSS feeds, including **podcast feeds**. You can now create and manage your podcast feed entirely in SQL, and distribute it to all podcast directories such as Apple Podcasts, Spotify, and Google Podcasts. - Better error handling in template rendering. Many template helpers now display a more precise error message when they fail to execute. This makes it easier to debug errors when you [develop your own custom components](https://sql.ophir.dev/custom_components.sql). + - better error messages when an error occurs when defining a variable with `SET`. SQLPage now displays the query that caused the error, and the name of the variable that was being defined. - Updated SQL parser to [v0.44](https://github.com/sqlparser-rs/sqlparser-rs/blob/main/CHANGELOG.md#0440-2024-03-02) - support [EXECUTE ... USING](https://www.postgresql.org/docs/current/plpgsql-statements.html#PLPGSQL-STATEMENTS-EXECUTING-DYN) in PostgreSQL - support `INSERT INTO ... SELECT ... RETURNING`, which allows you to insert data into a table, and easily pass values from the inserted row to a SQLPage component. [postgres docs](https://www.postgresql.org/docs/current/dml-returning.html), [mysql docs](https://mariadb.com/kb/en/insertreturning/), [sqlite docs](https://sqlite.org/lang_returning.html) diff --git a/examples/official-site/sqlpage/migrations/38_run_sql.sql b/examples/official-site/sqlpage/migrations/38_run_sql.sql new file mode 100644 index 00000000..877c81a7 --- /dev/null +++ b/examples/official-site/sqlpage/migrations/38_run_sql.sql @@ -0,0 +1,48 @@ +INSERT INTO sqlpage_functions ( + "name", + "introduced_in_version", + "icon", + "description_md" + ) +VALUES ( + 'run_sql', + '0.20.0', + 'login', + 'Executes another SQL file and returns its result as a JSON array. + +### Example + +#### Include a common header in all your pages + +It is common to want to run the same SQL queries at the beginning of all your pages, +to check if an user is logged in, render a header, etc. +You can create a file called `common_header.sql`, and use the `dynamic` component with the `run_sql` function to include it in all your pages. + +```sql +select ''dynamic'' as component, sqlpage.run_sql(''common_header.sql'') as properties; +``` + +#### Notes + + - **recursion**: you can use `run_sql` to include a file that itself includes another file, and so on. However, be careful to avoid infinite loops. SQLPage will throw an error if the inclusion depth is superior to 8. + - **security**: be careful when using `run_sql` to include files. Never use `run_sql` with a user-provided parameter. Never run a file uploaded by a user, or a file that is not under your control. + - **variables**: the included file will have access to the same variables (URL parameters, POST variables, etc.) + as the calling file. It will not have access to uploaded files. + If the included file changes the value of a variable or creates a new variable, the change will not be visible in the calling file. +' + ); +INSERT INTO sqlpage_function_parameters ( + "function", + "index", + "name", + "description_md", + "type" + ) +VALUES ( + 'run_sql', + 1, + 'file', + 'Path to the SQL file to execute, can be absolute, or relative to the web root (the root folder of your website sql files). + In-database files, from the sqlpage_files(path, contents, last_modified) table are supported.', + 'TEXT' + ); diff --git a/src/dynamic_component.rs b/src/dynamic_component.rs new file mode 100644 index 00000000..63186428 --- /dev/null +++ b/src/dynamic_component.rs @@ -0,0 +1,166 @@ +use anyhow::{self, Context as _}; +use serde_json::Value as JsonValue; + +use crate::webserver::database::DbItem; + +pub fn parse_dynamic_rows(row: DbItem) -> impl Iterator { + DynamicComponentIterator { + stack: vec![], + db_item: Some(row), + } +} + +struct DynamicComponentIterator { + stack: Vec>, + db_item: Option, +} + +impl Iterator for DynamicComponentIterator { + type Item = DbItem; + + fn next(&mut self) -> Option { + if let Some(db_item) = self.db_item.take() { + if let DbItem::Row(mut row) = db_item { + if let Some(properties) = extract_dynamic_properties(&mut row) { + self.stack = dynamic_properties_to_vec(properties); + } else { + // Most common case: just a regular row. We allocated nothing. + return Some(DbItem::Row(row)); + } + } else { + return Some(db_item); + } + } + expand_dynamic_stack(&mut self.stack); + self.stack.pop().map(|result| match result { + Ok(row) => DbItem::Row(row), + Err(err) => DbItem::Error(err), + }) + } +} + +fn expand_dynamic_stack(stack: &mut Vec>) { + while let Some(Ok(mut next)) = stack.pop() { + if let Some(properties) = extract_dynamic_properties(&mut next) { + stack.extend(dynamic_properties_to_vec(properties)); + } else { + stack.push(Ok(next)); + return; + } + } +} + +/// if row.component == 'dynamic', return Some(row.properties), otherwise return None +#[inline] +fn extract_dynamic_properties(data: &mut JsonValue) -> Option { + let component = data.get("component").and_then(|v| v.as_str()); + if component == Some("dynamic") { + let properties = data.get_mut("properties").map(JsonValue::take); + Some(properties.unwrap_or_default()) + } else { + None + } +} + +/// reverse the order of the vec returned by `dynamic_properties_to_result_vec`, +/// and wrap each element in a Result +fn dynamic_properties_to_vec(properties_obj: JsonValue) -> Vec> { + dynamic_properties_to_result_vec(properties_obj).map_or_else( + |err| vec![Err(err)], + |vec| vec.into_iter().rev().map(Ok).collect::>(), + ) +} + +/// if properties is a string, parse it as JSON and return a vec with the parsed value +/// if properties is an array, return it as is +/// if properties is an object, return it as a single element vec +/// otherwise, return an error +fn dynamic_properties_to_result_vec( + mut properties_obj: JsonValue, +) -> anyhow::Result> { + if let JsonValue::String(s) = properties_obj { + properties_obj = serde_json::from_str::(&s).with_context(|| { + format!( + "Unable to parse the 'properties' property of the dynamic component as JSON.\n\ + Invalid json: {s}" + ) + })?; + } + match properties_obj { + obj @ JsonValue::Object(_) => Ok(vec![obj]), + JsonValue::Array(values) => Ok(values), + other => anyhow::bail!( + "Dynamic component expected properties of type array or object, got {other} instead." + ), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_dynamic_properties_to_result_vec() { + let mut properties = JsonValue::String(r#"{"a": 1}"#.to_string()); + assert_eq!( + dynamic_properties_to_result_vec(properties.clone()).unwrap(), + vec![JsonValue::Object( + serde_json::from_str(r#"{"a": 1}"#).unwrap() + )] + ); + + properties = JsonValue::Array(vec![JsonValue::String(r#"{"a": 1}"#.to_string())]); + assert_eq!( + dynamic_properties_to_result_vec(properties.clone()).unwrap(), + vec![JsonValue::String(r#"{"a": 1}"#.to_string())] + ); + + properties = JsonValue::Object(serde_json::from_str(r#"{"a": 1}"#).unwrap()); + assert_eq!( + dynamic_properties_to_result_vec(properties.clone()).unwrap(), + vec![JsonValue::Object( + serde_json::from_str(r#"{"a": 1}"#).unwrap() + )] + ); + + properties = JsonValue::Null; + assert!(dynamic_properties_to_result_vec(properties).is_err()); + } + + #[test] + fn test_dynamic_properties_to_vec() { + let properties = JsonValue::String(r#"{"a": 1}"#.to_string()); + assert_eq!( + dynamic_properties_to_vec(properties.clone()) + .first() + .unwrap() + .as_ref() + .unwrap(), + &serde_json::json!({"a": 1}) + ); + } + + #[test] + fn test_parse_dynamic_rows() { + let row = DbItem::Row(serde_json::json!({ + "component": "dynamic", + "properties": [ + {"a": 1}, + {"component": "dynamic", "properties": {"nested": 2}}, + ] + })); + let iter = parse_dynamic_rows(row) + .map(|item| match item { + DbItem::Row(row) => row, + x => panic!("Expected a row, got {x:?}"), + }) + .collect::>(); + assert_eq!( + iter, + vec![ + serde_json::json!({"a": 1}), + serde_json::json!({"nested": 2}), + ] + ); + } +} diff --git a/src/file_cache.rs b/src/file_cache.rs index 63992140..90bdcf4f 100644 --- a/src/file_cache.rs +++ b/src/file_cache.rs @@ -5,7 +5,7 @@ use anyhow::Context; use async_trait::async_trait; use chrono::{DateTime, TimeZone, Utc}; use std::collections::HashMap; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::sync::atomic::{ AtomicU64, Ordering::{Acquire, Release}, @@ -97,7 +97,7 @@ impl FileCache { /// Gets a file from the cache, or loads it from the file system if it's not there /// This is a privileged operation; it should not be used for user-provided paths - pub async fn get(&self, app_state: &AppState, path: &PathBuf) -> anyhow::Result> { + pub async fn get(&self, app_state: &AppState, path: &Path) -> anyhow::Result> { self.get_with_privilege(app_state, path, true).await } @@ -107,7 +107,7 @@ impl FileCache { pub async fn get_with_privilege( &self, app_state: &AppState, - path: &PathBuf, + path: &Path, privileged: bool, ) -> anyhow::Result> { log::trace!("Attempting to get from cache {:?}", path); @@ -164,7 +164,7 @@ impl FileCache { Ok(value) => { let new_val = Arc::clone(&value.content); log::trace!("Writing to cache {:?}", path); - self.cache.write().await.insert(path.clone(), value); + self.cache.write().await.insert(PathBuf::from(path), value); log::trace!("Done writing to cache {:?}", path); log::trace!("{:?} loaded in cache", path); Ok(new_val) diff --git a/src/lib.rs b/src/lib.rs index 38845be9..9c3e5166 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,6 +4,7 @@ extern crate core; pub mod app_config; +pub mod dynamic_component; pub mod file_cache; pub mod filesystem; pub mod render; diff --git a/src/render.rs b/src/render.rs index 29fab508..5d733a20 100644 --- a/src/render.rs +++ b/src/render.rs @@ -274,15 +274,12 @@ pub struct RenderContext { pub writer: W, current_component: Option, shell_renderer: SplitTemplateRenderer, - recursion_depth: usize, current_statement: usize, } const DEFAULT_COMPONENT: &str = "table"; const PAGE_SHELL_COMPONENT: &str = "shell"; const FRAGMENT_SHELL_COMPONENT: &str = "shell-empty"; -const DYNAMIC_COMPONENT: &str = "dynamic"; -const MAX_RECURSION_DEPTH: usize = 256; impl RenderContext { pub async fn new( @@ -293,12 +290,7 @@ impl RenderContext { ) -> anyhow::Result> { log::debug!("Creating the shell component for the page"); - let mut initial_rows = - if get_object_str(&initial_row, "component") == Some(DYNAMIC_COMPONENT) { - Self::extract_dynamic_properties(&initial_row)? - } else { - vec![Cow::Borrowed(&initial_row)] - }; + let mut initial_rows = vec![Cow::Borrowed(&initial_row)]; if !initial_rows .first() @@ -336,7 +328,6 @@ impl RenderContext { writer, current_component: None, shell_renderer, - recursion_depth: 0, current_statement: 1, }; @@ -367,11 +358,6 @@ impl RenderContext { let new_component = get_object_str(data, "component"); let current_component = self.current_component().await?.name(); match (current_component, new_component) { - (_current_component, Some(DYNAMIC_COMPONENT)) => { - self.render_dynamic(data).await.with_context(|| { - format!("Unable to render dynamic component with properties {data}") - })?; - } ( _, Some( @@ -398,41 +384,6 @@ impl RenderContext { Ok(()) } - fn extract_dynamic_properties(data: &Value) -> anyhow::Result>> { - let properties_key = "properties"; - let properties_obj = data - .get(properties_key) - .with_context(|| format!("Missing '{properties_key}' key."))?; - Ok(match properties_obj { - Value::String(s) => match serde_json::from_str::(s) - .with_context(|| "parsing json properties")? - { - Value::Array(values) => values.into_iter().map(Cow::Owned).collect(), - obj @ Value::Object(_) => vec![Cow::Owned(obj)], - other => bail!( - "Expected properties string to parse as array or object, got {other} instead." - ), - }, - obj @ Value::Object(_) => vec![Cow::Borrowed(obj)], - Value::Array(values) => values.iter().map(Cow::Borrowed).collect(), - other => bail!("Expected properties of type array or object, got {other} instead."), - }) - } - - async fn render_dynamic(&mut self, data: &Value) -> anyhow::Result<()> { - anyhow::ensure!( - self.recursion_depth <= MAX_RECURSION_DEPTH, - "Maximum recursion depth exceeded in the dynamic component." - ); - for dynamic_row_obj in Self::extract_dynamic_properties(data)? { - self.recursion_depth += 1; - let res = self.handle_row(&dynamic_row_obj).await; - self.recursion_depth -= 1; - res?; - } - Ok(()) - } - #[allow(clippy::unused_async)] pub async fn finish_query(&mut self) -> anyhow::Result<()> { log::debug!("-> Query {} finished", self.current_statement); diff --git a/src/webserver/database/execute_queries.rs b/src/webserver/database/execute_queries.rs index 928a7b32..1a4df801 100644 --- a/src/webserver/database/execute_queries.rs +++ b/src/webserver/database/execute_queries.rs @@ -1,23 +1,24 @@ -use anyhow::anyhow; +use anyhow::{anyhow, Context}; use futures_util::stream::Stream; use futures_util::StreamExt; use std::borrow::Cow; use std::collections::HashMap; +use std::pin::Pin; use super::csv_import::run_csv_import; use super::sql::{ParsedSqlFile, ParsedStatement, StmtWithParams}; +use crate::dynamic_component::parse_dynamic_rows; use crate::webserver::database::sql_pseudofunctions::extract_req_param; use crate::webserver::database::sql_to_json::row_to_string; use crate::webserver::http::SingleOrVec; use crate::webserver::http_request_info::RequestInfo; +use super::sql_pseudofunctions::StmtParam; +use super::{highlight_sql_error, Database, DbItem}; use sqlx::any::{AnyArguments, AnyQueryResult, AnyRow, AnyStatement, AnyTypeInfo}; use sqlx::pool::PoolConnection; use sqlx::{Any, AnyConnection, Arguments, Either, Executor, Statement}; -use super::sql_pseudofunctions::StmtParam; -use super::{highlight_sql_error, Database, DbItem}; - impl Database { pub(crate) async fn prepare_with( &self, @@ -31,7 +32,6 @@ impl Database { .map_err(|e| highlight_sql_error("Failed to prepare SQL statement", query, e)) } } - pub fn stream_query_results<'a>( db: &'a Database, sql_file: &'a ParsedSqlFile, @@ -53,28 +53,24 @@ pub fn stream_query_results<'a>( let mut stream = connection.fetch_many(query); while let Some(elem) = stream.next().await { let is_err = elem.is_err(); - yield parse_single_sql_result(&stmt.query, elem); + for i in parse_dynamic_rows(parse_single_sql_result(&stmt.query, elem)) { + yield i; + } if is_err { break; } } }, ParsedStatement::SetVariable { variable, value} => { - let query = bind_parameters(value, request).await?; - let connection = take_connection(db, &mut connection_opt).await?; - log::debug!("Executing query to set the {variable:?} variable: {:?}", query.sql); - let value: Option = connection.fetch_optional(query).await?.as_ref().and_then(row_to_string); - let (vars, name) = vars_and_name(request, variable)?; - if let Some(value) = value { - log::debug!("Setting variable {name} to {value:?}"); - vars.insert(name.clone(), SingleOrVec::Single(value)); - } else { - log::debug!("Removing variable {name}"); - vars.remove(&name); - } + execute_set_variable_query(db, &mut connection_opt, request, variable, value).await + .with_context(|| + format!("Failed to set the {variable:?} variable to {value:?}") + )?; }, ParsedStatement::StaticSimpleSelect(value) => { - yield DbItem::Row(value.clone().into()) + for i in parse_dynamic_rows(DbItem::Row(value.clone().into())) { + yield i; + } } ParsedStatement::Error(e) => yield DbItem::Error(clone_anyhow_err(e)), } @@ -83,6 +79,45 @@ pub fn stream_query_results<'a>( .map(|res| res.unwrap_or_else(DbItem::Error)) } +/// This function is used to create a pinned boxed stream of query results. +/// This allows recursive calls. +pub fn stream_query_results_boxed<'a>( + db: &'a Database, + sql_file: &'a ParsedSqlFile, + request: &'a mut RequestInfo, +) -> Pin + 'a>> { + Box::pin(stream_query_results(db, sql_file, request)) +} + +async fn execute_set_variable_query<'a>( + db: &'a Database, + connection_opt: &mut Option>, + request: &'a mut RequestInfo, + variable: &StmtParam, + statement: &StmtWithParams, +) -> anyhow::Result<()> { + let query = bind_parameters(statement, request).await?; + let connection = take_connection(db, connection_opt).await?; + log::debug!( + "Executing query to set the {variable:?} variable: {:?}", + query.sql + ); + let value: Option = connection + .fetch_optional(query) + .await? + .as_ref() + .and_then(row_to_string); + let (vars, name) = vars_and_name(request, variable)?; + if let Some(value) = value { + log::debug!("Setting variable {name} to {value:?}"); + vars.insert(name.clone(), SingleOrVec::Single(value)); + } else { + log::debug!("Removing variable {name}"); + vars.remove(&name); + } + Ok(()) +} + fn vars_and_name<'a>( request: &'a mut RequestInfo, variable: &StmtParam, diff --git a/src/webserver/database/sql.rs b/src/webserver/database/sql.rs index d33fcc72..9f4f1f5e 100644 --- a/src/webserver/database/sql.rs +++ b/src/webserver/database/sql.rs @@ -72,6 +72,7 @@ fn parse_sql<'a>( dialect: &'a dyn Dialect, sql: &'a str, ) -> anyhow::Result + 'a> { + log::trace!("Parsing SQL: {sql}"); let tokens = Tokenizer::new(dialect, sql) .tokenize_with_location() .with_context(|| "SQLPage's SQL parser could not tokenize the sql file")?; @@ -90,6 +91,7 @@ fn parse_single_statement(parser: &mut Parser<'_>, db_kind: AnyKind) -> Option

stmt, Err(err) => return Some(syntax_error(err, parser)), }; + log::debug!("Parsed statement: {stmt}"); while parser.consume_token(&SemiColon) {} if let Some(static_statement) = extract_static_simple_select(&stmt) { log::debug!("Optimised a static simple select to avoid a trivial database query: {stmt} optimized to {static_statement:?}"); @@ -106,8 +108,10 @@ fn parse_single_statement(parser: &mut Parser<'_>, db_kind: AnyKind) -> Option

), ReadFileAsDataUrl(Box), + RunSql(Box), Path, Protocol, } @@ -107,6 +114,7 @@ pub(super) fn func_call_to_param(func_name: &str, arguments: &mut [FunctionArg]) "read_file_as_data_url" => StmtParam::ReadFileAsDataUrl(Box::new( extract_variable_argument("read_file_as_data_url", arguments), )), + "run_sql" => StmtParam::RunSql(Box::new(extract_variable_argument("run_sql", arguments))), unknown_name => StmtParam::Error(format!( "Unknown function {unknown_name}({})", FormatArguments(arguments) @@ -126,6 +134,7 @@ pub(super) async fn extract_req_param<'a>( StmtParam::UrlEncode(inner) => url_encode(inner, request)?, StmtParam::ReadFileAsText(inner) => read_file_as_text(inner, request).await?, StmtParam::ReadFileAsDataUrl(inner) => read_file_as_data_url(inner, request).await?, + StmtParam::RunSql(inner) => run_sql(inner, request).await?, _ => extract_req_param_non_nested(param, request)?, }) } @@ -248,6 +257,53 @@ async fn read_file_as_data_url<'a>( Ok(Some(Cow::Owned(data_url))) } +async fn run_sql<'a>( + param0: &StmtParam, + request: &'a RequestInfo, +) -> Result>, anyhow::Error> { + use serde::ser::{SerializeSeq, Serializer}; + let Some(sql_file_path) = extract_req_param_non_nested(param0, request)? else { + log::debug!("run_sql: first argument is NULL, returning NULL"); + return Ok(None); + }; + let sql_file = request + .app_state + .sql_file_cache + .get_with_privilege( + &request.app_state, + std::path::Path::new(sql_file_path.as_ref()), + true, + ) + .await + .with_context(|| format!("run_sql: invalid path {sql_file_path:?}"))?; + let mut tmp_req = request.clone(); + if tmp_req.clone_depth > 8 { + bail!("Too many nested inclusions. run_sql can include a file that includes another file, but the depth is limited to 8 levels. \n\ + Executing sqlpage.run_sql('{sql_file_path}') would exceed this limit. \n\ + This is to prevent infinite loops and stack overflows.\n\ + Make sure that your SQL file does not try to run itself, directly or through a chain of other files."); + } + let mut results_stream = + stream_query_results_boxed(&request.app_state.db, &sql_file, &mut tmp_req); + let mut json_results_bytes = Vec::new(); + let mut json_encoder = serde_json::Serializer::new(&mut json_results_bytes); + let mut seq = json_encoder.serialize_seq(None)?; + while let Some(db_item) = results_stream.next().await { + match db_item { + DbItem::Row(row) => { + log::debug!("run_sql: row: {:?}", row); + seq.serialize_element(&row)?; + } + DbItem::FinishedQuery => log::trace!("run_sql: Finished query"), + DbItem::Error(err) => { + return Err(err.context(format!("run_sql: unable to run {sql_file_path:?}"))) + } + } + } + seq.end()?; + Ok(Some(Cow::Owned(String::from_utf8(json_results_bytes)?))) +} + fn mime_from_upload<'a>(param0: &StmtParam, request: &'a RequestInfo) -> Option<&'a Mime> { if let StmtParam::UploadedFilePath(name) | StmtParam::UploadedFileMimeType(name) = param0 { request.uploaded_files.get(name)?.content_type.as_ref() @@ -308,8 +364,9 @@ pub(super) fn extract_req_param_non_nested<'a>( .map(|x| Cow::Borrowed(x.as_ref())), StmtParam::ReadFileAsText(_) => bail!("Nested read_file_as_text() function not allowed",), StmtParam::ReadFileAsDataUrl(_) => { - bail!("Nested read_file_as_data_url() function not allowed",) + bail!("Nested read_file_as_data_url() function not allowed") } + StmtParam::RunSql(_) => bail!("Nested run_sql() function not allowed"), }) } diff --git a/src/webserver/http.rs b/src/webserver/http.rs index fa746a9e..eb2affc0 100644 --- a/src/webserver/http.rs +++ b/src/webserver/http.rs @@ -271,7 +271,7 @@ fn send_anyhow_error( body.push_str("Contact the administrator for more information. A detailed error message has been logged."); } else { use std::fmt::Write; - write!(body, "{e:#}").unwrap(); + write!(body, "{e:?}").unwrap(); } resp = resp.set_body(BoxBody::new(body)); resp.headers_mut().insert( @@ -306,7 +306,7 @@ fn send_anyhow_error( .unwrap_or_else(|_| log::error!("could not send headers")); } -#[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] +#[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq, Clone)] #[serde(untagged)] pub enum SingleOrVec { Single(String), diff --git a/src/webserver/http_request_info.rs b/src/webserver/http_request_info.rs index b9a16768..7cf3be49 100644 --- a/src/webserver/http_request_info.rs +++ b/src/webserver/http_request_info.rs @@ -33,6 +33,26 @@ pub struct RequestInfo { pub cookies: ParamMap, pub basic_auth: Option, pub app_state: Arc, + pub clone_depth: u8, +} + +impl Clone for RequestInfo { + fn clone(&self) -> Self { + Self { + path: self.path.clone(), + protocol: self.protocol.clone(), + get_variables: self.get_variables.clone(), + post_variables: self.post_variables.clone(), + // uploaded_files is not cloned, as it contains file handles + uploaded_files: HashMap::new(), + headers: self.headers.clone(), + client_ip: self.client_ip, + cookies: self.cookies.clone(), + basic_auth: self.basic_auth.clone(), + app_state: self.app_state.clone(), + clone_depth: self.clone_depth + 1, + } + } } pub(crate) async fn extract_request_info( @@ -76,6 +96,7 @@ pub(crate) async fn extract_request_info( basic_auth, app_state, protocol, + clone_depth: 0, } } diff --git a/tests/sql_test_files/error_too_many_nested_inclusions.sql b/tests/sql_test_files/error_too_many_nested_inclusions.sql new file mode 100644 index 00000000..a021bada --- /dev/null +++ b/tests/sql_test_files/error_too_many_nested_inclusions.sql @@ -0,0 +1,4 @@ +select 'debug' as component, + sqlpage.run_sql( + 'tests/sql_test_files/error_too_many_nested_inclusions.sql' + ) as contents; \ No newline at end of file diff --git a/tests/sql_test_files/it_works_deeply_nested_dynamic_components.sql b/tests/sql_test_files/it_works_deeply_nested_dynamic_components.sql new file mode 100644 index 00000000..c0608b23 --- /dev/null +++ b/tests/sql_test_files/it_works_deeply_nested_dynamic_components.sql @@ -0,0 +1,129 @@ +select 'dynamic' as component,'{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"dynamic","properties": +{"component":"text", "contents": "It works ! (but it shouldn''t)"} +}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}} +' as properties; \ No newline at end of file diff --git a/tests/sql_test_files/it_works_dynamic_nested.sql b/tests/sql_test_files/it_works_dynamic_nested.sql new file mode 100644 index 00000000..4c3c0fb8 --- /dev/null +++ b/tests/sql_test_files/it_works_dynamic_nested.sql @@ -0,0 +1,8 @@ +-- Checks that we can have a page with a single dynamic component containing multiple children +select 'dynamic' as component, + '[ + {"component":"dynamic", "properties": [ + {"component":"text"}, + {"contents":"It works !", "bold":true} + ]} + ]' as properties; \ No newline at end of file diff --git a/tests/sql_test_files/it_works_dynamic_run_sql_include.sql b/tests/sql_test_files/it_works_dynamic_run_sql_include.sql new file mode 100644 index 00000000..d4c3679f --- /dev/null +++ b/tests/sql_test_files/it_works_dynamic_run_sql_include.sql @@ -0,0 +1 @@ +select 'dynamic' as component, sqlpage.run_sql('tests/sql_test_files/it_works_dynamic_shell.sql') as properties; diff --git a/tests/sql_test_files/it_works_run_sql.sql b/tests/sql_test_files/it_works_run_sql.sql new file mode 100644 index 00000000..d7c830d8 --- /dev/null +++ b/tests/sql_test_files/it_works_run_sql.sql @@ -0,0 +1 @@ +select 'dynamic' as component, sqlpage.run_sql('tests/sql_test_files/it_works_simple.sql') as properties; diff --git a/tests/sql_test_files/it_works_simple.sql b/tests/sql_test_files/it_works_simple.sql new file mode 100644 index 00000000..cef377f5 --- /dev/null +++ b/tests/sql_test_files/it_works_simple.sql @@ -0,0 +1,2 @@ +select 'shell' as component, 'Hello world !' AS title; +select 'text' as component, 'It works !' AS contents; \ No newline at end of file