Skip to content

Commit b2bfa73

Browse files
author
bors-servo
authored
Auto merge of #345 - Eijebong:syn, r=SimonSapin
Update syn to 0.14 and bump version
2 parents 0716de2 + 86cfd3d commit b2bfa73

File tree

2 files changed

+11
-11
lines changed

2 files changed

+11
-11
lines changed

html5ever/Cargo.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22

33
name = "html5ever"
4-
version = "0.22.3"
4+
version = "0.22.4"
55
authors = [ "The html5ever Project Developers" ]
66
license = "MIT / Apache-2.0"
77
repository = "https://github.com/servo/html5ever"
@@ -41,6 +41,6 @@ rustc-test = "0.3"
4141
typed-arena = "1.3.0"
4242

4343
[build-dependencies]
44-
quote = "0.5"
45-
syn = { version = "0.13", features = ["extra-traits", "full", "fold"] }
46-
proc-macro2 = "0.3"
44+
quote = "0.6"
45+
syn = { version = "0.14", features = ["extra-traits", "full", "fold"] }
46+
proc-macro2 = "0.4"

html5ever/macros/match_token.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ matching, by enforcing the following restrictions on its input:
9999
is common in the HTML5 syntax.
100100
*/
101101

102-
use quote::{ToTokens, Tokens};
102+
use quote::ToTokens;
103103
use std::collections::HashSet;
104104
use std::fs::File;
105105
use std::io::{Read, Write};
@@ -114,7 +114,7 @@ pub fn expand(from: &Path, to: &Path) {
114114
let ast = syn::parse_file(&source).expect("Parsing rules.rs module");
115115
let mut m = MatchTokenParser {};
116116
let ast = m.fold_file(ast);
117-
let code = ast.into_tokens().to_string().replace("{ ", "{\n").replace(" }", "\n}");
117+
let code = ast.into_token_stream().to_string().replace("{ ", "{\n").replace(" }", "\n}");
118118
File::create(to).unwrap().write_all(code.as_bytes()).unwrap();
119119
}
120120

@@ -238,7 +238,7 @@ pub fn expand_match_token(body: &TokenStream) -> syn::Expr {
238238
syn::parse2(ast.into()).unwrap()
239239
}
240240

241-
fn expand_match_token_macro(match_token: MatchToken) -> Tokens {
241+
fn expand_match_token_macro(match_token: MatchToken) -> TokenStream {
242242
let mut arms = match_token.arms;
243243
let to_be_matched = match_token.expr;
244244
// Handle the last arm specially at the end.
@@ -249,11 +249,11 @@ fn expand_match_token_macro(match_token: MatchToken) -> Tokens {
249249

250250
// Case arms for wildcard matching. We collect these and
251251
// emit them later.
252-
let mut wildcards_patterns: Vec<Tokens> = Vec::new();
252+
let mut wildcards_patterns: Vec<TokenStream> = Vec::new();
253253
let mut wildcards_expressions: Vec<syn::Expr> = Vec::new();
254254

255255
// Tags excluded (by an 'else' RHS) from wildcard matching.
256-
let mut wild_excluded_patterns: Vec<Tokens> = Vec::new();
256+
let mut wild_excluded_patterns: Vec<TokenStream> = Vec::new();
257257

258258
let mut arms_code = Vec::new();
259259

@@ -284,7 +284,7 @@ fn expand_match_token_macro(match_token: MatchToken) -> Tokens {
284284
if tag.name.is_none() {
285285
panic!("'else' may not appear with a wildcard tag");
286286
}
287-
wild_excluded_patterns.push(make_tag_pattern(&Tokens::new(), tag));
287+
wild_excluded_patterns.push(make_tag_pattern(&TokenStream::new(), tag));
288288
}
289289
}
290290

@@ -422,7 +422,7 @@ impl Fold for MatchTokenParser {
422422
}
423423
}
424424

425-
fn make_tag_pattern(binding: &Tokens, tag: Tag) -> Tokens {
425+
fn make_tag_pattern(binding: &TokenStream, tag: Tag) -> TokenStream {
426426
let kind = match tag.kind {
427427
TagKind::StartTag => quote!(::tokenizer::StartTag),
428428
TagKind::EndTag => quote!(::tokenizer::EndTag),

0 commit comments

Comments
 (0)