comments.rs 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
  2. // #[doc = "..."] -> "..."
  3. fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal {
  4. lit_of_doc_comment(tokens, false)
  5. }
  6. // #![doc = "..."] -> "..."
  7. fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal {
  8. lit_of_doc_comment(tokens, true)
  9. }
  10. fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal {
  11. let mut iter = tokens.clone().into_iter();
  12. match iter.next().unwrap() {
  13. TokenTree::Punct(punct) => {
  14. assert_eq!(punct.as_char(), '#');
  15. assert_eq!(punct.spacing(), Spacing::Alone);
  16. }
  17. _ => panic!("wrong token {:?}", tokens),
  18. }
  19. if inner {
  20. match iter.next().unwrap() {
  21. TokenTree::Punct(punct) => {
  22. assert_eq!(punct.as_char(), '!');
  23. assert_eq!(punct.spacing(), Spacing::Alone);
  24. }
  25. _ => panic!("wrong token {:?}", tokens),
  26. }
  27. }
  28. iter = match iter.next().unwrap() {
  29. TokenTree::Group(group) => {
  30. assert_eq!(group.delimiter(), Delimiter::Bracket);
  31. assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
  32. group.stream().into_iter()
  33. }
  34. _ => panic!("wrong token {:?}", tokens),
  35. };
  36. match iter.next().unwrap() {
  37. TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
  38. _ => panic!("wrong token {:?}", tokens),
  39. }
  40. match iter.next().unwrap() {
  41. TokenTree::Punct(punct) => {
  42. assert_eq!(punct.as_char(), '=');
  43. assert_eq!(punct.spacing(), Spacing::Alone);
  44. }
  45. _ => panic!("wrong token {:?}", tokens),
  46. }
  47. match iter.next().unwrap() {
  48. TokenTree::Literal(literal) => {
  49. assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
  50. literal
  51. }
  52. _ => panic!("wrong token {:?}", tokens),
  53. }
  54. }
  55. #[test]
  56. fn closed_immediately() {
  57. let stream = "/**/".parse::<TokenStream>().unwrap();
  58. let tokens = stream.into_iter().collect::<Vec<_>>();
  59. assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
  60. }
  61. #[test]
  62. fn incomplete() {
  63. assert!("/*/".parse::<TokenStream>().is_err());
  64. }
  65. #[test]
  66. fn lit() {
  67. let stream = "/// doc".parse::<TokenStream>().unwrap();
  68. let lit = lit_of_outer_doc_comment(&stream);
  69. assert_eq!(lit.to_string(), "\" doc\"");
  70. let stream = "//! doc".parse::<TokenStream>().unwrap();
  71. let lit = lit_of_inner_doc_comment(&stream);
  72. assert_eq!(lit.to_string(), "\" doc\"");
  73. let stream = "/** doc */".parse::<TokenStream>().unwrap();
  74. let lit = lit_of_outer_doc_comment(&stream);
  75. assert_eq!(lit.to_string(), "\" doc \"");
  76. let stream = "/*! doc */".parse::<TokenStream>().unwrap();
  77. let lit = lit_of_inner_doc_comment(&stream);
  78. assert_eq!(lit.to_string(), "\" doc \"");
  79. }
  80. #[test]
  81. fn carriage_return() {
  82. let stream = "///\r\n".parse::<TokenStream>().unwrap();
  83. let lit = lit_of_outer_doc_comment(&stream);
  84. assert_eq!(lit.to_string(), "\"\"");
  85. let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
  86. let lit = lit_of_outer_doc_comment(&stream);
  87. assert_eq!(lit.to_string(), "\"\\r\\n\"");
  88. "///\r".parse::<TokenStream>().unwrap_err();
  89. "///\r \n".parse::<TokenStream>().unwrap_err();
  90. "/**\r \n*/".parse::<TokenStream>().unwrap_err();
  91. }