parser.py 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252
  1. import warnings
  2. import bleach
  3. import markdown
  4. from bs4 import BeautifulSoup
  5. from django.http import Http404
  6. from django.urls import resolve
  7. from htmlmin.minify import html_minify
  8. from markdown.extensions.fenced_code import FencedCodeExtension
  9. from misago.conf import settings
  10. from .bbcode import blocks, inline
  11. from .md.shortimgs import ShortImagesExtension
  12. from .md.striketrough import StriketroughExtension
  13. from .mentions import add_mentions
  14. from .pipeline import pipeline
  15. MISAGO_ATTACHMENT_VIEWS = ("misago:attachment", "misago:attachment-thumbnail")
  16. def parse(
  17. text,
  18. request,
  19. poster,
  20. allow_mentions=True,
  21. allow_links=True,
  22. allow_images=True,
  23. allow_blocks=True,
  24. force_shva=False,
  25. minify=True,
  26. ):
  27. """
  28. Message parser
  29. Utility for flavours to call
  30. Breaks text into paragraphs, supports code, spoiler and quote blocks,
  31. headers, lists, images, spoilers, text styles
  32. Returns dict object
  33. """
  34. md = md_factory(
  35. allow_links=allow_links, allow_images=allow_images, allow_blocks=allow_blocks
  36. )
  37. parsing_result = {
  38. "original_text": text,
  39. "parsed_text": "",
  40. "markdown": md,
  41. "mentions": [],
  42. "images": [],
  43. "internal_links": [],
  44. "outgoing_links": [],
  45. }
  46. # Parse text
  47. parsed_text = md.convert(text)
  48. # Clean and store parsed text
  49. parsing_result["parsed_text"] = parsed_text.strip()
  50. if allow_links:
  51. linkify_paragraphs(parsing_result)
  52. parsing_result = pipeline.process_result(parsing_result)
  53. if allow_mentions:
  54. add_mentions(request, parsing_result)
  55. if allow_links or allow_images:
  56. clean_links(request, parsing_result, force_shva)
  57. if minify:
  58. minify_result(parsing_result)
  59. return parsing_result
  60. def md_factory(allow_links=True, allow_images=True, allow_blocks=True):
  61. """creates and configures markdown object"""
  62. md = markdown.Markdown(extensions=["markdown.extensions.nl2br"])
  63. # Remove HTML allowances
  64. del md.preprocessors["html_block"]
  65. del md.inlinePatterns["html"]
  66. # Remove references
  67. del md.preprocessors["reference"]
  68. del md.inlinePatterns["reference"]
  69. del md.inlinePatterns["image_reference"]
  70. del md.inlinePatterns["short_reference"]
  71. # Add [b], [i], [u]
  72. md.inlinePatterns.add("bb_b", inline.bold, "<strong")
  73. md.inlinePatterns.add("bb_i", inline.italics, "<emphasis")
  74. md.inlinePatterns.add("bb_u", inline.underline, "<emphasis2")
  75. # Add ~~deleted~~
  76. striketrough_md = StriketroughExtension()
  77. striketrough_md.extendMarkdown(md)
  78. if allow_links:
  79. # Add [url]
  80. md.inlinePatterns.add("bb_url", inline.url(md), "<link")
  81. else:
  82. # Remove links
  83. del md.inlinePatterns["link"]
  84. del md.inlinePatterns["autolink"]
  85. del md.inlinePatterns["automail"]
  86. if allow_images:
  87. # Add [img]
  88. md.inlinePatterns.add("bb_img", inline.image(md), "<image_link")
  89. short_images_md = ShortImagesExtension()
  90. short_images_md.extendMarkdown(md)
  91. else:
  92. # Remove images
  93. del md.inlinePatterns["image_link"]
  94. if allow_blocks:
  95. # Add [hr] and [quote] blocks
  96. md.parser.blockprocessors.add(
  97. "bb_hr", blocks.BBCodeHRProcessor(md.parser), ">hr"
  98. )
  99. fenced_code = FencedCodeExtension()
  100. fenced_code.extendMarkdown(md, None)
  101. code_bbcode = blocks.CodeBlockExtension()
  102. code_bbcode.extendMarkdown(md)
  103. quote_bbcode = blocks.QuoteExtension()
  104. quote_bbcode.extendMarkdown(md)
  105. else:
  106. # Remove blocks
  107. del md.parser.blockprocessors["hashheader"]
  108. del md.parser.blockprocessors["setextheader"]
  109. del md.parser.blockprocessors["code"]
  110. del md.parser.blockprocessors["quote"]
  111. del md.parser.blockprocessors["hr"]
  112. del md.parser.blockprocessors["olist"]
  113. del md.parser.blockprocessors["ulist"]
  114. return pipeline.extend_markdown(md)
  115. def linkify_paragraphs(result):
  116. result["parsed_text"] = bleach.linkify(
  117. result["parsed_text"],
  118. callbacks=settings.MISAGO_BLEACH_CALLBACKS,
  119. skip_tags=["a", "code", "pre"],
  120. parse_email=True,
  121. )
  122. def clean_links(request, result, force_shva=False):
  123. host = request.get_host()
  124. soup = BeautifulSoup(result["parsed_text"], "html5lib")
  125. for link in soup.find_all("a"):
  126. if is_internal_link(link["href"], host):
  127. link["href"] = clean_internal_link(link["href"], host)
  128. result["internal_links"].append(link["href"])
  129. link["href"] = clean_attachment_link(link["href"], force_shva)
  130. else:
  131. result["outgoing_links"].append(clean_link_prefix(link["href"]))
  132. link["href"] = assert_link_prefix(link["href"])
  133. link["rel"] = "nofollow noopener"
  134. if link.string:
  135. link.string = clean_link_prefix(link.string)
  136. for img in soup.find_all("img"):
  137. img["alt"] = clean_link_prefix(img["alt"])
  138. if is_internal_link(img["src"], host):
  139. img["src"] = clean_internal_link(img["src"], host)
  140. result["images"].append(img["src"])
  141. img["src"] = clean_attachment_link(img["src"], force_shva)
  142. else:
  143. result["images"].append(clean_link_prefix(img["src"]))
  144. img["src"] = assert_link_prefix(img["src"])
  145. # [6:-7] trims <body></body> wrap
  146. result["parsed_text"] = str(soup.body)[6:-7]
  147. def is_internal_link(link, host):
  148. if link.startswith("/") and not link.startswith("//"):
  149. return True
  150. link = clean_link_prefix(link).lstrip("www.").lower()
  151. return link.lower().startswith(host.lstrip("www."))
  152. def clean_link_prefix(link):
  153. if link.lower().startswith("https:"):
  154. link = link[6:]
  155. if link.lower().startswith("http:"):
  156. link = link[5:]
  157. if link.startswith("//"):
  158. link = link[2:]
  159. return link
  160. def assert_link_prefix(link):
  161. if link.lower().startswith("https:"):
  162. return link
  163. if link.lower().startswith("http:"):
  164. return link
  165. if link.startswith("//"):
  166. return "http:%s" % link
  167. return "http://%s" % link
  168. def clean_internal_link(link, host):
  169. link = clean_link_prefix(link)
  170. if link.lower().startswith("www."):
  171. link = link[4:]
  172. if host.lower().startswith("www."):
  173. host = host[4:]
  174. if link.lower().startswith(host):
  175. link = link[len(host) :]
  176. return link or "/"
  177. def clean_attachment_link(link, force_shva=False):
  178. try:
  179. resolution = resolve(link)
  180. url_name = ":".join(resolution.namespaces + [resolution.url_name])
  181. except (Http404, ValueError):
  182. return link
  183. if url_name in MISAGO_ATTACHMENT_VIEWS:
  184. if force_shva:
  185. link = "%s?shva=1" % link
  186. elif link.endswith("?shva=1"):
  187. link = link[:-7]
  188. return link
  189. def minify_result(result):
  190. result["parsed_text"] = html_minify(result["parsed_text"])
  191. result["parsed_text"] = strip_html_head_body(result["parsed_text"])
  192. def strip_html_head_body(parsed_text):
  193. # [25:-14] trims <html><head></head><body> and </body></html>
  194. return parsed_text[25:-14]