diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 91c100b..56f1dbb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: elixir:1.7.2 +image: elixir:1.8.1 cache: key: ${CI_COMMIT_REF_SLUG} diff --git a/README.md b/README.md index c1b1dcd..a75eb16 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,16 @@ -# AutoLinker +# Linkify -[![Build Status](https://travis-ci.org/smpallen99/auto_linker.png?branch=master)](https://travis-ci.org/smpallen99/auto_linker) [![Hex Version][hex-img]][hex] [![License][license-img]][license] - -[hex-img]: https://img.shields.io/hexpm/v/auto_linker.svg -[hex]: https://hex.pm/packages/auto_linker -[license-img]: http://img.shields.io/badge/license-MIT-brightgreen.svg -[license]: http://opensource.org/licenses/MIT - -AutoLinker is a basic package for turning website names, and phone numbers into links. +Linkify is a basic package for turning website names into links. Use this package in your web view to convert web references into click-able links. -This is a very early version. Some of the described options are not yet functional. - ## Installation -The package can be installed by adding `auto_linker` to your list of dependencies in `mix.exs`: +The package can be installed by adding `linkify` to your list of dependencies in `mix.exs`: ```elixir def deps do - [{:auto_linker, "~> 0.2"}] + [{:linkify, "~> 0.1"}] end ``` @@ -27,44 +18,31 @@ end The following examples illustrate some examples on how to use the auto linker. -```iex -iex> AutoLinker.link("google.com") -"google.com" - -iex> AutoLinker.link("google.com", new_window: false, rel: false) -"google.com" - -iex> AutoLinker.link("google.com", new_window: false, rel: false, class: false) -"google.com" - -iex> AutoLinker.link("call me at x9999", phone: true) -"call me at x9999" - -iex> AutoLinker.link("or at home on 555.555.5555", phone: true) -"or at home on 555.555.5555" - -iex> AutoLinker.link(", work (555) 555-5555", phone: true) -", work (555) 555-5555" - -iex> AutoLinker.link("[Google Search](http://google.com)", markdown: true) -"Google Search" -``` - -See the [Docs](https://hexdocs.pm/auto_linker/) for more examples - -## Configuration - -By default, link parsing is enabled and phone parsing is disabled. - ```elixir -# enable phone parsing, and disable link parsing -config :auto_linker, opts: [phone: true, url: false] + +iex> Linkify.link("google.com") +"google.com" + +iex> Linkify.link("google.com", class: "linkified") +"google.com" + +iex> Linkify.link("google.com", new_window: true) +"google.com" + +iex> Linkify.link("google.com", new_window: true, rel: "noopener noreferrer") +"google.com" ``` +See the [Docs](https://hexdocs.pm/linkify/) for more examples + +## Acknowledgements + +This is a fork of [auto_linker](https://github.com/smpallen99/auto_linker) by [Steve Pallen](https://github.com/smpallen99). ## License -`auto_linker` is Copyright (c) 2017 E-MetroTel +Copyright © 2017 E-MetroTel +Copyright © 2019 Pleroma Authors The source is released under the MIT License. diff --git a/config/config.exs b/config/config.exs deleted file mode 100644 index 78be4ed..0000000 --- a/config/config.exs +++ /dev/null @@ -1,30 +0,0 @@ -# This file is responsible for configuring your application -# and its dependencies with the aid of the Mix.Config module. -use Mix.Config - -# This configuration is loaded before any dependency and is restricted -# to this project. If another project depends on this project, this -# file won't be loaded nor affect the parent project. For this reason, -# if you want to provide default values for your application for -# 3rd-party users, it should be done in your "mix.exs" file. - -# You can configure for your application as: -# -# config :auto_linker, key: :value -# -# And access this configuration in your application as: -# -# Application.get_env(:auto_linker, :key) -# -# Or configure a 3rd-party app: -# -# config :logger, level: :info -# - -# It is also possible to import configuration files, relative to this -# directory. For example, you can emulate configuration per environment -# by uncommenting the line below and defining dev.exs, test.exs and such. -# Configuration from the imported file will override the ones defined -# here (which is why it is important to import them last). -# -# import_config "#{Mix.env}.exs" diff --git a/lib/auto_linker.ex b/lib/auto_linker.ex deleted file mode 100644 index 01688d8..0000000 --- a/lib/auto_linker.ex +++ /dev/null @@ -1,69 +0,0 @@ -defmodule AutoLinker do - @moduledoc """ - Create url links from text containing urls. - - Turns an input string like `"Check out google.com"` into - `Check out "google.com"` - - ## Examples - - iex> AutoLinker.link("google.com") - ~s(google.com) - - iex> AutoLinker.link("google.com", new_window: false, rel: false) - ~s(google.com) - - iex> AutoLinker.link("google.com", new_window: false, rel: false, class: false) - ~s(google.com) - - iex> AutoLinker.link("[Google](http://google.com)", markdown: true, new_window: false, rel: false, class: false) - ~s(Google) - - iex> AutoLinker.link("[Google Search](http://google.com)", markdown: true) - ~s(Google Search) - """ - - import AutoLinker.Parser - - @doc """ - Auto link a string. - - Options: - - * `class: "auto-linker"` - specify the class to be added to the generated link. false to clear - * `rel: "noopener noreferrer"` - override the rel attribute. false to clear - * `new_window: true` - set to false to remove `target='_blank'` attribute - * `scheme: false` - Set to true to link urls with schema `http://google` - * `truncate: false` - Set to a number to truncate urls longer then the number. Truncated urls will end in `..` - * `strip_prefix: true` - Strip the scheme prefix - * `exclude_class: false` - Set to a class name when you don't want urls auto linked in the html of the give class - * `exclude_id: false` - Set to an element id when you don't want urls auto linked in the html of the give element - * `exclude_patterns: ["```"]` - Don't link anything between the the pattern - * `markdown: false` - link markdown style links - * `email: false` - link email links - * `mention: false` - link @mentions (when `true`, requires `mention_prefix` or `mention_handler` options to be set) - * `mention_prefix: nil` - a prefix to build a link for a mention (example: `https://example.com/user/`) - * `mention_handler: nil` - a custom handler to validate and formart a mention - * `hashtag: false` - link #hashtags (when `true`, requires `hashtag_prefix` or `hashtag_handler` options to be set) - * `hashtag_prefix: nil` - a prefix to build a link for a hashtag (example: `https://example.com/tag/`) - * `hashtag_handler: nil` - a custom handler to validate and formart a hashtag - * `extra: false` - link urls with rarely used schemes (magnet, ipfs, irc, etc.) - * `validate_tld: true` - Set to false to disable TLD validation for urls/emails, also can be set to :no_scheme to validate TLDs only for urls without a scheme (e.g `example.com` will be validated, but `http://example.loki` won't) - - Each of the above options can be specified when calling `link(text, opts)` - or can be set in the `:auto_linker`'s configuration. For example: - - config :auto_linker, - class: false, - new_window: false - - Note that passing opts to `link/2` will override the configuration settings. - """ - def link(text, opts \\ []) do - parse(text, opts) - end - - def link_map(text, acc, opts \\ []) do - parse({text, acc}, opts) - end -end diff --git a/lib/linkify.ex b/lib/linkify.ex new file mode 100644 index 0000000..5a5e720 --- /dev/null +++ b/lib/linkify.ex @@ -0,0 +1,51 @@ +defmodule Linkify do + @moduledoc """ + Create url links from text containing urls. + + Turns an input string like `"Check out google.com"` into + `Check out "google.com"` + + ## Examples + + iex> Linkify.link("google.com") + ~s(google.com) + + iex> Linkify.link("google.com", new_window: true, rel: "noopener noreferrer") + ~s(google.com) + + iex> Linkify.link("google.com", class: "linkified") + ~s(google.com) + """ + + import Linkify.Parser + + @doc """ + Finds links and turns them into HTML `` tag. + + Options: + + * `class` - specify the class to be added to the generated link. + * `rel` - specify the rel attribute. + * `new_window` - set to `true` to add `target="_blank"` attribute + * `truncate` - Set to a number to truncate urls longer then the number. Truncated urls will end in `...` + * `strip_prefix` - Strip the scheme prefix (default: `false`) + * `exclude_class` - Set to a class name when you don't want urls auto linked in the html of the give class (default: `false`) + * `exclude_id` - Set to an element id when you don't want urls auto linked in the html of the give element (default: `false`) + * `email` - link email links (default: `false`) + * `mention` - link @mentions (when `true`, requires `mention_prefix` or `mention_handler` options to be set) (default: `false`) + * `mention_prefix` - a prefix to build a link for a mention (example: `https://example.com/user/`, default: `nil`) + * `mention_handler` - a custom handler to validate and formart a mention (default: `nil`) + * `hashtag: false` - link #hashtags (when `true`, requires `hashtag_prefix` or `hashtag_handler` options to be set) + * `hashtag_prefix: nil` - a prefix to build a link for a hashtag (example: `https://example.com/tag/`) + * `hashtag_handler: nil` - a custom handler to validate and formart a hashtag + * `extra: false` - link urls with rarely used schemes (magnet, ipfs, irc, etc.) + * `validate_tld: true` - Set to false to disable TLD validation for urls/emails, also can be set to :no_scheme to validate TLDs only for urls without a scheme (e.g `example.com` will be validated, but `http://example.loki` won't) + """ + def link(text, opts \\ []) do + parse(text, opts) + end + + def link_map(text, acc, opts \\ []) do + parse({text, acc}, opts) + end +end diff --git a/lib/auto_linker/builder.ex b/lib/linkify/builder.ex similarity index 64% rename from lib/auto_linker/builder.ex rename to lib/linkify/builder.ex index 888fd82..9b3f1b0 100644 --- a/lib/auto_linker/builder.ex +++ b/lib/linkify/builder.ex @@ -1,4 +1,4 @@ -defmodule AutoLinker.Builder do +defmodule Linkify.Builder do @moduledoc """ Module for building the auto generated link. """ @@ -17,14 +17,6 @@ defmodule AutoLinker.Builder do |> format_url(text, opts) end - def create_markdown_links(text, opts) do - [] - |> build_attrs(text, opts, :rel) - |> build_attrs(text, opts, :target) - |> build_attrs(text, opts, :class) - |> format_markdown(text, opts) - end - defp build_attrs(attrs, uri, %{rel: get_rel}, :rel) when is_function(get_rel, 1) do case get_rel.(uri) do nil -> attrs @@ -33,15 +25,21 @@ defmodule AutoLinker.Builder do end defp build_attrs(attrs, _, opts, :rel) do - if rel = Map.get(opts, :rel, "noopener noreferrer"), do: [{:rel, rel} | attrs], else: attrs + case Map.get(opts, :rel) do + rel when is_binary(rel) -> [{:rel, rel} | attrs] + _ -> attrs + end end defp build_attrs(attrs, _, opts, :target) do - if Map.get(opts, :new_window, true), do: [{:target, :_blank} | attrs], else: attrs + if Map.get(opts, :new_window), do: [{:target, :_blank} | attrs], else: attrs end defp build_attrs(attrs, _, opts, :class) do - if cls = Map.get(opts, :class, "auto-linker"), do: [{:class, cls} | attrs], else: attrs + case Map.get(opts, :class) do + cls when is_binary(cls) -> [{:class, cls} | attrs] + _ -> attrs + end end defp build_attrs(attrs, url, _opts, :href) do @@ -68,16 +66,6 @@ defmodule AutoLinker.Builder do |> Enum.join(" ") end - defp format_markdown(attrs, text, _opts) do - attrs = - case format_attrs(attrs) do - "" -> "" - attrs -> " " <> attrs - end - - Regex.replace(~r/\[(.+?)\]\((.+?)\)/, text, "\\1") - end - defp truncate(url, false), do: url defp truncate(url, len) when len < 3, do: url @@ -93,34 +81,6 @@ defmodule AutoLinker.Builder do defp strip_prefix(url, _), do: url - def create_phone_link([], buffer, _), do: buffer - - def create_phone_link([h | t], buffer, opts) do - create_phone_link(t, format_phone_link(h, buffer, opts), opts) - end - - def format_phone_link([h | _], buffer, opts) do - val = - h - |> String.replace(~r/[\.\+\- x\(\)]+/, "") - |> format_phone_link(h, opts) - - # val = ~s'#{h}' - String.replace(buffer, h, val) - end - - def format_phone_link(number, original, opts) do - tag = opts[:tag] || "a" - class = opts[:class] || "phone-number" - data_phone = opts[:data_phone] || "data-phone" - attrs = format_attributes(opts[:attributes] || []) - href = opts[:href] || "#" - - ~s'<#{tag} href="#{href}" class="#{class}" #{data_phone}="#{number}"#{attrs}>#{original}' - end - def create_mention_link("@" <> name, _buffer, opts) do mention_prefix = opts[:mention_prefix] @@ -182,10 +142,4 @@ defmodule AutoLinker.Builder do attrs = format_attrs(attrs) ~s(#{uri}) end - - defp format_attributes(attrs) do - Enum.reduce(attrs, "", fn {name, value}, acc -> - acc <> ~s' #{name}="#{value}"' - end) - end end diff --git a/lib/auto_linker/parser.ex b/lib/linkify/parser.ex similarity index 51% rename from lib/auto_linker/parser.ex rename to lib/linkify/parser.ex index b7b4b75..b86e2c4 100644 --- a/lib/auto_linker/parser.ex +++ b/lib/linkify/parser.ex @@ -1,19 +1,15 @@ -defmodule AutoLinker.Parser do +defmodule Linkify.Parser do @moduledoc """ Module to handle parsing the the input string. """ - alias AutoLinker.Builder + alias Linkify.Builder @invalid_url ~r/(\.\.+)|(^(\d+\.){1,2}\d+$)/ - @match_url ~r{^[\w\.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+$} + @match_url ~r{^(?:\W*)?(?(?:https?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:\/?#[\]@!\$&'\(\)\*\+,;=.]+$)}u - @match_scheme ~r{^(?:\W*)?(?(?:https?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:\/?#[\]@!\$&'\(\)\*\+,;=.]+$)}u - - @match_phone ~r"((?:x\d{2,7})|(?:(?:\+?1\s?(?:[.-]\s?)?)?(?:\(\s?(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s?\)|(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s?(?:[.-]\s?)?)(?:[2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s?(?:[.-]\s?)?(?:[0-9]{4}))" - - @match_hostname ~r{^(?:\W*https?:\/\/)?(?:[^@\n]+\\w@)?(?[^:#~\/\n?]+)}u + @match_hostname ~r{^\W*(?https?:\/\/)?(?:[^@\n]+\\w@)?(?[^:#~\/\n?]+)}u @match_ip ~r"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$" @@ -42,228 +38,144 @@ defmodule AutoLinker.Parser do @tlds "./priv/tlds.txt" |> File.read!() |> String.split("\n", trim: true) |> MapSet.new() - @default_opts ~w(url validate_tld)a + @default_opts %{ + url: true, + validate_tld: true + } @doc """ Parse the given string, identifying items to link. - Parses the string, replacing the matching urls and phone numbers with an html link. + Parses the string, replacing the matching urls with an html link. ## Examples - iex> AutoLinker.Parser.parse("Check out google.com") - ~s{Check out google.com} - - iex> AutoLinker.Parser.parse("call me at x9999", phone: true) - ~s{call me at x9999} - - iex> AutoLinker.Parser.parse("or at home on 555.555.5555", phone: true) - ~s{or at home on 555.555.5555} - - iex> AutoLinker.Parser.parse(", work (555) 555-5555", phone: true) - ~s{, work (555) 555-5555} + iex> Linkify.Parser.parse("Check out google.com") + ~s{Check out google.com} """ + @types [:url, :email, :hashtag, :mention, :extra] + def parse(input, opts \\ %{}) - def parse(input, opts) when is_binary(input), do: {input, nil} |> parse(opts) |> elem(0) + def parse(input, opts) when is_binary(input), do: {input, %{}} |> parse(opts) |> elem(0) def parse(input, list) when is_list(list), do: parse(input, Enum.into(list, %{})) def parse(input, opts) do - config = - :auto_linker - |> Application.get_env(:opts, []) - |> Enum.into(%{}) - |> Map.put( - :attributes, - Application.get_env(:auto_linker, :attributes, []) - ) + opts = Map.merge(@default_opts, opts) - opts = - Enum.reduce(@default_opts, opts, fn opt, acc -> - if is_nil(opts[opt]) and is_nil(config[opt]) do - Map.put(acc, opt, true) - else - acc - end - end) + Enum.reduce(opts, input, fn + {type, true}, input when type in @types -> + do_parse(input, opts, {"", "", :parsing}, type) - do_parse(input, Map.merge(config, opts)) + _, input -> + input + end) end - defp do_parse(input, %{phone: false} = opts), do: do_parse(input, Map.delete(opts, :phone)) - defp do_parse(input, %{url: false} = opts), do: do_parse(input, Map.delete(opts, :url)) - - defp do_parse(input, %{phone: _} = opts) do - input - |> do_parse(opts, {"", "", :parsing}, &check_and_link_phone/3) - |> do_parse(Map.delete(opts, :phone)) - end - - defp do_parse(input, %{hashtag: true} = opts) do - input - |> do_parse(opts, {"", "", :parsing}, &check_and_link_hashtag/3) - |> do_parse(Map.delete(opts, :hashtag)) - end - - defp do_parse(input, %{extra: true} = opts) do - input - |> do_parse(opts, {"", "", :parsing}, &check_and_link_extra/3) - |> do_parse(Map.delete(opts, :extra)) - end - - defp do_parse({text, user_acc}, %{markdown: true} = opts) do - text - |> Builder.create_markdown_links(opts) - |> (&{&1, user_acc}).() - |> do_parse(Map.delete(opts, :markdown)) - end - - defp do_parse(input, %{email: true} = opts) do - input - |> do_parse(opts, {"", "", :parsing}, &check_and_link_email/3) - |> do_parse(Map.delete(opts, :email)) - end - - defp do_parse({text, user_acc}, %{url: _} = opts) do - input = - with exclude <- Map.get(opts, :exclude_patterns), - true <- is_list(exclude), - true <- String.starts_with?(text, exclude) do - {text, user_acc} - else - _ -> - do_parse( - {text, user_acc}, - opts, - {"", "", :parsing}, - &check_and_link/3 - ) - end - - do_parse(input, Map.delete(opts, :url)) - end - - defp do_parse(input, %{mention: true} = opts) do - input - |> do_parse(opts, {"", "", :parsing}, &check_and_link_mention/3) - |> do_parse(Map.delete(opts, :mention)) - end - - defp do_parse(input, _), do: input - defp do_parse({"", user_acc}, _opts, {"", acc, _}, _handler), do: {acc, user_acc} - defp do_parse({" text, user_acc}, opts, {buffer, acc, :parsing}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> " text, user_acc}, opts, {buffer, acc, :parsing}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> " text, user_acc}, opts, {buffer, acc, :parsing}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> " text, user_acc}, opts, {buffer, acc, :parsing}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> " text, user_acc}, opts, {buffer, acc, :parsing}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> " text, user_acc}, opts, {buffer, acc, :parsing}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "" <> text, user_acc}, opts, {buffer, acc, :skip}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, handler) + defp do_parse({"" <> text, user_acc}, opts, {buffer, acc, :skip}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, type) - defp do_parse({"" <> text, user_acc}, opts, {buffer, acc, :skip}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, handler) + defp do_parse({"" <> text, user_acc}, opts, {buffer, acc, :skip}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, type) - defp do_parse({"" <> text, user_acc}, opts, {buffer, acc, :skip}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, handler) + defp do_parse({"" <> text, user_acc}, opts, {buffer, acc, :skip}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> "", :parsing}, type) - defp do_parse({"<" <> text, user_acc}, opts, {"", acc, :parsing}, handler), - do: do_parse({text, user_acc}, opts, {"<", acc, {:open, 1}}, handler) + defp do_parse({"<" <> text, user_acc}, opts, {"", acc, :parsing}, type), + do: do_parse({text, user_acc}, opts, {"<", acc, {:open, 1}}, type) - defp do_parse({"<" <> text, user_acc}, opts, {"", acc, {:html, level}}, handler) do - do_parse({text, user_acc}, opts, {"<", acc, {:open, level + 1}}, handler) + defp do_parse({"<" <> text, user_acc}, opts, {"", acc, {:html, level}}, type) do + do_parse({text, user_acc}, opts, {"<", acc, {:open, level + 1}}, type) end - defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:attrs, level}}, handler), + defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:attrs, level}}, type), do: do_parse( {text, user_acc}, opts, {"", acc <> buffer <> ">", {:html, level}}, - handler + type ) - defp do_parse({<> <> text, user_acc}, opts, {"", acc, {:attrs, level}}, handler) do - do_parse({text, user_acc}, opts, {"", acc <> <>, {:attrs, level}}, handler) + defp do_parse({<> <> text, user_acc}, opts, {"", acc, {:attrs, level}}, type) do + do_parse({text, user_acc}, opts, {"", acc <> <>, {:attrs, level}}, type) end - defp do_parse({" text, user_acc}, opts, {buffer, acc, {:html, level}}, handler) do - {buffer, user_acc} = run_handler(handler, buffer, opts, user_acc) + defp do_parse({" text, user_acc}, opts, {buffer, acc, {:html, level}}, type) do + {buffer, user_acc} = link(type, buffer, opts, user_acc) do_parse( {text, user_acc}, opts, {"", acc <> buffer <> "" <> text, user_acc}, opts, {buffer, acc, {:close, 1}}, handler), - do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> ">", :parsing}, handler) + defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, 1}}, type), + do: do_parse({text, user_acc}, opts, {"", acc <> buffer <> ">", :parsing}, type) - defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, level}}, handler), + defp do_parse({">" <> text, user_acc}, opts, {buffer, acc, {:close, level}}, type), do: do_parse( {text, user_acc}, opts, {"", acc <> buffer <> ">", {:html, level - 1}}, - handler + type ) - defp do_parse({text, user_acc}, opts, {buffer, acc, {:open, level}}, handler) do - do_parse({text, user_acc}, opts, {"", acc <> buffer, {:attrs, level}}, handler) + defp do_parse({text, user_acc}, opts, {buffer, acc, {:open, level}}, type) do + do_parse({text, user_acc}, opts, {"", acc <> buffer, {:attrs, level}}, type) end - # default cases where state is not important - defp do_parse( - {" " <> text, user_acc}, - %{phone: _} = opts, - {buffer, acc, state}, - handler - ), - do: do_parse({text, user_acc}, opts, {buffer <> " ", acc, state}, handler) - defp do_parse( {<>, user_acc}, opts, {buffer, acc, state}, - handler + type ) when char in [" ", "\r", "\n"] do - {buffer, user_acc} = run_handler(handler, buffer, opts, user_acc) + {buffer, user_acc} = link(type, buffer, opts, user_acc) do_parse( {text, user_acc}, opts, {"", acc <> buffer <> char, state}, - handler + type ) end - defp do_parse({<>, user_acc}, opts, {buffer, acc, state}, handler) do - {buffer, user_acc} = run_handler(handler, buffer <> <>, opts, user_acc) + defp do_parse({<>, user_acc}, opts, {buffer, acc, state}, type) do + {buffer, user_acc} = link(type, buffer <> <>, opts, user_acc) do_parse( {"", user_acc}, opts, {"", acc <> buffer, state}, - handler + type ) end - defp do_parse({<> <> text, user_acc}, opts, {buffer, acc, state}, handler), - do: do_parse({text, user_acc}, opts, {buffer <> <>, acc, state}, handler) + defp do_parse({<> <> text, user_acc}, opts, {buffer, acc, state}, type), + do: do_parse({text, user_acc}, opts, {buffer <> <>, acc, state}, type) - def check_and_link(buffer, opts, _user_acc) do + def check_and_link(:url, buffer, opts, _user_acc) do str = strip_parens(buffer) if url?(str, opts) do - case parse_link(str, opts) do + case @match_url |> Regex.run(str, capture: [:url]) |> hd() do ^buffer -> link_url(buffer, opts) url -> String.replace(buffer, url, link_url(url, opts)) end @@ -272,11 +184,29 @@ defmodule AutoLinker.Parser do end end - defp parse_link(str, %{scheme: true}) do - @match_scheme |> Regex.run(str, capture: [:url]) |> hd() + def check_and_link(:email, buffer, opts, _user_acc) do + if email?(buffer, opts), do: link_email(buffer, opts), else: buffer end - defp parse_link(str, _), do: str + def check_and_link(:mention, buffer, opts, user_acc) do + buffer + |> match_mention + |> link_mention(buffer, opts, user_acc) + end + + def check_and_link(:hashtag, buffer, opts, user_acc) do + buffer + |> match_hashtag + |> link_hashtag(buffer, opts, user_acc) + end + + def check_and_link(:extra, "xmpp:" <> handle, opts, _user_acc) do + if email?(handle, opts), do: link_extra("xmpp:" <> handle, opts), else: handle + end + + def check_and_link(:extra, buffer, opts, _user_acc) do + if String.starts_with?(buffer, @prefix_extra), do: link_extra(buffer, opts), else: buffer + end defp strip_parens("(" <> buffer) do ~r/[^\)]*/ |> Regex.run(buffer) |> hd() @@ -284,44 +214,8 @@ defmodule AutoLinker.Parser do defp strip_parens(buffer), do: buffer - def check_and_link_email(buffer, opts, _user_acc) do - if email?(buffer, opts), do: link_email(buffer, opts), else: buffer - end - - def check_and_link_phone(buffer, opts, _user_acc) do - buffer - |> match_phone - |> link_phone(buffer, opts) - end - - def check_and_link_mention(buffer, opts, user_acc) do - buffer - |> match_mention - |> link_mention(buffer, opts, user_acc) - end - - def check_and_link_hashtag(buffer, opts, user_acc) do - buffer - |> match_hashtag - |> link_hashtag(buffer, opts, user_acc) - end - - def check_and_link_extra("xmpp:" <> handle, opts, _user_acc) do - if email?(handle, opts), do: link_extra("xmpp:" <> handle, opts), else: handle - end - - def check_and_link_extra(buffer, opts, _user_acc) do - if String.starts_with?(buffer, @prefix_extra), do: link_extra(buffer, opts), else: buffer - end - - # @doc false - def url?(buffer, opts) do - if opts[:scheme] do - valid_url?(buffer) && Regex.match?(@match_scheme, buffer) && valid_tld?(buffer, opts) - else - valid_url?(buffer) && Regex.match?(@match_url, buffer) && valid_tld?(buffer, opts) - end + valid_url?(buffer) && Regex.match?(@match_url, buffer) && valid_tld?(buffer, opts) end def email?(buffer, opts) do @@ -330,38 +224,35 @@ defmodule AutoLinker.Parser do defp valid_url?(url), do: !Regex.match?(@invalid_url, url) - def valid_tld?(buffer, opts) do + @doc """ + Validates a URL's TLD. Returns a boolean. + + Will return `true` if `:validate_tld` option set to `false`. + + Will skip validation and return `true` if `:validate_tld` set to `:no_scheme` and the url has a scheme. + """ + def valid_tld?(url, opts) do + [scheme, host] = Regex.run(@match_hostname, url, capture: [:scheme, :host]) + cond do opts[:validate_tld] == false -> true - opts[:validate_tld] == :no_scheme && opts[:scheme] -> + ip?(host) -> + true + + # don't validate if scheme is present + opts[:validate_tld] == :no_scheme and scheme != "" -> true true -> - with [host] <- Regex.run(@match_hostname, buffer, capture: [:host]) do - if ip?(host) do - true - else - tld = host |> String.split(".") |> List.last() - MapSet.member?(@tlds, tld) - end - else - _ -> false - end + tld = host |> String.split(".") |> List.last() + MapSet.member?(@tlds, tld) end end def ip?(buffer), do: Regex.match?(@match_ip, buffer) - @doc false - def match_phone(buffer) do - case Regex.scan(@match_phone, buffer) do - [] -> nil - other -> other - end - end - def match_mention(buffer) do case Regex.run(@match_mention, buffer) do [mention] -> mention @@ -416,12 +307,6 @@ defmodule AutoLinker.Parser do defp maybe_update_buffer(out, _match, _buffer), do: out - def link_phone(nil, buffer, _), do: buffer - - def link_phone(list, buffer, opts) do - Builder.create_phone_link(list, buffer, opts) - end - @doc false def link_url(buffer, opts) do Builder.create_link(buffer, opts) @@ -436,8 +321,8 @@ defmodule AutoLinker.Parser do Builder.create_extra_link(buffer, opts) end - defp run_handler(handler, buffer, opts, user_acc) do - case handler.(buffer, opts, user_acc) do + defp link(type, buffer, opts, user_acc) do + case check_and_link(type, buffer, opts, user_acc) do {buffer, user_acc} -> {buffer, user_acc} buffer -> {buffer, user_acc} end diff --git a/mix.exs b/mix.exs index d73175c..1d5959c 100644 --- a/mix.exs +++ b/mix.exs @@ -1,21 +1,21 @@ -defmodule AutoLinker.Mixfile do +defmodule Linkify.Mixfile do use Mix.Project - @version "0.2.2" + @version "0.1.0" def project do [ - app: :auto_linker, + app: :linkify, version: @version, - elixir: "~> 1.4", + elixir: "~> 1.8", build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, deps: deps(), docs: [extras: ["README.md"]], package: package(), - name: "AutoLinker", + name: "Linkify", description: """ - AutoLinker is a basic package for turning website names into links. + Linkify is a basic package for turning website names into links. """ ] end @@ -29,18 +29,16 @@ defmodule AutoLinker.Mixfile do # Dependencies can be Hex packages: defp deps do [ - {:ex_doc, "~> 0.19", only: :dev, runtime: false}, - {:earmark, "~> 1.2", only: :dev, override: true}, - {:credo, "~> 1.0.0", only: [:dev, :test], runtime: false} + {:ex_doc, "~> 0.20", only: :dev, runtime: false}, + {:credo, "~> 1.1.0", only: [:dev, :test], runtime: false} ] end defp package do [ - maintainers: ["Stephen Pallen"], licenses: ["MIT"], - links: %{"Github" => "https://github.com/smpallen99/auto_linker"}, - files: ~w(lib README.md mix.exs LICENSE) + links: %{"GitLab" => "https://git.pleroma.social/pleroma/linkify"}, + files: ~w(lib priv README.md mix.exs LICENSE) ] end end diff --git a/mix.lock b/mix.lock index a265551..32e36c5 100644 --- a/mix.lock +++ b/mix.lock @@ -1,12 +1,10 @@ %{ "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"}, - "credo": {:hex, :credo, "1.0.2", "88bc918f215168bf6ce7070610a6173c45c82f32baa08bdfc80bf58df2d103b6", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"}, - "earmark": {:hex, :earmark, "1.3.1", "73812f447f7a42358d3ba79283cfa3075a7580a3a2ed457616d6517ac3738cb9", [:mix], [], "hexpm"}, - "ex_doc": {:hex, :ex_doc, "0.19.3", "3c7b0f02851f5fc13b040e8e925051452e41248f685e40250d7e40b07b9f8c10", [:mix], [{:earmark, "~> 1.2", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, - "file_system": {:hex, :file_system, "0.2.6", "fd4dc3af89b9ab1dc8ccbcc214a0e60c41f34be251d9307920748a14bf41f1d3", [:mix], [], "hexpm"}, + "credo": {:hex, :credo, "1.1.0", "e0c07b2fd7e2109495f582430a1bc96b2c71b7d94c59dfad120529f65f19872f", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"}, + "earmark": {:hex, :earmark, "1.3.2", "b840562ea3d67795ffbb5bd88940b1bed0ed9fa32834915125ea7d02e35888a5", [:mix], [], "hexpm"}, + "ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, "jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"}, "makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, "makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"}, - "mix_test_watch": {:hex, :mix_test_watch, "0.9.0", "c72132a6071261893518fa08e121e911c9358713f62794a90c95db59042af375", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm"}, "nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"}, } diff --git a/test/auto_linker_test.exs b/test/auto_linker_test.exs deleted file mode 100644 index 0478c57..0000000 --- a/test/auto_linker_test.exs +++ /dev/null @@ -1,448 +0,0 @@ -defmodule AutoLinkerTest do - use ExUnit.Case, async: true - doctest AutoLinker - - test "phone number" do - assert AutoLinker.link(", work (555) 555-5555", phone: true) == - ~s{, work (555) 555-5555} - end - - test "default link" do - assert AutoLinker.link("google.com") == - "google.com" - end - - test "markdown" do - assert AutoLinker.link("[google.com](http://google.com)", markdown: true) == - "google.com" - end - - test "does on link existing links" do - assert AutoLinker.link("google.com") == - "google.com" - end - - test "phone number and markdown link" do - assert AutoLinker.link("888 888-8888 [ab](a.com)", phone: true, markdown: true) == - ~s(888 888-8888) <> - ~s( ab) - end - - test "all kinds of links" do - text = - "hello google.com https://ddg.com 888 888-8888 user@email.com [google.com](http://google.com) irc:///mIRC" - - expected = - "hello google.com ddg.com 888 888-8888 user@email.com google.com irc:///mIRC" - - assert AutoLinker.link(text, - phone: true, - markdown: true, - email: true, - scheme: true, - extra: true, - class: false, - new_window: false, - rel: false - ) == expected - end - - test "rel as function" do - text = "google.com" - - expected = "google.com" - - custom_rel = fn url -> - url |> String.split(".") |> List.last() - end - - assert AutoLinker.link(text, - class: false, - new_window: false, - rel: custom_rel - ) == expected - - text = "google.com" - - expected = "google.com" - - custom_rel = fn _ -> nil end - - assert AutoLinker.link(text, - class: false, - new_window: false, - rel: custom_rel - ) == expected - end - - test "link_map/2" do - assert AutoLinker.link_map("google.com", []) == - {"google.com", - []} - end - - describe "custom handlers" do - test "mentions handler" do - text = "hello @user, @valid_user and @invalid_user" - valid_users = ["user", "valid_user"] - - handler = fn "@" <> user = mention, buffer, _opts, acc -> - if Enum.member?(valid_users, user) do - link = ~s(#{mention}) - {link, %{acc | mentions: MapSet.put(acc.mentions, {mention, user})}} - else - {buffer, acc} - end - end - - {result_text, %{mentions: mentions}} = - AutoLinker.link_map(text, %{mentions: MapSet.new()}, - mention: true, - mention_handler: handler - ) - - assert result_text == - "hello @user, @valid_user and @invalid_user" - - assert mentions |> MapSet.to_list() |> Enum.map(&elem(&1, 1)) == valid_users - end - - test "hashtags handler" do - text = "#hello #world" - - handler = fn hashtag, buffer, opts, acc -> - link = AutoLinker.Builder.create_hashtag_link(hashtag, buffer, opts) - {link, %{acc | tags: MapSet.put(acc.tags, hashtag)}} - end - - {result_text, %{tags: tags}} = - AutoLinker.link_map(text, %{tags: MapSet.new()}, - hashtag: true, - hashtag_handler: handler, - hashtag_prefix: "https://example.com/user/", - class: false, - new_window: false, - rel: false - ) - - assert result_text == - "#hello #world" - - assert MapSet.to_list(tags) == ["#hello", "#world"] - end - - test "mention handler and hashtag prefix" do - text = - "Hello again, @user.<script></script>\nThis is on another :moominmamma: line. #2hu #epic #phantasmagoric" - - handler = fn "@" <> user = mention, _, _, _ -> - ~s(@#{mention}) - end - - expected = - "Hello again, @@user.<script></script>\nThis is on another :moominmamma: line. #2hu #epic #phantasmagoric" - - assert AutoLinker.link(text, - mention: true, - mention_handler: handler, - hashtag: true, - hashtag_prefix: "/tag/" - ) == expected - end - end - - describe "mentions" do - test "simple mentions" do - expected = - ~s{hello @user and @anotherUser.} - - assert AutoLinker.link("hello @user and @anotherUser.", - mention: true, - mention_prefix: "https://example.com/user/" - ) == expected - end - - test "mentions inside html tags" do - text = - "

hello world

\n

<`em>another @user__test and @user__test google.com paragraph

\n" - - expected = - "

hello world

\n

<`em>another @user__test and @user__test google.com paragraph

\n" - - assert AutoLinker.link(text, - mention: true, - mention_prefix: "u/", - class: false, - rel: false, - new_window: false - ) == expected - end - - test "metion @user@example.com" do - text = "hey @user@example.com" - - expected = - "hey @user@example.com" - - assert AutoLinker.link(text, - mention: true, - mention_prefix: "https://example.com/user/" - ) == expected - end - end - - describe "hashtag links" do - test "hashtag" do - expected = - " one #2two three #four." - - assert AutoLinker.link(" one #2two three #four.", - hashtag: true, - hashtag_prefix: "https://example.com/tag/" - ) == expected - end - - test "must have non-numbers" do - expected = "#1ok #42 #7" - - assert AutoLinker.link("#1ok #42 #7", - hashtag: true, - hashtag_prefix: "/t/", - class: false, - rel: false, - new_window: false - ) == expected - end - - test "support French" do - text = "#administrateur·rice·s #ingénieur·e·s" - - expected = - "#administrateur·rice·s #ingénieur·e·s" - - assert AutoLinker.link(text, - hashtag: true, - hashtag_prefix: "/t/", - class: false, - rel: false, - new_window: false - ) == expected - end - - test "support Telugu" do - text = "#చక్రం #కకకకక్ #కకకకాక #కకకక్రకకకక" - - expected = - "#చక్రం #కకకకక్ #కకకకాక #కకకక్రకకకక" - - assert AutoLinker.link(text, - hashtag: true, - hashtag_prefix: "/t/", - class: false, - rel: false, - new_window: false - ) == expected - end - - test "do not turn urls with hashes into hashtags" do - text = "google.com#test #test google.com/#test #tag" - - expected = - "google.com#test #test google.com/#test #tag" - - assert AutoLinker.link(text, - scheme: true, - hashtag: true, - class: false, - new_window: false, - rel: false, - hashtag_prefix: "https://example.com/tag/" - ) == expected - end - - test "works with non-latin characters" do - text = "#漢字 #は #тест #ทดสอบ" - - expected = - "#漢字 #は #тест #ทดสอบ" - - assert AutoLinker.link(text, - scheme: true, - class: false, - new_window: false, - rel: false, - hashtag: true, - hashtag_prefix: "https://example.com/tag/" - ) == expected - end - end - - describe "links" do - test "turning urls into links" do - text = "Hey, check out http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." - - expected = - "Hey, check out youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." - - assert AutoLinker.link(text, scheme: true) == expected - - # no scheme - text = "Hey, check out www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." - assert AutoLinker.link(text, scheme: true) == expected - end - - test "turn urls with schema into urls" do - text = "📌https://google.com" - expected = "📌google.com" - - assert AutoLinker.link(text, scheme: true, class: false, new_window: false, rel: false) == - expected - end - - test "hostname/@user" do - text = "https://example.com/@user" - - expected = - "example.com/@user" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "https://example.com:4000/@user" - - expected = - "example.com:4000/@user" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "https://example.com:4000/@user" - - expected = - "example.com:4000/@user" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "@username" - expected = "@username" - assert AutoLinker.link(text, scheme: true) == expected - - text = "http://www.cs.vu.nl/~ast/intel/" - - expected = - "cs.vu.nl/~ast/intel/" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "https://forum.zdoom.org/viewtopic.php?f=44&t=57087" - - expected = - "forum.zdoom.org/viewtopic.php?f=44&t=57087" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul" - - expected = - "en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "https://en.wikipedia.org/wiki/Duff's_device" - - expected = - "en.wikipedia.org/wiki/Duff's_device" - - assert AutoLinker.link(text, scheme: true) == expected - end - end - - describe "non http links" do - test "xmpp" do - text = "xmpp:user@example.com" - - expected = - "xmpp:user@example.com" - - assert AutoLinker.link(text, extra: true, new_window: false, rel: false) == expected - end - - test "email" do - text = "user@example.com" - expected = "user@example.com" - assert AutoLinker.link(text, email: true) == expected - end - - test "magnet" do - text = - "magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce" - - expected = - "magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce" - - assert AutoLinker.link(text, extra: true, new_window: false, rel: false) == expected - end - - test "dweb" do - text = - "dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt" - - expected = - "dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt" - - assert AutoLinker.link(text, extra: true, new_window: false, rel: false) == expected - end - end - - describe "TLDs" do - test "parse with scheme" do - text = "https://google.com" - - expected = - "google.com" - - assert AutoLinker.link(text, scheme: true) == expected - end - - test "only existing TLDs with scheme" do - text = "this url https://google.foobar.blah11blah/ has invalid TLD" - - expected = "this url https://google.foobar.blah11blah/ has invalid TLD" - assert AutoLinker.link(text, scheme: true) == expected - - text = "this url https://google.foobar.com/ has valid TLD" - - expected = - "this url google.foobar.com/ has valid TLD" - - assert AutoLinker.link(text, scheme: true) == expected - end - - test "only existing TLDs without scheme" do - text = "this url google.foobar.blah11blah/ has invalid TLD" - expected = "this url google.foobar.blah11blah/ has invalid TLD" - assert AutoLinker.link(text, scheme: false) == expected - - text = "this url google.foobar.com/ has valid TLD" - - expected = - "this url google.foobar.com/ has valid TLD" - - assert AutoLinker.link(text, scheme: false) == expected - end - - test "only existing TLDs with and without scheme" do - text = "this url http://google.foobar.com/ has valid TLD" - - expected = - "this url google.foobar.com/ has valid TLD" - - assert AutoLinker.link(text, scheme: true) == expected - - text = "this url google.foobar.com/ has valid TLD" - - expected = - "this url google.foobar.com/ has valid TLD" - - assert AutoLinker.link(text, scheme: true) == expected - end - end -end diff --git a/test/builder_test.exs b/test/builder_test.exs index e20f6ea..bfcd9bb 100644 --- a/test/builder_test.exs +++ b/test/builder_test.exs @@ -1,39 +1,34 @@ -defmodule AutoLinker.BuilderTest do +defmodule Linkify.BuilderTest do use ExUnit.Case, async: true - doctest AutoLinker.Builder + doctest Linkify.Builder - import AutoLinker.Builder + import Linkify.Builder test "create_link/2" do - expected = - "text" + expected = "text" assert create_link("text", %{}) == expected - expected = "text" - assert create_link("text", %{rel: nil}) == expected + expected = "text" - expected = - "text" + assert create_link("text", %{new_window: true}) == expected + + expected = "text" + assert create_link("text", %{class: "linkified"}) == expected + + expected = "text" assert create_link("text", %{rel: "me"}) == expected - expected = "t..." + expected = "t..." - assert create_link("text", %{truncate: 3, rel: false}) == expected + assert create_link("text", %{truncate: 3}) == expected - expected = "text" - assert create_link("text", %{truncate: 2, rel: false}) == expected + expected = "text" + assert create_link("text", %{truncate: 2}) == expected - expected = "http://text" - assert create_link("http://text", %{rel: false, strip_prefix: false}) == expected - end - - test "create_markdown_links/2" do - expected = - "text" - - assert create_markdown_links("[text](url)", %{}) == expected + expected = "http://text" + assert create_link("http://text", %{strip_prefix: false}) == expected end test "format_hashtag/3" do @@ -53,40 +48,14 @@ defmodule AutoLinker.BuilderTest do assert format_mention(%{href: "url"}, "user@host", nil) == expected end - describe "create_phone_link" do - test "finishes" do - assert create_phone_link([], "", []) == "" - end - - test "handles one link" do - phrase = "my exten is x888. Call me." - - expected = - ~s'my exten is x888. Call me.' - - assert create_phone_link([["x888", ""]], phrase, attributes: [test: "test"]) == expected - end - - test "handles multiple links" do - phrase = "555.555.5555 or (555) 888-8888" - - expected = - ~s'555.555.5555 or ' <> - ~s'(555) 888-8888' - - assert create_phone_link([["555.555.5555", ""], ["(555) 888-8888"]], phrase, []) == expected - end - end - test "create_mention_link/3" do - expected = - "@navi" + expected = "@navi" assert create_mention_link("@navi", "hello @navi", %{mention_prefix: "/u/"}) == expected end test "create_email_link/3" do - expected = "user@example.org" + expected = "user@example.org" assert create_email_link("user@example.org", %{}) == expected assert create_email_link("user@example.org", %{href: "mailto:user@example.org"}) == expected end diff --git a/test/linkify_test.exs b/test/linkify_test.exs new file mode 100644 index 0000000..c50aace --- /dev/null +++ b/test/linkify_test.exs @@ -0,0 +1,404 @@ +defmodule LinkifyTest do + use ExUnit.Case, async: true + doctest Linkify + + test "default link" do + assert Linkify.link("google.com") == + "google.com" + end + + test "does on link existing links" do + text = ~s(google.com) + assert Linkify.link(text) == text + end + + test "all kinds of links" do + text = "hello google.com https://ddg.com user@email.com irc:///mIRC" + + expected = + "hello google.com ddg.com user@email.com irc:///mIRC" + + assert Linkify.link(text, + email: true, + extra: true + ) == expected + end + + test "class attribute" do + assert Linkify.link("google.com", class: "linkified") == + "google.com" + end + + test "rel attribute" do + assert Linkify.link("google.com", rel: "noopener noreferrer") == + "google.com" + end + + test "rel as function" do + text = "google.com" + + expected = "google.com" + + custom_rel = fn url -> + url |> String.split(".") |> List.last() + end + + assert Linkify.link(text, rel: custom_rel) == expected + + text = "google.com" + + expected = "google.com" + + custom_rel = fn _ -> nil end + + assert Linkify.link(text, rel: custom_rel) == expected + end + + test "link_map/2" do + assert Linkify.link_map("google.com", []) == + {"google.com", []} + end + + describe "custom handlers" do + test "mentions handler" do + text = "hello @user, @valid_user and @invalid_user" + valid_users = ["user", "valid_user"] + + handler = fn "@" <> user = mention, buffer, _opts, acc -> + if Enum.member?(valid_users, user) do + link = ~s(#{mention}) + {link, %{acc | mentions: MapSet.put(acc.mentions, {mention, user})}} + else + {buffer, acc} + end + end + + {result_text, %{mentions: mentions}} = + Linkify.link_map(text, %{mentions: MapSet.new()}, + mention: true, + mention_handler: handler + ) + + assert result_text == + "hello @user, @valid_user and @invalid_user" + + assert mentions |> MapSet.to_list() |> Enum.map(&elem(&1, 1)) == valid_users + end + + test "hashtags handler" do + text = "#hello #world" + + handler = fn hashtag, buffer, opts, acc -> + link = Linkify.Builder.create_hashtag_link(hashtag, buffer, opts) + {link, %{acc | tags: MapSet.put(acc.tags, hashtag)}} + end + + {result_text, %{tags: tags}} = + Linkify.link_map(text, %{tags: MapSet.new()}, + hashtag: true, + hashtag_handler: handler, + hashtag_prefix: "https://example.com/user/", + rel: false + ) + + assert result_text == + "#hello #world" + + assert MapSet.to_list(tags) == ["#hello", "#world"] + end + + test "mention handler and hashtag prefix" do + text = + "Hello again, @user.<script></script>\nThis is on another :moominmamma: line. #2hu #epic #phantasmagoric" + + handler = fn "@" <> user = mention, _, _, _ -> + ~s(@#{mention}) + end + + expected = + ~s(Hello again, @@user.<script></script>\nThis is on another :moominmamma: line. #2hu #epic #phantasmagoric) + + assert Linkify.link(text, + mention: true, + mention_handler: handler, + hashtag: true, + hashtag_prefix: "/tag/", + new_window: true + ) == expected + end + end + + describe "mentions" do + test "simple mentions" do + expected = + ~s{hello @user and @anotherUser.} + + assert Linkify.link("hello @user and @anotherUser.", + mention: true, + mention_prefix: "https://example.com/user/", + new_window: true + ) == expected + end + + test "mentions inside html tags" do + text = + "

hello world

\n

<`em>another @user__test and @user__test google.com paragraph

\n" + + expected = + "

hello world

\n

<`em>another @user__test and @user__test google.com paragraph

\n" + + assert Linkify.link(text, mention: true, mention_prefix: "u/") == expected + end + + test "metion @user@example.com" do + text = "hey @user@example.com" + + expected = + "hey @user@example.com" + + assert Linkify.link(text, + mention: true, + mention_prefix: "https://example.com/user/", + new_window: true + ) == expected + end + end + + describe "hashtag links" do + test "hashtag" do + expected = + " one #2two three #four." + + assert Linkify.link(" one #2two three #four.", + hashtag: true, + hashtag_prefix: "https://example.com/tag/", + new_window: true + ) == expected + end + + test "must have non-numbers" do + expected = "#1ok #42 #7" + + assert Linkify.link("#1ok #42 #7", + hashtag: true, + hashtag_prefix: "/t/", + rel: false + ) == expected + end + + test "support French" do + text = "#administrateur·rice·s #ingénieur·e·s" + + expected = + "#administrateur·rice·s #ingénieur·e·s" + + assert Linkify.link(text, + hashtag: true, + hashtag_prefix: "/t/", + rel: false + ) == expected + end + + test "support Telugu" do + text = "#చక్రం #కకకకక్ #కకకకాక #కకకక్రకకకక" + + expected = + "#చక్రం #కకకకక్ #కకకకాక #కకకక్రకకకక" + + assert Linkify.link(text, + hashtag: true, + hashtag_prefix: "/t/", + rel: false + ) == expected + end + + test "do not turn urls with hashes into hashtags" do + text = "google.com#test #test google.com/#test #tag" + + expected = + "google.com#test #test google.com/#test #tag" + + assert Linkify.link(text, + hashtag: true, + rel: false, + hashtag_prefix: "https://example.com/tag/" + ) == expected + end + + test "works with non-latin characters" do + text = "#漢字 #は #тест #ทดสอบ" + + expected = + "#漢字 #は #тест #ทดสอบ" + + assert Linkify.link(text, + rel: false, + hashtag: true, + hashtag_prefix: "https://example.com/tag/" + ) == expected + end + end + + describe "links" do + test "turning urls into links" do + text = "Hey, check out http://www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." + + expected = + "Hey, check out youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." + + assert Linkify.link(text, new_window: true) == expected + + # no scheme + text = "Hey, check out www.youtube.com/watch?v=8Zg1-TufF%20zY?x=1&y=2#blabla ." + assert Linkify.link(text, new_window: true) == expected + end + + test "turn urls with schema into urls" do + text = "📌https://google.com" + expected = "📌google.com" + + assert Linkify.link(text, rel: false) == expected + end + + test "hostname/@user" do + text = "https://example.com/@user" + + expected = "example.com/@user" + + assert Linkify.link(text, new_window: true) == expected + + text = "https://example.com:4000/@user" + + expected = + "example.com:4000/@user" + + assert Linkify.link(text, new_window: true) == expected + + text = "https://example.com:4000/@user" + + expected = + "example.com:4000/@user" + + assert Linkify.link(text, new_window: true) == expected + + text = "@username" + expected = "@username" + assert Linkify.link(text, new_window: true) == expected + + text = "http://www.cs.vu.nl/~ast/intel/" + + expected = "cs.vu.nl/~ast/intel/" + + assert Linkify.link(text) == expected + + text = "https://forum.zdoom.org/viewtopic.php?f=44&t=57087" + + expected = + "forum.zdoom.org/viewtopic.php?f=44&t=57087" + + assert Linkify.link(text) == expected + + text = "https://en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul" + + expected = + "en.wikipedia.org/wiki/Sophia_(Gnosticism)#Mythos_of_the_soul" + + assert Linkify.link(text) == expected + + text = "https://en.wikipedia.org/wiki/Duff's_device" + + expected = + "en.wikipedia.org/wiki/Duff's_device" + + assert Linkify.link(text) == expected + end + end + + describe "non http links" do + test "xmpp" do + text = "xmpp:user@example.com" + + expected = "xmpp:user@example.com" + + assert Linkify.link(text, extra: true) == expected + end + + test "email" do + text = "user@example.com" + expected = "user@example.com" + assert Linkify.link(text, email: true) == expected + end + + test "magnet" do + text = + "magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce" + + expected = + "magnet:?xt=urn:btih:a4104a9d2f5615601c429fe8bab8177c47c05c84&dn=ubuntu-18.04.1.0-live-server-amd64.iso&tr=http%3A%2F%2Ftorrent.ubuntu.com%3A6969%2Fannounce&tr=http%3A%2F%2Fipv6.torrent.ubuntu.com%3A6969%2Fannounce" + + assert Linkify.link(text, extra: true) == expected + end + + test "dweb" do + text = + "dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt" + + expected = + "dweb://584faa05d394190ab1a3f0240607f9bf2b7e2bd9968830a11cf77db0cea36a21+v1.0.0/path/to/file.txt" + + assert Linkify.link(text, extra: true) == expected + end + end + + describe "TLDs" do + test "parse with scheme" do + text = "https://google.com" + + expected = "google.com" + + assert Linkify.link(text) == expected + end + + test "only existing TLDs with scheme" do + text = "this url https://google.foobar.blah11blah/ has invalid TLD" + + expected = "this url https://google.foobar.blah11blah/ has invalid TLD" + assert Linkify.link(text) == expected + + text = "this url https://google.foobar.com/ has valid TLD" + + expected = + "this url google.foobar.com/ has valid TLD" + + assert Linkify.link(text) == expected + end + + test "only existing TLDs without scheme" do + text = "this url google.foobar.blah11blah/ has invalid TLD" + assert Linkify.link(text) == text + + text = "this url google.foobar.com/ has valid TLD" + + expected = + "this url google.foobar.com/ has valid TLD" + + assert Linkify.link(text) == expected + end + + test "only existing TLDs with and without scheme" do + text = "this url http://google.foobar.com/ has valid TLD" + + expected = + "this url google.foobar.com/ has valid TLD" + + assert Linkify.link(text) == expected + + text = "this url google.foobar.com/ has valid TLD" + + expected = + "this url google.foobar.com/ has valid TLD" + + assert Linkify.link(text) == expected + end + end +end diff --git a/test/parser_test.exs b/test/parser_test.exs index cb4f8fb..9b74235 100644 --- a/test/parser_test.exs +++ b/test/parser_test.exs @@ -1,8 +1,8 @@ -defmodule AutoLinker.ParserTest do +defmodule Linkify.ParserTest do use ExUnit.Case, async: true - doctest AutoLinker.Parser + doctest Linkify.Parser - import AutoLinker.Parser + import Linkify.Parser describe "url?/2" do test "valid scheme true" do @@ -106,28 +106,10 @@ defmodule AutoLinker.ParserTest do end end - describe "match_phone" do - test "valid" do - valid_phone_nunbers() - |> Enum.each(fn number -> - assert number |> match_phone() |> valid_number?(number) - end) - end - - test "invalid" do - invalid_phone_numbers() - |> Enum.each(fn number -> - assert number |> match_phone() |> is_nil - end) - end - end - describe "parse" do test "handle line breakes" do text = "google.com\r\nssss" - - expected = - "google.com\r\nssss" + expected = "google.com\r\nssss" assert parse(text) == expected end @@ -157,25 +139,20 @@ defmodule AutoLinker.ParserTest do expected = "" - assert parse(text, class: false, rel: false, new_window: false, phone: false) == expected + assert parse(text, class: false, rel: false) == expected text = "Check out
google.com
" expected = "Check out " - assert parse(text, class: false, rel: false, new_window: false) == expected + assert parse(text, class: false, rel: false) == expected end test "links url inside nested html" do text = "

google.com

" expected = "

google.com

" - assert parse(text, class: false, rel: false, new_window: false) == expected - end - - test "excludes html with specified class" do - text = "```Check out
google.com
```" - assert parse(text, exclude_patterns: ["```"]) == text + assert parse(text, class: false, rel: false) == expected end test "do not link parens" do @@ -184,19 +161,19 @@ defmodule AutoLinker.ParserTest do expected = " foo (example.com/path/folder/), bar" - assert parse(text, class: false, rel: false, new_window: false, scheme: true) == expected + assert parse(text, class: false, rel: false, scheme: true) == expected text = " foo (example.com/path/folder/), bar" expected = " foo (example.com/path/folder/), bar" - assert parse(text, class: false, rel: false, new_window: false) == expected + assert parse(text, class: false, rel: false) == expected end test "do not link urls" do text = "google.com" - assert parse(text, url: false, phone: true) == text + assert parse(text, url: false) == text end test "do not link `:test.test`" do @@ -260,35 +237,6 @@ defmodule AutoLinker.ParserTest do "555.555.5555" ] - def valid_phone_nunbers, - do: [ - "x55", - "x555", - "x5555", - "x12345", - "+1 555 555-5555", - "555 555-5555", - "555.555.5555", - "613-555-5555", - "1 (555) 555-5555", - "(555) 555-5555", - "1.555.555.5555", - "800 555-5555", - "1.800.555.5555", - "1 (800) 555-5555", - "888 555-5555", - "887 555-5555", - "1-877-555-5555", - "1 800 710-5515" - ] - - def invalid_phone_numbers, - do: [ - "5555", - "x5", - "(555) 555-55" - ] - def custom_tld_scheme_urls, do: [ "http://whatever.null/",