sites

public wiki contents of suckless.org
git clone git://git.suckless.org/sites
Log | Files | Refs

commit 553aec962a7406e4a3407313e3372de3a38c4fa9
parent 59682d4e915ef5f85a622e8b7d6e0556c96e5e8b
Author: Miles Alan <m@milesalan.com>
Date:   Thu,  8 Aug 2019 05:29:13 -0500

[surf][patch] externalpipe: Improvements to surf_linkselect.sh script

Add various small improvements to surf_linkselect.sh script. Including:
- Preserve ordering of links rather than alphabetically sorting
- De-encode & < > in link titles
- Encode &amp; to & in link URLs; e.g. for google links
- Set defaults SURF_WINDOW / DMENU_PROMPT variables (allow usages w/ no args)
- Omit empty links

Diffstat:
Msurf.suckless.org/patches/externalpipe/surf_linkselect.sh | 42+++++++++++++++++++++++-------------------
1 file changed, 23 insertions(+), 19 deletions(-)

diff --git a/surf.suckless.org/patches/externalpipe/surf_linkselect.sh b/surf.suckless.org/patches/externalpipe/surf_linkselect.sh @@ -1,17 +1,14 @@ #!/usr/bin/env sh -# # surf_linkselect.sh: -# Usage: -# curl somesite.com | surf_linkselect [SURFWINDOWID] [PROMPT] -# -# Description: -# Given an HTML body as STDIN, extracts links via xmllint & provides list -# to dmenu with each link paired with its associated content. Selected -# link is then normalized based on the passed surf window's URI and the -# result is printed to STDOUT. -# -# Dependencies: -# xmllint, awk, dmenu +# Usage: curl somesite.com | surf_linkselect [SURFWINDOWID] [PROMPT] +# Deps: xmllint, dmenu +# Info: +# Designed to be used w/ surf externalpipe patch. Enables keyboard-only +# link selection via dmenu. Given HTML stdin, extracts links one per line +# Selected link is normalized based on current URI and printed to STDOUT. +# Pipe the result to a new surf or xprop _SURF_URI accordingly. +SURF_WINDOW="${1:-$(xprop -root | sed -n '/^_NET_ACTIVE_WINDOW/ s/.* //p')}" +DMENU_PROMPT="${2:-Link}" function dump_links_with_titles() { awk '{ @@ -20,22 +17,31 @@ function dump_links_with_titles() { $0 = input; gsub("<[^>]*>", ""); gsub(/[ ]+/, " "); + gsub("&amp;", "\\&"); + gsub("&lt;", "<"); + gsub("&gt;", ">"); $1 = $1; title = ($0 == "" ? "None" : $0); $0 = input; match($0, /\<[ ]*[aA][^>]* [hH][rR][eE][fF]=["]([^"]+)["]/, linkextract); $0 = linkextract[1]; + gsub(/^[ \t]+/,""); + gsub(/[ \t]+$/,""); gsub("[ ]", "%20"); link = $0; - print title ": " link; + if (link != "") { + print title ": " link; + } }' } function link_normalize() { URI=$1 awk -v uri=$URI '{ + gsub("&amp;", "\\&"); + if ($0 ~ /^https?:\/\// || $0 ~ /^\/\/.+$/) { print $0; } else if ($0 ~/^#/) { @@ -57,16 +63,14 @@ function link_normalize() { } function link_select() { - SURF_WINDOW=$1 - DMENU_PROMPT=$2 tr -d '\n\r' | xmllint --html --xpath "//a" - | dump_links_with_titles | - sort | - uniq | + awk '!x[$0]++' | + # sort | uniq dmenu -p "$DMENU_PROMPT" -l 10 -i -w $SURF_WINDOW | awk -F' ' '{print $NF}' | link_normalize $(xprop -id $SURF_WINDOW _SURF_URI | cut -d '"' -f 2) } -link_select "$1" "$2"- \ No newline at end of file +link_select+ \ No newline at end of file