sites

public wiki contents of suckless.org
git clone git://git.suckless.org/sites
Log | Files | Refs

commit 1f0a0aceb97de4d05842610c90539bd97caad7b0
parent bd31bc9128641ac9e34ff8d0e38b066de79fab79
Author: Miles Alan <m@milesalan.com>
Date:   Thu,  2 May 2019 16:24:39 -0500

Add surf externalpipe patch

Diffstat:
Asurf.suckless.org/patches/externalpipe/edit_screen.sh | 5+++++
Asurf.suckless.org/patches/externalpipe/index.md | 53+++++++++++++++++++++++++++++++++++++++++++++++++++++
Asurf.suckless.org/patches/externalpipe/surf-2.0-externalpipe.diff | 93+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Asurf.suckless.org/patches/externalpipe/surf_linkselect.sh | 73+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
4 files changed, 224 insertions(+), 0 deletions(-)

diff --git a/surf.suckless.org/patches/externalpipe/edit_screen.sh b/surf.suckless.org/patches/externalpipe/edit_screen.sh @@ -0,0 +1,5 @@ +#!/bin/sh +tmpfile=$(mktemp /tmp/st-edit.XXXXXX) +trap 'rm "$tmpfile"' 0 1 15 +cat > "$tmpfile" +st -e "$EDITOR" "$tmpfile" diff --git a/surf.suckless.org/patches/externalpipe/index.md b/surf.suckless.org/patches/externalpipe/index.md @@ -0,0 +1,52 @@ +externalpipe +============ + +Description +----------- + +Pipe the current page's source to an external program. This is particularly +helpful for keyboard-based link following and also for viewing the source in an +external editor. Works both when javascript is enabled and disabled. + +Example +------- +Install the below shell scripts into your `$PATH`: +* [surf_linkselect.sh](surf_linkselect.sh) - extracts links via xmllint and + pipes to dmenu, converts selected link to valid URL. +* [edit_screen.sh](edit_screen.sh) - open source in `$EDITOR` for copying text. + + +Add to your `config.h`: + static char *linkselect_curwin [] = { "/bin/sh", "-c", + "surf_linkselect.sh $0 'Link' | xargs -r xprop -id $0 -f _SURF_GO 8s -set _SURF_GO", + winid, NULL + }; + static char *linkselect_newwin [] = { "/bin/sh", "-c", + "surf_linkselect.sh $0 'Link (new window)' | xargs -r surf", + winid, NULL + }; + static char *editscreen[] = { "/bin/sh", "-c", "edit_screen.sh", NULL }; + ... + static Key keys[] = { + { MODKEY, GDK_KEY_d, externalpipe, { .v = linkselect_curwin } }, + { GDK_SHIFT_MASK|MODKEY, GDK_KEY_d, externalpipe, { .v = linkselect_newwin } }, + { MODKEY, GDK_KEY_o, externalpipe, { .v = editscreen } }, + ... + } + +Now you have the new keybindings: +- **Ctrl-d** - open dmenu with links, select to follow in current surf window +- **Ctrl-Shift-d** - open dmenu with links, select to open in new surf window +- **Ctrl-o** - view sourcecode for the current page in your editor + + +Download +-------- + +* [surf-2.0-externalpipe.diff](surf-2.0-externalpipe.diff) (2368) (20190502) + +Author +------ + +* Miles Alan - m@milesalan.com +* Rob Pilling - robpilling@gmail.com (author of st externalpipe, which pipe code is based on)+ \ No newline at end of file diff --git a/surf.suckless.org/patches/externalpipe/surf-2.0-externalpipe.diff b/surf.suckless.org/patches/externalpipe/surf-2.0-externalpipe.diff @@ -0,0 +1,93 @@ +diff --git a/surf.c b/surf.c +index 93a1629..ba53b94 100644 +--- a/surf.c ++++ b/surf.c +@@ -217,6 +217,7 @@ static void togglefullscreen(Client *c, const Arg *a); + static void togglecookiepolicy(Client *c, const Arg *a); + static void toggleinspector(Client *c, const Arg *a); + static void find(Client *c, const Arg *a); ++static void externalpipe(Client *c, const Arg *a); + + /* Buttons */ + static void clicknavigate(Client *c, const Arg *a, WebKitHitTestResult *h); +@@ -241,6 +242,80 @@ char *argv0; + /* configuration, allows nested code to access above variables */ + #include "config.h" + ++static void ++externalpipe_execute(char* buffer, Arg *arg) { ++ int to[2]; ++ void (*oldsigpipe)(int); ++ ++ if (pipe(to) == -1) ++ return; ++ ++ switch (fork()) { ++ case -1: ++ close(to[0]); ++ close(to[1]); ++ return; ++ case 0: ++ dup2(to[0], STDIN_FILENO); close(to[0]); close(to[1]); ++ execvp(((char **)arg->v)[0], (char **)arg->v); ++ fprintf(stderr, "st: execvp %s\n", ((char **)arg->v)[0]); ++ perror("failed"); ++ exit(0); ++ } ++ ++ close(to[0]); ++ oldsigpipe = signal(SIGPIPE, SIG_IGN); ++ write(to[1], buffer, strlen(buffer)); ++ close(to[1]); ++ signal(SIGPIPE, oldsigpipe); ++} ++ ++static void ++externalpipe_resource_done(WebKitWebResource *r, GAsyncResult *s, Arg *arg) ++{ ++ GError *gerr = NULL; ++ guchar *buffer = webkit_web_resource_get_data_finish(r, s, NULL, &gerr); ++ if (gerr == NULL) { ++ externalpipe_execute((char *) buffer, arg); ++ } else { ++ g_error_free(gerr); ++ } ++ g_free(buffer); ++} ++ ++static void ++externalpipe_js_done(WebKitWebView *wv, GAsyncResult *s, Arg *arg) ++{ ++ WebKitJavascriptResult *j = webkit_web_view_run_javascript_finish( ++ wv, s, NULL); ++ if (!j) { ++ return; ++ } ++ JSCValue *v = webkit_javascript_result_get_js_value(j); ++ if (jsc_value_is_string(v)) { ++ char *buffer = jsc_value_to_string(v); ++ externalpipe_execute(buffer, arg); ++ g_free(buffer); ++ } ++ webkit_javascript_result_unref(j); ++} ++ ++void ++externalpipe(Client *c, const Arg *arg) ++{ ++ if (curconfig[JavaScript].val.i) { ++ webkit_web_view_run_javascript( ++ c->view, "window.document.body.outerHTML", ++ NULL, externalpipe_js_done, arg); ++ } else { ++ WebKitWebResource *resource = webkit_web_view_get_main_resource(c->view); ++ if (resource != NULL) { ++ webkit_web_resource_get_data( ++ resource, NULL, externalpipe_resource_done, arg); ++ } ++ } ++} ++ + void + usage(void) + { diff --git a/surf.suckless.org/patches/externalpipe/surf_linkselect.sh b/surf.suckless.org/patches/externalpipe/surf_linkselect.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env sh +# +# surf_linkselect.sh: +# Usage: +# curl somesite.com | surf_linkselect [SURFWINDOWID] [PROMPT] +# +# Description: +# Given an HTML body as STDIN, extracts links via xmllint & provides list +# to dmenu with each link paired with its associated content. Selected +# link is then normalized based on the passed surf window's URI and the +# result is printed to STDOUT. +# +# Dependencies: +# xmllint, awk, dmenu + +function dump_links_with_titles() { + awk '{ + input = $0; + + $0 = input; + gsub("<[^>]*>", ""); + gsub(/[ ]+/, " "); + $1 = $1; + title = ($0 == "" ? "None" : $0); + + $0 = input; + match($0, /\<[ ]*[aA][^>]* [hH][rR][eE][fF]=["]([^"]+)["]/, linkextract); + $0 = linkextract[1]; + gsub("[ ]", "%20"); + link = $0; + + print title ": " link; + }' +} + +function link_normalize() { + URI=$1 + awk -v uri=$URI '{ + if ($0 ~ /^https?:\/\// || $0 ~ /^\/\/.+$/) { + print $0; + } else if ($0 ~/^#/) { + gsub(/[#?][^#?]+/, "", uri); + print uri $0; + } else if ($0 ~/^\//) { + split(uri, uri_parts, "/"); + print uri_parts[3] $0; + } else { + gsub(/[#][^#]+/, "", uri); + uri_parts_size = split(uri, uri_parts, "/"); + delete uri_parts[uri_parts_size]; + for (v in uri_parts) { + uri_pagestripped = uri_pagestripped uri_parts[v] "/" + } + print uri_pagestripped $0; + } + }' +} + +function link_select() { + SURF_WINDOW=$1 + DMENU_PROMPT=$2 + tr -d '\n\r' | + xmllint --html --xpath "//a" - | + dump_links_with_titles | + sort | + uniq | + dmenu -p "$DMENU_PROMPT" -l 10 -i -w $SURF_WINDOW | + awk -F' ' '{print $NF}' | + link_normalize $(xprop -id $SURF_WINDOW _SURF_URI | cut -d '"' -f 2) +} + +link_select "$1" "$2"+ \ No newline at end of file