sites

public wiki contents of suckless.org
git clone git://git.suckless.org/sites
Log | Files | Refs

commit b817d5f061360f4812cdeb760281863ff4ddb5b6
parent 4f5c20eb3d66fd4d8f65dfd1563bc3f4c7bf86c4
Author: Michael Buch <michaelbuch12@gmail.com>
Date:   Sun, 20 May 2018 01:43:25 +0100

Handle multiple URLs on single line scenario

When cycling through URLs, instead of picking the first
URL of each line, cycle through URLs on a single line
from back to front

Diffstat:
Mst.suckless.org/patches/copyurl/index.md | 1+
Ast.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff | 179+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 180 insertions(+), 0 deletions(-)

diff --git a/st.suckless.org/patches/copyurl/index.md b/st.suckless.org/patches/copyurl/index.md @@ -25,6 +25,7 @@ Download Following patches also highlight the selected urls: * [st-copyurl-20180514-a7bd977.diff](st-copyurl-20180514-a7bd977.diff) + * [st-copyurl-20180520-53d59ef.diff](st-copyurl-20180520-53d59ef.diff) Authors ------- diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff b/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff @@ -0,0 +1,179 @@ +From 53d59ef83576afface82889e21bf86ed9c288496 Mon Sep 17 00:00:00 2001 +From: Michael Buch <michaelbuch12@gmail.com> +Date: Sun, 20 May 2018 01:35:03 +0100 +Subject: [PATCH] Handle multiple URLs on single line scenario + +When cycling through URLs, instead of picking the first +URL of each line, cycle through URLs on a single line +from back to front +--- + config.def.h | 1 + + st.c | 116 +++++++++++++++++++++++++++++++++++++++++++++++++++ + st.h | 1 + + 3 files changed, 118 insertions(+) + +diff --git a/config.def.h b/config.def.h +index 82b1b09..cbe923e 100644 +--- a/config.def.h ++++ b/config.def.h +@@ -178,6 +178,7 @@ static Shortcut shortcuts[] = { + { TERMMOD, XK_Y, selpaste, {.i = 0} }, + { TERMMOD, XK_Num_Lock, numlock, {.i = 0} }, + { TERMMOD, XK_I, iso14755, {.i = 0} }, ++ { MODKEY, XK_l, copyurl, {.i = 0} }, + }; + + /* +diff --git a/st.c b/st.c +index 0628707..309aa89 100644 +--- a/st.c ++++ b/st.c +@@ -204,6 +204,7 @@ static void tdefutf8(char); + static int32_t tdefcolor(int *, int *, int); + static void tdeftran(char); + static void tstrsequence(uchar); ++static void tsetcolor(int, int, int, uint32_t, uint32_t); + + static void drawregion(int, int, int, int); + +@@ -1600,6 +1601,17 @@ tsetmode(int priv, int set, int *args, int narg) + } + } + ++void ++tsetcolor( int row, int start, int end, uint32_t fg, uint32_t bg ) ++{ ++ int i = start; ++ for( ; i < end; ++i ) ++ { ++ term.line[row][i].fg = fg; ++ term.line[row][i].bg = bg; ++ } ++} ++ + void + csihandle(void) + { +@@ -2617,3 +2629,107 @@ redraw(void) + tfulldirt(); + draw(); + } ++ ++char * ++findlast(char *str, const char* find) ++{ ++ char* found = NULL; ++ for(found = str + strlen(str) - 1; found >= str; --found) { ++ if(strncmp(found, find, strlen(find)) == 0) { ++ return found; ++ } ++ } ++ ++ return NULL; ++} ++ ++/* ++** Select and copy the previous url on screen (do nothing if there's no url). ++** ++** FIXME: doesn't handle urls that span multiple lines; will need to add support ++** for multiline "getsel()" first ++*/ ++void ++copyurl(const Arg *arg) { ++ /* () and [] can appear in urls, but excluding them here will reduce false ++ * positives when figuring out where a given url ends. ++ */ ++ static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ++ "abcdefghijklmnopqrstuvwxyz" ++ "0123456789-._~:/?#@!$&'*+,;=%"; ++ ++ /* remove highlighting from previous selection if any */ ++ if(sel.ob.x >= 0 && sel.oe.x >= 0) ++ tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, defaultbg); ++ ++ int i = 0, ++ row = 0, ++ col = 0, ++ startrow = 0, ++ colend = 0; ++ ++ char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */ ++ char *c = NULL, ++ *match = NULL; ++ ++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot; ++ LIMIT(row, term.top, term.bot); ++ startrow = row; ++ ++ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col; ++ LIMIT(colend, 0, term.col); ++ ++ /* ++ ** Scan from (term.bot,term.col) to (0,0) and find ++ ** next occurrance of a URL ++ */ ++ do { ++ /* Read in each column of every row until ++ ** we hit previous occurrence of URL ++ */ ++ for (col = 0, i = 0; col < colend; ++col,++i) { ++ /* assume ascii */ ++ if (term.line[row][col].u > 127) ++ continue; ++ linestr[i] = term.line[row][col].u; ++ } ++ linestr[term.col] = '\0'; ++ ++ if ((match = findlast(linestr, "http://")) ++ || (match = findlast(linestr, "https://"))) ++ break; ++ ++ if (--row < term.top) ++ row = term.bot; ++ ++ colend = term.col; ++ } while (row != startrow); ++ ++ if (match) { ++ /* must happen before trim */ ++ selclear(); ++ sel.ob.x = strlen(linestr) - strlen(match); ++ ++ /* trim the rest of the line from the url match */ ++ for (c = match; *c != '\0'; ++c) ++ if (!strchr(URLCHARS, *c)) { ++ *c = '\0'; ++ break; ++ } ++ ++ /* highlight selection by inverting terminal colors */ ++ tsetcolor(row, sel.ob.x, sel.ob.x + strlen( match ), defaultbg, defaultfg); ++ ++ /* select and copy */ ++ sel.mode = 1; ++ sel.type = SEL_REGULAR; ++ sel.oe.x = sel.ob.x + strlen(match)-1; ++ sel.ob.y = sel.oe.y = row; ++ selnormalize(); ++ tsetdirt(sel.nb.y, sel.ne.y); ++ xsetsel(getsel()); ++ xclipcopy(); ++ } ++ ++ free(linestr); ++} +diff --git a/st.h b/st.h +index dac64d8..5a58f8f 100644 +--- a/st.h ++++ b/st.h +@@ -85,6 +85,7 @@ void printscreen(const Arg *); + void printsel(const Arg *); + void sendbreak(const Arg *); + void toggleprinter(const Arg *); ++void copyurl(const Arg *); + + int tattrset(int); + void tnew(int, int); +-- +2.17.0 +