sites

public wiki contents of suckless.org
git clone git://git.suckless.org/sites
Log | Files | Refs

commit 8b1563baffd317eade787e919746a2608e98e2dd
parent b817d5f061360f4812cdeb760281863ff4ddb5b6
Author: Michael Buch <michaelbuch12@gmail.com>
Date:   Sun, 20 May 2018 16:39:14 +0100

[copuryl patch] Cleanup URL determination logic

Separate out into separate function and provide
ability to add more URLs to the list.

Diffstat:
Mst.suckless.org/patches/copyurl/index.md | 2+-
Dst.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff | 179-------------------------------------------------------------------------------
Ast.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff | 185+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
3 files changed, 186 insertions(+), 180 deletions(-)

diff --git a/st.suckless.org/patches/copyurl/index.md b/st.suckless.org/patches/copyurl/index.md @@ -25,7 +25,7 @@ Download Following patches also highlight the selected urls: * [st-copyurl-20180514-a7bd977.diff](st-copyurl-20180514-a7bd977.diff) - * [st-copyurl-20180520-53d59ef.diff](st-copyurl-20180520-53d59ef.diff) + * [st-copyurl-20180520-0fe819f.diff](st-copyurl-20180520-0fe819f.diff) Authors ------- diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff b/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff @@ -1,179 +0,0 @@ -From 53d59ef83576afface82889e21bf86ed9c288496 Mon Sep 17 00:00:00 2001 -From: Michael Buch <michaelbuch12@gmail.com> -Date: Sun, 20 May 2018 01:35:03 +0100 -Subject: [PATCH] Handle multiple URLs on single line scenario - -When cycling through URLs, instead of picking the first -URL of each line, cycle through URLs on a single line -from back to front ---- - config.def.h | 1 + - st.c | 116 +++++++++++++++++++++++++++++++++++++++++++++++++++ - st.h | 1 + - 3 files changed, 118 insertions(+) - -diff --git a/config.def.h b/config.def.h -index 82b1b09..cbe923e 100644 ---- a/config.def.h -+++ b/config.def.h -@@ -178,6 +178,7 @@ static Shortcut shortcuts[] = { - { TERMMOD, XK_Y, selpaste, {.i = 0} }, - { TERMMOD, XK_Num_Lock, numlock, {.i = 0} }, - { TERMMOD, XK_I, iso14755, {.i = 0} }, -+ { MODKEY, XK_l, copyurl, {.i = 0} }, - }; - - /* -diff --git a/st.c b/st.c -index 0628707..309aa89 100644 ---- a/st.c -+++ b/st.c -@@ -204,6 +204,7 @@ static void tdefutf8(char); - static int32_t tdefcolor(int *, int *, int); - static void tdeftran(char); - static void tstrsequence(uchar); -+static void tsetcolor(int, int, int, uint32_t, uint32_t); - - static void drawregion(int, int, int, int); - -@@ -1600,6 +1601,17 @@ tsetmode(int priv, int set, int *args, int narg) - } - } - -+void -+tsetcolor( int row, int start, int end, uint32_t fg, uint32_t bg ) -+{ -+ int i = start; -+ for( ; i < end; ++i ) -+ { -+ term.line[row][i].fg = fg; -+ term.line[row][i].bg = bg; -+ } -+} -+ - void - csihandle(void) - { -@@ -2617,3 +2629,107 @@ redraw(void) - tfulldirt(); - draw(); - } -+ -+char * -+findlast(char *str, const char* find) -+{ -+ char* found = NULL; -+ for(found = str + strlen(str) - 1; found >= str; --found) { -+ if(strncmp(found, find, strlen(find)) == 0) { -+ return found; -+ } -+ } -+ -+ return NULL; -+} -+ -+/* -+** Select and copy the previous url on screen (do nothing if there's no url). -+** -+** FIXME: doesn't handle urls that span multiple lines; will need to add support -+** for multiline "getsel()" first -+*/ -+void -+copyurl(const Arg *arg) { -+ /* () and [] can appear in urls, but excluding them here will reduce false -+ * positives when figuring out where a given url ends. -+ */ -+ static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" -+ "abcdefghijklmnopqrstuvwxyz" -+ "0123456789-._~:/?#@!$&'*+,;=%"; -+ -+ /* remove highlighting from previous selection if any */ -+ if(sel.ob.x >= 0 && sel.oe.x >= 0) -+ tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, defaultbg); -+ -+ int i = 0, -+ row = 0, -+ col = 0, -+ startrow = 0, -+ colend = 0; -+ -+ char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */ -+ char *c = NULL, -+ *match = NULL; -+ -+ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot; -+ LIMIT(row, term.top, term.bot); -+ startrow = row; -+ -+ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col; -+ LIMIT(colend, 0, term.col); -+ -+ /* -+ ** Scan from (term.bot,term.col) to (0,0) and find -+ ** next occurrance of a URL -+ */ -+ do { -+ /* Read in each column of every row until -+ ** we hit previous occurrence of URL -+ */ -+ for (col = 0, i = 0; col < colend; ++col,++i) { -+ /* assume ascii */ -+ if (term.line[row][col].u > 127) -+ continue; -+ linestr[i] = term.line[row][col].u; -+ } -+ linestr[term.col] = '\0'; -+ -+ if ((match = findlast(linestr, "http://")) -+ || (match = findlast(linestr, "https://"))) -+ break; -+ -+ if (--row < term.top) -+ row = term.bot; -+ -+ colend = term.col; -+ } while (row != startrow); -+ -+ if (match) { -+ /* must happen before trim */ -+ selclear(); -+ sel.ob.x = strlen(linestr) - strlen(match); -+ -+ /* trim the rest of the line from the url match */ -+ for (c = match; *c != '\0'; ++c) -+ if (!strchr(URLCHARS, *c)) { -+ *c = '\0'; -+ break; -+ } -+ -+ /* highlight selection by inverting terminal colors */ -+ tsetcolor(row, sel.ob.x, sel.ob.x + strlen( match ), defaultbg, defaultfg); -+ -+ /* select and copy */ -+ sel.mode = 1; -+ sel.type = SEL_REGULAR; -+ sel.oe.x = sel.ob.x + strlen(match)-1; -+ sel.ob.y = sel.oe.y = row; -+ selnormalize(); -+ tsetdirt(sel.nb.y, sel.ne.y); -+ xsetsel(getsel()); -+ xclipcopy(); -+ } -+ -+ free(linestr); -+} -diff --git a/st.h b/st.h -index dac64d8..5a58f8f 100644 ---- a/st.h -+++ b/st.h -@@ -85,6 +85,7 @@ void printscreen(const Arg *); - void printsel(const Arg *); - void sendbreak(const Arg *); - void toggleprinter(const Arg *); -+void copyurl(const Arg *); - - int tattrset(int); - void tnew(int, int); --- -2.17.0 - diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff b/st.suckless.org/patches/copyurl/st-copyurl-20180520-73f375a.diff @@ -0,0 +1,185 @@ +From 73f375a7c3256d32c62bdb7d616207071e89f982 Mon Sep 17 00:00:00 2001 +From: Michael Buch <michaelbuch12@gmail.com> +Date: Sun, 20 May 2018 16:14:14 +0100 +Subject: [PATCH] [copuryl patch] Cleanup URL determination logic + +Separate out into separate function and provide +ability to add more URLs to the list. +--- + config.def.h | 1 + + st.c | 123 +++++++++++++++++++++++++++++++++++++++++++++++++++ + st.h | 1 + + 3 files changed, 125 insertions(+) + +diff --git a/config.def.h b/config.def.h +index 82b1b09..cbe923e 100644 +--- a/config.def.h ++++ b/config.def.h +@@ -178,6 +178,7 @@ static Shortcut shortcuts[] = { + { TERMMOD, XK_Y, selpaste, {.i = 0} }, + { TERMMOD, XK_Num_Lock, numlock, {.i = 0} }, + { TERMMOD, XK_I, iso14755, {.i = 0} }, ++ { MODKEY, XK_l, copyurl, {.i = 0} }, + }; + + /* +diff --git a/st.c b/st.c +index 0628707..b08f454 100644 +--- a/st.c ++++ b/st.c +@@ -204,6 +204,7 @@ static void tdefutf8(char); + static int32_t tdefcolor(int *, int *, int); + static void tdeftran(char); + static void tstrsequence(uchar); ++static void tsetcolor(int, int, int, uint32_t, uint32_t); + + static void drawregion(int, int, int, int); + +@@ -1600,6 +1601,17 @@ tsetmode(int priv, int set, int *args, int narg) + } + } + ++void ++tsetcolor( int row, int start, int end, uint32_t fg, uint32_t bg ) ++{ ++ int i = start; ++ for( ; i < end; ++i ) ++ { ++ term.line[row][i].fg = fg; ++ term.line[row][i].bg = bg; ++ } ++} ++ + void + csihandle(void) + { +@@ -2617,3 +2629,114 @@ redraw(void) + tfulldirt(); + draw(); + } ++ ++char * ++findlastany(char *str, const char** find, size_t len) ++{ ++ char* found = NULL; ++ int i = 0; ++ for(found = str + strlen(str) - 1; found >= str; --found) { ++ for(i = 0; i < len; i++) { ++ if(strncmp(found, find[i], strlen(find[i])) == 0) { ++ return found; ++ } ++ } ++ } ++ ++ return NULL; ++} ++ ++/* ++** Select and copy the previous url on screen (do nothing if there's no url). ++** ++** FIXME: doesn't handle urls that span multiple lines; will need to add support ++** for multiline "getsel()" first ++*/ ++void ++copyurl(const Arg *arg) { ++ /* () and [] can appear in urls, but excluding them here will reduce false ++ * positives when figuring out where a given url ends. ++ */ ++ static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ++ "abcdefghijklmnopqrstuvwxyz" ++ "0123456789-._~:/?#@!$&'*+,;=%"; ++ ++ static const char* URLSTRINGS[] = {"http://", "https://"}; ++ ++ /* remove highlighting from previous selection if any */ ++ if(sel.ob.x >= 0 && sel.oe.x >= 0) ++ tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, defaultbg); ++ ++ int i = 0, ++ row = 0, /* row of current URL */ ++ col = 0, /* column of current URL start */ ++ startrow = 0, /* row of last occurrence */ ++ colend = 0, /* column of last occurrence */ ++ passes = 0; /* how many rows have been scanned */ ++ ++ char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */ ++ char *c = NULL, ++ *match = NULL; ++ ++ row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot; ++ LIMIT(row, term.top, term.bot); ++ startrow = row; ++ ++ colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col; ++ LIMIT(colend, 0, term.col); ++ ++ /* ++ ** Scan from (term.bot,term.col) to (0,0) and find ++ ** next occurrance of a URL ++ */ ++ while(passes !=term.bot + 2) { ++ /* Read in each column of every row until ++ ** we hit previous occurrence of URL ++ */ ++ for (col = 0, i = 0; col < colend; ++col,++i) { ++ /* assume ascii */ ++ if (term.line[row][col].u > 127) ++ continue; ++ linestr[i] = term.line[row][col].u; ++ } ++ linestr[term.col] = '\0'; ++ ++ if ((match = findlastany(linestr, URLSTRINGS, ++ sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0])))) ++ break; ++ ++ if (--row < term.top) ++ row = term.bot; ++ ++ colend = term.col; ++ passes++; ++ }; ++ ++ if (match) { ++ /* must happen before trim */ ++ selclear(); ++ sel.ob.x = strlen(linestr) - strlen(match); ++ ++ /* trim the rest of the line from the url match */ ++ for (c = match; *c != '\0'; ++c) ++ if (!strchr(URLCHARS, *c)) { ++ *c = '\0'; ++ break; ++ } ++ ++ /* highlight selection by inverting terminal colors */ ++ tsetcolor(row, sel.ob.x, sel.ob.x + strlen( match ), defaultbg, defaultfg); ++ ++ /* select and copy */ ++ sel.mode = 1; ++ sel.type = SEL_REGULAR; ++ sel.oe.x = sel.ob.x + strlen(match)-1; ++ sel.ob.y = sel.oe.y = row; ++ selnormalize(); ++ tsetdirt(sel.nb.y, sel.ne.y); ++ xsetsel(getsel()); ++ xclipcopy(); ++ } ++ ++ free(linestr); ++} +diff --git a/st.h b/st.h +index dac64d8..5a58f8f 100644 +--- a/st.h ++++ b/st.h +@@ -85,6 +85,7 @@ void printscreen(const Arg *); + void printsel(const Arg *); + void sendbreak(const Arg *); + void toggleprinter(const Arg *); ++void copyurl(const Arg *); + + int tattrset(int); + void tnew(int, int); +-- +2.17.0 +