sites

public wiki contents of suckless.org
git clone git://git.suckless.org/sites
Log | Files | Refs

st-copyurl-20161105-8c99915.diff (2545B)


      1 diff --git a/config.def.h b/config.def.h
      2 index 7f465d1..faf6b54 100644
      3 --- a/config.def.h
      4 +++ b/config.def.h
      5 @@ -171,6 +171,7 @@ static Shortcut shortcuts[] = {
      6  	{ MODKEY|ShiftMask,     XK_V,           clippaste,      {.i =  0} },
      7  	{ MODKEY,               XK_Num_Lock,    numlock,        {.i =  0} },
      8  	{ MODKEY,               XK_Control_L,   iso14755,       {.i =  0} },
      9 +	{ MODKEY,               XK_l,           copyurl,        {.i =  0} },
     10  };
     11  
     12  /*
     13 diff --git a/st.c b/st.c
     14 index 4d44388..029c214 100644
     15 --- a/st.c
     16 +++ b/st.c
     17 @@ -344,6 +344,7 @@ static void printscreen(const Arg *) ;
     18  static void iso14755(const Arg *);
     19  static void toggleprinter(const Arg *);
     20  static void sendbreak(const Arg *);
     21 +static void copyurl(const Arg *);
     22  
     23  /* Config.h for applying patches and the configuration. */
     24  #include "config.h"
     25 @@ -4521,3 +4522,63 @@ run:
     26  	return 0;
     27  }
     28  
     29 +/* select and copy the previous url on screen (do nothing if there's no url).
     30 + * known bug: doesn't handle urls that span multiple lines (wontfix)
     31 + * known bug: only finds first url on line (mightfix)
     32 + */
     33 +void
     34 +copyurl(const Arg *arg) {
     35 +	/* () and [] can appear in urls, but excluding them here will reduce false
     36 +	 * positives when figuring out where a given url ends.
     37 +	 */
     38 +	static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
     39 +		"abcdefghijklmnopqrstuvwxyz"
     40 +		"0123456789-._~:/?#@!$&'*+,;=%";
     41 +
     42 +	int i, row, startrow;
     43 +	char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */
     44 +	char *c, *match = NULL;
     45 +
     46 +	row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y-1 : term.bot;
     47 +	LIMIT(row, term.top, term.bot);
     48 +	startrow = row;
     49 +
     50 +	/* find the start of the last url before selection */
     51 +	do {
     52 +		for (i = 0; i < term.col; ++i) {
     53 +			if (term.line[row][i].u > 127) /* assume ascii */
     54 +				continue;
     55 +			linestr[i] = term.line[row][i].u;
     56 +		}
     57 +		linestr[term.col] = '\0';
     58 +		if ((match = strstr(linestr, "http://"))
     59 +				|| (match = strstr(linestr, "https://")))
     60 +			break;
     61 +		if (--row < term.top)
     62 +			row = term.bot;
     63 +	} while (row != startrow);
     64 +
     65 +	if (match) {
     66 +		/* must happen before trim */
     67 +		selclear(NULL);
     68 +		sel.ob.x = strlen(linestr) - strlen(match);
     69 +
     70 +		/* trim the rest of the line from the url match */
     71 +		for (c = match; *c != '\0'; ++c)
     72 +			if (!strchr(URLCHARS, *c)) {
     73 +				*c = '\0';
     74 +				break;
     75 +			}
     76 +
     77 +		/* select and copy */
     78 +		sel.mode = 1;
     79 +		sel.type = SEL_REGULAR;
     80 +		sel.oe.x = sel.ob.x + strlen(match)-1;
     81 +		sel.ob.y = sel.oe.y = row;
     82 +		selnormalize();
     83 +		tsetdirt(sel.nb.y, sel.ne.y);
     84 +		selcopy(0);
     85 +	}
     86 +
     87 +	free(linestr);
     88 +}