hlfw.ca

gcli

Download patch

ref: a9109fa5763523520bda1c2bb189d9e6f1ccf085
author: Halfwit <michaelmisch1985@gmail.com>
date: Mon Jun 3 08:50:32 PDT 2019

Initial commit

--- /dev/null
+++ b/README.md
@@ -1,0 +1,34 @@
+# Search the web from the command line using a Google Custom Search engine
+# Should work anywhere you have access to a Factotum (Plan9, 9front, plan9port)
+
+## Installation
+
+go get github.com/halfwit/google
+go install github.com/halfwit/google
+
+## Usage 
+
+google [[-i] [-it type] [-is size] [-ic color] [-id scale]] [-m results] [-r url] [-a key] [-f type] [-e match] [-x exclude] [ -s safe] query
+ -m Number of results per query
+ -r Search for sites related to [url]
+ -a Use API key instead of factotum
+ -f File type [bmp|gif|png|jpg|svg|pdf]
+ -e Match string exactly
+ -x Phrase to exclude
+ -s Safe search [active|high|medium|off]
+ -i Image search
+Requires -i
+ -it Image type [clipart|face|lineart|news|photo]
+ -is Image size [huge|icon|large|medium|small|xlarge|xxlarge]
+ -ic Image color [black|blue|brown|gray|green|orange|ping|purple|red|teal|white|yellow]
+ -id Image scale [color|gray|mono]
+
+## Authentication
+
+To use this, you need an API key for Google Custom Search engine.
+See https://developers.google.com/custom-search/v1/introduction, select "Get A Key"
+
+Additionally, you also have to create a Custom Search Engine itself.
+Store these keys in your Factotum
+`gcli` - your API key (AIza...)
+`gcse` - your CSE key
--- /dev/null
+++ b/gcli.go
@@ -1,0 +1,186 @@
+package main
+
+import (
+	"compress/gzip"
+	"encoding/json"
+	"flag"
+	"fmt"
+	"log"
+	"net/http"
+	"os"
+	"os/user"
+	"strings"
+	"sync"
+	"time"
+
+	"bitbucket.org/mischief/libauth"
+)
+
+type results struct {
+	Items []items `json: "@items"`
+}
+
+type items struct {
+	Title string
+	Link string
+	Snippet string
+	Image image `json: "@image"`
+}
+
+type image struct {
+	ContextLink string
+	ThumbnailLink string
+}
+	
+var (
+	nmax = flag.Int("m", 50, "Number of results per query")
+	related = flag.String("r", "", "Search for sites related to [url]")
+	isearch =   flag.Bool("i", false, "Image search")
+	itype =   flag.String("it", "", "Image type [clipart|face|lineart|news|photo]")
+	isize =   flag.String("is", "", "Image size [huge|icon|large|medium|small|xlarge|xxlarge]")
+	icolor =  flag.String("ic", "", "Image color [black|blue|brown|gray|green|orange|ping|purple|red|teal|white|yellow]")
+	iscale =  flag.String("id", "", "Image scale [color|gray|mono]")
+	ftype =   flag.String("f", "", "Filetype [bmp|gif|png|jpg|svg|pdf]")
+	exact =   flag.String("e", "", "Match string exactly")
+	exclude = flag.String("x", "", "Phrase to exclude")
+	site =    flag.String("u", "", "Limit search to URL")
+	safe =    flag.String("s", "off", "Safe search [active|high|medium|off]")
+	snippet = flag.Bool("sn", false, "Include short description in results")
+	thumb =   flag.Bool("t", false, "Include thumbnails")
+)
+
+func keys() (string, string, error) {
+	u, err := user.Current()
+	if err != nil {
+		return "", "", err
+	}
+	key, err := libauth.Getuserpasswd("proto=pass service=gcli user=%s", u.Username)
+	if err != nil {
+		return "", "", err
+	}
+	cx, err := libauth.Getuserpasswd("proto=pass service=gcse user=%s", u.Username)
+	if err != nil {
+		return "", "", err
+	}
+	return key.Password, cx.Password, nil
+}
+
+func search(url string, re *results) error {
+	client := &http.Client{
+		Timeout: 10 * time.Second,
+	}
+	req, err := http.NewRequest("GET", url, nil)
+	if err != nil {
+		log.Fatal(err)
+	}
+	req.Header.Set("Accept-Encoding", "gzip")
+	req.Header.Set("User-Agent", "gcli (gzip)")
+	response, err := client.Do(req)
+	if err != nil {
+		log.Fatal(err)
+	}
+	defer response.Body.Close()
+	gr, err := gzip.NewReader(response.Body)
+	if err != nil {
+		log.Fatal(err)
+	}
+	return json.NewDecoder(gr).Decode(&re)
+}
+
+func buildurl(key, cx string, start int) string {
+	var opts strings.Builder
+
+	query := strings.Join(flag.Args(), "+")
+	url := fmt.Sprintf("https://www.googleapis.com/customsearch/v1?key=%s&start=%d&maxResults=10&cx=%s&q=%s", key, start, cx, query)
+
+	switch {
+	case *isearch:
+		opts.WriteString("&searchType=image")
+	case *itype != "":
+		if *isearch {
+			opts.WriteString("&imageType=")
+			opts.WriteString(*itype)
+		}
+/*	case *related != "":
+		opts.WriteString(
+	case *isize != "":
+		if *isearch {
+			search = search.ImgSize(*isize)
+		}
+	case *icolor != "":
+		if *isearch {
+			search = search.ImgDominantColor(*icolor)
+		}
+	case *iscale != "":
+		if *isearch {
+			search = search.ImgColorType(*iscale)
+		}
+
+	case *exact != "":
+		search = search.ExactTerms(*exact)
+	case *exclude != "":
+		search = search.ExcludeTerms(*exclude)
+	case *site != "":
+		search = search.SiteSearch(*site)
+*/
+	case *ftype != "":
+		opts.WriteString("&fileType=")
+		opts.WriteString(*ftype)
+	}
+	return url+opts.String()
+}
+
+func handle(r results, lines chan string) {
+	for _, item := range r.Items {
+		var line strings.Builder
+		line.WriteString(fmt.Sprintf("%s %s", item.Title, item.Link))
+		if *snippet {
+			snip := strings.Replace(item.Snippet, "\n", " ", -1)
+			line.WriteString(snip)
+			line.WriteString("\n")
+		}
+		if *isearch {
+			line.WriteString(fmt.Sprintf("%s %s", 
+				item.Image.ContextLink, 
+				item.Image.ThumbnailLink,
+			))
+		}
+		lines <- line.String()	
+	}
+}
+	
+func main() {
+	flag.Parse()
+	if flag.Lookup("h") != nil {
+		flag.Usage()
+		os.Exit(1)
+	}
+	key, cx, err := keys()
+	if err != nil {
+		log.Fatal(err)
+	}
+	// TODO: There's no determinism to the output
+	// but running this in the main routine is very slow on many systems
+
+	// The API only gives us 10 results at a time
+	// We need to loop through to our max results
+	// And fetch each selection of results
+	var wg sync.WaitGroup
+	lines := make(chan string)
+	for i := 0; i <= *nmax; i+=10 {
+		wg.Add(1)
+		go func(){
+			defer wg.Done()
+			url := buildurl(key, cx, i)
+			r := results{}
+			search(url, &r)
+			handle(r, lines)
+		}()
+	}
+	go func(){
+		for line := range lines {
+			fmt.Println(line)
+		}
+	}()	
+	wg.Wait()
+}