ref: ab91f6b00015858a8d8e115279ec1a06c0617a7d
author: Halfwit <michaelmisch1985@gmail.com>
date: Thu Jun 6 06:59:12 PDT 2019
Initial commit
--- /dev/null
+++ b/README.md
@@ -1,0 +1,17 @@
+# wkcli
+
+Search wikimedia sites from the command line
+
+
+## Usage
+```
+wkcli [ -lx ] [ -w wiki ]
+```
+
+ - `-l` List page references as well
+ - `-x` Return exact match
+ - `-w wiki` Select which wikimedia site to search (Default en.wikipedia.org)
+
+## Bugs
+
+Issues with how the json parsing is done leads to fatal errors when bad results come in. This will be addressed in the future as I have time
--- /dev/null
+++ b/list.go
@@ -1,0 +1,69 @@
+package main
+
+import (
+ "compress/gzip"
+ "fmt"
+ "io/ioutil"
+
+ "github.com/tidwall/gjson"
+)
+
+type fromInterface []string
+
+func listAmbiguities(r []interface{}) error {
+ var iface fromInterface
+ if len(r) < 3 {
+ return fmt.Errorf("Received incomplete results, cannot continue")
+ }
+ names := iface.getslice(r[1], "Names")
+ syn := iface.getslice(r[2], "Synopsis")
+ if err := iface.errors(); err != nil {
+ return err
+ }
+ for n, m := range names {
+ fmt.Printf("%s - %s\n", m, syn[n])
+ }
+ return nil
+}
+
+func listLinks(header []interface{}, results *gzip.Reader) error {
+ err := listHeading(header)
+ if err != nil || ! *refs {
+ return err
+ }
+ json, _ := ioutil.ReadAll(results)
+ links := gjson.GetManyBytes(json, "*.extlinks.url")
+ fmt.Println(links)
+ return err
+}
+
+func listHeading(r []interface{}) error {
+ var iface fromInterface
+ if len(r) < 3 || r == nil {
+ fmt.Errorf("Unable to parse results, cannot continue")
+ }
+ name := iface.getslice(r[1], "Names")
+ syns := iface.getslice(r[2], "Synopsis")
+ url := iface.getslice(r[3], "Urls")
+ if err := iface.errors(); err != nil {
+ return err
+ }
+ fmt.Printf("%s - %s\n%s\n", name[0], url[0], syns[0])
+ return nil
+}
+
+func (e *fromInterface) getslice(i interface{}, name string) []interface{} {
+ switch v := i.(type) {
+ case []interface{}:
+ return v
+ }
+ *e = append(*e, fmt.Sprintf("Unable to locate %s in results"))
+ return nil
+}
+
+func (e *fromInterface) errors() error {
+ if len(*e) > 0 {
+ return fmt.Errorf("%s\n", *e)
+ }
+ return nil
+}
--- /dev/null
+++ b/searches.go
@@ -1,0 +1,60 @@
+package main
+
+import (
+ "compress/gzip"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "time"
+)
+
+var initial = "https://%s/w/api.php?action=opensearch&search=%s&namespace=0&format=json"
+var links = "https://%s/w/api.php?action=query&titles=%s&prop=extlinks&ellimit=500&formatversion=2&format=json"
+
+func getInitial(query string) ([]interface{}, error) {
+ url := fmt.Sprintf(initial, *wiki, query)
+ var r []interface{}
+ gr, err := run(url)
+ if err != nil {
+ return nil, err
+ }
+ if err = json.NewDecoder(gr).Decode(&r); err != nil {
+ return nil, err
+ }
+ return r, nil
+}
+
+func getLinks(query string) (*gzip.Reader, error) {
+ url := fmt.Sprintf(links, *wiki, query)
+ return run(url)
+}
+
+func isAmbiguous(r []interface{}) bool {
+ // The codepath for true will immediately catch and properly print this error
+ if len(r) < 1 {
+ return true
+ }
+ names, ok := r[1].([]interface{})
+ if ! ok {
+ return true
+ }
+ return len(names) > 1
+}
+
+func run(url string) (*gzip.Reader, error) {
+ client := &http.Client{
+ Timeout: 15 * time.Second,
+ }
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Accept-Encoding", "gzip")
+ req.Header.Set("User-Agent", "gcli (gzip)")
+ response, err := client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ defer response.Body.Close()
+ return gzip.NewReader(response.Body)
+}
\ No newline at end of file
--- /dev/null
+++ b/wkcli.go
@@ -1,0 +1,41 @@
+package main
+
+import (
+ "flag"
+ "log"
+ "os"
+ "strings"
+)
+
+var (
+ refs = flag.Bool("l", false, "Return references from page")
+ ifl = flag.Bool("x", false, "Return exact match")
+ wiki = flag.String("w", "en.wikipedia.org", "Mediawiki to search")
+)
+
+func main() {
+ flag.Parse()
+ if flag.Lookup("h") != nil {
+ flag.Usage()
+ os.Exit(1)
+ }
+ query := strings.Join(flag.Args(), "+")
+ d, err := getInitial(query)
+ if err != nil {
+ log.Fatalf("Error in initial query: %v", err)
+ }
+ if isAmbiguous(d) && ! *ifl {
+ // No unique result, print list and exit
+ if err := listAmbiguities(d); err != nil {
+ log.Fatal(err)
+ }
+ os.Exit(0)
+ }
+ d2, err := getLinks(query)
+ if err != nil {
+ log.Fatalf("%s\n%s\n", listHeading(d), err)
+ }
+ if err := listLinks(d, d2); err != nil {
+ log.Fatal(err)
+ }
+}