mirror of
https://github.com/chubin/wttr.in
synced 2025-01-26 02:34:59 +00:00
Merge branch 'chubin/logging'
This commit is contained in:
commit
88df26818f
42 changed files with 3216 additions and 988 deletions
30
.golangci.yaml
Normal file
30
.golangci.yaml
Normal file
|
@ -0,0 +1,30 @@
|
|||
run:
|
||||
skip-dirs:
|
||||
- pkg/curlator
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- wsl
|
||||
- wrapcheck
|
||||
- varnamelen
|
||||
- gci
|
||||
- exhaustivestruct
|
||||
- exhaustruct
|
||||
- gomnd
|
||||
- gofmt
|
||||
|
||||
# to be fixed:
|
||||
- ireturn
|
||||
- gosec
|
||||
- noctx
|
||||
- interfacer
|
||||
|
||||
# deprecated:
|
||||
- scopelint
|
||||
- deadcode
|
||||
- varcheck
|
||||
- maligned
|
||||
- ifshort
|
||||
- nosnakecase
|
||||
- structcheck
|
||||
- golint
|
9
Makefile
Normal file
9
Makefile
Normal file
|
@ -0,0 +1,9 @@
|
|||
srv: srv.go internal/*/*.go internal/*/*/*.go
|
||||
go build -o srv -ldflags '-w -linkmode external -extldflags "-static"' ./
|
||||
#go build -o srv ./
|
||||
|
||||
go-test:
|
||||
go test ./...
|
||||
|
||||
lint:
|
||||
golangci-lint run ./...
|
|
@ -1,79 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/robfig/cron"
|
||||
)
|
||||
|
||||
var peakRequest30 sync.Map
|
||||
var peakRequest60 sync.Map
|
||||
|
||||
func initPeakHandling() {
|
||||
c := cron.New()
|
||||
// cronTime := fmt.Sprintf("%d,%d * * * *", 30-prefetchInterval/60, 60-prefetchInterval/60)
|
||||
c.AddFunc("24 * * * *", prefetchPeakRequests30)
|
||||
c.AddFunc("54 * * * *", prefetchPeakRequests60)
|
||||
c.Start()
|
||||
}
|
||||
|
||||
func savePeakRequest(cacheDigest string, r *http.Request) {
|
||||
_, min, _ := time.Now().Clock()
|
||||
if min == 30 {
|
||||
peakRequest30.Store(cacheDigest, *r)
|
||||
} else if min == 0 {
|
||||
peakRequest60.Store(cacheDigest, *r)
|
||||
}
|
||||
}
|
||||
|
||||
func prefetchRequest(r *http.Request) {
|
||||
processRequest(r)
|
||||
}
|
||||
|
||||
func syncMapLen(sm *sync.Map) int {
|
||||
count := 0
|
||||
|
||||
f := func(key, value interface{}) bool {
|
||||
|
||||
// Not really certain about this part, don't know for sure
|
||||
// if this is a good check for an entry's existence
|
||||
if key == "" {
|
||||
return false
|
||||
}
|
||||
count++
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
sm.Range(f)
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
func prefetchPeakRequests(peakRequestMap *sync.Map) {
|
||||
peakRequestLen := syncMapLen(peakRequestMap)
|
||||
log.Printf("PREFETCH: Prefetching %d requests\n", peakRequestLen)
|
||||
if peakRequestLen == 0 {
|
||||
return
|
||||
}
|
||||
sleepBetweenRequests := time.Duration(prefetchInterval*1000/peakRequestLen) * time.Millisecond
|
||||
peakRequestMap.Range(func(key interface{}, value interface{}) bool {
|
||||
go func(r http.Request) {
|
||||
prefetchRequest(&r)
|
||||
}(value.(http.Request))
|
||||
peakRequestMap.Delete(key)
|
||||
time.Sleep(sleepBetweenRequests)
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
func prefetchPeakRequests30() {
|
||||
prefetchPeakRequests(&peakRequest30)
|
||||
}
|
||||
|
||||
func prefetchPeakRequests60() {
|
||||
prefetchPeakRequests(&peakRequest60)
|
||||
}
|
|
@ -1,199 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func processRequest(r *http.Request) responseWithHeader {
|
||||
var response responseWithHeader
|
||||
|
||||
if response, ok := redirectInsecure(r); ok {
|
||||
return *response
|
||||
}
|
||||
|
||||
if dontCache(r) {
|
||||
return get(r)
|
||||
}
|
||||
|
||||
cacheDigest := getCacheDigest(r)
|
||||
|
||||
foundInCache := false
|
||||
|
||||
savePeakRequest(cacheDigest, r)
|
||||
|
||||
cacheBody, ok := lruCache.Get(cacheDigest)
|
||||
if ok {
|
||||
cacheEntry := cacheBody.(responseWithHeader)
|
||||
|
||||
// if after all attempts we still have no answer,
|
||||
// we try to make the query on our own
|
||||
for attempts := 0; attempts < 300; attempts++ {
|
||||
if !ok || !cacheEntry.InProgress {
|
||||
break
|
||||
}
|
||||
time.Sleep(30 * time.Millisecond)
|
||||
cacheBody, ok = lruCache.Get(cacheDigest)
|
||||
cacheEntry = cacheBody.(responseWithHeader)
|
||||
}
|
||||
if cacheEntry.InProgress {
|
||||
log.Printf("TIMEOUT: %s\n", cacheDigest)
|
||||
}
|
||||
if ok && !cacheEntry.InProgress && cacheEntry.Expires.After(time.Now()) {
|
||||
response = cacheEntry
|
||||
foundInCache = true
|
||||
}
|
||||
}
|
||||
|
||||
if !foundInCache {
|
||||
lruCache.Add(cacheDigest, responseWithHeader{InProgress: true})
|
||||
response = get(r)
|
||||
if response.StatusCode == 200 || response.StatusCode == 304 || response.StatusCode == 404 {
|
||||
lruCache.Add(cacheDigest, response)
|
||||
} else {
|
||||
log.Printf("REMOVE: %d response for %s from cache\n", response.StatusCode, cacheDigest)
|
||||
lruCache.Remove(cacheDigest)
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
func get(req *http.Request) responseWithHeader {
|
||||
|
||||
client := &http.Client{}
|
||||
|
||||
queryURL := fmt.Sprintf("http://%s%s", req.Host, req.RequestURI)
|
||||
|
||||
proxyReq, err := http.NewRequest(req.Method, queryURL, req.Body)
|
||||
if err != nil {
|
||||
log.Printf("Request: %s\n", err)
|
||||
}
|
||||
|
||||
// proxyReq.Header.Set("Host", req.Host)
|
||||
// proxyReq.Header.Set("X-Forwarded-For", req.RemoteAddr)
|
||||
|
||||
for header, values := range req.Header {
|
||||
for _, value := range values {
|
||||
proxyReq.Header.Add(header, value)
|
||||
}
|
||||
}
|
||||
|
||||
res, err := client.Do(proxyReq)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
return responseWithHeader{
|
||||
InProgress: false,
|
||||
Expires: time.Now().Add(time.Duration(randInt(1000, 1500)) * time.Second),
|
||||
Body: body,
|
||||
Header: res.Header,
|
||||
StatusCode: res.StatusCode,
|
||||
}
|
||||
}
|
||||
|
||||
// implementation of the cache.get_signature of original wttr.in
|
||||
func getCacheDigest(req *http.Request) string {
|
||||
|
||||
userAgent := req.Header.Get("User-Agent")
|
||||
|
||||
queryHost := req.Host
|
||||
queryString := req.RequestURI
|
||||
|
||||
clientIPAddress := readUserIP(req)
|
||||
|
||||
lang := req.Header.Get("Accept-Language")
|
||||
|
||||
return fmt.Sprintf("%s:%s%s:%s:%s", userAgent, queryHost, queryString, clientIPAddress, lang)
|
||||
}
|
||||
|
||||
// return true if request should not be cached
|
||||
func dontCache(req *http.Request) bool {
|
||||
|
||||
// dont cache cyclic requests
|
||||
loc := strings.Split(req.RequestURI, "?")[0]
|
||||
return strings.Contains(loc, ":")
|
||||
}
|
||||
|
||||
// redirectInsecure returns redirection response, and bool value, if redirection was needed,
|
||||
// if the query comes from a browser, and it is insecure.
|
||||
//
|
||||
// Insecure queries are marked by the frontend web server
|
||||
// with X-Forwarded-Proto header:
|
||||
//
|
||||
// proxy_set_header X-Forwarded-Proto $scheme;
|
||||
//
|
||||
//
|
||||
func redirectInsecure(req *http.Request) (*responseWithHeader, bool) {
|
||||
if isPlainTextAgent(req.Header.Get("User-Agent")) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if strings.ToLower(req.Header.Get("X-Forwarded-Proto")) == "https" {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
target := "https://" + req.Host + req.URL.Path
|
||||
if len(req.URL.RawQuery) > 0 {
|
||||
target += "?" + req.URL.RawQuery
|
||||
}
|
||||
|
||||
body := []byte(fmt.Sprintf(`<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
|
||||
<TITLE>301 Moved</TITLE></HEAD><BODY>
|
||||
<H1>301 Moved</H1>
|
||||
The document has moved
|
||||
<A HREF="%s">here</A>.
|
||||
</BODY></HTML>
|
||||
`, target))
|
||||
|
||||
return &responseWithHeader{
|
||||
InProgress: false,
|
||||
Expires: time.Now().Add(time.Duration(randInt(1000, 1500)) * time.Second),
|
||||
Body: body,
|
||||
Header: http.Header{"Location": []string{target}},
|
||||
StatusCode: 301,
|
||||
}, true
|
||||
}
|
||||
|
||||
// isPlainTextAgent returns true if userAgent is a plain-text agent
|
||||
func isPlainTextAgent(userAgent string) bool {
|
||||
userAgentLower := strings.ToLower(userAgent)
|
||||
for _, signature := range plainTextAgents {
|
||||
if strings.Contains(userAgentLower, signature) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func readUserIP(r *http.Request) string {
|
||||
IPAddress := r.Header.Get("X-Real-Ip")
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.Header.Get("X-Forwarded-For")
|
||||
}
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.RemoteAddr
|
||||
var err error
|
||||
IPAddress, _, err = net.SplitHostPort(IPAddress)
|
||||
if err != nil {
|
||||
log.Printf("ERROR: userip: %q is not IP:port\n", IPAddress)
|
||||
}
|
||||
}
|
||||
return IPAddress
|
||||
}
|
||||
|
||||
func randInt(min int, max int) int {
|
||||
return min + rand.Intn(max-min)
|
||||
}
|
87
cmd/srv.go
87
cmd/srv.go
|
@ -1,87 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
lru "github.com/hashicorp/golang-lru"
|
||||
)
|
||||
|
||||
const serverPort = 8083
|
||||
const uplinkSrvAddr = "127.0.0.1:9002"
|
||||
const uplinkTimeout = 30
|
||||
const prefetchInterval = 300
|
||||
const lruCacheSize = 12800
|
||||
|
||||
// plainTextAgents contains signatures of the plain-text agents
|
||||
var plainTextAgents = []string{
|
||||
"curl",
|
||||
"httpie",
|
||||
"lwp-request",
|
||||
"wget",
|
||||
"python-httpx",
|
||||
"python-requests",
|
||||
"openbsd ftp",
|
||||
"powershell",
|
||||
"fetch",
|
||||
"aiohttp",
|
||||
"http_get",
|
||||
"xh",
|
||||
}
|
||||
|
||||
var lruCache *lru.Cache
|
||||
|
||||
type responseWithHeader struct {
|
||||
InProgress bool // true if the request is being processed
|
||||
Expires time.Time // expiration time of the cache entry
|
||||
|
||||
Body []byte
|
||||
Header http.Header
|
||||
StatusCode int // e.g. 200
|
||||
}
|
||||
|
||||
func init() {
|
||||
var err error
|
||||
lruCache, err = lru.New(lruCacheSize)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
dialer := &net.Dialer{
|
||||
Timeout: uplinkTimeout * time.Second,
|
||||
KeepAlive: uplinkTimeout * time.Second,
|
||||
DualStack: true,
|
||||
}
|
||||
|
||||
http.DefaultTransport.(*http.Transport).DialContext = func(ctx context.Context, network, _ string) (net.Conn, error) {
|
||||
return dialer.DialContext(ctx, network, uplinkSrvAddr)
|
||||
}
|
||||
|
||||
initPeakHandling()
|
||||
}
|
||||
|
||||
func copyHeader(dst, src http.Header) {
|
||||
for k, vv := range src {
|
||||
for _, v := range vv {
|
||||
dst.Add(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
// printStat()
|
||||
response := processRequest(r)
|
||||
|
||||
copyHeader(w.Header(), response.Header)
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
w.WriteHeader(response.StatusCode)
|
||||
w.Write(response.Body)
|
||||
})
|
||||
|
||||
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", serverPort), nil))
|
||||
}
|
40
cmd/stat.go
40
cmd/stat.go
|
@ -1,40 +0,0 @@
|
|||
package main
|
||||
|
||||
// import (
|
||||
// "log"
|
||||
// "sync"
|
||||
// "time"
|
||||
// )
|
||||
//
|
||||
// type safeCounter struct {
|
||||
// v map[int]int
|
||||
// mux sync.Mutex
|
||||
// }
|
||||
//
|
||||
// func (c *safeCounter) inc(key int) {
|
||||
// c.mux.Lock()
|
||||
// c.v[key]++
|
||||
// c.mux.Unlock()
|
||||
// }
|
||||
//
|
||||
// // func (c *safeCounter) val(key int) int {
|
||||
// // c.mux.Lock()
|
||||
// // defer c.mux.Unlock()
|
||||
// // return c.v[key]
|
||||
// // }
|
||||
// //
|
||||
// // func (c *safeCounter) reset(key int) int {
|
||||
// // c.mux.Lock()
|
||||
// // defer c.mux.Unlock()
|
||||
// // result := c.v[key]
|
||||
// // c.v[key] = 0
|
||||
// // return result
|
||||
// // }
|
||||
//
|
||||
// var queriesPerMinute safeCounter
|
||||
//
|
||||
// func printStat() {
|
||||
// _, min, _ := time.Now().Clock()
|
||||
// queriesPerMinute.inc(min)
|
||||
// log.Printf("Processed %d requests\n", min)
|
||||
// }
|
26
go.mod
Normal file
26
go.mod
Normal file
|
@ -0,0 +1,26 @@
|
|||
module github.com/chubin/wttr.in
|
||||
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/alecthomas/kong v0.7.1 // indirect
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20200910202707-1e08a3fab204 // indirect
|
||||
github.com/go-sql-driver/mysql v1.5.0 // indirect
|
||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect
|
||||
github.com/hashicorp/golang-lru v0.6.0
|
||||
github.com/itchyny/gojq v0.12.11 // indirect
|
||||
github.com/klauspost/lctime v0.1.0 // indirect
|
||||
github.com/lib/pq v1.8.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.14 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.16 // indirect
|
||||
github.com/robfig/cron v1.2.0
|
||||
github.com/samonzeweb/godb v1.0.8 // indirect
|
||||
github.com/sirupsen/logrus v1.9.0 // indirect
|
||||
github.com/smartystreets/assertions v1.2.0 // indirect
|
||||
github.com/smartystreets/goconvey v1.6.4 // indirect
|
||||
github.com/stretchr/testify v1.8.1 // indirect
|
||||
github.com/zsefvlol/timezonemapper v1.0.0 // indirect
|
||||
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
80
go.sum
Normal file
80
go.sum
Normal file
|
@ -0,0 +1,80 @@
|
|||
github.com/alecthomas/assert/v2 v2.1.0/go.mod h1:b/+1DI2Q6NckYi+3mXyH3wFb8qG37K/DuK80n7WefXA=
|
||||
github.com/alecthomas/kong v0.7.1 h1:azoTh0IOfwlAX3qN9sHWTxACE2oV8Bg2gAwBsMwDQY4=
|
||||
github.com/alecthomas/kong v0.7.1/go.mod h1:n1iCIO2xS46oE8ZfYCNDqdR0b0wZNrXAIAqro/2132U=
|
||||
github.com/alecthomas/repr v0.1.0/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20200910202707-1e08a3fab204/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4=
|
||||
github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/itchyny/gojq v0.12.11 h1:YhLueoHhHiN4mkfM+3AyJV6EPcCxKZsOnYf+aVSwaQw=
|
||||
github.com/itchyny/gojq v0.12.11/go.mod h1:o3FT8Gkbg/geT4pLI0tF3hvip5F3Y/uskjRz9OYa38g=
|
||||
github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE=
|
||||
github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/klauspost/lctime v0.1.0 h1:nINsuFc860M9cyYhT6vfg6U1USh7kiVBj/s/2b04U70=
|
||||
github.com/klauspost/lctime v0.1.0/go.mod h1:OwdMhr8tbQvusAsnilqkkgDQqivWlqyg0w5cfXkLiDk=
|
||||
github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
||||
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||
github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
|
||||
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ=
|
||||
github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k=
|
||||
github.com/samonzeweb/godb v1.0.8 h1:WRn6nq0FChYOzh+w8SgpXHUkEhL7W6ZqkCf5Ninx7Uc=
|
||||
github.com/samonzeweb/godb v1.0.8/go.mod h1:LNDt3CakfBwpRY4AD0y/QPTbj+jB6O17tSxQES0p47o=
|
||||
github.com/samonzeweb/godb v1.0.15 h1:HyNb8o1w109as9KWE8ih1YIBe8jC4luJ22f1XNacW38=
|
||||
github.com/samonzeweb/godb v1.0.15/go.mod h1:SxCHqyireDXNrZApknS9lGUEutA43x9eJF632ecbK5Q=
|
||||
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
|
||||
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/assertions v1.2.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/zsefvlol/timezonemapper v1.0.0 h1:HXqkOzf01gXYh2nDQcDSROikFgMaximnhE8BY9SyF6E=
|
||||
github.com/zsefvlol/timezonemapper v1.0.0/go.mod h1:cVUCOLEmc/VvOMusEhpd2G/UBtadL26ZVz2syODXDoQ=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 h1:0A+M6Uqn+Eje4kHMK80dtF3JCXC4ykBgQG4Fe06QRhQ=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab h1:2QkjZIsXupsJbJIdSjjUOgWK3aEtzyuh2mPt3l/CkeU=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A=
|
||||
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
179
internal/config/config.go
Normal file
179
internal/config/config.go
Normal file
|
@ -0,0 +1,179 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
"github.com/chubin/wttr.in/internal/util"
|
||||
)
|
||||
|
||||
// Config of the program.
|
||||
type Config struct {
|
||||
Cache
|
||||
Geo
|
||||
Logging
|
||||
Server
|
||||
Uplink
|
||||
}
|
||||
|
||||
// Logging configuration.
|
||||
type Logging struct {
|
||||
// AccessLog path.
|
||||
AccessLog string `yaml:"accessLog,omitempty"`
|
||||
|
||||
// ErrorsLog path.
|
||||
ErrorsLog string `yaml:"errorsLog,omitempty"`
|
||||
|
||||
// Interval between access log flushes, in seconds.
|
||||
Interval int `yaml:"interval,omitempty"`
|
||||
}
|
||||
|
||||
// Server configuration.
|
||||
type Server struct {
|
||||
// PortHTTP is port where HTTP server must listen.
|
||||
// If 0, HTTP is disabled.
|
||||
PortHTTP int `yaml:"portHttp,omitempty"`
|
||||
|
||||
// PortHTTP is port where the HTTPS server must listen.
|
||||
// If 0, HTTPS is disabled.
|
||||
PortHTTPS int `yaml:"portHttps,omitempty"`
|
||||
|
||||
// TLSCertFile contains path to cert file for TLS Server.
|
||||
TLSCertFile string `yaml:"tlsCertFile,omitempty"`
|
||||
|
||||
// TLSCertFile contains path to key file for TLS Server.
|
||||
TLSKeyFile string `yaml:"tlsKeyFile,omitempty"`
|
||||
}
|
||||
|
||||
// Uplink configuration.
|
||||
type Uplink struct {
|
||||
// Address contains address of the uplink server in form IP:PORT.
|
||||
Address string `yaml:"address,omitempty"`
|
||||
|
||||
// Timeout for upstream queries.
|
||||
Timeout int `yaml:"timeout,omitempty"`
|
||||
|
||||
// PrefetchInterval contains time (in milliseconds) indicating,
|
||||
// how long the prefetch procedure should take.
|
||||
PrefetchInterval int `yaml:"prefetchInterval,omitempty"`
|
||||
}
|
||||
|
||||
// Cache configuration.
|
||||
type Cache struct {
|
||||
// Size of the main cache.
|
||||
Size int `yaml:"size,omitempty"`
|
||||
}
|
||||
|
||||
// Geo contains geolocation configuration.
|
||||
type Geo struct {
|
||||
// IPCache contains the path to the IP Geodata cache.
|
||||
IPCache string `yaml:"ipCache,omitempty"`
|
||||
|
||||
// IPCacheDB contains the path to the SQLite DB with the IP Geodata cache.
|
||||
IPCacheDB string `yaml:"ipCacheDb,omitempty"`
|
||||
|
||||
IPCacheType types.CacheType `yaml:"ipCacheType,omitempty"`
|
||||
|
||||
// LocationCache contains the path to the Location Geodata cache.
|
||||
LocationCache string `yaml:"locationCache,omitempty"`
|
||||
|
||||
// LocationCacheDB contains the path to the SQLite DB with the Location Geodata cache.
|
||||
LocationCacheDB string `yaml:"locationCacheDb,omitempty"`
|
||||
|
||||
LocationCacheType types.CacheType `yaml:"locationCacheType,omitempty"`
|
||||
|
||||
Nominatim []Nominatim
|
||||
}
|
||||
|
||||
type Nominatim struct {
|
||||
Name string
|
||||
|
||||
// Type describes the type of the location service.
|
||||
// Supported types: iq.
|
||||
Type string
|
||||
|
||||
URL string
|
||||
|
||||
Token string
|
||||
}
|
||||
|
||||
// Default contains the default configuration.
|
||||
func Default() *Config {
|
||||
return &Config{
|
||||
Cache{
|
||||
Size: 12800,
|
||||
},
|
||||
Geo{
|
||||
IPCache: "/wttr.in/cache/ip2l",
|
||||
IPCacheDB: "/wttr.in/cache/geoip.db",
|
||||
IPCacheType: types.CacheTypeDB,
|
||||
LocationCache: "/wttr.in/cache/loc",
|
||||
LocationCacheDB: "/wttr.in/cache/geoloc.db",
|
||||
LocationCacheType: types.CacheTypeDB,
|
||||
Nominatim: []Nominatim{
|
||||
{
|
||||
Name: "locationiq",
|
||||
Type: "iq",
|
||||
URL: "https://eu1.locationiq.com/v1/search",
|
||||
Token: os.Getenv("NOMINATIM_LOCATIONIQ"),
|
||||
},
|
||||
{
|
||||
Name: "opencage",
|
||||
Type: "opencage",
|
||||
URL: "https://api.opencagedata.com/geocode/v1/json",
|
||||
Token: os.Getenv("NOMINATIM_OPENCAGE"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Logging{
|
||||
AccessLog: "/wttr.in/log/access.log",
|
||||
ErrorsLog: "/wttr.in/log/errors.log",
|
||||
Interval: 300,
|
||||
},
|
||||
Server{
|
||||
PortHTTP: 8083,
|
||||
PortHTTPS: 8084,
|
||||
TLSCertFile: "/wttr.in/etc/fullchain.pem",
|
||||
TLSKeyFile: "/wttr.in/etc/privkey.pem",
|
||||
},
|
||||
Uplink{
|
||||
Address: "127.0.0.1:9002",
|
||||
Timeout: 30,
|
||||
PrefetchInterval: 300,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Load config from file.
|
||||
func Load(filename string) (*Config, error) {
|
||||
var (
|
||||
config Config
|
||||
data []byte
|
||||
err error
|
||||
)
|
||||
|
||||
data, err = os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = util.YamlUnmarshalStrict(data, &config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
|
||||
func (c *Config) Dump() []byte {
|
||||
data, err := yaml.Marshal(c)
|
||||
if err != nil {
|
||||
// should never happen.
|
||||
log.Fatalln("config.Dump():", err)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
88
internal/geo/ip/convert.go
Normal file
88
internal/geo/ip/convert.go
Normal file
|
@ -0,0 +1,88 @@
|
|||
package ip
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/samonzeweb/godb"
|
||||
"github.com/samonzeweb/godb/adapters/sqlite"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/util"
|
||||
)
|
||||
|
||||
//nolint:cyclop
|
||||
func (c *Cache) ConvertCache() error {
|
||||
dbfile := c.config.Geo.IPCacheDB
|
||||
|
||||
err := util.RemoveFileIfExists(dbfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
db, err := godb.Open(sqlite.Adapter, dbfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = createTable(db, "Address")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("listing cache entries...")
|
||||
files, err := filepath.Glob(filepath.Join(c.config.Geo.IPCache, "*"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("going to convert %d entries\n", len(files))
|
||||
|
||||
block := []Address{}
|
||||
for i, file := range files {
|
||||
ip := filepath.Base(file)
|
||||
loc, err := c.Read(ip)
|
||||
if err != nil {
|
||||
log.Println("invalid entry for", ip)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
block = append(block, *loc)
|
||||
|
||||
if i%1000 != 0 || i == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
err = db.BulkInsert(&block).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
block = []Address{}
|
||||
log.Println("converted", i+1, "entries")
|
||||
}
|
||||
|
||||
// inserting the rest.
|
||||
err = db.BulkInsert(&block).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("converted", len(files), "entries")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createTable(db *godb.DB, tableName string) error {
|
||||
createTable := fmt.Sprintf(
|
||||
`create table %s (
|
||||
name text not null primary key,
|
||||
fullName text not null,
|
||||
lat text not null,
|
||||
long text not null);
|
||||
`, tableName)
|
||||
|
||||
_, err := db.CurrentDB().Exec(createTable)
|
||||
|
||||
return err
|
||||
}
|
244
internal/geo/ip/ip.go
Normal file
244
internal/geo/ip/ip.go
Normal file
|
@ -0,0 +1,244 @@
|
|||
package ip
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/config"
|
||||
"github.com/chubin/wttr.in/internal/routing"
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
"github.com/chubin/wttr.in/internal/util"
|
||||
"github.com/samonzeweb/godb"
|
||||
"github.com/samonzeweb/godb/adapters/sqlite"
|
||||
)
|
||||
|
||||
// Address information.
|
||||
type Address struct {
|
||||
IP string `db:"ip,key"`
|
||||
CountryCode string `db:"countryCode"`
|
||||
Country string `db:"country"`
|
||||
Region string `db:"region"`
|
||||
City string `db:"city"`
|
||||
Latitude float64 `db:"latitude"`
|
||||
Longitude float64 `db:"longitude"`
|
||||
}
|
||||
|
||||
func (l *Address) String() string {
|
||||
if l.Latitude == -1000 {
|
||||
return fmt.Sprintf(
|
||||
"%s;%s;%s;%s",
|
||||
l.CountryCode, l.Country, l.Region, l.City)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(
|
||||
"%s;%s;%s;%s;%v;%v",
|
||||
l.CountryCode, l.Country, l.Region, l.City, l.Latitude, l.Longitude)
|
||||
}
|
||||
|
||||
// Cache provides access to the IP Geodata cache.
|
||||
type Cache struct {
|
||||
config *config.Config
|
||||
db *godb.DB
|
||||
}
|
||||
|
||||
// NewCache returns new cache reader for the specified config.
|
||||
func NewCache(config *config.Config) (*Cache, error) {
|
||||
db, err := godb.Open(sqlite.Adapter, config.Geo.IPCacheDB)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Needed for "upsert" implementation in Put()
|
||||
db.UseErrorParser()
|
||||
|
||||
return &Cache{
|
||||
config: config,
|
||||
db: db,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Read returns location information from the cache, if found,
|
||||
// or types.ErrNotFound if not found. If the entry is found, but its format
|
||||
// is invalid, types.ErrInvalidCacheEntry is returned.
|
||||
//
|
||||
// Format:
|
||||
//
|
||||
// [CountryCode];Country;Region;City;[Latitude];[Longitude]
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// DE;Germany;Free and Hanseatic City of Hamburg;Hamburg;53.5736;9.9782
|
||||
//
|
||||
|
||||
func (c *Cache) Read(addr string) (*Address, error) {
|
||||
if c.config.Geo.IPCacheType == types.CacheTypeDB {
|
||||
return c.readFromCacheDB(addr)
|
||||
}
|
||||
|
||||
return c.readFromCacheFile(addr)
|
||||
}
|
||||
|
||||
func (c *Cache) readFromCacheFile(addr string) (*Address, error) {
|
||||
bytes, err := os.ReadFile(c.cacheFile(addr))
|
||||
if err != nil {
|
||||
return nil, types.ErrNotFound
|
||||
}
|
||||
|
||||
return NewAddressFromString(addr, string(bytes))
|
||||
}
|
||||
|
||||
func (c *Cache) readFromCacheDB(addr string) (*Address, error) {
|
||||
result := Address{}
|
||||
err := c.db.Select(&result).
|
||||
Where("IP = ?", addr).
|
||||
Do()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (c *Cache) Put(addr string, loc *Address) error {
|
||||
if c.config.Geo.IPCacheType == types.CacheTypeDB {
|
||||
return c.putToCacheDB(loc)
|
||||
}
|
||||
|
||||
return c.putToCacheFile(addr, loc)
|
||||
}
|
||||
|
||||
func (c *Cache) putToCacheDB(loc *Address) error {
|
||||
err := c.db.Insert(loc).Do()
|
||||
// it should work like this:
|
||||
//
|
||||
// target := dberror.UniqueConstraint{}
|
||||
// if errors.As(err, &target) {
|
||||
//
|
||||
// See: https://github.com/samonzeweb/godb/pull/23
|
||||
//
|
||||
// But for some reason it does not work,
|
||||
// so the dirty hack is used:
|
||||
if strings.Contains(fmt.Sprint(err), "UNIQUE constraint failed") {
|
||||
return c.db.Update(loc).Do()
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) putToCacheFile(addr string, loc fmt.Stringer) error {
|
||||
return os.WriteFile(c.cacheFile(addr), []byte(loc.String()), 0o600)
|
||||
}
|
||||
|
||||
// cacheFile returns path to the cache entry for addr.
|
||||
func (c *Cache) cacheFile(addr string) string {
|
||||
return path.Join(c.config.Geo.IPCache, addr)
|
||||
}
|
||||
|
||||
// NewAddressFromString parses the location cache entry s,
|
||||
// and return location, or error, if the cache entry is invalid.
|
||||
func NewAddressFromString(addr, s string) (*Address, error) {
|
||||
var (
|
||||
lat float64 = -1000
|
||||
long float64 = -1000
|
||||
err error
|
||||
)
|
||||
|
||||
parts := strings.Split(s, ";")
|
||||
if len(parts) < 4 {
|
||||
return nil, types.ErrInvalidCacheEntry
|
||||
}
|
||||
|
||||
if len(parts) >= 6 {
|
||||
lat, err = strconv.ParseFloat(parts[4], 64)
|
||||
if err != nil {
|
||||
return nil, types.ErrInvalidCacheEntry
|
||||
}
|
||||
|
||||
long, err = strconv.ParseFloat(parts[5], 64)
|
||||
if err != nil {
|
||||
return nil, types.ErrInvalidCacheEntry
|
||||
}
|
||||
}
|
||||
|
||||
return &Address{
|
||||
IP: addr,
|
||||
CountryCode: parts[0],
|
||||
Country: parts[1],
|
||||
Region: parts[2],
|
||||
City: parts[3],
|
||||
Latitude: lat,
|
||||
Longitude: long,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Response provides routing interface to the geo cache.
|
||||
//
|
||||
// Temporary workaround to switch IP addresses handling to the Go server.
|
||||
// Handles two queries:
|
||||
//
|
||||
// - /:geo-ip-put?ip=IP&value=VALUE
|
||||
// - /:geo-ip-get?ip=IP
|
||||
//
|
||||
//nolint:cyclop
|
||||
func (c *Cache) Response(r *http.Request) *routing.Cadre {
|
||||
var (
|
||||
respERR = &routing.Cadre{Body: []byte("ERR")}
|
||||
respOK = &routing.Cadre{Body: []byte("OK")}
|
||||
)
|
||||
|
||||
if ip := util.ReadUserIP(r); ip != "127.0.0.1" {
|
||||
log.Printf("geoIP access from %s rejected\n", ip)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if r.URL.Path == "/:geo-ip-put" {
|
||||
ip := r.URL.Query().Get("ip")
|
||||
value := r.URL.Query().Get("value")
|
||||
if !validIP4(ip) || value == "" {
|
||||
log.Printf("invalid geoIP put query: ip='%s' value='%s'\n", ip, value)
|
||||
|
||||
return respERR
|
||||
}
|
||||
|
||||
location, err := NewAddressFromString(ip, value)
|
||||
if err != nil {
|
||||
return respERR
|
||||
}
|
||||
|
||||
err = c.Put(ip, location)
|
||||
if err != nil {
|
||||
return respERR
|
||||
}
|
||||
|
||||
return respOK
|
||||
}
|
||||
if r.URL.Path == "/:geo-ip-get" {
|
||||
ip := r.URL.Query().Get("ip")
|
||||
if !validIP4(ip) {
|
||||
return respERR
|
||||
}
|
||||
|
||||
result, err := c.Read(ip)
|
||||
if result == nil || err != nil {
|
||||
return respERR
|
||||
}
|
||||
|
||||
return &routing.Cadre{Body: []byte(result.String())}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validIP4(ipAddress string) bool {
|
||||
re := regexp.MustCompile(
|
||||
`^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$`)
|
||||
|
||||
return re.MatchString(strings.Trim(ipAddress, " "))
|
||||
}
|
84
internal/geo/ip/ip_test.go
Normal file
84
internal/geo/ip/ip_test.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
package ip_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
. "github.com/chubin/wttr.in/internal/geo/ip"
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
)
|
||||
|
||||
//nolint:funlen
|
||||
func TestParseCacheEntry(t *testing.T) {
|
||||
t.Parallel()
|
||||
tests := []struct {
|
||||
addr string
|
||||
input string
|
||||
expected Address
|
||||
err error
|
||||
}{
|
||||
{
|
||||
"1.2.3.4",
|
||||
"DE;Germany;Free and Hanseatic City of Hamburg;Hamburg;53.5736;9.9782",
|
||||
Address{
|
||||
IP: "1.2.3.4",
|
||||
CountryCode: "DE",
|
||||
Country: "Germany",
|
||||
Region: "Free and Hanseatic City of Hamburg",
|
||||
City: "Hamburg",
|
||||
Latitude: 53.5736,
|
||||
Longitude: 9.9782,
|
||||
},
|
||||
nil,
|
||||
},
|
||||
|
||||
{
|
||||
"1.2.3.4",
|
||||
"ES;Spain;Madrid, Comunidad de;Madrid;40.4165;-3.70256;28223;Orange Espagne SA;orange.es",
|
||||
Address{
|
||||
IP: "1.2.3.4",
|
||||
CountryCode: "ES",
|
||||
Country: "Spain",
|
||||
Region: "Madrid, Comunidad de",
|
||||
City: "Madrid",
|
||||
Latitude: 40.4165,
|
||||
Longitude: -3.70256,
|
||||
},
|
||||
nil,
|
||||
},
|
||||
|
||||
{
|
||||
"1.2.3.4",
|
||||
"US;United States of America;California;Mountain View",
|
||||
Address{
|
||||
IP: "1.2.3.4",
|
||||
CountryCode: "US",
|
||||
Country: "United States of America",
|
||||
Region: "California",
|
||||
City: "Mountain View",
|
||||
Latitude: -1000,
|
||||
Longitude: -1000,
|
||||
},
|
||||
nil,
|
||||
},
|
||||
|
||||
// Invalid entries
|
||||
{
|
||||
"1.2.3.4",
|
||||
"DE;Germany;Free and Hanseatic City of Hamburg;Hamburg;53.5736;XXX",
|
||||
Address{},
|
||||
types.ErrInvalidCacheEntry,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
result, err := NewAddressFromString(tt.addr, tt.input)
|
||||
if tt.err == nil {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, *result, tt.expected)
|
||||
} else {
|
||||
require.ErrorIs(t, err, tt.err)
|
||||
}
|
||||
}
|
||||
}
|
218
internal/geo/location/cache.go
Normal file
218
internal/geo/location/cache.go
Normal file
|
@ -0,0 +1,218 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/samonzeweb/godb"
|
||||
"github.com/samonzeweb/godb/adapters/sqlite"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/zsefvlol/timezonemapper"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/config"
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
)
|
||||
|
||||
// Cache is an implemenation of DB/file-based cache.
|
||||
//
|
||||
// At the moment, it is an implementation for the location cache,
|
||||
// but it should be generalized to cache everything.
|
||||
type Cache struct {
|
||||
config *config.Config
|
||||
db *godb.DB
|
||||
searcher *Searcher
|
||||
indexField string
|
||||
filesCacheDir string
|
||||
}
|
||||
|
||||
// NewCache returns new cache reader for the specified config.
|
||||
func NewCache(config *config.Config) (*Cache, error) {
|
||||
var (
|
||||
db *godb.DB
|
||||
err error
|
||||
)
|
||||
|
||||
if config.Geo.LocationCacheType == types.CacheTypeDB {
|
||||
log.Debugln("using db for location cache")
|
||||
db, err = godb.Open(sqlite.Adapter, config.Geo.LocationCacheDB)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Debugln("db file:", config.Geo.LocationCacheDB)
|
||||
|
||||
// Needed for "upsert" implementation in Put()
|
||||
db.UseErrorParser()
|
||||
}
|
||||
|
||||
return &Cache{
|
||||
config: config,
|
||||
db: db,
|
||||
indexField: "name",
|
||||
filesCacheDir: config.Geo.LocationCache,
|
||||
searcher: NewSearcher(config),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Resolve returns location information for specified location.
|
||||
// If the information is found in the cache, it is returned.
|
||||
// If it is not found, the external service is queried,
|
||||
// and the result is stored in the cache.
|
||||
func (c *Cache) Resolve(location string) (*Location, error) {
|
||||
location = normalizeLocationName(location)
|
||||
|
||||
loc, err := c.Read(location)
|
||||
if !errors.Is(err, types.ErrNotFound) {
|
||||
return loc, err
|
||||
}
|
||||
|
||||
log.Debugln("geo/location: not found in cache:", location)
|
||||
loc, err = c.searcher.Search(location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
loc.Name = location
|
||||
loc.Timezone = latLngToTimezoneString(loc.Lat, loc.Lon)
|
||||
|
||||
err = c.Put(location, loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return loc, nil
|
||||
}
|
||||
|
||||
// Read returns location information from the cache, if found,
|
||||
// or types.ErrNotFound if not found. If the entry is found, but its format
|
||||
// is invalid, types.ErrInvalidCacheEntry is returned.
|
||||
func (c *Cache) Read(addr string) (*Location, error) {
|
||||
if c.config.Geo.LocationCacheType == types.CacheTypeFiles {
|
||||
return c.readFromCacheFile(addr)
|
||||
}
|
||||
|
||||
return c.readFromCacheDB(addr)
|
||||
}
|
||||
|
||||
func (c *Cache) readFromCacheFile(name string) (*Location, error) {
|
||||
var (
|
||||
fileLoc = struct {
|
||||
Latitude float64 `json:"latitude"`
|
||||
Longitude float64 `json:"longitude"`
|
||||
Timezone string `json:"timezone"`
|
||||
Address string `json:"address"`
|
||||
}{}
|
||||
location Location
|
||||
)
|
||||
|
||||
bytes, err := os.ReadFile(c.cacheFile(name))
|
||||
if err != nil {
|
||||
return nil, types.ErrNotFound
|
||||
}
|
||||
err = json.Unmarshal(bytes, &fileLoc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// normalize name
|
||||
name = strings.TrimSpace(
|
||||
strings.TrimRight(
|
||||
strings.TrimLeft(name, `"`), `"`))
|
||||
|
||||
timezone := fileLoc.Timezone
|
||||
if timezone == "" {
|
||||
timezone = timezonemapper.LatLngToTimezoneString(fileLoc.Latitude, fileLoc.Longitude)
|
||||
}
|
||||
|
||||
location = Location{
|
||||
Name: name,
|
||||
Lat: fmt.Sprint(fileLoc.Latitude),
|
||||
Lon: fmt.Sprint(fileLoc.Longitude),
|
||||
Timezone: timezone,
|
||||
Fullname: fileLoc.Address,
|
||||
}
|
||||
|
||||
return &location, nil
|
||||
}
|
||||
|
||||
func (c *Cache) readFromCacheDB(addr string) (*Location, error) {
|
||||
result := Location{}
|
||||
err := c.db.Select(&result).
|
||||
Where(c.indexField+" = ?", addr).
|
||||
Do()
|
||||
|
||||
if strings.Contains(fmt.Sprint(err), "no rows in result set") {
|
||||
return nil, types.ErrNotFound
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("readFromCacheDB: %w", err)
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func (c *Cache) Put(addr string, loc *Location) error {
|
||||
log.Infoln("geo/location: storing in cache:", loc)
|
||||
if c.config.Geo.IPCacheType == types.CacheTypeDB {
|
||||
return c.putToCacheDB(loc)
|
||||
}
|
||||
|
||||
return c.putToCacheFile(addr, loc)
|
||||
}
|
||||
|
||||
func (c *Cache) putToCacheDB(loc *Location) error {
|
||||
err := c.db.Insert(loc).Do()
|
||||
if strings.Contains(fmt.Sprint(err), "UNIQUE constraint failed") {
|
||||
return c.db.Update(loc).Do()
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) putToCacheFile(addr string, loc fmt.Stringer) error {
|
||||
return os.WriteFile(c.cacheFile(addr), []byte(loc.String()), 0o600)
|
||||
}
|
||||
|
||||
// cacheFile returns path to the cache entry for addr.
|
||||
func (c *Cache) cacheFile(item string) string {
|
||||
return path.Join(c.filesCacheDir, item)
|
||||
}
|
||||
|
||||
// normalizeLocationName converts name into the standard location form
|
||||
// with the following steps:
|
||||
// - remove excessive spaces,
|
||||
// - remove quotes,
|
||||
// - convert to lover case.
|
||||
func normalizeLocationName(name string) string {
|
||||
name = strings.ReplaceAll(name, `"`, " ")
|
||||
name = strings.ReplaceAll(name, `'`, " ")
|
||||
name = strings.TrimSpace(name)
|
||||
name = strings.Join(strings.Fields(name), " ")
|
||||
|
||||
return strings.ToLower(name)
|
||||
}
|
||||
|
||||
// latLngToTimezoneString returns timezone for lat, lon,
|
||||
// or an empty string if they are invalid.
|
||||
func latLngToTimezoneString(lat, lon string) string {
|
||||
latFloat, err := strconv.ParseFloat(lat, 64)
|
||||
if err != nil {
|
||||
log.Errorln("geoloc: latLngToTimezoneString:", err)
|
||||
|
||||
return ""
|
||||
}
|
||||
lonFloat, err := strconv.ParseFloat(lon, 64)
|
||||
if err != nil {
|
||||
log.Errorln("geoloc: latLngToTimezoneString:", err)
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
return timezonemapper.LatLngToTimezoneString(latFloat, lonFloat)
|
||||
}
|
145
internal/geo/location/convert.go
Normal file
145
internal/geo/location/convert.go
Normal file
|
@ -0,0 +1,145 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/samonzeweb/godb"
|
||||
"github.com/samonzeweb/godb/adapters/sqlite"
|
||||
)
|
||||
|
||||
// ConvertCache converts files-based cache into the DB-based cache.
|
||||
// If reset is true, the DB cache is created from scratch.
|
||||
//
|
||||
//nolint:funlen,cyclop
|
||||
func (c *Cache) ConvertCache(reset bool) error {
|
||||
var (
|
||||
dbfile = c.config.Geo.LocationCacheDB
|
||||
tableName = "Location"
|
||||
cacheFiles = c.filesCacheDir
|
||||
known = map[string]bool{}
|
||||
)
|
||||
|
||||
if reset {
|
||||
err := removeDBIfExists(dbfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
db, err := godb.Open(sqlite.Adapter, dbfile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if reset {
|
||||
err = createTable(db, tableName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
log.Println("listing cache entries...")
|
||||
files, err := filepath.Glob(filepath.Join(cacheFiles, "*"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("going to convert %d entries\n", len(files))
|
||||
|
||||
block := []Location{}
|
||||
for i, file := range files {
|
||||
ip := filepath.Base(file)
|
||||
loc, err := c.Read(ip)
|
||||
if err != nil {
|
||||
log.Println("invalid entry for", ip)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip too long location names.
|
||||
if len(loc.Name) > 25 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip duplicates.
|
||||
if known[loc.Name] {
|
||||
log.Println("skipping", loc.Name)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
singleLocation := Location{}
|
||||
err = db.Select(&singleLocation).
|
||||
Where("name = ?", loc.Name).
|
||||
Do()
|
||||
if !errors.Is(err, sql.ErrNoRows) {
|
||||
log.Println("found in db:", loc.Name)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
known[loc.Name] = true
|
||||
|
||||
// Skip some invalid names.
|
||||
if strings.Contains(loc.Name, "\n") {
|
||||
continue
|
||||
}
|
||||
|
||||
block = append(block, *loc)
|
||||
if i%1000 != 0 || i == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Println("going to insert new entries")
|
||||
err = db.BulkInsert(&block).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
block = []Location{}
|
||||
log.Println("converted", i+1, "entries")
|
||||
}
|
||||
|
||||
// inserting the rest.
|
||||
err = db.BulkInsert(&block).Do()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("converted", len(files), "entries")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createTable(db *godb.DB, tableName string) error {
|
||||
createTable := fmt.Sprintf(
|
||||
`create table %s (
|
||||
name text not null primary key,
|
||||
displayName text not null,
|
||||
lat text not null,
|
||||
lon text not null,
|
||||
timezone text not null);
|
||||
`, tableName)
|
||||
|
||||
_, err := db.CurrentDB().Exec(createTable)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func removeDBIfExists(filename string) error {
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
// no db file
|
||||
return nil
|
||||
}
|
||||
|
||||
return os.Remove(filename)
|
||||
}
|
25
internal/geo/location/location.go
Normal file
25
internal/geo/location/location.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
)
|
||||
|
||||
type Location struct {
|
||||
Name string `db:"name,key" json:"name"`
|
||||
Lat string `db:"lat" json:"latitude"`
|
||||
Lon string `db:"lon" json:"longitude"`
|
||||
Timezone string `db:"timezone" json:"timezone"`
|
||||
Fullname string `db:"displayName" json:"address"`
|
||||
}
|
||||
|
||||
// String returns string representation of location.
|
||||
func (l *Location) String() string {
|
||||
bytes, err := json.Marshal(l)
|
||||
if err != nil {
|
||||
// should never happen
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
return string(bytes)
|
||||
}
|
77
internal/geo/location/nominatim.go
Normal file
77
internal/geo/location/nominatim.go
Normal file
|
@ -0,0 +1,77 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Nominatim struct {
|
||||
name string
|
||||
url string
|
||||
token string
|
||||
typ string
|
||||
}
|
||||
|
||||
type locationQuerier interface {
|
||||
Query(*Nominatim, string) (*Location, error)
|
||||
}
|
||||
|
||||
func NewNominatim(name, typ, url, token string) *Nominatim {
|
||||
return &Nominatim{
|
||||
name: name,
|
||||
url: url,
|
||||
token: token,
|
||||
typ: typ,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *Nominatim) Query(location string) (*Location, error) {
|
||||
var data locationQuerier
|
||||
|
||||
switch n.typ {
|
||||
case "iq":
|
||||
data = &locationIQ{}
|
||||
case "opencage":
|
||||
data = &locationOpenCage{}
|
||||
default:
|
||||
return nil, fmt.Errorf("%s: %w", n.name, types.ErrUnknownLocationService)
|
||||
}
|
||||
|
||||
return data.Query(n, location)
|
||||
}
|
||||
|
||||
func makeQuery(url string, result interface{}) error {
|
||||
var errResponse struct {
|
||||
Error string
|
||||
}
|
||||
|
||||
log.Debugln("nominatim:", url)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, &errResponse)
|
||||
if err == nil && errResponse.Error != "" {
|
||||
return fmt.Errorf("%w: %s", types.ErrUpstream, errResponse.Error)
|
||||
}
|
||||
|
||||
log.Debugln("nominatim: response: ", string(body))
|
||||
err = json.Unmarshal(body, &result)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
39
internal/geo/location/nominatim_locationiq.go
Normal file
39
internal/geo/location/nominatim_locationiq.go
Normal file
|
@ -0,0 +1,39 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
)
|
||||
|
||||
type locationIQ []struct {
|
||||
Name string `db:"name,key"`
|
||||
Lat string `db:"lat"`
|
||||
Lon string `db:"lon"`
|
||||
//nolint:tagliatelle
|
||||
Fullname string `db:"displayName" json:"display_name"`
|
||||
}
|
||||
|
||||
func (data *locationIQ) Query(n *Nominatim, location string) (*Location, error) {
|
||||
url := fmt.Sprintf(
|
||||
"%s?q=%s&format=json&language=native&limit=1&key=%s",
|
||||
n.url, url.QueryEscape(location), n.token)
|
||||
|
||||
err := makeQuery(url, data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%s: %w", n.name, err)
|
||||
}
|
||||
|
||||
if len(*data) != 1 {
|
||||
return nil, fmt.Errorf("%w: %s: invalid response", types.ErrUpstream, n.name)
|
||||
}
|
||||
|
||||
nl := &(*data)[0]
|
||||
|
||||
return &Location{
|
||||
Lat: nl.Lat,
|
||||
Lon: nl.Lon,
|
||||
Fullname: nl.Fullname,
|
||||
}, nil
|
||||
}
|
42
internal/geo/location/nominatim_opencage.go
Normal file
42
internal/geo/location/nominatim_opencage.go
Normal file
|
@ -0,0 +1,42 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
)
|
||||
|
||||
type locationOpenCage struct {
|
||||
Results []struct {
|
||||
Name string `db:"name,key"`
|
||||
Geometry struct {
|
||||
Lat float64 `db:"lat"`
|
||||
Lng float64 `db:"lng"`
|
||||
}
|
||||
Fullname string `json:"formatted"`
|
||||
} `json:"results"`
|
||||
}
|
||||
|
||||
func (data *locationOpenCage) Query(n *Nominatim, location string) (*Location, error) {
|
||||
url := fmt.Sprintf(
|
||||
"%s?q=%s&language=native&limit=1&key=%s",
|
||||
n.url, url.QueryEscape(location), n.token)
|
||||
|
||||
err := makeQuery(url, data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%s: %w", n.name, err)
|
||||
}
|
||||
|
||||
if len(data.Results) != 1 {
|
||||
return nil, fmt.Errorf("%w: %s: invalid response", types.ErrUpstream, n.name)
|
||||
}
|
||||
|
||||
nl := data.Results[0]
|
||||
|
||||
return &Location{
|
||||
Lat: fmt.Sprint(nl.Geometry.Lat),
|
||||
Lon: fmt.Sprint(nl.Geometry.Lng),
|
||||
Fullname: nl.Fullname,
|
||||
}, nil
|
||||
}
|
44
internal/geo/location/response.go
Normal file
44
internal/geo/location/response.go
Normal file
|
@ -0,0 +1,44 @@
|
|||
package location
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/routing"
|
||||
)
|
||||
|
||||
// Response provides routing interface to the geo cache.
|
||||
func (c *Cache) Response(r *http.Request) *routing.Cadre {
|
||||
var (
|
||||
locationName = r.URL.Query().Get("location")
|
||||
loc *Location
|
||||
bytes []byte
|
||||
err error
|
||||
)
|
||||
|
||||
if locationName == "" {
|
||||
return errorResponse("location is not specified")
|
||||
}
|
||||
|
||||
loc, err = c.Resolve(locationName)
|
||||
if err != nil {
|
||||
log.Println("geo/location error:", locationName)
|
||||
|
||||
return errorResponse(fmt.Sprint(err))
|
||||
}
|
||||
|
||||
bytes, err = json.Marshal(loc)
|
||||
if err != nil {
|
||||
return errorResponse(fmt.Sprint(err))
|
||||
}
|
||||
|
||||
return &routing.Cadre{Body: bytes}
|
||||
}
|
||||
|
||||
func errorResponse(s string) *routing.Cadre {
|
||||
return &routing.Cadre{Body: []byte(
|
||||
fmt.Sprintf(`{"error": %q}`, s),
|
||||
)}
|
||||
}
|
42
internal/geo/location/search.go
Normal file
42
internal/geo/location/search.go
Normal file
|
@ -0,0 +1,42 @@
|
|||
package location
|
||||
|
||||
import "github.com/chubin/wttr.in/internal/config"
|
||||
|
||||
type Provider interface {
|
||||
Query(location string) (*Location, error)
|
||||
}
|
||||
|
||||
type Searcher struct {
|
||||
providers []Provider
|
||||
}
|
||||
|
||||
// NewSearcher returns a new Searcher for the specified config.
|
||||
func NewSearcher(config *config.Config) *Searcher {
|
||||
providers := []Provider{}
|
||||
for _, p := range config.Geo.Nominatim {
|
||||
providers = append(providers, NewNominatim(p.Name, p.Type, p.URL, p.Token))
|
||||
}
|
||||
|
||||
return &Searcher{
|
||||
providers: providers,
|
||||
}
|
||||
}
|
||||
|
||||
// Search makes queries through all known providers,
|
||||
// and returns response, as soon as it is not nil.
|
||||
// If all responses were nil, the last response is returned.
|
||||
func (s *Searcher) Search(location string) (*Location, error) {
|
||||
var (
|
||||
err error
|
||||
result *Location
|
||||
)
|
||||
|
||||
for _, p := range s.providers {
|
||||
result, err = p.Query(location)
|
||||
if result != nil && err == nil {
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
119
internal/logging/logging.go
Normal file
119
internal/logging/logging.go
Normal file
|
@ -0,0 +1,119 @@
|
|||
package logging
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/util"
|
||||
)
|
||||
|
||||
// Logging request.
|
||||
//
|
||||
|
||||
// RequestLogger logs all incoming HTTP requests.
|
||||
type RequestLogger struct {
|
||||
buf map[logEntry]int
|
||||
filename string
|
||||
m sync.Mutex
|
||||
|
||||
period time.Duration
|
||||
lastFlush time.Time
|
||||
}
|
||||
|
||||
type logEntry struct {
|
||||
Proto string
|
||||
IP string
|
||||
URI string
|
||||
UserAgent string
|
||||
}
|
||||
|
||||
// NewRequestLogger returns a new RequestLogger for the specified log file.
|
||||
// Flush logging entries after period of time.
|
||||
//
|
||||
// If filename is empty, no log will be written, and all logging entries
|
||||
// will be silently dropped.
|
||||
func NewRequestLogger(filename string, period time.Duration) *RequestLogger {
|
||||
return &RequestLogger{
|
||||
buf: map[logEntry]int{},
|
||||
filename: filename,
|
||||
m: sync.Mutex{},
|
||||
period: period,
|
||||
}
|
||||
}
|
||||
|
||||
// Log logs information about a HTTP request.
|
||||
func (rl *RequestLogger) Log(r *http.Request) error {
|
||||
le := logEntry{
|
||||
Proto: "http",
|
||||
IP: util.ReadUserIP(r),
|
||||
URI: r.RequestURI,
|
||||
UserAgent: r.Header.Get("User-Agent"),
|
||||
}
|
||||
if r.TLS != nil {
|
||||
le.Proto = "https"
|
||||
}
|
||||
|
||||
rl.m.Lock()
|
||||
rl.buf[le]++
|
||||
rl.m.Unlock()
|
||||
|
||||
if time.Since(rl.lastFlush) > rl.period {
|
||||
return rl.flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// flush stores log data to disk, and flushes the buffer.
|
||||
func (rl *RequestLogger) flush() error {
|
||||
rl.m.Lock()
|
||||
defer rl.m.Unlock()
|
||||
|
||||
// It is possible, that while waiting the mutex,
|
||||
// the buffer was already flushed.
|
||||
if time.Since(rl.lastFlush) <= rl.period {
|
||||
return nil
|
||||
}
|
||||
|
||||
if rl.filename != "" {
|
||||
// Generate log output.
|
||||
output := ""
|
||||
for k, hitsNumber := range rl.buf {
|
||||
output += fmt.Sprintf("%s %3d %s\n", time.Now().Format(time.RFC3339), hitsNumber, k.String())
|
||||
}
|
||||
|
||||
// Open log file.
|
||||
//nolint:nosnakecase
|
||||
f, err := os.OpenFile(rl.filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Save output to log file.
|
||||
_, err = f.Write([]byte(output))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Flush buffer.
|
||||
rl.buf = map[logEntry]int{}
|
||||
rl.lastFlush = time.Now()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// String returns string representation of logEntry.
|
||||
func (e *logEntry) String() string {
|
||||
return fmt.Sprintf(
|
||||
"%s %s %s %s",
|
||||
e.Proto,
|
||||
e.IP,
|
||||
e.URI,
|
||||
e.UserAgent,
|
||||
)
|
||||
}
|
84
internal/logging/suppress.go
Normal file
84
internal/logging/suppress.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
package logging
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// LogSuppressor provides io.Writer interface for logging
|
||||
// with lines suppression. For usage with log.Logger.
|
||||
type LogSuppressor struct {
|
||||
filename string
|
||||
suppress []string
|
||||
linePrefix string
|
||||
|
||||
logFile *os.File
|
||||
m sync.Mutex
|
||||
}
|
||||
|
||||
// NewLogSuppressor creates a new LogSuppressor for specified
|
||||
// filename and lines to be suppressed.
|
||||
//
|
||||
// If filename is empty, log entries will be printed to stderr.
|
||||
func NewLogSuppressor(filename string, suppress []string, linePrefix string) *LogSuppressor {
|
||||
return &LogSuppressor{
|
||||
filename: filename,
|
||||
suppress: suppress,
|
||||
linePrefix: linePrefix,
|
||||
}
|
||||
}
|
||||
|
||||
// Open opens log file.
|
||||
func (ls *LogSuppressor) Open() error {
|
||||
var err error
|
||||
|
||||
if ls.filename == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
//nolint:nosnakecase
|
||||
ls.logFile, err = os.OpenFile(ls.filename, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o600)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Close closes log file.
|
||||
func (ls *LogSuppressor) Close() error {
|
||||
if ls.filename == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
return ls.logFile.Close()
|
||||
}
|
||||
|
||||
// Write writes p to log, and returns number f bytes written.
|
||||
// Implements io.Writer interface.
|
||||
func (ls *LogSuppressor) Write(p []byte) (int, error) {
|
||||
var output string
|
||||
|
||||
if ls.filename == "" {
|
||||
return os.Stdin.Write(p)
|
||||
}
|
||||
|
||||
ls.m.Lock()
|
||||
defer ls.m.Unlock()
|
||||
|
||||
lines := strings.Split(string(p), ls.linePrefix)
|
||||
for _, line := range lines {
|
||||
if (func(line string) bool {
|
||||
for _, suppress := range ls.suppress {
|
||||
if strings.Contains(line, suppress) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
})(line) {
|
||||
continue
|
||||
}
|
||||
output += line
|
||||
}
|
||||
|
||||
return ls.logFile.Write([]byte(output))
|
||||
}
|
98
internal/processor/peak.go
Normal file
98
internal/processor/peak.go
Normal file
|
@ -0,0 +1,98 @@
|
|||
package processor
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/robfig/cron"
|
||||
)
|
||||
|
||||
func (rp *RequestProcessor) startPeakHandling() error {
|
||||
var err error
|
||||
|
||||
c := cron.New()
|
||||
// cronTime := fmt.Sprintf("%d,%d * * * *", 30-prefetchInterval/60, 60-prefetchInterval/60)
|
||||
err = c.AddFunc(
|
||||
"24 * * * *",
|
||||
func() { rp.prefetchPeakRequests(&rp.peakRequest30) },
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = c.AddFunc(
|
||||
"54 * * * *",
|
||||
func() { rp.prefetchPeakRequests(&rp.peakRequest60) },
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.Start()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// registerPeakRequest registers requests coming in the peak time.
|
||||
// Such requests can be prefetched afterwards just before the peak time comes.
|
||||
func (rp *RequestProcessor) savePeakRequest(cacheDigest string, r *http.Request) {
|
||||
if _, min, _ := time.Now().Clock(); min == 30 {
|
||||
rp.peakRequest30.Store(cacheDigest, *r)
|
||||
} else if min == 0 {
|
||||
rp.peakRequest60.Store(cacheDigest, *r)
|
||||
}
|
||||
}
|
||||
|
||||
func (rp *RequestProcessor) prefetchRequest(r *http.Request) error {
|
||||
_, err := rp.ProcessRequest(r)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func syncMapLen(sm *sync.Map) int {
|
||||
count := 0
|
||||
f := func(key, value interface{}) bool {
|
||||
// Not really certain about this part, don't know for sure
|
||||
// if this is a good check for an entry's existence
|
||||
if key == "" {
|
||||
return false
|
||||
}
|
||||
count++
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
sm.Range(f)
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
func (rp *RequestProcessor) prefetchPeakRequests(peakRequestMap *sync.Map) {
|
||||
peakRequestLen := syncMapLen(peakRequestMap)
|
||||
if peakRequestLen == 0 {
|
||||
return
|
||||
}
|
||||
log.Printf("PREFETCH: Prefetching %d requests\n", peakRequestLen)
|
||||
sleepBetweenRequests := time.Duration(rp.config.Uplink.PrefetchInterval*1000/peakRequestLen) * time.Millisecond
|
||||
peakRequestMap.Range(func(key interface{}, value interface{}) bool {
|
||||
req, ok := value.(http.Request)
|
||||
if !ok {
|
||||
log.Println("missing value for:", key)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
go func(r http.Request) {
|
||||
err := rp.prefetchRequest(&r)
|
||||
if err != nil {
|
||||
log.Println("prefetch request:", err)
|
||||
}
|
||||
}(req)
|
||||
peakRequestMap.Delete(key)
|
||||
time.Sleep(sleepBetweenRequests)
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
380
internal/processor/processor.go
Normal file
380
internal/processor/processor.go
Normal file
|
@ -0,0 +1,380 @@
|
|||
package processor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
lru "github.com/hashicorp/golang-lru"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/config"
|
||||
geoip "github.com/chubin/wttr.in/internal/geo/ip"
|
||||
geoloc "github.com/chubin/wttr.in/internal/geo/location"
|
||||
"github.com/chubin/wttr.in/internal/routing"
|
||||
"github.com/chubin/wttr.in/internal/stats"
|
||||
"github.com/chubin/wttr.in/internal/util"
|
||||
)
|
||||
|
||||
// plainTextAgents contains signatures of the plain-text agents.
|
||||
func plainTextAgents() []string {
|
||||
return []string{
|
||||
"curl",
|
||||
"httpie",
|
||||
"lwp-request",
|
||||
"wget",
|
||||
"python-httpx",
|
||||
"python-requests",
|
||||
"openbsd ftp",
|
||||
"powershell",
|
||||
"fetch",
|
||||
"aiohttp",
|
||||
"http_get",
|
||||
"xh",
|
||||
}
|
||||
}
|
||||
|
||||
type ResponseWithHeader struct {
|
||||
InProgress bool // true if the request is being processed
|
||||
Expires time.Time // expiration time of the cache entry
|
||||
|
||||
Body []byte
|
||||
Header http.Header
|
||||
StatusCode int // e.g. 200
|
||||
}
|
||||
|
||||
// RequestProcessor handles incoming requests.
|
||||
type RequestProcessor struct {
|
||||
peakRequest30 sync.Map
|
||||
peakRequest60 sync.Map
|
||||
lruCache *lru.Cache
|
||||
stats *stats.Stats
|
||||
router routing.Router
|
||||
upstreamTransport *http.Transport
|
||||
config *config.Config
|
||||
geoIPCache *geoip.Cache
|
||||
geoLocation *geoloc.Cache
|
||||
}
|
||||
|
||||
// NewRequestProcessor returns new RequestProcessor.
|
||||
func NewRequestProcessor(config *config.Config) (*RequestProcessor, error) {
|
||||
lruCache, err := lru.New(config.Cache.Size)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dialer := &net.Dialer{
|
||||
Timeout: time.Duration(config.Uplink.Timeout) * time.Second,
|
||||
KeepAlive: time.Duration(config.Uplink.Timeout) * time.Second,
|
||||
DualStack: true,
|
||||
}
|
||||
|
||||
transport := &http.Transport{
|
||||
DialContext: func(ctx context.Context, network, _ string) (net.Conn, error) {
|
||||
return dialer.DialContext(ctx, network, config.Uplink.Address)
|
||||
},
|
||||
}
|
||||
|
||||
geoCache, err := geoip.NewCache(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
geoLocation, err := geoloc.NewCache(config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rp := &RequestProcessor{
|
||||
lruCache: lruCache,
|
||||
stats: stats.New(),
|
||||
upstreamTransport: transport,
|
||||
config: config,
|
||||
geoIPCache: geoCache,
|
||||
geoLocation: geoLocation,
|
||||
}
|
||||
|
||||
// Initialize routes.
|
||||
rp.router.AddPath("/:stats", rp.stats)
|
||||
rp.router.AddPath("/:geo-ip-get", rp.geoIPCache)
|
||||
rp.router.AddPath("/:geo-ip-put", rp.geoIPCache)
|
||||
rp.router.AddPath("/:geo-location", rp.geoLocation)
|
||||
|
||||
return rp, nil
|
||||
}
|
||||
|
||||
// Start starts async request processor jobs, such as peak handling.
|
||||
func (rp *RequestProcessor) Start() error {
|
||||
return rp.startPeakHandling()
|
||||
}
|
||||
|
||||
func (rp *RequestProcessor) ProcessRequest(r *http.Request) (*ResponseWithHeader, error) {
|
||||
var (
|
||||
response *ResponseWithHeader
|
||||
ip = util.ReadUserIP(r)
|
||||
)
|
||||
|
||||
if ip != "127.0.0.1" {
|
||||
rp.stats.Inc("total")
|
||||
}
|
||||
|
||||
// Main routing logic.
|
||||
if rh := rp.router.Route(r); rh != nil {
|
||||
result := rh.Response(r)
|
||||
if result != nil {
|
||||
return fromCadre(result), nil
|
||||
}
|
||||
}
|
||||
|
||||
if resp, ok := redirectInsecure(r); ok {
|
||||
rp.stats.Inc("redirects")
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
if dontCache(r) {
|
||||
rp.stats.Inc("uncached")
|
||||
|
||||
return get(r, rp.upstreamTransport)
|
||||
}
|
||||
|
||||
// processing cached request
|
||||
cacheDigest := getCacheDigest(r)
|
||||
|
||||
rp.savePeakRequest(cacheDigest, r)
|
||||
|
||||
response = rp.processRequestFromCache(r)
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
return rp.processUncachedRequest(r)
|
||||
}
|
||||
|
||||
// processRequestFromCache processes requests using the cache.
|
||||
// If no entry in cache found, nil is returned.
|
||||
func (rp *RequestProcessor) processRequestFromCache(r *http.Request) *ResponseWithHeader {
|
||||
var (
|
||||
cacheEntry ResponseWithHeader
|
||||
cacheDigest = getCacheDigest(r)
|
||||
ok bool
|
||||
)
|
||||
|
||||
cacheBody, _ := rp.lruCache.Get(cacheDigest)
|
||||
cacheEntry, ok = cacheBody.(ResponseWithHeader)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// if after all attempts we still have no answer,
|
||||
// we try to make the query on our own
|
||||
for attempts := 0; attempts < 300; attempts++ {
|
||||
if !ok || !cacheEntry.InProgress {
|
||||
break
|
||||
}
|
||||
time.Sleep(30 * time.Millisecond)
|
||||
cacheBody, _ = rp.lruCache.Get(cacheDigest)
|
||||
v, ok := cacheBody.(ResponseWithHeader)
|
||||
if ok {
|
||||
cacheEntry = v
|
||||
}
|
||||
}
|
||||
if cacheEntry.InProgress {
|
||||
log.Printf("TIMEOUT: %s\n", cacheDigest)
|
||||
}
|
||||
if ok && !cacheEntry.InProgress && cacheEntry.Expires.After(time.Now()) {
|
||||
rp.stats.Inc("cache1")
|
||||
|
||||
return &cacheEntry
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// processUncachedRequest processes requests that were not found in the cache.
|
||||
func (rp *RequestProcessor) processUncachedRequest(r *http.Request) (*ResponseWithHeader, error) {
|
||||
var (
|
||||
cacheDigest = getCacheDigest(r)
|
||||
ip = util.ReadUserIP(r)
|
||||
response *ResponseWithHeader
|
||||
err error
|
||||
)
|
||||
|
||||
// Response was not found in cache.
|
||||
// Starting real handling.
|
||||
format := r.URL.Query().Get("format")
|
||||
if len(format) != 0 {
|
||||
rp.stats.Inc("format")
|
||||
if format == "j1" {
|
||||
rp.stats.Inc("format=j1")
|
||||
}
|
||||
}
|
||||
|
||||
// Count, how many IP addresses are known.
|
||||
_, err = rp.geoIPCache.Read(ip)
|
||||
if err == nil {
|
||||
rp.stats.Inc("geoip")
|
||||
}
|
||||
|
||||
// Indicate, that the request is being handled.
|
||||
rp.lruCache.Add(cacheDigest, ResponseWithHeader{InProgress: true})
|
||||
|
||||
response, err = get(r, rp.upstreamTransport)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if response.StatusCode == 200 || response.StatusCode == 304 || response.StatusCode == 404 {
|
||||
rp.lruCache.Add(cacheDigest, *response)
|
||||
} else {
|
||||
log.Printf("REMOVE: %d response for %s from cache\n", response.StatusCode, cacheDigest)
|
||||
rp.lruCache.Remove(cacheDigest)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func get(req *http.Request, transport *http.Transport) (*ResponseWithHeader, error) {
|
||||
client := &http.Client{
|
||||
Transport: transport,
|
||||
}
|
||||
|
||||
queryURL := fmt.Sprintf("http://%s%s", req.Host, req.RequestURI)
|
||||
|
||||
proxyReq, err := http.NewRequest(req.Method, queryURL, req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// proxyReq.Header.Set("Host", req.Host)
|
||||
// proxyReq.Header.Set("X-Forwarded-For", req.RemoteAddr)
|
||||
|
||||
for header, values := range req.Header {
|
||||
for _, value := range values {
|
||||
proxyReq.Header.Add(header, value)
|
||||
}
|
||||
}
|
||||
|
||||
if proxyReq.Header.Get("X-Forwarded-For") == "" {
|
||||
proxyReq.Header.Set("X-Forwarded-For", ipFromAddr(req.RemoteAddr))
|
||||
}
|
||||
|
||||
res, err := client.Do(proxyReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ResponseWithHeader{
|
||||
InProgress: false,
|
||||
Expires: time.Now().Add(time.Duration(randInt(1000, 1500)) * time.Second),
|
||||
Body: body,
|
||||
Header: res.Header,
|
||||
StatusCode: res.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// getCacheDigest is an implementation of the cache.get_signature of original wttr.in.
|
||||
func getCacheDigest(req *http.Request) string {
|
||||
userAgent := req.Header.Get("User-Agent")
|
||||
|
||||
queryHost := req.Host
|
||||
queryString := req.RequestURI
|
||||
|
||||
clientIPAddress := util.ReadUserIP(req)
|
||||
|
||||
lang := req.Header.Get("Accept-Language")
|
||||
|
||||
return fmt.Sprintf("%s:%s%s:%s:%s", userAgent, queryHost, queryString, clientIPAddress, lang)
|
||||
}
|
||||
|
||||
// dontCache returns true if req should not be cached.
|
||||
func dontCache(req *http.Request) bool {
|
||||
// dont cache cyclic requests
|
||||
loc := strings.Split(req.RequestURI, "?")[0]
|
||||
|
||||
return strings.Contains(loc, ":")
|
||||
}
|
||||
|
||||
// redirectInsecure returns redirection response, and bool value, if redirection was needed,
|
||||
// if the query comes from a browser, and it is insecure.
|
||||
//
|
||||
// Insecure queries are marked by the frontend web server
|
||||
// with X-Forwarded-Proto header:
|
||||
// `proxy_set_header X-Forwarded-Proto $scheme;`.
|
||||
func redirectInsecure(req *http.Request) (*ResponseWithHeader, bool) {
|
||||
if isPlainTextAgent(req.Header.Get("User-Agent")) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if req.TLS != nil || strings.ToLower(req.Header.Get("X-Forwarded-Proto")) == "https" {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
target := "https://" + req.Host + req.URL.Path
|
||||
if len(req.URL.RawQuery) > 0 {
|
||||
target += "?" + req.URL.RawQuery
|
||||
}
|
||||
|
||||
body := []byte(fmt.Sprintf(`<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8">
|
||||
<TITLE>301 Moved</TITLE></HEAD><BODY>
|
||||
<H1>301 Moved</H1>
|
||||
The document has moved
|
||||
<A HREF="%s">here</A>.
|
||||
</BODY></HTML>
|
||||
`, target))
|
||||
|
||||
return &ResponseWithHeader{
|
||||
InProgress: false,
|
||||
Expires: time.Now().Add(time.Duration(randInt(1000, 1500)) * time.Second),
|
||||
Body: body,
|
||||
Header: http.Header{"Location": []string{target}},
|
||||
StatusCode: 301,
|
||||
}, true
|
||||
}
|
||||
|
||||
// isPlainTextAgent returns true if userAgent is a plain-text agent.
|
||||
func isPlainTextAgent(userAgent string) bool {
|
||||
userAgentLower := strings.ToLower(userAgent)
|
||||
for _, signature := range plainTextAgents() {
|
||||
if strings.Contains(userAgentLower, signature) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func randInt(min int, max int) int {
|
||||
return min + rand.Intn(max-min)
|
||||
}
|
||||
|
||||
// ipFromAddr returns IP address from a ADDR:PORT pair.
|
||||
func ipFromAddr(s string) string {
|
||||
pos := strings.LastIndex(s, ":")
|
||||
if pos == -1 {
|
||||
return s
|
||||
}
|
||||
|
||||
return s[:pos]
|
||||
}
|
||||
|
||||
// fromCadre converts Cadre into a responseWithHeader.
|
||||
func fromCadre(cadre *routing.Cadre) *ResponseWithHeader {
|
||||
return &ResponseWithHeader{
|
||||
Body: cadre.Body,
|
||||
Expires: cadre.Expires,
|
||||
StatusCode: 200,
|
||||
InProgress: false,
|
||||
}
|
||||
}
|
72
internal/routing/routing.go
Normal file
72
internal/routing/routing.go
Normal file
|
@ -0,0 +1,72 @@
|
|||
package routing
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// CadreFormat specifies how the shot data is formatted.
|
||||
type CadreFormat int
|
||||
|
||||
const (
|
||||
// CadreFormatANSI represents Terminal ANSI format.
|
||||
CadreFormatANSI = iota
|
||||
|
||||
// CadreFormatHTML represents HTML.
|
||||
CadreFormatHTML
|
||||
|
||||
// CadreFormatPNG represents PNG.
|
||||
CadreFormatPNG
|
||||
)
|
||||
|
||||
// Cadre contains result of a query execution.
|
||||
type Cadre struct {
|
||||
// Body contains the data of Cadre, formatted as Format.
|
||||
Body []byte
|
||||
|
||||
// Format of the shot.
|
||||
Format CadreFormat
|
||||
|
||||
// Expires contains the time of the Cadre expiration,
|
||||
// or 0 if it does not expire.
|
||||
Expires time.Time
|
||||
}
|
||||
|
||||
// Handler can handle queries and return views.
|
||||
type Handler interface {
|
||||
Response(*http.Request) *Cadre
|
||||
}
|
||||
|
||||
type routeFunc func(*http.Request) bool
|
||||
|
||||
type route struct {
|
||||
routeFunc
|
||||
Handler
|
||||
}
|
||||
|
||||
// Router keeps a routing table, and finds queries handlers, based on its rules.
|
||||
type Router struct {
|
||||
rt []route
|
||||
}
|
||||
|
||||
// Route returns a query handler based on its content.
|
||||
func (r *Router) Route(req *http.Request) Handler {
|
||||
for _, re := range r.rt {
|
||||
if re.routeFunc(req) {
|
||||
return re.Handler
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddPath adds route for a static path.
|
||||
func (r *Router) AddPath(path string, handler Handler) {
|
||||
r.rt = append(r.rt, route{routePath(path), handler})
|
||||
}
|
||||
|
||||
func routePath(path string) routeFunc {
|
||||
return routeFunc(func(req *http.Request) bool {
|
||||
return req.URL.Path == path
|
||||
})
|
||||
}
|
89
internal/stats/stats.go
Normal file
89
internal/stats/stats.go
Normal file
|
@ -0,0 +1,89 @@
|
|||
package stats
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/routing"
|
||||
)
|
||||
|
||||
// Stats holds processed requests statistics.
|
||||
type Stats struct {
|
||||
m sync.Mutex
|
||||
v map[string]int
|
||||
startTime time.Time
|
||||
}
|
||||
|
||||
// New returns new Stats.
|
||||
func New() *Stats {
|
||||
return &Stats{
|
||||
v: map[string]int{},
|
||||
startTime: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Inc key by one.
|
||||
func (c *Stats) Inc(key string) {
|
||||
c.m.Lock()
|
||||
c.v[key]++
|
||||
c.m.Unlock()
|
||||
}
|
||||
|
||||
// Get current key counter value.
|
||||
func (c *Stats) Get(key string) int {
|
||||
c.m.Lock()
|
||||
defer c.m.Unlock()
|
||||
|
||||
return c.v[key]
|
||||
}
|
||||
|
||||
// Reset key counter.
|
||||
func (c *Stats) Reset(key string) int {
|
||||
c.m.Lock()
|
||||
defer c.m.Unlock()
|
||||
result := c.v[key]
|
||||
c.v[key] = 0
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Show returns current statistics formatted as []byte.
|
||||
func (c *Stats) Show() []byte {
|
||||
var b bytes.Buffer
|
||||
|
||||
c.m.Lock()
|
||||
defer c.m.Unlock()
|
||||
|
||||
uptime := time.Since(c.startTime) / time.Second
|
||||
|
||||
fmt.Fprintf(&b, "%-20s: %v\n", "Running since", c.startTime.Format(time.RFC3339))
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Uptime (min)", uptime/60)
|
||||
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Total queries", c.v["total"])
|
||||
|
||||
if uptime != 0 {
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Throughput (QpM)", c.v["total"]*60/int(uptime))
|
||||
}
|
||||
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Cache L1 queries", c.v["cache1"])
|
||||
|
||||
if c.v["total"] != 0 {
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Cache L1 queries (%)", (100*c.v["cache1"])/c.v["total"])
|
||||
}
|
||||
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Upstream queries", c.v["total"]-c.v["cache1"])
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Queries with format", c.v["format"])
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Queries with format=j1", c.v["format=j1"])
|
||||
fmt.Fprintf(&b, "%-20s: %d\n", "Queries with known IP", c.v["geoip"])
|
||||
|
||||
return b.Bytes()
|
||||
}
|
||||
|
||||
func (c *Stats) Response(*http.Request) *routing.Cadre {
|
||||
return &routing.Cadre{
|
||||
Body: c.Show(),
|
||||
}
|
||||
}
|
14
internal/types/errors.go
Normal file
14
internal/types/errors.go
Normal file
|
@ -0,0 +1,14 @@
|
|||
package types
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrNotFound = errors.New("cache entry not found")
|
||||
ErrInvalidCacheEntry = errors.New("invalid cache entry format")
|
||||
ErrUpstream = errors.New("upstream error")
|
||||
|
||||
// ErrNoServersConfigured means that there are no servers to run.
|
||||
ErrNoServersConfigured = errors.New("no servers configured")
|
||||
|
||||
ErrUnknownLocationService = errors.New("unknown location service")
|
||||
)
|
8
internal/types/types.go
Normal file
8
internal/types/types.go
Normal file
|
@ -0,0 +1,8 @@
|
|||
package types
|
||||
|
||||
type CacheType string
|
||||
|
||||
const (
|
||||
CacheTypeDB = "db"
|
||||
CacheTypeFiles = "files"
|
||||
)
|
18
internal/util/files.go
Normal file
18
internal/util/files.go
Normal file
|
@ -0,0 +1,18 @@
|
|||
package util
|
||||
|
||||
import "os"
|
||||
|
||||
// RemoveFileIfExists removes filename if exists, or does nothing if the file
|
||||
// is not there. Returns an error, if it occurred during deletion.
|
||||
func RemoveFileIfExists(filename string) error {
|
||||
_, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
// no db file
|
||||
return nil
|
||||
}
|
||||
|
||||
return os.Remove(filename)
|
||||
}
|
26
internal/util/http.go
Normal file
26
internal/util/http.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
package util
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// ReadUserIP returns IP address of the client from http.Request,
|
||||
// taking into account the HTTP headers.
|
||||
func ReadUserIP(r *http.Request) string {
|
||||
IPAddress := r.Header.Get("X-Real-Ip")
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.Header.Get("X-Forwarded-For")
|
||||
}
|
||||
if IPAddress == "" {
|
||||
IPAddress = r.RemoteAddr
|
||||
var err error
|
||||
IPAddress, _, err = net.SplitHostPort(IPAddress)
|
||||
if err != nil {
|
||||
log.Printf("ERROR: userip: %q is not IP:port\n", IPAddress)
|
||||
}
|
||||
}
|
||||
|
||||
return IPAddress
|
||||
}
|
15
internal/util/yaml.go
Normal file
15
internal/util/yaml.go
Normal file
|
@ -0,0 +1,15 @@
|
|||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// YamlUnmarshalStrict unmarshals YAML data with an error when unknown fields are present.
|
||||
func YamlUnmarshalStrict(in []byte, out interface{}) error {
|
||||
dec := yaml.NewDecoder(bytes.NewReader(in))
|
||||
dec.KnownFields(true)
|
||||
|
||||
return dec.Decode(out)
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
package main
|
||||
//nolint:forbidigo,funlen,nestif,goerr113,gocognit,cyclop
|
||||
package v1
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
@ -6,7 +7,6 @@ import (
|
|||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
@ -14,6 +14,7 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
//nolint:tagliatelle
|
||||
type cond struct {
|
||||
ChanceOfRain string `json:"chanceofrain"`
|
||||
FeelsLikeC int `json:",string"`
|
||||
|
@ -49,6 +50,7 @@ type loc struct {
|
|||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
//nolint:tagliatelle
|
||||
type resp struct {
|
||||
Data struct {
|
||||
Cur []cond `json:"current_condition"`
|
||||
|
@ -58,65 +60,78 @@ type resp struct {
|
|||
} `json:"data"`
|
||||
}
|
||||
|
||||
func getDataFromAPI() (ret resp) {
|
||||
var params []string
|
||||
func (g *global) getDataFromAPI() (*resp, error) {
|
||||
var (
|
||||
ret resp
|
||||
params []string
|
||||
)
|
||||
|
||||
if len(config.APIKey) == 0 {
|
||||
log.Fatal("No API key specified. Setup instructions are in the README.")
|
||||
if len(g.config.APIKey) == 0 {
|
||||
return nil, fmt.Errorf("no API key specified. Setup instructions are in the README")
|
||||
}
|
||||
params = append(params, "key="+config.APIKey)
|
||||
params = append(params, "key="+g.config.APIKey)
|
||||
|
||||
// non-flag shortcut arguments will overwrite possible flag arguments
|
||||
for _, arg := range flag.Args() {
|
||||
if v, err := strconv.Atoi(arg); err == nil && len(arg) == 1 {
|
||||
config.Numdays = v
|
||||
g.config.Numdays = v
|
||||
} else {
|
||||
config.City = arg
|
||||
g.config.City = arg
|
||||
}
|
||||
}
|
||||
|
||||
if len(config.City) > 0 {
|
||||
params = append(params, "q="+url.QueryEscape(config.City))
|
||||
if len(g.config.City) > 0 {
|
||||
params = append(params, "q="+url.QueryEscape(g.config.City))
|
||||
}
|
||||
params = append(params, "format=json", "num_of_days="+strconv.Itoa(config.Numdays), "tp=3")
|
||||
if config.Lang != "" {
|
||||
params = append(params, "lang="+config.Lang)
|
||||
params = append(params, "format=json", "num_of_days="+strconv.Itoa(g.config.Numdays), "tp=3")
|
||||
if g.config.Lang != "" {
|
||||
params = append(params, "lang="+g.config.Lang)
|
||||
}
|
||||
|
||||
if debug {
|
||||
if g.debug {
|
||||
fmt.Fprintln(os.Stderr, params)
|
||||
}
|
||||
|
||||
res, err := http.Get(wuri + strings.Join(params, "&"))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if debug {
|
||||
if g.debug {
|
||||
var out bytes.Buffer
|
||||
json.Indent(&out, body, "", " ")
|
||||
out.WriteTo(os.Stderr)
|
||||
|
||||
err := json.Indent(&out, body, "", " ")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = out.WriteTo(os.Stderr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fmt.Print("\n\n")
|
||||
}
|
||||
|
||||
if config.Lang == "" {
|
||||
if g.config.Lang == "" {
|
||||
if err = json.Unmarshal(body, &ret); err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
if err = unmarshalLang(body, &ret); err != nil {
|
||||
log.Println(err)
|
||||
if err = g.unmarshalLang(body, &ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func unmarshalLang(body []byte, r *resp) error {
|
||||
func (g *global) unmarshalLang(body []byte, r *resp) error {
|
||||
var rv map[string]interface{}
|
||||
if err := json.Unmarshal(body, &rv); err != nil {
|
||||
return err
|
||||
|
@ -128,7 +143,7 @@ func unmarshalLang(body []byte, r *resp) error {
|
|||
if !ok {
|
||||
continue
|
||||
}
|
||||
langs, ok := cc["lang_"+config.Lang].([]interface{})
|
||||
langs, ok := cc["lang_"+g.config.Lang].([]interface{})
|
||||
if !ok || len(langs) == 0 {
|
||||
continue
|
||||
}
|
||||
|
@ -151,7 +166,7 @@ func unmarshalLang(body []byte, r *resp) error {
|
|||
if !ok {
|
||||
continue
|
||||
}
|
||||
langs, ok := h["lang_"+config.Lang].([]interface{})
|
||||
langs, ok := h["lang_"+g.config.Lang].([]interface{})
|
||||
if !ok || len(langs) == 0 {
|
||||
continue
|
||||
}
|
||||
|
@ -172,5 +187,6 @@ func unmarshalLang(body []byte, r *resp) error {
|
|||
if err := json.NewDecoder(&buf).Decode(r); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
172
internal/view/v1/cmd.go
Normal file
172
internal/view/v1/cmd.go
Normal file
|
@ -0,0 +1,172 @@
|
|||
// This code represents wttr.in view v1.
|
||||
// It is based on wego (github.com/schachmat/wego) from which it diverged back in 2016.
|
||||
|
||||
//nolint:forbidigo,funlen,gocognit,cyclop
|
||||
package v1
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"os/user"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/mattn/go-colorable"
|
||||
"github.com/mattn/go-runewidth"
|
||||
)
|
||||
|
||||
type Configuration struct {
|
||||
APIKey string
|
||||
City string
|
||||
Numdays int
|
||||
Imperial bool
|
||||
WindUnit bool
|
||||
Inverse bool
|
||||
Lang string
|
||||
Narrow bool
|
||||
LocationName string
|
||||
WindMS bool
|
||||
RightToLeft bool
|
||||
}
|
||||
|
||||
type global struct {
|
||||
ansiEsc *regexp.Regexp
|
||||
config Configuration
|
||||
configpath string
|
||||
debug bool
|
||||
}
|
||||
|
||||
const (
|
||||
wuri = "http://127.0.0.1:5001/premium/v1/weather.ashx?"
|
||||
suri = "http://127.0.0.1:5001/premium/v1/search.ashx?"
|
||||
slotcount = 4
|
||||
)
|
||||
|
||||
func (g *global) configload() error {
|
||||
b, err := ioutil.ReadFile(g.configpath)
|
||||
if err == nil {
|
||||
return json.Unmarshal(b, &g.config)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *global) configsave() error {
|
||||
j, err := json.MarshalIndent(g.config, "", "\t")
|
||||
if err == nil {
|
||||
return ioutil.WriteFile(g.configpath, j, 0o600)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *global) init() {
|
||||
flag.IntVar(&g.config.Numdays, "days", 3, "Number of days of weather forecast to be displayed")
|
||||
flag.StringVar(&g.config.Lang, "lang", "en", "Language of the report")
|
||||
flag.StringVar(&g.config.City, "city", "New York", "City to be queried")
|
||||
flag.BoolVar(&g.debug, "debug", false, "Print out raw json response for debugging purposes")
|
||||
flag.BoolVar(&g.config.Imperial, "imperial", false, "Use imperial units")
|
||||
flag.BoolVar(&g.config.Inverse, "inverse", false, "Use inverted colors")
|
||||
flag.BoolVar(&g.config.Narrow, "narrow", false, "Narrow output (two columns)")
|
||||
flag.StringVar(&g.config.LocationName, "location_name", "", "Location name (used in the caption)")
|
||||
flag.BoolVar(&g.config.WindMS, "wind_in_ms", false, "Show wind speed in m/s")
|
||||
flag.BoolVar(&g.config.RightToLeft, "right_to_left", false, "Right to left script")
|
||||
g.configpath = os.Getenv("WEGORC")
|
||||
if g.configpath == "" {
|
||||
usr, err := user.Current()
|
||||
if err != nil {
|
||||
log.Fatalf("%v\nYou can set the environment variable WEGORC to point to your config file as a workaround.", err)
|
||||
}
|
||||
g.configpath = path.Join(usr.HomeDir, ".wegorc")
|
||||
}
|
||||
g.config.APIKey = ""
|
||||
g.config.Imperial = false
|
||||
g.config.Lang = "en"
|
||||
err := g.configload()
|
||||
var pathError *os.PathError
|
||||
if errors.Is(err, pathError) {
|
||||
log.Printf("No config file found. Creating %s ...", g.configpath)
|
||||
if err2 := g.configsave(); err2 != nil {
|
||||
log.Fatal(err2)
|
||||
}
|
||||
} else if err != nil {
|
||||
log.Fatalf("could not parse %v: %v", g.configpath, err)
|
||||
}
|
||||
|
||||
g.ansiEsc = regexp.MustCompile("\033.*?m")
|
||||
}
|
||||
|
||||
func Cmd() error {
|
||||
g := global{}
|
||||
g.init()
|
||||
|
||||
flag.Parse()
|
||||
|
||||
r, err := g.getDataFromAPI()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.Data.Req == nil || len(r.Data.Req) < 1 {
|
||||
if r.Data.Err != nil && len(r.Data.Err) >= 1 {
|
||||
log.Fatal(r.Data.Err[0].Msg)
|
||||
}
|
||||
log.Fatal("Malformed response.")
|
||||
}
|
||||
locationName := r.Data.Req[0].Query
|
||||
if g.config.LocationName != "" {
|
||||
locationName = g.config.LocationName
|
||||
}
|
||||
if g.config.Lang == "he" || g.config.Lang == "ar" || g.config.Lang == "fa" {
|
||||
g.config.RightToLeft = true
|
||||
}
|
||||
if caption, ok := localizedCaption()[g.config.Lang]; !ok {
|
||||
fmt.Printf("Weather report: %s\n\n", locationName)
|
||||
} else {
|
||||
if g.config.RightToLeft {
|
||||
caption = locationName + " " + caption
|
||||
space := strings.Repeat(" ", 125-runewidth.StringWidth(caption))
|
||||
fmt.Printf("%s%s\n\n", space, caption)
|
||||
} else {
|
||||
fmt.Printf("%s %s\n\n", caption, locationName)
|
||||
}
|
||||
}
|
||||
stdout := colorable.NewColorableStdout()
|
||||
|
||||
if r.Data.Cur == nil || len(r.Data.Cur) < 1 {
|
||||
log.Fatal("No weather data available.")
|
||||
}
|
||||
out := g.formatCond(make([]string, 5), r.Data.Cur[0], true)
|
||||
for _, val := range out {
|
||||
if g.config.RightToLeft {
|
||||
fmt.Fprint(stdout, strings.Repeat(" ", 94))
|
||||
} else {
|
||||
fmt.Fprint(stdout, " ")
|
||||
}
|
||||
fmt.Fprintln(stdout, val)
|
||||
}
|
||||
|
||||
if g.config.Numdays == 0 {
|
||||
return nil
|
||||
}
|
||||
if r.Data.Weather == nil {
|
||||
log.Fatal("No detailed weather forecast available.")
|
||||
}
|
||||
for _, d := range r.Data.Weather {
|
||||
lines, err := g.printDay(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, val := range lines {
|
||||
fmt.Fprintln(stdout, val)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
package main
|
||||
//nolint:funlen,nestif,cyclop,gocognit,gocyclo
|
||||
package v1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
@ -8,8 +9,8 @@ import (
|
|||
"github.com/mattn/go-runewidth"
|
||||
)
|
||||
|
||||
var (
|
||||
windDir = map[string]string{
|
||||
func windDir() map[string]string {
|
||||
return map[string]string{
|
||||
"N": "\033[1m↓\033[0m",
|
||||
"NNE": "\033[1m↓\033[0m",
|
||||
"NE": "\033[1m↙\033[0m",
|
||||
|
@ -27,13 +28,14 @@ var (
|
|||
"NW": "\033[1m↘\033[0m",
|
||||
"NNW": "\033[1m↘\033[0m",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
func formatTemp(c cond) string {
|
||||
func (g *global) formatTemp(c cond) string {
|
||||
color := func(temp int, explicitPlus bool) string {
|
||||
var col = 0
|
||||
if !config.Inverse {
|
||||
// Extemely cold temperature must be shown with violet
|
||||
var col int
|
||||
//nolint:dupl
|
||||
if !g.config.Inverse {
|
||||
// Extremely cold temperature must be shown with violet
|
||||
// because dark blue is too dark
|
||||
col = 165
|
||||
switch temp {
|
||||
|
@ -127,12 +129,13 @@ func formatTemp(c cond) string {
|
|||
}
|
||||
}
|
||||
}
|
||||
if config.Imperial {
|
||||
if g.config.Imperial {
|
||||
temp = (temp*18 + 320) / 10
|
||||
}
|
||||
if explicitPlus {
|
||||
return fmt.Sprintf("\033[38;5;%03dm+%d\033[0m", col, temp)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("\033[38;5;%03dm%d\033[0m", col, temp)
|
||||
}
|
||||
t := c.TempC
|
||||
|
@ -160,135 +163,129 @@ func formatTemp(c cond) string {
|
|||
if explicitPlus1 {
|
||||
explicitPlus2 = false
|
||||
}
|
||||
return pad(
|
||||
|
||||
return g.pad(
|
||||
fmt.Sprintf("%s(%s) °%s",
|
||||
color(t, explicitPlus1),
|
||||
color(c.FeelsLikeC, explicitPlus2),
|
||||
unitTemp[config.Imperial]),
|
||||
unitTemp()[g.config.Imperial]),
|
||||
15)
|
||||
}
|
||||
// if c.FeelsLikeC < t {
|
||||
// if c.FeelsLikeC < 0 && t > 0 {
|
||||
// explicitPlus = true
|
||||
// }
|
||||
// return pad(fmt.Sprintf("%s%s%s °%s", color(c.FeelsLikeC, false), hyphen, color(t, explicitPlus), unitTemp[config.Imperial]), 15)
|
||||
// } else if c.FeelsLikeC > t {
|
||||
// if t < 0 && c.FeelsLikeC > 0 {
|
||||
// explicitPlus = true
|
||||
// }
|
||||
// return pad(fmt.Sprintf("%s%s%s °%s", color(t, false), hyphen, color(c.FeelsLikeC, explicitPlus), unitTemp[config.Imperial]), 15)
|
||||
// }
|
||||
return pad(fmt.Sprintf("%s °%s", color(c.FeelsLikeC, false), unitTemp[config.Imperial]), 15)
|
||||
|
||||
return g.pad(fmt.Sprintf("%s °%s", color(c.FeelsLikeC, false), unitTemp()[g.config.Imperial]), 15)
|
||||
}
|
||||
|
||||
func formatWind(c cond) string {
|
||||
windInRightUnits := func(spd int) int {
|
||||
if config.WindMS {
|
||||
spd = (spd * 1000) / 3600
|
||||
} else {
|
||||
if config.Imperial {
|
||||
spd = (spd * 1000) / 1609
|
||||
}
|
||||
}
|
||||
return spd
|
||||
}
|
||||
color := func(spd int) string {
|
||||
var col = 46
|
||||
switch spd {
|
||||
case 1, 2, 3:
|
||||
col = 82
|
||||
case 4, 5, 6:
|
||||
col = 118
|
||||
case 7, 8, 9:
|
||||
col = 154
|
||||
case 10, 11, 12:
|
||||
col = 190
|
||||
case 13, 14, 15:
|
||||
col = 226
|
||||
case 16, 17, 18, 19:
|
||||
col = 220
|
||||
case 20, 21, 22, 23:
|
||||
col = 214
|
||||
case 24, 25, 26, 27:
|
||||
col = 208
|
||||
case 28, 29, 30, 31:
|
||||
col = 202
|
||||
default:
|
||||
if spd > 0 {
|
||||
col = 196
|
||||
}
|
||||
}
|
||||
spd = windInRightUnits(spd)
|
||||
|
||||
return fmt.Sprintf("\033[38;5;%03dm%d\033[0m", col, spd)
|
||||
func (g *global) formatWind(c cond) string {
|
||||
unitWindString := unitWind(0, g.config.Lang)
|
||||
if g.config.WindMS {
|
||||
unitWindString = unitWind(2, g.config.Lang)
|
||||
} else if g.config.Imperial {
|
||||
unitWindString = unitWind(1, g.config.Lang)
|
||||
}
|
||||
|
||||
unitWindString := unitWind(0, config.Lang)
|
||||
if config.WindMS {
|
||||
unitWindString = unitWind(2, config.Lang)
|
||||
} else {
|
||||
if config.Imperial {
|
||||
unitWindString = unitWind(1, config.Lang)
|
||||
}
|
||||
hyphen := "-"
|
||||
|
||||
cWindGustKmph := speedToColor(c.WindGustKmph, windInRightUnits(c.WindGustKmph, g.config.WindMS, g.config.Imperial))
|
||||
cWindspeedKmph := speedToColor(c.WindspeedKmph, windInRightUnits(c.WindspeedKmph, g.config.WindMS, g.config.Imperial))
|
||||
if windInRightUnits(c.WindGustKmph, g.config.WindMS, g.config.Imperial) >
|
||||
windInRightUnits(c.WindspeedKmph, g.config.WindMS, g.config.Imperial) {
|
||||
return g.pad(
|
||||
fmt.Sprintf("%s %s%s%s %s", windDir()[c.Winddir16Point], cWindspeedKmph, hyphen, cWindGustKmph, unitWindString),
|
||||
15)
|
||||
}
|
||||
|
||||
hyphen := " - "
|
||||
// if (config.Lang == "sl") {
|
||||
// hyphen = "-"
|
||||
// }
|
||||
hyphen = "-"
|
||||
|
||||
cWindGustKmph := color(c.WindGustKmph)
|
||||
cWindspeedKmph := color(c.WindspeedKmph)
|
||||
if windInRightUnits(c.WindGustKmph) > windInRightUnits(c.WindspeedKmph) {
|
||||
return pad(fmt.Sprintf("%s %s%s%s %s", windDir[c.Winddir16Point], cWindspeedKmph, hyphen, cWindGustKmph, unitWindString), 15)
|
||||
}
|
||||
return pad(fmt.Sprintf("%s %s %s", windDir[c.Winddir16Point], cWindspeedKmph, unitWindString), 15)
|
||||
return g.pad(fmt.Sprintf("%s %s %s", windDir()[c.Winddir16Point], cWindspeedKmph, unitWindString), 15)
|
||||
}
|
||||
|
||||
func formatVisibility(c cond) string {
|
||||
if config.Imperial {
|
||||
func windInRightUnits(spd int, windMS, imperial bool) int {
|
||||
if windMS {
|
||||
spd = (spd * 1000) / 3600
|
||||
} else if imperial {
|
||||
spd = (spd * 1000) / 1609
|
||||
}
|
||||
|
||||
return spd
|
||||
}
|
||||
|
||||
func speedToColor(spd, spdConverted int) string {
|
||||
col := 46
|
||||
switch spd {
|
||||
case 1, 2, 3:
|
||||
col = 82
|
||||
case 4, 5, 6:
|
||||
col = 118
|
||||
case 7, 8, 9:
|
||||
col = 154
|
||||
case 10, 11, 12:
|
||||
col = 190
|
||||
case 13, 14, 15:
|
||||
col = 226
|
||||
case 16, 17, 18, 19:
|
||||
col = 220
|
||||
case 20, 21, 22, 23:
|
||||
col = 214
|
||||
case 24, 25, 26, 27:
|
||||
col = 208
|
||||
case 28, 29, 30, 31:
|
||||
col = 202
|
||||
default:
|
||||
if spd > 0 {
|
||||
col = 196
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("\033[38;5;%03dm%d\033[0m", col, spdConverted)
|
||||
}
|
||||
|
||||
func (g *global) formatVisibility(c cond) string {
|
||||
if g.config.Imperial {
|
||||
c.VisibleDistKM = (c.VisibleDistKM * 621) / 1000
|
||||
}
|
||||
return pad(fmt.Sprintf("%d %s", c.VisibleDistKM, unitVis(config.Imperial, config.Lang)), 15)
|
||||
|
||||
return g.pad(fmt.Sprintf("%d %s", c.VisibleDistKM, unitVis(g.config.Imperial, g.config.Lang)), 15)
|
||||
}
|
||||
|
||||
func formatRain(c cond) string {
|
||||
rainUnit := float32(c.PrecipMM)
|
||||
if config.Imperial {
|
||||
rainUnit = float32(c.PrecipMM) * 0.039
|
||||
func (g *global) formatRain(c cond) string {
|
||||
rainUnit := c.PrecipMM
|
||||
if g.config.Imperial {
|
||||
rainUnit = c.PrecipMM * 0.039
|
||||
}
|
||||
if c.ChanceOfRain != "" {
|
||||
return pad(fmt.Sprintf(
|
||||
return g.pad(fmt.Sprintf(
|
||||
"%.1f %s | %s%%",
|
||||
rainUnit,
|
||||
unitRain(config.Imperial, config.Lang),
|
||||
unitRain(g.config.Imperial, g.config.Lang),
|
||||
c.ChanceOfRain), 15)
|
||||
}
|
||||
return pad(fmt.Sprintf("%.1f %s", rainUnit, unitRain(config.Imperial, config.Lang)), 15)
|
||||
|
||||
return g.pad(fmt.Sprintf("%.1f %s", rainUnit, unitRain(g.config.Imperial, g.config.Lang)), 15)
|
||||
}
|
||||
|
||||
func formatCond(cur []string, c cond, current bool) (ret []string) {
|
||||
var icon []string
|
||||
if i, ok := codes[c.WeatherCode]; !ok {
|
||||
icon = iconUnknown
|
||||
func (g *global) formatCond(cur []string, c cond, current bool) []string {
|
||||
var (
|
||||
ret []string
|
||||
icon []string
|
||||
)
|
||||
|
||||
if i, ok := codes()[c.WeatherCode]; !ok {
|
||||
icon = getIcon("iconUnknown")
|
||||
} else {
|
||||
icon = i
|
||||
}
|
||||
if config.Inverse {
|
||||
if g.config.Inverse {
|
||||
// inverting colors
|
||||
for i := range icon {
|
||||
icon[i] = strings.Replace(icon[i], "38;5;226", "38;5;94", -1)
|
||||
icon[i] = strings.Replace(icon[i], "38;5;250", "38;5;243", -1)
|
||||
icon[i] = strings.Replace(icon[i], "38;5;21", "38;5;18", -1)
|
||||
icon[i] = strings.Replace(icon[i], "38;5;255", "38;5;245", -1)
|
||||
icon[i] = strings.Replace(icon[i], "38;5;111", "38;5;63", -1)
|
||||
icon[i] = strings.Replace(icon[i], "38;5;251", "38;5;238", -1)
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;226", "38;5;94")
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;250", "38;5;243")
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;21", "38;5;18")
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;255", "38;5;245")
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;111", "38;5;63")
|
||||
icon[i] = strings.ReplaceAll(icon[i], "38;5;251", "38;5;238")
|
||||
}
|
||||
}
|
||||
//desc := fmt.Sprintf("%-15.15v", c.WeatherDesc[0].Value)
|
||||
// desc := fmt.Sprintf("%-15.15v", c.WeatherDesc[0].Value)
|
||||
desc := c.WeatherDesc[0].Value
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
for runewidth.StringWidth(desc) < 15 {
|
||||
desc = " " + desc
|
||||
}
|
||||
|
@ -306,7 +303,7 @@ func formatCond(cur []string, c cond, current bool) (ret []string) {
|
|||
}
|
||||
}
|
||||
if current {
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
desc = c.WeatherDesc[0].Value
|
||||
if runewidth.StringWidth(desc) < 15 {
|
||||
desc = strings.Repeat(" ", 15-runewidth.StringWidth(desc)) + desc
|
||||
|
@ -315,7 +312,7 @@ func formatCond(cur []string, c cond, current bool) (ret []string) {
|
|||
desc = c.WeatherDesc[0].Value
|
||||
}
|
||||
} else {
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
if frstRune, size := utf8.DecodeRuneInString(desc); frstRune != ' ' {
|
||||
desc = "…" + desc[size:]
|
||||
for runewidth.StringWidth(desc) < 15 {
|
||||
|
@ -325,32 +322,46 @@ func formatCond(cur []string, c cond, current bool) (ret []string) {
|
|||
} else {
|
||||
if lastRune, size := utf8.DecodeLastRuneInString(desc); lastRune != ' ' {
|
||||
desc = desc[:len(desc)-size] + "…"
|
||||
//for numberOfSpaces < runewidth.StringWidth(fmt.Sprintf("%c", lastRune)) - 1 {
|
||||
// for numberOfSpaces < runewidth.StringWidth(fmt.Sprintf("%c", lastRune)) - 1 {
|
||||
for runewidth.StringWidth(desc) < 15 {
|
||||
desc = desc + " "
|
||||
desc += " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if config.RightToLeft {
|
||||
ret = append(ret, fmt.Sprintf("%v %v %v", cur[0], desc, icon[0]), fmt.Sprintf("%v %v %v", cur[1], formatTemp(c), icon[1]), fmt.Sprintf("%v %v %v", cur[2], formatWind(c), icon[2]), fmt.Sprintf("%v %v %v", cur[3], formatVisibility(c), icon[3]), fmt.Sprintf("%v %v %v", cur[4], formatRain(c), icon[4]))
|
||||
if g.config.RightToLeft {
|
||||
ret = append(
|
||||
ret,
|
||||
fmt.Sprintf("%v %v %v", cur[0], desc, icon[0]),
|
||||
fmt.Sprintf("%v %v %v", cur[1], g.formatTemp(c), icon[1]),
|
||||
fmt.Sprintf("%v %v %v", cur[2], g.formatWind(c), icon[2]),
|
||||
fmt.Sprintf("%v %v %v", cur[3], g.formatVisibility(c), icon[3]),
|
||||
fmt.Sprintf("%v %v %v", cur[4], g.formatRain(c), icon[4]))
|
||||
} else {
|
||||
ret = append(ret, fmt.Sprintf("%v %v %v", cur[0], icon[0], desc), fmt.Sprintf("%v %v %v", cur[1], icon[1], formatTemp(c)), fmt.Sprintf("%v %v %v", cur[2], icon[2], formatWind(c)), fmt.Sprintf("%v %v %v", cur[3], icon[3], formatVisibility(c)), fmt.Sprintf("%v %v %v", cur[4], icon[4], formatRain(c)))
|
||||
ret = append(
|
||||
ret,
|
||||
fmt.Sprintf("%v %v %v", cur[0], icon[0], desc),
|
||||
fmt.Sprintf("%v %v %v", cur[1], icon[1], g.formatTemp(c)),
|
||||
fmt.Sprintf("%v %v %v", cur[2], icon[2], g.formatWind(c)),
|
||||
fmt.Sprintf("%v %v %v", cur[3], icon[3], g.formatVisibility(c)),
|
||||
fmt.Sprintf("%v %v %v", cur[4], icon[4], g.formatRain(c)))
|
||||
}
|
||||
return
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func justifyCenter(s string, width int) string {
|
||||
appendSide := 0
|
||||
for runewidth.StringWidth(s) <= width {
|
||||
if appendSide == 1 {
|
||||
s = s + " "
|
||||
s += " "
|
||||
appendSide = 0
|
||||
} else {
|
||||
s = " " + s
|
||||
appendSide = 1
|
||||
}
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
|
@ -359,28 +370,31 @@ func reverse(s string) string {
|
|||
for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 {
|
||||
r[i], r[j] = r[j], r[i]
|
||||
}
|
||||
|
||||
return string(r)
|
||||
}
|
||||
|
||||
func pad(s string, mustLen int) (ret string) {
|
||||
func (g *global) pad(s string, mustLen int) string {
|
||||
var ret string
|
||||
ret = s
|
||||
realLen := utf8.RuneCountInString(ansiEsc.ReplaceAllLiteralString(s, ""))
|
||||
realLen := utf8.RuneCountInString(g.ansiEsc.ReplaceAllLiteralString(s, ""))
|
||||
delta := mustLen - realLen
|
||||
if delta > 0 {
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
ret = strings.Repeat(" ", delta) + ret + "\033[0m"
|
||||
} else {
|
||||
ret += "\033[0m" + strings.Repeat(" ", delta)
|
||||
}
|
||||
} else if delta < 0 {
|
||||
toks := ansiEsc.Split(s, 2)
|
||||
toks := g.ansiEsc.Split(s, 2)
|
||||
tokLen := utf8.RuneCountInString(toks[0])
|
||||
esc := ansiEsc.FindString(s)
|
||||
esc := g.ansiEsc.FindString(s)
|
||||
if tokLen > mustLen {
|
||||
ret = fmt.Sprintf("%.*s\033[0m", mustLen, toks[0])
|
||||
} else {
|
||||
ret = fmt.Sprintf("%s%s%s", toks[0], esc, pad(toks[1], mustLen-tokLen))
|
||||
ret = fmt.Sprintf("%s%s%s", toks[0], esc, g.pad(toks[1], mustLen-tokLen))
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
return ret
|
||||
}
|
213
internal/view/v1/icons.go
Normal file
213
internal/view/v1/icons.go
Normal file
|
@ -0,0 +1,213 @@
|
|||
package v1
|
||||
|
||||
//nolint:funlen
|
||||
func getIcon(name string) []string {
|
||||
icon := map[string][]string{
|
||||
"iconUnknown": {
|
||||
" .-. ",
|
||||
" __) ",
|
||||
" ( ",
|
||||
" `-’ ",
|
||||
" • ",
|
||||
},
|
||||
|
||||
"iconSunny": {
|
||||
"\033[38;5;226m \\ / \033[0m",
|
||||
"\033[38;5;226m .-. \033[0m",
|
||||
"\033[38;5;226m ― ( ) ― \033[0m",
|
||||
"\033[38;5;226m `-’ \033[0m",
|
||||
"\033[38;5;226m / \\ \033[0m",
|
||||
},
|
||||
|
||||
"iconPartlyCloudy": {
|
||||
"\033[38;5;226m \\ /\033[0m ",
|
||||
"\033[38;5;226m _ /\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m \\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
" ",
|
||||
},
|
||||
|
||||
"iconCloudy": {
|
||||
" ",
|
||||
"\033[38;5;250m .--. \033[0m",
|
||||
"\033[38;5;250m .-( ). \033[0m",
|
||||
"\033[38;5;250m (___.__)__) \033[0m",
|
||||
" ",
|
||||
},
|
||||
|
||||
"iconVeryCloudy": {
|
||||
" ",
|
||||
"\033[38;5;240;1m .--. \033[0m",
|
||||
"\033[38;5;240;1m .-( ). \033[0m",
|
||||
"\033[38;5;240;1m (___.__)__) \033[0m",
|
||||
" ",
|
||||
},
|
||||
|
||||
"iconLightShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
},
|
||||
|
||||
"iconHeavyShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;240;1m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;240;1m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;240;1m(___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘‚‘‚‘‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’‚’‚’ \033[0m",
|
||||
},
|
||||
|
||||
"iconLightSnowShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
},
|
||||
|
||||
"iconHeavySnowShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;240;1m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;240;1m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;240;1m(___(__) \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
},
|
||||
|
||||
"iconLightSleetShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[38;5;255m* \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[0m",
|
||||
},
|
||||
|
||||
"iconThunderyShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;228;5m ⚡\033[38;5;111;25m‘‘\033[38;5;228;5m⚡\033[38;5;111;25m‘‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
},
|
||||
|
||||
"iconThunderyHeavyRain": {
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘\033[38;5;228;5m⚡\033[38;5;21;25m‘‚\033[38;5;228;5m⚡\033[38;5;21;25m‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’\033[38;5;228;5m⚡\033[38;5;21;25m’‚’ \033[0m",
|
||||
},
|
||||
|
||||
"iconThunderySnowShowers": {
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;228;5m⚡\033[38;5;255;25m*\033[38;5;228;5m⚡\033[38;5;255;25m* \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
},
|
||||
|
||||
"iconLightRain": {
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
},
|
||||
|
||||
"iconHeavyRain": {
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘‚‘‚‘‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’‚’‚’ \033[0m",
|
||||
},
|
||||
|
||||
"iconLightSnow": {
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
},
|
||||
|
||||
"iconHeavySnow": {
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
},
|
||||
|
||||
"iconLightSleet": {
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[38;5;255m* \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[0m",
|
||||
},
|
||||
|
||||
"iconFog": {
|
||||
" ",
|
||||
"\033[38;5;251m _ - _ - _ - \033[0m",
|
||||
"\033[38;5;251m _ - _ - _ \033[0m",
|
||||
"\033[38;5;251m _ - _ - _ - \033[0m",
|
||||
" ",
|
||||
},
|
||||
}
|
||||
|
||||
return icon[name]
|
||||
}
|
||||
|
||||
func codes() map[int][]string {
|
||||
return map[int][]string{
|
||||
113: getIcon("iconSunny"),
|
||||
116: getIcon("iconPartlyCloudy"),
|
||||
119: getIcon("iconCloudy"),
|
||||
122: getIcon("iconVeryCloudy"),
|
||||
143: getIcon("iconFog"),
|
||||
176: getIcon("iconLightShowers"),
|
||||
179: getIcon("iconLightSleetShowers"),
|
||||
182: getIcon("iconLightSleet"),
|
||||
185: getIcon("iconLightSleet"),
|
||||
200: getIcon("iconThunderyShowers"),
|
||||
227: getIcon("iconLightSnow"),
|
||||
230: getIcon("iconHeavySnow"),
|
||||
248: getIcon("iconFog"),
|
||||
260: getIcon("iconFog"),
|
||||
263: getIcon("iconLightShowers"),
|
||||
266: getIcon("iconLightRain"),
|
||||
281: getIcon("iconLightSleet"),
|
||||
284: getIcon("iconLightSleet"),
|
||||
293: getIcon("iconLightRain"),
|
||||
296: getIcon("iconLightRain"),
|
||||
299: getIcon("iconHeavyShowers"),
|
||||
302: getIcon("iconHeavyRain"),
|
||||
305: getIcon("iconHeavyShowers"),
|
||||
308: getIcon("iconHeavyRain"),
|
||||
311: getIcon("iconLightSleet"),
|
||||
314: getIcon("iconLightSleet"),
|
||||
317: getIcon("iconLightSleet"),
|
||||
320: getIcon("iconLightSnow"),
|
||||
323: getIcon("iconLightSnowShowers"),
|
||||
326: getIcon("iconLightSnowShowers"),
|
||||
329: getIcon("iconHeavySnow"),
|
||||
332: getIcon("iconHeavySnow"),
|
||||
335: getIcon("iconHeavySnowShowers"),
|
||||
338: getIcon("iconHeavySnow"),
|
||||
350: getIcon("iconLightSleet"),
|
||||
353: getIcon("iconLightShowers"),
|
||||
356: getIcon("iconHeavyShowers"),
|
||||
359: getIcon("iconHeavyRain"),
|
||||
362: getIcon("iconLightSleetShowers"),
|
||||
365: getIcon("iconLightSleetShowers"),
|
||||
368: getIcon("iconLightSnowShowers"),
|
||||
371: getIcon("iconHeavySnowShowers"),
|
||||
374: getIcon("iconLightSleetShowers"),
|
||||
377: getIcon("iconLightSleet"),
|
||||
386: getIcon("iconThunderyShowers"),
|
||||
389: getIcon("iconThunderyHeavyRain"),
|
||||
392: getIcon("iconThunderySnowShowers"),
|
||||
395: getIcon("iconHeavySnowShowers"),
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
package main
|
||||
package v1
|
||||
|
||||
var (
|
||||
locale = map[string]string{
|
||||
//nolint:funlen
|
||||
func locale() map[string]string {
|
||||
return map[string]string{
|
||||
"af": "af_ZA",
|
||||
"am": "am_ET",
|
||||
"ar": "ar_TN",
|
||||
|
@ -73,8 +74,11 @@ var (
|
|||
"zh": "zh_CN",
|
||||
"zu": "zu_ZA",
|
||||
}
|
||||
}
|
||||
|
||||
localizedCaption = map[string]string{
|
||||
//nolint:funlen
|
||||
func localizedCaption() map[string]string {
|
||||
return map[string]string{
|
||||
"af": "Weer verslag vir:",
|
||||
"am": "የአየር ሁኔታ ዘገባ ለ ፥",
|
||||
"ar": "تقرير حالة ألطقس",
|
||||
|
@ -147,8 +151,11 @@ var (
|
|||
"zh-tw": "天氣預報:",
|
||||
"mg": "Vinavina toetr'andro hoan'ny:",
|
||||
}
|
||||
}
|
||||
|
||||
daytimeTranslation = map[string][]string{
|
||||
//nolint:misspell,funlen
|
||||
func daytimeTranslation() map[string][]string {
|
||||
return map[string][]string{
|
||||
"af": {"Oggend", "Middag", "Vroegaand", "Laatnag"},
|
||||
"am": {"ጠዋት", "ከሰዓት በኋላ", "ምሽት", "ሌሊት"},
|
||||
"ar": {"ﺎﻠﻠﻴﻟ", "ﺎﻠﻤﺳﺍﺀ", "ﺎﻠﻈﻫﺭ", "ﺎﻠﺼﺑﺎﺣ"},
|
||||
|
@ -222,99 +229,110 @@ var (
|
|||
"zu": {"Morning", "Noon", "Evening", "Night"},
|
||||
"mg": {"Maraina", "Tolakandro", "Ariva", "Alina"},
|
||||
}
|
||||
}
|
||||
|
||||
unitTemp = map[bool]string{
|
||||
func unitTemp() map[bool]string {
|
||||
return map[bool]string{
|
||||
false: "C",
|
||||
true: "F",
|
||||
}
|
||||
}
|
||||
|
||||
localizedRain = map[string]map[bool]string{
|
||||
"en": map[bool]string{
|
||||
func localizedRain() map[string]map[bool]string {
|
||||
return map[string]map[bool]string{
|
||||
"en": {
|
||||
false: "mm",
|
||||
true: "in",
|
||||
},
|
||||
"be": map[bool]string{
|
||||
"be": {
|
||||
false: "мм",
|
||||
true: "in",
|
||||
},
|
||||
"ru": map[bool]string{
|
||||
"ru": {
|
||||
false: "мм",
|
||||
true: "in",
|
||||
},
|
||||
"uk": map[bool]string{
|
||||
"uk": {
|
||||
false: "мм",
|
||||
true: "in",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
localizedVis = map[string]map[bool]string{
|
||||
"en": map[bool]string{
|
||||
func localizedVis() map[string]map[bool]string {
|
||||
return map[string]map[bool]string{
|
||||
"en": {
|
||||
false: "km",
|
||||
true: "mi",
|
||||
},
|
||||
"be": map[bool]string{
|
||||
"be": {
|
||||
false: "км",
|
||||
true: "mi",
|
||||
},
|
||||
"ru": map[bool]string{
|
||||
"ru": {
|
||||
false: "км",
|
||||
true: "mi",
|
||||
},
|
||||
"uk": map[bool]string{
|
||||
"uk": {
|
||||
false: "км",
|
||||
true: "mi",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
localizedWind = map[string]map[int]string{
|
||||
"en": map[int]string{
|
||||
func localizedWind() map[string]map[int]string {
|
||||
return map[string]map[int]string{
|
||||
"en": {
|
||||
0: "km/h",
|
||||
1: "mph",
|
||||
2: "m/s",
|
||||
},
|
||||
"be": map[int]string{
|
||||
"be": {
|
||||
0: "км/г",
|
||||
1: "mph",
|
||||
2: "м/c",
|
||||
},
|
||||
"ru": map[int]string{
|
||||
"ru": {
|
||||
0: "км/ч",
|
||||
1: "mph",
|
||||
2: "м/c",
|
||||
},
|
||||
"tr": map[int]string{
|
||||
"tr": {
|
||||
0: "km/sa",
|
||||
1: "mph",
|
||||
2: "m/s",
|
||||
},
|
||||
"uk": map[int]string{
|
||||
"uk": {
|
||||
0: "км/год",
|
||||
1: "mph",
|
||||
2: "м/c",
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
func unitWind(unit int, lang string) string {
|
||||
translation, ok := localizedWind[lang]
|
||||
translation, ok := localizedWind()[lang]
|
||||
if !ok {
|
||||
translation = localizedWind["en"]
|
||||
translation = localizedWind()["en"]
|
||||
}
|
||||
|
||||
return translation[unit]
|
||||
}
|
||||
|
||||
func unitVis(unit bool, lang string) string {
|
||||
translation, ok := localizedVis[lang]
|
||||
translation, ok := localizedVis()[lang]
|
||||
if !ok {
|
||||
translation = localizedVis["en"]
|
||||
translation = localizedVis()["en"]
|
||||
}
|
||||
|
||||
return translation[unit]
|
||||
}
|
||||
|
||||
func unitRain(unit bool, lang string) string {
|
||||
translation, ok := localizedRain[lang]
|
||||
translation, ok := localizedRain()[lang]
|
||||
if !ok {
|
||||
translation = localizedRain["en"]
|
||||
translation = localizedRain()["en"]
|
||||
}
|
||||
|
||||
return translation[unit]
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package main
|
||||
package v1
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
@ -7,13 +7,19 @@ import (
|
|||
"github.com/klauspost/lctime"
|
||||
)
|
||||
|
||||
var (
|
||||
slotTimes = [slotcount]int{9 * 60, 12 * 60, 18 * 60, 22 * 60}
|
||||
)
|
||||
func slotTimes() []int {
|
||||
return []int{9 * 60, 12 * 60, 18 * 60, 22 * 60}
|
||||
}
|
||||
|
||||
//nolint:funlen,gocognit,cyclop
|
||||
func (g *global) printDay(w weather) ([]string, error) {
|
||||
var (
|
||||
ret = []string{}
|
||||
dateName string
|
||||
names string
|
||||
)
|
||||
|
||||
func printDay(w weather) (ret []string) {
|
||||
hourly := w.Hourly
|
||||
ret = make([]string, 5)
|
||||
for i := range ret {
|
||||
ret[i] = "│"
|
||||
}
|
||||
|
@ -23,73 +29,67 @@ func printDay(w weather) (ret []string) {
|
|||
for _, h := range hourly {
|
||||
c := int(math.Mod(float64(h.Time), 100)) + 60*(h.Time/100)
|
||||
for i, s := range slots {
|
||||
if math.Abs(float64(c-slotTimes[i])) < math.Abs(float64(s.Time-slotTimes[i])) {
|
||||
if math.Abs(float64(c-slotTimes()[i])) < math.Abs(float64(s.Time-slotTimes()[i])) {
|
||||
h.Time = c
|
||||
slots[i] = h
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
slots[0], slots[3] = slots[3], slots[0]
|
||||
slots[1], slots[2] = slots[2], slots[1]
|
||||
}
|
||||
|
||||
for i, s := range slots {
|
||||
if config.Narrow {
|
||||
if g.config.Narrow {
|
||||
if i == 0 || i == 2 {
|
||||
continue
|
||||
}
|
||||
}
|
||||
ret = formatCond(ret, s, false)
|
||||
ret = g.formatCond(ret, s, false)
|
||||
for i := range ret {
|
||||
ret[i] = ret[i] + "│"
|
||||
ret[i] += "│"
|
||||
}
|
||||
}
|
||||
|
||||
d, _ := time.Parse("2006-01-02", w.Date)
|
||||
// dateFmt := "┤ " + d.Format("Mon 02. Jan") + " ├"
|
||||
|
||||
if val, ok := locale[config.Lang]; ok {
|
||||
lctime.SetLocale(val)
|
||||
if val, ok := locale()[g.config.Lang]; ok {
|
||||
err := lctime.SetLocale(val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
lctime.SetLocale("en_US")
|
||||
err := lctime.SetLocale("en_US")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
dateName := ""
|
||||
if config.RightToLeft {
|
||||
|
||||
if g.config.RightToLeft {
|
||||
dow := lctime.Strftime("%a", d)
|
||||
day := lctime.Strftime("%d", d)
|
||||
month := lctime.Strftime("%b", d)
|
||||
dateName = reverse(month) + " " + day + " " + reverse(dow)
|
||||
} else {
|
||||
dateName = lctime.Strftime("%a %d %b", d)
|
||||
if config.Lang == "ko" {
|
||||
if g.config.Lang == "ko" {
|
||||
dateName = lctime.Strftime("%b %d일 %a", d)
|
||||
}
|
||||
if config.Lang == "zh" || config.Lang == "zh-tw" || config.Lang == "zh-cn" {
|
||||
if g.config.Lang == "zh" || g.config.Lang == "zh-tw" || g.config.Lang == "zh-cn" {
|
||||
dateName = lctime.Strftime("%b%d日%A", d)
|
||||
}
|
||||
}
|
||||
// appendSide := 0
|
||||
// // for utf8.RuneCountInString(dateName) <= dateWidth {
|
||||
// for runewidth.StringWidth(dateName) <= dateWidth {
|
||||
// if appendSide == 1 {
|
||||
// dateName = dateName + " "
|
||||
// appendSide = 0
|
||||
// } else {
|
||||
// dateName = " " + dateName
|
||||
// appendSide = 1
|
||||
// }
|
||||
// }
|
||||
|
||||
dateFmt := "┤" + justifyCenter(dateName, 12) + "├"
|
||||
|
||||
trans := daytimeTranslation["en"]
|
||||
if t, ok := daytimeTranslation[config.Lang]; ok {
|
||||
trans := daytimeTranslation()["en"]
|
||||
if t, ok := daytimeTranslation()[g.config.Lang]; ok {
|
||||
trans = t
|
||||
}
|
||||
if config.Narrow {
|
||||
|
||||
if g.config.Narrow {
|
||||
names := "│ " + justifyCenter(trans[1], 16) +
|
||||
"└──────┬──────┘" + justifyCenter(trans[3], 16) + " │"
|
||||
|
||||
|
@ -97,16 +97,16 @@ func printDay(w weather) (ret []string) {
|
|||
" ┌─────────────┐ ",
|
||||
"┌───────────────────────" + dateFmt + "───────────────────────┐",
|
||||
names,
|
||||
"├──────────────────────────────┼──────────────────────────────┤"},
|
||||
"├──────────────────────────────┼──────────────────────────────┤",
|
||||
},
|
||||
ret...)
|
||||
|
||||
return append(ret,
|
||||
"└──────────────────────────────┴──────────────────────────────┘")
|
||||
|
||||
"└──────────────────────────────┴──────────────────────────────┘"),
|
||||
nil
|
||||
}
|
||||
|
||||
names := ""
|
||||
if config.RightToLeft {
|
||||
if g.config.RightToLeft {
|
||||
names = "│" + justifyCenter(trans[3], 29) + "│ " + justifyCenter(trans[2], 16) +
|
||||
"└──────┬──────┘" + justifyCenter(trans[1], 16) + " │" + justifyCenter(trans[0], 29) + "│"
|
||||
} else {
|
||||
|
@ -114,13 +114,17 @@ func printDay(w weather) (ret []string) {
|
|||
"└──────┬──────┘" + justifyCenter(trans[2], 16) + " │" + justifyCenter(trans[3], 29) + "│"
|
||||
}
|
||||
|
||||
//nolint:lll
|
||||
ret = append([]string{
|
||||
" ┌─────────────┐ ",
|
||||
"┌──────────────────────────────┬───────────────────────" + dateFmt + "───────────────────────┬──────────────────────────────┐",
|
||||
names,
|
||||
"├──────────────────────────────┼──────────────────────────────┼──────────────────────────────┼──────────────────────────────┤"},
|
||||
"├──────────────────────────────┼──────────────────────────────┼──────────────────────────────┼──────────────────────────────┤",
|
||||
},
|
||||
ret...)
|
||||
|
||||
//nolint:lll
|
||||
return append(ret,
|
||||
"└──────────────────────────────┴──────────────────────────────┴──────────────────────────────┴──────────────────────────────┘")
|
||||
"└──────────────────────────────┴──────────────────────────────┴──────────────────────────────┴──────────────────────────────┘"),
|
||||
nil
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
module github.com/chubin/wttr.in/v2
|
||||
|
||||
go 1.15
|
||||
|
||||
require (
|
||||
github.com/klauspost/lctime v0.1.0
|
||||
github.com/mattn/go-colorable v0.1.11
|
||||
github.com/mattn/go-runewidth v0.0.13
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
github.com/klauspost/lctime v0.1.0 h1:nINsuFc860M9cyYhT6vfg6U1USh7kiVBj/s/2b04U70=
|
||||
github.com/klauspost/lctime v0.1.0/go.mod h1:OwdMhr8tbQvusAsnilqkkgDQqivWlqyg0w5cfXkLiDk=
|
||||
github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs=
|
||||
github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
|
||||
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6 h1:foEbQz/B0Oz6YIqu/69kfXPYeFQAuuMYFkjaqXzl5Wo=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
@ -1,187 +0,0 @@
|
|||
package main
|
||||
|
||||
var (
|
||||
iconUnknown = []string{
|
||||
" .-. ",
|
||||
" __) ",
|
||||
" ( ",
|
||||
" `-’ ",
|
||||
" • "}
|
||||
|
||||
iconSunny = []string{
|
||||
"\033[38;5;226m \\ / \033[0m",
|
||||
"\033[38;5;226m .-. \033[0m",
|
||||
"\033[38;5;226m ― ( ) ― \033[0m",
|
||||
"\033[38;5;226m `-’ \033[0m",
|
||||
"\033[38;5;226m / \\ \033[0m"}
|
||||
|
||||
iconPartlyCloudy = []string{
|
||||
"\033[38;5;226m \\ /\033[0m ",
|
||||
"\033[38;5;226m _ /\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m \\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
" "}
|
||||
|
||||
iconCloudy = []string{
|
||||
" ",
|
||||
"\033[38;5;250m .--. \033[0m",
|
||||
"\033[38;5;250m .-( ). \033[0m",
|
||||
"\033[38;5;250m (___.__)__) \033[0m",
|
||||
" "}
|
||||
|
||||
iconVeryCloudy = []string{
|
||||
" ",
|
||||
"\033[38;5;240;1m .--. \033[0m",
|
||||
"\033[38;5;240;1m .-( ). \033[0m",
|
||||
"\033[38;5;240;1m (___.__)__) \033[0m",
|
||||
" "}
|
||||
|
||||
iconLightShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m"}
|
||||
|
||||
iconHeavyShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;240;1m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;240;1m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;240;1m(___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘‚‘‚‘‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’‚’‚’ \033[0m"}
|
||||
|
||||
iconLightSnowShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m"}
|
||||
|
||||
iconHeavySnowShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;240;1m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;240;1m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;240;1m(___(__) \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m"}
|
||||
|
||||
iconLightSleetShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[38;5;255m* \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[0m"}
|
||||
|
||||
iconThunderyShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;228;5m ⚡\033[38;5;111;25m‘‘\033[38;5;228;5m⚡\033[38;5;111;25m‘‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m"}
|
||||
|
||||
iconThunderyHeavyRain = []string{
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘\033[38;5;228;5m⚡\033[38;5;21;25m‘‚\033[38;5;228;5m⚡\033[38;5;21;25m‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’\033[38;5;228;5m⚡\033[38;5;21;25m’‚’ \033[0m"}
|
||||
|
||||
iconThunderySnowShowers = []string{
|
||||
"\033[38;5;226m _`/\"\"\033[38;5;250m.-. \033[0m",
|
||||
"\033[38;5;226m ,\\_\033[38;5;250m( ). \033[0m",
|
||||
"\033[38;5;226m /\033[38;5;250m(___(__) \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;228;5m⚡\033[38;5;255;25m*\033[38;5;228;5m⚡\033[38;5;255;25m* \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m"}
|
||||
|
||||
iconLightRain = []string{
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m",
|
||||
"\033[38;5;111m ‘ ‘ ‘ ‘ \033[0m"}
|
||||
|
||||
iconHeavyRain = []string{
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;21;1m ‚‘‚‘‚‘‚‘ \033[0m",
|
||||
"\033[38;5;21;1m ‚’‚’‚’‚’ \033[0m"}
|
||||
|
||||
iconLightSnow = []string{
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m",
|
||||
"\033[38;5;255m * * * \033[0m"}
|
||||
|
||||
iconHeavySnow = []string{
|
||||
"\033[38;5;240;1m .-. \033[0m",
|
||||
"\033[38;5;240;1m ( ). \033[0m",
|
||||
"\033[38;5;240;1m (___(__) \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m",
|
||||
"\033[38;5;255;1m * * * * \033[0m"}
|
||||
|
||||
iconLightSleet = []string{
|
||||
"\033[38;5;250m .-. \033[0m",
|
||||
"\033[38;5;250m ( ). \033[0m",
|
||||
"\033[38;5;250m (___(__) \033[0m",
|
||||
"\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[38;5;255m* \033[0m",
|
||||
"\033[38;5;255m *\033[38;5;111m ‘ \033[38;5;255m*\033[38;5;111m ‘ \033[0m"}
|
||||
|
||||
iconFog = []string{
|
||||
" ",
|
||||
"\033[38;5;251m _ - _ - _ - \033[0m",
|
||||
"\033[38;5;251m _ - _ - _ \033[0m",
|
||||
"\033[38;5;251m _ - _ - _ - \033[0m",
|
||||
" "}
|
||||
|
||||
codes = map[int][]string{
|
||||
113: iconSunny,
|
||||
116: iconPartlyCloudy,
|
||||
119: iconCloudy,
|
||||
122: iconVeryCloudy,
|
||||
143: iconFog,
|
||||
176: iconLightShowers,
|
||||
179: iconLightSleetShowers,
|
||||
182: iconLightSleet,
|
||||
185: iconLightSleet,
|
||||
200: iconThunderyShowers,
|
||||
227: iconLightSnow,
|
||||
230: iconHeavySnow,
|
||||
248: iconFog,
|
||||
260: iconFog,
|
||||
263: iconLightShowers,
|
||||
266: iconLightRain,
|
||||
281: iconLightSleet,
|
||||
284: iconLightSleet,
|
||||
293: iconLightRain,
|
||||
296: iconLightRain,
|
||||
299: iconHeavyShowers,
|
||||
302: iconHeavyRain,
|
||||
305: iconHeavyShowers,
|
||||
308: iconHeavyRain,
|
||||
311: iconLightSleet,
|
||||
314: iconLightSleet,
|
||||
317: iconLightSleet,
|
||||
320: iconLightSnow,
|
||||
323: iconLightSnowShowers,
|
||||
326: iconLightSnowShowers,
|
||||
329: iconHeavySnow,
|
||||
332: iconHeavySnow,
|
||||
335: iconHeavySnowShowers,
|
||||
338: iconHeavySnow,
|
||||
350: iconLightSleet,
|
||||
353: iconLightShowers,
|
||||
356: iconHeavyShowers,
|
||||
359: iconHeavyRain,
|
||||
362: iconLightSleetShowers,
|
||||
365: iconLightSleetShowers,
|
||||
368: iconLightSnowShowers,
|
||||
371: iconHeavySnowShowers,
|
||||
374: iconLightSleetShowers,
|
||||
377: iconLightSleet,
|
||||
386: iconThunderyShowers,
|
||||
389: iconThunderyHeavyRain,
|
||||
392: iconThunderySnowShowers,
|
||||
395: iconHeavySnowShowers,
|
||||
}
|
||||
)
|
|
@ -1,156 +0,0 @@
|
|||
// This code represents wttr.in view v1.
|
||||
// It is based on wego (github.com/schachmat/wego) from which it diverged back in 2016.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "crypto/sha512"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"os/user"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/mattn/go-colorable"
|
||||
"github.com/mattn/go-runewidth"
|
||||
)
|
||||
|
||||
type configuration struct {
|
||||
APIKey string
|
||||
City string
|
||||
Numdays int
|
||||
Imperial bool
|
||||
WindUnit bool
|
||||
Inverse bool
|
||||
Lang string
|
||||
Narrow bool
|
||||
LocationName string
|
||||
WindMS bool
|
||||
RightToLeft bool
|
||||
}
|
||||
|
||||
var (
|
||||
ansiEsc *regexp.Regexp
|
||||
config configuration
|
||||
configpath string
|
||||
debug bool
|
||||
)
|
||||
|
||||
const (
|
||||
wuri = "http://127.0.0.1:5001/premium/v1/weather.ashx?"
|
||||
suri = "http://127.0.0.1:5001/premium/v1/search.ashx?"
|
||||
slotcount = 4
|
||||
)
|
||||
|
||||
func configload() error {
|
||||
b, err := ioutil.ReadFile(configpath)
|
||||
if err == nil {
|
||||
return json.Unmarshal(b, &config)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func configsave() error {
|
||||
j, err := json.MarshalIndent(config, "", "\t")
|
||||
if err == nil {
|
||||
return ioutil.WriteFile(configpath, j, 0600)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.IntVar(&config.Numdays, "days", 3, "Number of days of weather forecast to be displayed")
|
||||
flag.StringVar(&config.Lang, "lang", "en", "Language of the report")
|
||||
flag.StringVar(&config.City, "city", "New York", "City to be queried")
|
||||
flag.BoolVar(&debug, "debug", false, "Print out raw json response for debugging purposes")
|
||||
flag.BoolVar(&config.Imperial, "imperial", false, "Use imperial units")
|
||||
flag.BoolVar(&config.Inverse, "inverse", false, "Use inverted colors")
|
||||
flag.BoolVar(&config.Narrow, "narrow", false, "Narrow output (two columns)")
|
||||
flag.StringVar(&config.LocationName, "location_name", "", "Location name (used in the caption)")
|
||||
flag.BoolVar(&config.WindMS, "wind_in_ms", false, "Show wind speed in m/s")
|
||||
flag.BoolVar(&config.RightToLeft, "right_to_left", false, "Right to left script")
|
||||
configpath = os.Getenv("WEGORC")
|
||||
if configpath == "" {
|
||||
usr, err := user.Current()
|
||||
if err != nil {
|
||||
log.Fatalf("%v\nYou can set the environment variable WEGORC to point to your config file as a workaround.", err)
|
||||
}
|
||||
configpath = path.Join(usr.HomeDir, ".wegorc")
|
||||
}
|
||||
config.APIKey = ""
|
||||
config.Imperial = false
|
||||
config.Lang = "en"
|
||||
err := configload()
|
||||
if _, ok := err.(*os.PathError); ok {
|
||||
log.Printf("No config file found. Creating %s ...", configpath)
|
||||
if err2 := configsave(); err2 != nil {
|
||||
log.Fatal(err2)
|
||||
}
|
||||
} else if err != nil {
|
||||
log.Fatalf("could not parse %v: %v", configpath, err)
|
||||
}
|
||||
|
||||
ansiEsc = regexp.MustCompile("\033.*?m")
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
r := getDataFromAPI()
|
||||
|
||||
if r.Data.Req == nil || len(r.Data.Req) < 1 {
|
||||
if r.Data.Err != nil && len(r.Data.Err) >= 1 {
|
||||
log.Fatal(r.Data.Err[0].Msg)
|
||||
}
|
||||
log.Fatal("Malformed response.")
|
||||
}
|
||||
locationName := r.Data.Req[0].Query
|
||||
if config.LocationName != "" {
|
||||
locationName = config.LocationName
|
||||
}
|
||||
if config.Lang == "he" || config.Lang == "ar" || config.Lang == "fa" {
|
||||
config.RightToLeft = true
|
||||
}
|
||||
if caption, ok := localizedCaption[config.Lang]; !ok {
|
||||
fmt.Printf("Weather report: %s\n\n", locationName)
|
||||
} else {
|
||||
if config.RightToLeft {
|
||||
caption = locationName + " " + caption
|
||||
space := strings.Repeat(" ", 125-runewidth.StringWidth(caption))
|
||||
fmt.Printf("%s%s\n\n", space, caption)
|
||||
} else {
|
||||
fmt.Printf("%s %s\n\n", caption, locationName)
|
||||
}
|
||||
}
|
||||
stdout := colorable.NewColorableStdout()
|
||||
|
||||
if r.Data.Cur == nil || len(r.Data.Cur) < 1 {
|
||||
log.Fatal("No weather data available.")
|
||||
}
|
||||
out := formatCond(make([]string, 5), r.Data.Cur[0], true)
|
||||
for _, val := range out {
|
||||
if config.RightToLeft {
|
||||
fmt.Fprint(stdout, strings.Repeat(" ", 94))
|
||||
} else {
|
||||
fmt.Fprint(stdout, " ")
|
||||
}
|
||||
fmt.Fprintln(stdout, val)
|
||||
}
|
||||
|
||||
if config.Numdays == 0 {
|
||||
return
|
||||
}
|
||||
if r.Data.Weather == nil {
|
||||
log.Fatal("No detailed weather forecast available.")
|
||||
}
|
||||
for _, d := range r.Data.Weather {
|
||||
for _, val := range printDay(d) {
|
||||
fmt.Fprintln(stdout, val)
|
||||
}
|
||||
}
|
||||
}
|
266
srv.go
Normal file
266
srv.go
Normal file
|
@ -0,0 +1,266 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"io"
|
||||
stdlog "log"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alecthomas/kong"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/chubin/wttr.in/internal/config"
|
||||
geoip "github.com/chubin/wttr.in/internal/geo/ip"
|
||||
geoloc "github.com/chubin/wttr.in/internal/geo/location"
|
||||
"github.com/chubin/wttr.in/internal/logging"
|
||||
"github.com/chubin/wttr.in/internal/processor"
|
||||
"github.com/chubin/wttr.in/internal/types"
|
||||
// v1 "github.com/chubin/wttr.in/internal/view/v1"
|
||||
)
|
||||
|
||||
//nolint:gochecknoglobals
|
||||
var cli struct {
|
||||
ConfigFile string `name:"config-file" arg:"" optional:"" help:"Name of configuration file"`
|
||||
|
||||
ConfigCheck bool `name:"config-check" help:"Check configuration"`
|
||||
ConfigDump bool `name:"config-dump" help:"Dump configuration"`
|
||||
ConvertGeoIPCache bool `name:"convert-geo-ip-cache" help:"Convert Geo IP data cache to SQlite"`
|
||||
ConvertGeoLocationCache bool `name:"convert-geo-location-cache" help:"Convert Geo Location data cache to SQlite"`
|
||||
GeoResolve string `name:"geo-resolve" help:"Resolve location"`
|
||||
LogLevel string `name:"log-level" short:"l" help:"Show log messages with level" default:"info"`
|
||||
|
||||
// V1 v1.Configuration
|
||||
}
|
||||
|
||||
const logLineStart = "LOG_LINE_START "
|
||||
|
||||
func suppressMessages() []string {
|
||||
return []string{
|
||||
"error reading preface from client",
|
||||
"TLS handshake error from",
|
||||
"URL query contains semicolon, which is no longer a supported separator",
|
||||
"connection error: PROTOCOL_ERROR",
|
||||
}
|
||||
}
|
||||
|
||||
func copyHeader(dst, src http.Header) {
|
||||
for k, vv := range src {
|
||||
for _, v := range vv {
|
||||
dst.Add(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func serveHTTP(mux *http.ServeMux, port int, logFile io.Writer, errs chan<- error) {
|
||||
srv := &http.Server{
|
||||
Addr: fmt.Sprintf(":%d", port),
|
||||
ErrorLog: stdlog.New(logFile, logLineStart, stdlog.LstdFlags),
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 10 * time.Second,
|
||||
IdleTimeout: 1 * time.Second,
|
||||
Handler: mux,
|
||||
}
|
||||
errs <- srv.ListenAndServe()
|
||||
}
|
||||
|
||||
func serveHTTPS(mux *http.ServeMux, port int, certFile, keyFile string, logFile io.Writer, errs chan<- error) {
|
||||
tlsConfig := &tls.Config{
|
||||
|
||||
// CipherSuites: []uint16{
|
||||
// tls.TLS_CHACHA20_POLY1305_SHA256,
|
||||
// tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
|
||||
// tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
||||
// },
|
||||
// MinVersion: tls.VersionTLS13,
|
||||
}
|
||||
srv := &http.Server{
|
||||
Addr: fmt.Sprintf(":%d", port),
|
||||
ErrorLog: stdlog.New(logFile, logLineStart, stdlog.LstdFlags),
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 20 * time.Second,
|
||||
IdleTimeout: 1 * time.Second,
|
||||
TLSConfig: tlsConfig,
|
||||
Handler: mux,
|
||||
}
|
||||
errs <- srv.ListenAndServeTLS(certFile, keyFile)
|
||||
}
|
||||
|
||||
func serve(conf *config.Config) error {
|
||||
var (
|
||||
// mux is main HTTP/HTTP requests multiplexer.
|
||||
mux = http.NewServeMux()
|
||||
|
||||
// logger is optimized requests logger.
|
||||
logger = logging.NewRequestLogger(
|
||||
conf.Logging.AccessLog,
|
||||
time.Duration(conf.Logging.Interval)*time.Second)
|
||||
|
||||
rp *processor.RequestProcessor
|
||||
|
||||
// errs is the servers errors channel.
|
||||
errs = make(chan error, 1)
|
||||
|
||||
// numberOfServers started. If 0, exit.
|
||||
numberOfServers int
|
||||
|
||||
errorsLog = logging.NewLogSuppressor(
|
||||
conf.Logging.ErrorsLog,
|
||||
suppressMessages(),
|
||||
logLineStart,
|
||||
)
|
||||
|
||||
err error
|
||||
)
|
||||
|
||||
rp, err = processor.NewRequestProcessor(conf)
|
||||
if err != nil {
|
||||
return fmt.Errorf("log processor initialization: %w", err)
|
||||
}
|
||||
|
||||
err = errorsLog.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = rp.Start()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mux.HandleFunc("/", mainHandler(rp, logger))
|
||||
|
||||
if conf.Server.PortHTTP != 0 {
|
||||
go serveHTTP(mux, conf.Server.PortHTTP, errorsLog, errs)
|
||||
numberOfServers++
|
||||
}
|
||||
if conf.Server.PortHTTPS != 0 {
|
||||
go serveHTTPS(mux, conf.Server.PortHTTPS, conf.Server.TLSCertFile, conf.Server.TLSKeyFile, errorsLog, errs)
|
||||
numberOfServers++
|
||||
}
|
||||
if numberOfServers == 0 {
|
||||
return types.ErrNoServersConfigured
|
||||
}
|
||||
|
||||
return <-errs // block until one of the servers writes an error
|
||||
}
|
||||
|
||||
func mainHandler(
|
||||
rp *processor.RequestProcessor,
|
||||
logger *logging.RequestLogger,
|
||||
) func(http.ResponseWriter, *http.Request) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if err := logger.Log(r); err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
|
||||
if checkURLForPNG(r) {
|
||||
w.Write([]byte("PNG support temporary disabled"))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
response, err := rp.ProcessRequest(r)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
|
||||
return
|
||||
}
|
||||
if response.StatusCode == 0 {
|
||||
log.Println("status code 0", response)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
copyHeader(w.Header(), response.Header)
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
w.WriteHeader(response.StatusCode)
|
||||
_, err = w.Write(response.Body)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
conf *config.Config
|
||||
err error
|
||||
)
|
||||
|
||||
ctx := kong.Parse(&cli)
|
||||
ctx.FatalIfErrorf(setLogLevel(cli.LogLevel))
|
||||
|
||||
if cli.ConfigFile != "" {
|
||||
conf, err = config.Load(cli.ConfigFile)
|
||||
if err != nil {
|
||||
log.Fatalf("reading config from %s: %s\n", cli.ConfigFile, err)
|
||||
}
|
||||
} else {
|
||||
conf = config.Default()
|
||||
}
|
||||
|
||||
if cli.ConfigDump {
|
||||
//nolint:forbidigo
|
||||
fmt.Print(string(conf.Dump()))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if cli.ConfigCheck {
|
||||
return
|
||||
}
|
||||
|
||||
switch {
|
||||
case cli.ConvertGeoIPCache:
|
||||
ctx.FatalIfErrorf(convertGeoIPCache(conf))
|
||||
case cli.ConvertGeoLocationCache:
|
||||
ctx.FatalIfErrorf(convertGeoLocationCache(conf))
|
||||
case cli.GeoResolve != "":
|
||||
sr := geoloc.NewSearcher(conf)
|
||||
loc, err := sr.Search(cli.GeoResolve)
|
||||
ctx.FatalIfErrorf(err)
|
||||
if loc != nil {
|
||||
//nolint:forbidigo
|
||||
fmt.Println(*loc)
|
||||
}
|
||||
default:
|
||||
err = serve(conf)
|
||||
ctx.FatalIfErrorf(err)
|
||||
}
|
||||
}
|
||||
|
||||
func convertGeoIPCache(conf *config.Config) error {
|
||||
geoIPCache, err := geoip.NewCache(conf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return geoIPCache.ConvertCache()
|
||||
}
|
||||
|
||||
func convertGeoLocationCache(conf *config.Config) error {
|
||||
geoLocCache, err := geoloc.NewCache(conf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return geoLocCache.ConvertCache(false)
|
||||
}
|
||||
|
||||
func setLogLevel(logLevel string) error {
|
||||
parsedLevel, err := log.ParseLevel(logLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.SetLevel(parsedLevel)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkURLForPNG(r *http.Request) bool {
|
||||
url := r.URL.String()
|
||||
return strings.Contains(url, ".png")
|
||||
}
|
Loading…
Reference in a new issue