New project remote-scores-sync-zqds
This commit is contained in:
parent
fb53c9a4f1
commit
5fa94ecbed
7 changed files with 634 additions and 0 deletions
96
remote/scores-sync-zqds/main.go
Normal file
96
remote/scores-sync-zqds/main.go
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path"
|
||||
"syscall"
|
||||
|
||||
"gopkg.in/fsnotify.v1"
|
||||
)
|
||||
|
||||
var (
|
||||
TeamsDir string
|
||||
skipInitialSync bool
|
||||
)
|
||||
|
||||
func main() {
|
||||
if v, exists := os.LookupEnv("ZQDS_BASEURL"); exists {
|
||||
base_URL = v
|
||||
}
|
||||
if v, exists := os.LookupEnv("ZQDS_EVENTID"); exists {
|
||||
eventId = v
|
||||
}
|
||||
if v, exists := os.LookupEnv("ZQDS_ROUNDID"); exists {
|
||||
roundId = v
|
||||
}
|
||||
|
||||
if v, exists := os.LookupEnv("ZQDS_CLIENTID"); exists {
|
||||
clientId = v
|
||||
}
|
||||
if v, exists := os.LookupEnv("ZQDS_CLIENTSECRET"); exists {
|
||||
clientSecret = v
|
||||
}
|
||||
if v, exists := os.LookupEnv("ZQDS_TOKENURL"); exists {
|
||||
TokenURL = v
|
||||
}
|
||||
|
||||
flag.StringVar(&TeamsDir, "teams", "./TEAMS", "Base directory where save teams JSON files")
|
||||
var debugINotify = flag.Bool("debuginotify", false, "Show skipped inotofy events")
|
||||
flag.BoolVar(&skipInitialSync, "skipinitialsync", skipInitialSync, "Skip the initial synchronization")
|
||||
flag.Parse()
|
||||
|
||||
log.SetPrefix("[scores-sync-zqds] ")
|
||||
|
||||
TeamsDir = path.Clean(TeamsDir)
|
||||
|
||||
log.Println("Registering directory events...")
|
||||
watcher, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer watcher.Close()
|
||||
|
||||
if err := watcher.Add(TeamsDir); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path.Join(TeamsDir, "teams.json")); err == nil {
|
||||
treatAll(path.Join(TeamsDir, "teams.json"))
|
||||
}
|
||||
|
||||
// Register SIGUSR1, SIGUSR2
|
||||
interrupt := make(chan os.Signal, 1)
|
||||
signal.Notify(interrupt, syscall.SIGHUP)
|
||||
|
||||
watchedNotify := fsnotify.Create
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-interrupt:
|
||||
log.Println("SIGHUP received, resyncing all teams' score...")
|
||||
treatAll(path.Join(TeamsDir, "teams.json"))
|
||||
log.Println("SIGHUP treated.")
|
||||
case ev := <-watcher.Events:
|
||||
if ev.Name == "teams.json" {
|
||||
if ev.Op&watchedNotify == watchedNotify {
|
||||
if *debugINotify {
|
||||
log.Println("Treating event:", ev, "for", ev.Name)
|
||||
}
|
||||
go treatDiff(ev.Name)
|
||||
} else if ev.Op&fsnotify.Write == fsnotify.Write {
|
||||
log.Println("FSNOTIFY WRITE SEEN. Prefer looking at them, as it appears files are not atomically moved.")
|
||||
watchedNotify = fsnotify.Write
|
||||
} else if *debugINotify {
|
||||
log.Println("Skipped teams.json event:", ev)
|
||||
}
|
||||
} else if *debugINotify {
|
||||
log.Println("Skipped NON teams.json event:", ev, "for", ev.Name)
|
||||
}
|
||||
case err := <-watcher.Errors:
|
||||
log.Println("error:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
111
remote/scores-sync-zqds/treat.go
Normal file
111
remote/scores-sync-zqds/treat.go
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/oauth2/clientcredentials"
|
||||
|
||||
"srs.epita.fr/fic-server/libfic"
|
||||
)
|
||||
|
||||
var (
|
||||
teams_scores = map[string]float64{}
|
||||
|
||||
base_URL = "https://api.well-played.gg"
|
||||
eventId = ""
|
||||
roundId = ""
|
||||
|
||||
clientId = ""
|
||||
clientSecret = ""
|
||||
TokenURL = "https://idp.well-played.gg/oauth/token"
|
||||
|
||||
lock sync.Mutex
|
||||
)
|
||||
|
||||
func getTeams(pathname string) (teams map[string]fic.ExportedTeam, err error) {
|
||||
var cnt_raw []byte
|
||||
if cnt_raw, err = ioutil.ReadFile(pathname); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(cnt_raw, &teams); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func treatAll(pathname string) {
|
||||
teams, err := getTeams(pathname)
|
||||
if err != nil {
|
||||
log.Printf("[ERR] %s\n", err)
|
||||
}
|
||||
|
||||
for tid, team := range teams {
|
||||
treat(team, tid)
|
||||
}
|
||||
}
|
||||
|
||||
func treatDiff(pathname string) {
|
||||
teams, err := getTeams(pathname)
|
||||
if err != nil {
|
||||
log.Printf("[ERR] %s\n", err)
|
||||
}
|
||||
|
||||
for tid, team := range teams {
|
||||
if v, ok := teams_scores[tid]; !ok || v != team.Points {
|
||||
treat(team, tid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ZQDSScore struct {
|
||||
Score int64 `json:"score"`
|
||||
}
|
||||
|
||||
func treat(team fic.ExportedTeam, tid string) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
|
||||
log.Printf("Syncing score for %q: %d\n", team.Name, int64(team.Points))
|
||||
|
||||
// Save in memory what is the team's score
|
||||
teams_scores[tid] = team.Points
|
||||
|
||||
if u, err := url.Parse(base_URL); err != nil {
|
||||
log.Println(err.Error())
|
||||
} else {
|
||||
u.Path = path.Join(u.Path, "rank", "score", roundId, team.ExternalId)
|
||||
|
||||
body, err := json.Marshal(ZQDSScore{int64(team.Points * 10)})
|
||||
if err != nil {
|
||||
log.Println("[ERR] Unable to create JSON from Score")
|
||||
return
|
||||
}
|
||||
|
||||
config := clientcredentials.Config{
|
||||
ClientID: clientId,
|
||||
ClientSecret: clientSecret,
|
||||
Scopes: []string{"score:update"},
|
||||
TokenURL: TokenURL,
|
||||
}
|
||||
client := config.Client(context.Background())
|
||||
|
||||
if req, err := http.NewRequest("PUT", u.String(), bytes.NewReader(body)); err != nil {
|
||||
log.Println("[ERR] Unable to send request: ", err.Error())
|
||||
} else if resp, err := client.Do(req); err != nil {
|
||||
log.Println("[ERR] Error during request execution: ", err.Error())
|
||||
} else {
|
||||
log.Println(resp)
|
||||
log.Println(ioutil.ReadAll(resp.Body))
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in a new issue