package main import ( "context" "flag" "fmt" "io/fs" "log" "net/http" "net/url" "os" "os/signal" "path" "path/filepath" "strings" "syscall" "srs.epita.fr/fic-server/admin/api" "srs.epita.fr/fic-server/admin/pki" "srs.epita.fr/fic-server/admin/sync" "srs.epita.fr/fic-server/libfic" "srs.epita.fr/fic-server/settings" ) type ResponseWriterPrefix struct { real http.ResponseWriter prefix string } func (r ResponseWriterPrefix) Header() http.Header { return r.real.Header() } func (r ResponseWriterPrefix) WriteHeader(s int) { if v, exists := r.real.Header()["Location"]; exists { r.real.Header().Set("Location", r.prefix+v[0]) } r.real.WriteHeader(s) } func (r ResponseWriterPrefix) Write(z []byte) (int, error) { return r.real.Write(z) } func StripPrefix(prefix string, h http.Handler) http.Handler { if prefix == "" { return h } return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if prefix != "/" && r.URL.Path == "/" { http.Redirect(w, r, prefix+"/", http.StatusFound) } else if p := strings.TrimPrefix(r.URL.Path, prefix); len(p) < len(r.URL.Path) { r2 := new(http.Request) *r2 = *r r2.URL = new(url.URL) *r2.URL = *r.URL r2.URL.Path = p h.ServeHTTP(ResponseWriterPrefix{w, prefix}, r2) } else { h.ServeHTTP(w, r) } }) } func main() { cloudDAVBase := "" cloudUsername := "fic" cloudPassword := "" localImporterDirectory := "" gitImporterRemote := "" localImporterSymlink := false baseURL := "/" // Read paremeters from environment if v, exists := os.LookupEnv("FICOIDC_SECRET"); exists { api.OidcSecret = v } if v, exists := os.LookupEnv("FICCA_PASS"); exists { pki.SetCAPassword(v) } else { log.Println("WARNING: no password defined for the CA, will use empty password to secure CA private key") log.Println("WARNING: PLEASE DEFINE ENVIRONMENT VARIABLE: FICCA_PASS") } if v, exists := os.LookupEnv("FICCLOUD_URL"); exists { cloudDAVBase = v } if v, exists := os.LookupEnv("FICCLOUD_USER"); exists { cloudUsername = v } if v, exists := os.LookupEnv("FICCLOUD_PASS"); exists { cloudPassword = v } if v, exists := os.LookupEnv("FIC_BASEURL"); exists { baseURL = v } if v, exists := os.LookupEnv("FIC_4REAL"); exists { api.IsProductionEnv = v == "true" || v == "on" || v == "TRUE" || v == "ON" || v == "1" } // Read parameters from command line var bind = flag.String("bind", "127.0.0.1:8081", "Bind port/socket") var dsn = flag.String("dsn", fic.DSNGenerator(), "DSN to connect to the MySQL server") flag.StringVar(&baseURL, "baseurl", baseURL, "URL prepended to each URL") flag.StringVar(&api.TimestampCheck, "timestampCheck", api.TimestampCheck, "Path regularly touched by frontend to check time synchronisation") flag.StringVar(&pki.PKIDir, "pki", "./PKI", "Base directory where found PKI scripts") var staticDir = flag.String("static", "", "Directory containing static files (default if not provided: use embedded files)") flag.StringVar(&api.TeamsDir, "teams", "./TEAMS", "Base directory where save teams JSON files") flag.StringVar(&api.DashboardDir, "dashbord", "./DASHBOARD", "Base directory where save public JSON files") flag.StringVar(&settings.SettingsDir, "settings", settings.SettingsDir, "Base directory where load and save settings") flag.StringVar(&fic.FilesDir, "files", fic.FilesDir, "Base directory where found challenges files, local part") flag.StringVar(&localImporterDirectory, "localimport", localImporterDirectory, "Base directory where found challenges files to import, local part") flag.BoolVar(&localImporterSymlink, "localimportsymlink", localImporterSymlink, "Copy files or just create symlink?") flag.StringVar(&gitImporterRemote, "git-import-remote", gitImporterRemote, "Remote URL of the git repository to use as synchronization source") flag.StringVar(&cloudDAVBase, "clouddav", cloudDAVBase, "Base directory where found challenges files to import, cloud part") flag.StringVar(&cloudUsername, "clouduser", cloudUsername, "Username used to sync") flag.StringVar(&cloudPassword, "cloudpass", cloudPassword, "Password used to sync") flag.BoolVar(&fic.OptionalDigest, "optionaldigest", fic.OptionalDigest, "Is the digest required when importing files?") flag.BoolVar(&fic.StrongDigest, "strongdigest", fic.StrongDigest, "Are BLAKE2b digests required or is SHA-1 good enough?") flag.BoolVar(&api.IsProductionEnv, "4real", api.IsProductionEnv, "Set this flag when running for a real challenge (it disallows or avoid most of mass user progression deletion)") flag.Parse() log.SetPrefix("[admin] ") // Instantiate importer if localImporterDirectory != "" && cloudDAVBase != "" { log.Fatal("Cannot have both --clouddav and --localimport defined.") return } else if gitImporterRemote != "" && cloudDAVBase != "" { log.Fatal("Cannot have both --clouddav and --git-import-remote defined.") return } else if gitImporterRemote != "" { sync.GlobalImporter = sync.NewGitImporter(sync.LocalImporter{Base: localImporterDirectory, Symlink: localImporterSymlink}, gitImporterRemote) } else if localImporterDirectory != "" { sync.GlobalImporter = sync.LocalImporter{Base: localImporterDirectory, Symlink: localImporterSymlink} } else if cloudDAVBase != "" { sync.GlobalImporter, _ = sync.NewCloudImporter(cloudDAVBase, cloudUsername, cloudPassword) } if sync.GlobalImporter != nil { if err := sync.GlobalImporter.Init(); err != nil { log.Fatal("Unable to initialize the importer:", err) } log.Println("Using", sync.GlobalImporter.Kind()) } // Sanitize options var err error log.Println("Checking paths...") if staticDir != nil && *staticDir != "" { if sDir, err := filepath.Abs(*staticDir); err != nil { log.Fatal(err) } else { log.Println("Serving pages from", sDir) staticFS = http.Dir(sDir) sync.DeepReportPath = path.Join(sDir, sync.DeepReportPath) } } else { sub, err := fs.Sub(assets, "static") if err != nil { log.Fatal("Unable to cd to static/ directory:", err) } log.Println("Serving pages from memory.") staticFS = http.FS(sub) sync.DeepReportPath = path.Join("SYNC", sync.DeepReportPath) } if fic.FilesDir, err = filepath.Abs(fic.FilesDir); err != nil { log.Fatal(err) } if pki.PKIDir, err = filepath.Abs(pki.PKIDir); err != nil { log.Fatal(err) } if api.DashboardDir, err = filepath.Abs(api.DashboardDir); err != nil { log.Fatal(err) } if api.TeamsDir, err = filepath.Abs(api.TeamsDir); err != nil { log.Fatal(err) } if api.TimestampCheck, err = filepath.Abs(api.TimestampCheck); err != nil { log.Fatal(err) } if settings.SettingsDir, err = filepath.Abs(settings.SettingsDir); err != nil { log.Fatal(err) } if baseURL != "/" { baseURL = path.Clean(baseURL) } else { baseURL = "" } // Creating minimal directories structure os.MkdirAll(fic.FilesDir, 0777) os.MkdirAll(pki.PKIDir, 0711) os.MkdirAll(api.TeamsDir, 0777) os.MkdirAll(api.DashboardDir, 0777) os.MkdirAll(settings.SettingsDir, 0777) // Initialize settings and load them if !settings.ExistsSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) { if err = api.ResetSettings(); err != nil { log.Fatal("Unable to initialize settings.json:", err) } } else { var config *settings.Settings if config, err = settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)); err != nil { log.Fatal("Unable to read settings.json:", err) } else { api.ApplySettings(config) } } // Database connection log.Println("Opening database...") if err = fic.DBInit(*dsn); err != nil { log.Fatal("Cannot open the database: ", err) } defer fic.DBClose() log.Println("Creating database...") if err = fic.DBCreate(); err != nil { log.Fatal("Cannot create database: ", err) } // Update base URL on main page log.Println("Changing base URL to", baseURL+"/", "...") genIndex(baseURL) // Prepare graceful shutdown interrupt := make(chan os.Signal, 1) signal.Notify(interrupt, os.Interrupt, syscall.SIGTERM) srv := &http.Server{ Addr: *bind, Handler: StripPrefix(baseURL, api.Router()), } // Serve content go func() { log.Fatal(srv.ListenAndServe()) }() log.Println(fmt.Sprintf("Ready, listening on %s", *bind)) // Wait shutdown signal <-interrupt log.Print("The service is shutting down...") srv.Shutdown(context.Background()) log.Println("done") }