2016-01-07 17:27:53 +00:00
package main
import (
"flag"
2021-06-08 23:11:23 +00:00
"io/fs"
2024-03-23 17:03:08 +00:00
"io/ioutil"
2016-01-07 17:27:53 +00:00
"log"
"net/http"
2016-01-07 17:28:16 +00:00
"os"
2018-03-08 18:33:05 +00:00
"os/signal"
2016-11-19 15:26:23 +00:00
"path"
2016-01-20 21:44:34 +00:00
"path/filepath"
2024-01-18 10:15:39 +00:00
"strconv"
2024-03-23 17:03:08 +00:00
"strings"
2018-03-08 18:33:05 +00:00
"syscall"
2016-01-13 19:25:25 +00:00
2016-12-08 08:12:18 +00:00
"srs.epita.fr/fic-server/admin/api"
2023-07-10 10:10:03 +00:00
"srs.epita.fr/fic-server/admin/generation"
2018-01-21 13:18:26 +00:00
"srs.epita.fr/fic-server/admin/pki"
2017-11-27 01:45:33 +00:00
"srs.epita.fr/fic-server/admin/sync"
2016-01-13 19:25:25 +00:00
"srs.epita.fr/fic-server/libfic"
2017-11-25 15:05:03 +00:00
"srs.epita.fr/fic-server/settings"
2016-01-07 17:27:53 +00:00
)
func main ( ) {
2024-01-18 10:15:39 +00:00
var err error
bind := "127.0.0.1:8081"
2017-12-12 06:13:38 +00:00
cloudDAVBase := ""
cloudUsername := "fic"
cloudPassword := ""
localImporterDirectory := ""
2021-10-26 18:13:09 +00:00
gitImporterRemote := ""
2023-05-04 11:19:42 +00:00
gitImporterBranch := ""
2017-12-12 06:13:38 +00:00
localImporterSymlink := false
2021-07-21 10:30:51 +00:00
baseURL := "/"
2022-07-11 17:57:33 +00:00
checkplugins := sync . CheckPluginList { }
2017-12-12 06:13:38 +00:00
2017-11-25 15:05:35 +00:00
// Read paremeters from environment
2024-03-23 17:00:42 +00:00
if v , exists := os . LookupEnv ( "FICOIDC_ISSUER" ) ; exists {
api . OidcIssuer = v
2024-03-23 17:51:53 +00:00
} else if v , exists := os . LookupEnv ( "FICOIDC_ISSUER_FILE" ) ; exists {
fd , err := os . Open ( v )
if err != nil {
log . Fatal ( "Unable to open FICOIDC_ISSUER_FILE:" , err )
}
b , _ := ioutil . ReadAll ( fd )
api . OidcIssuer = strings . TrimSpace ( string ( b ) )
fd . Close ( )
2024-03-23 17:00:42 +00:00
}
2021-09-09 09:20:45 +00:00
if v , exists := os . LookupEnv ( "FICOIDC_SECRET" ) ; exists {
api . OidcSecret = v
2024-03-23 17:03:08 +00:00
} else if v , exists := os . LookupEnv ( "FICOIDC_SECRET_FILE" ) ; exists {
fd , err := os . Open ( v )
if err != nil {
log . Fatal ( "Unable to open FICOIDC_SECRET_FILE:" , err )
}
b , _ := ioutil . ReadAll ( fd )
api . OidcSecret = strings . TrimSpace ( string ( b ) )
fd . Close ( )
2021-09-09 09:20:45 +00:00
}
2018-01-21 13:18:26 +00:00
if v , exists := os . LookupEnv ( "FICCA_PASS" ) ; exists {
pki . SetCAPassword ( v )
2024-03-23 17:03:08 +00:00
} else if v , exists := os . LookupEnv ( "FICCA_PASS_FILE" ) ; exists {
fd , err := os . Open ( v )
if err != nil {
log . Fatal ( "Unable to open FICCA_PASS_FILE:" , err )
}
b , _ := ioutil . ReadAll ( fd )
pki . SetCAPassword ( strings . TrimSpace ( string ( b ) ) )
fd . Close ( )
2018-01-21 13:18:26 +00:00
} else {
log . Println ( "WARNING: no password defined for the CA, will use empty password to secure CA private key" )
2019-10-02 14:20:06 +00:00
log . Println ( "WARNING: PLEASE DEFINE ENVIRONMENT VARIABLE: FICCA_PASS" )
2018-01-21 13:18:26 +00:00
}
2017-11-25 15:05:35 +00:00
if v , exists := os . LookupEnv ( "FICCLOUD_URL" ) ; exists {
2017-12-12 06:13:38 +00:00
cloudDAVBase = v
2017-11-25 15:05:35 +00:00
}
if v , exists := os . LookupEnv ( "FICCLOUD_USER" ) ; exists {
2017-12-12 06:13:38 +00:00
cloudUsername = v
2017-11-25 15:05:35 +00:00
}
if v , exists := os . LookupEnv ( "FICCLOUD_PASS" ) ; exists {
2017-12-12 06:13:38 +00:00
cloudPassword = v
2024-03-23 17:03:08 +00:00
} else if v , exists := os . LookupEnv ( "FICCLOUD_PASS_FILE" ) ; exists {
fd , err := os . Open ( v )
if err != nil {
log . Fatal ( "Unable to open FICCLOUD_PASS_FILE:" , err )
}
b , _ := ioutil . ReadAll ( fd )
cloudPassword = strings . TrimSpace ( string ( b ) )
fd . Close ( )
2017-11-25 15:05:35 +00:00
}
2021-07-21 10:30:51 +00:00
if v , exists := os . LookupEnv ( "FIC_BASEURL" ) ; exists {
baseURL = v
}
2021-11-13 00:39:15 +00:00
if v , exists := os . LookupEnv ( "FIC_4REAL" ) ; exists {
2024-01-18 10:15:39 +00:00
api . IsProductionEnv , err = strconv . ParseBool ( v )
if err != nil {
log . Fatal ( "Unable to parse FIC_4REAL variable:" , err )
}
}
if v , exists := os . LookupEnv ( "FIC_ADMIN_BIND" ) ; exists {
bind = v
}
if v , exists := os . LookupEnv ( "FIC_TIMESTAMPCHECK" ) ; exists {
api . TimestampCheck = v
}
if v , exists := os . LookupEnv ( "FIC_SETTINGS" ) ; exists {
settings . SettingsDir = v
}
if v , exists := os . LookupEnv ( "FIC_FILES" ) ; exists {
fic . FilesDir = v
}
if v , exists := os . LookupEnv ( "FIC_SYNC_LOCALIMPORT" ) ; exists {
localImporterDirectory = v
}
if v , exists := os . LookupEnv ( "FIC_SYNC_LOCALIMPORTSYMLINK" ) ; exists {
localImporterSymlink , err = strconv . ParseBool ( v )
if err != nil {
log . Fatal ( "Unable to parse FIC_SYNC_LOCALIMPORTSYMLINK variable:" , err )
}
}
if v , exists := os . LookupEnv ( "FIC_SYNC_GIT_IMPORT_REMOTE" ) ; exists {
gitImporterRemote = v
}
if v , exists := os . LookupEnv ( "FIC_SYNC_GIT_BRANCH" ) ; exists {
gitImporterBranch = v
}
if v , exists := os . LookupEnv ( "FIC_OPTIONALDIGEST" ) ; exists {
fic . OptionalDigest , err = strconv . ParseBool ( v )
if err != nil {
log . Fatal ( "Unable to parse FIC_OPTIONALDIGEST variable:" , err )
}
}
if v , exists := os . LookupEnv ( "FIC_STRONGDIGEST" ) ; exists {
fic . StrongDigest , err = strconv . ParseBool ( v )
if err != nil {
log . Fatal ( "Unable to parse FIC_STRONGDIGEST variable:" , err )
}
2021-11-13 00:39:15 +00:00
}
2017-11-25 15:05:35 +00:00
// Read parameters from command line
2024-01-18 10:15:39 +00:00
flag . StringVar ( & bind , "bind" , bind , "Bind port/socket" )
2017-10-17 04:47:10 +00:00
var dsn = flag . String ( "dsn" , fic . DSNGenerator ( ) , "DSN to connect to the MySQL server" )
2021-07-21 10:30:51 +00:00
flag . StringVar ( & baseURL , "baseurl" , baseURL , "URL prepended to each URL" )
2019-10-31 15:05:58 +00:00
flag . StringVar ( & api . TimestampCheck , "timestampCheck" , api . TimestampCheck , "Path regularly touched by frontend to check time synchronisation" )
2018-01-21 13:18:26 +00:00
flag . StringVar ( & pki . PKIDir , "pki" , "./PKI" , "Base directory where found PKI scripts" )
2021-06-08 23:11:23 +00:00
var staticDir = flag . String ( "static" , "" , "Directory containing static files (default if not provided: use embedded files)" )
2017-01-15 01:37:59 +00:00
flag . StringVar ( & api . TeamsDir , "teams" , "./TEAMS" , "Base directory where save teams JSON files" )
2018-12-06 02:46:14 +00:00
flag . StringVar ( & api . DashboardDir , "dashbord" , "./DASHBOARD" , "Base directory where save public JSON files" )
2017-11-25 15:05:03 +00:00
flag . StringVar ( & settings . SettingsDir , "settings" , settings . SettingsDir , "Base directory where load and save settings" )
flag . StringVar ( & fic . FilesDir , "files" , fic . FilesDir , "Base directory where found challenges files, local part" )
2023-07-10 10:10:03 +00:00
flag . StringVar ( & generation . GeneratorSocket , "generator" , "./GENERATOR/generator.socket" , "Path to the generator socket (used to trigger issues.json generations, use an empty string to generate locally)" )
2017-12-12 06:13:38 +00:00
flag . StringVar ( & localImporterDirectory , "localimport" , localImporterDirectory ,
"Base directory where found challenges files to import, local part" )
flag . BoolVar ( & localImporterSymlink , "localimportsymlink" , localImporterSymlink ,
"Copy files or just create symlink?" )
2021-10-26 18:13:09 +00:00
flag . StringVar ( & gitImporterRemote , "git-import-remote" , gitImporterRemote ,
"Remote URL of the git repository to use as synchronization source" )
2023-05-04 11:19:42 +00:00
flag . StringVar ( & gitImporterBranch , "git-branch" , gitImporterBranch ,
"Branch to use in the git repository" )
2017-12-12 06:13:38 +00:00
flag . StringVar ( & cloudDAVBase , "clouddav" , cloudDAVBase ,
"Base directory where found challenges files to import, cloud part" )
flag . StringVar ( & cloudUsername , "clouduser" , cloudUsername , "Username used to sync" )
flag . StringVar ( & cloudPassword , "cloudpass" , cloudPassword , "Password used to sync" )
2017-11-25 15:05:03 +00:00
flag . BoolVar ( & fic . OptionalDigest , "optionaldigest" , fic . OptionalDigest , "Is the digest required when importing files?" )
2017-12-12 06:13:38 +00:00
flag . BoolVar ( & fic . StrongDigest , "strongdigest" , fic . StrongDigest , "Are BLAKE2b digests required or is SHA-1 good enough?" )
2021-11-13 00:39:15 +00:00
flag . BoolVar ( & api . IsProductionEnv , "4real" , api . IsProductionEnv , "Set this flag when running for a real challenge (it disallows or avoid most of mass user progression deletion)" )
2022-07-11 17:57:33 +00:00
flag . Var ( & checkplugins , "rules-plugins" , "List of libraries containing others rules to checks" )
2023-10-13 21:05:08 +00:00
flag . Var ( & sync . RemoteFileDomainWhitelist , "remote-file-domain-whitelist" , "List of domains which are allowed to store remote files" )
2016-01-07 17:27:53 +00:00
flag . Parse ( )
2016-10-13 17:02:41 +00:00
log . SetPrefix ( "[admin] " )
2017-12-12 06:13:38 +00:00
// Instantiate importer
if localImporterDirectory != "" && cloudDAVBase != "" {
log . Fatal ( "Cannot have both --clouddav and --localimport defined." )
return
2021-10-26 18:13:09 +00:00
} else if gitImporterRemote != "" && cloudDAVBase != "" {
log . Fatal ( "Cannot have both --clouddav and --git-import-remote defined." )
return
} else if gitImporterRemote != "" {
2023-05-04 11:19:42 +00:00
sync . GlobalImporter = sync . NewGitImporter ( sync . LocalImporter { Base : localImporterDirectory , Symlink : localImporterSymlink } , gitImporterRemote , gitImporterBranch )
2017-12-12 06:13:38 +00:00
} else if localImporterDirectory != "" {
2018-02-16 09:50:44 +00:00
sync . GlobalImporter = sync . LocalImporter { Base : localImporterDirectory , Symlink : localImporterSymlink }
2017-12-12 06:13:38 +00:00
} else if cloudDAVBase != "" {
sync . GlobalImporter , _ = sync . NewCloudImporter ( cloudDAVBase , cloudUsername , cloudPassword )
}
if sync . GlobalImporter != nil {
2021-10-26 17:56:40 +00:00
if err := sync . GlobalImporter . Init ( ) ; err != nil {
2023-11-26 11:55:38 +00:00
log . Fatal ( "Unable to initialize the importer: " , err . Error ( ) )
2021-10-26 17:56:40 +00:00
}
2017-12-12 06:13:38 +00:00
log . Println ( "Using" , sync . GlobalImporter . Kind ( ) )
2022-05-26 10:26:53 +00:00
// Update distributed challenge.json
2022-06-08 01:00:50 +00:00
if _ , err := os . Stat ( path . Join ( settings . SettingsDir , settings . ChallengeFile ) ) ; os . IsNotExist ( err ) {
challengeinfo , err := sync . GetFileContent ( sync . GlobalImporter , settings . ChallengeFile )
if err == nil {
if fd , err := os . Create ( path . Join ( settings . SettingsDir , settings . ChallengeFile ) ) ; err != nil {
log . Fatal ( "Unable to open SETTINGS/challenge.json:" , err )
} else {
fd . Write ( [ ] byte ( challengeinfo ) )
err = fd . Close ( )
if err != nil {
log . Fatal ( "Something went wrong during SETTINGS/challenge.json writing:" , err )
}
2022-05-26 10:26:53 +00:00
}
}
}
2017-12-12 06:13:38 +00:00
}
2017-12-04 07:40:57 +00:00
2017-11-21 21:24:06 +00:00
// Sanitize options
2016-01-20 21:44:34 +00:00
log . Println ( "Checking paths..." )
2021-06-08 23:11:23 +00:00
if staticDir != nil && * staticDir != "" {
if sDir , err := filepath . Abs ( * staticDir ) ; err != nil {
log . Fatal ( err )
} else {
2021-08-31 23:27:24 +00:00
log . Println ( "Serving pages from" , sDir )
2021-06-08 23:11:23 +00:00
staticFS = http . Dir ( sDir )
sync . DeepReportPath = path . Join ( sDir , sync . DeepReportPath )
}
} else {
sub , err := fs . Sub ( assets , "static" )
if err != nil {
log . Fatal ( "Unable to cd to static/ directory:" , err )
}
2021-08-31 23:27:24 +00:00
log . Println ( "Serving pages from memory." )
2021-06-08 23:11:23 +00:00
staticFS = http . FS ( sub )
2023-10-22 15:19:45 +00:00
2021-07-21 10:30:51 +00:00
sync . DeepReportPath = path . Join ( "SYNC" , sync . DeepReportPath )
2023-10-22 15:19:45 +00:00
if _ , err := os . Stat ( "SYNC" ) ; os . IsNotExist ( err ) {
os . MkdirAll ( "SYNC" , 0751 )
}
2016-01-22 16:34:34 +00:00
}
2016-01-20 21:44:34 +00:00
if fic . FilesDir , err = filepath . Abs ( fic . FilesDir ) ; err != nil {
log . Fatal ( err )
}
2018-01-21 13:18:26 +00:00
if pki . PKIDir , err = filepath . Abs ( pki . PKIDir ) ; err != nil {
log . Fatal ( err )
}
2018-12-06 02:46:14 +00:00
if api . DashboardDir , err = filepath . Abs ( api . DashboardDir ) ; err != nil {
log . Fatal ( err )
}
2017-01-16 12:12:03 +00:00
if api . TeamsDir , err = filepath . Abs ( api . TeamsDir ) ; err != nil {
log . Fatal ( err )
}
2019-10-31 15:05:58 +00:00
if api . TimestampCheck , err = filepath . Abs ( api . TimestampCheck ) ; err != nil {
log . Fatal ( err )
}
2017-11-25 15:05:03 +00:00
if settings . SettingsDir , err = filepath . Abs ( settings . SettingsDir ) ; err != nil {
2017-11-22 01:00:40 +00:00
log . Fatal ( err )
}
2021-07-21 10:30:51 +00:00
if baseURL != "/" {
baseURL = path . Clean ( baseURL )
2016-12-15 23:51:56 +00:00
} else {
2021-07-21 10:30:51 +00:00
baseURL = ""
2016-12-15 23:51:56 +00:00
}
2016-01-20 21:44:34 +00:00
2018-08-19 19:00:32 +00:00
// Creating minimal directories structure
2023-07-14 14:49:57 +00:00
os . MkdirAll ( fic . FilesDir , 0751 )
2018-08-19 19:00:32 +00:00
os . MkdirAll ( pki . PKIDir , 0711 )
2023-07-14 14:49:57 +00:00
os . MkdirAll ( api . TeamsDir , 0751 )
os . MkdirAll ( api . DashboardDir , 0751 )
os . MkdirAll ( settings . SettingsDir , 0751 )
2018-08-19 19:00:32 +00:00
2022-07-11 17:57:33 +00:00
// Load rules plugins
for _ , p := range checkplugins {
if err := sync . LoadChecksPlugin ( p ) ; err != nil {
log . Fatalf ( "Unable to load rule plugin %q: %s" , p , err . Error ( ) )
} else {
log . Printf ( "Rules plugin %q successfully loaded" , p )
}
}
2018-08-19 19:00:32 +00:00
// Initialize settings and load them
if ! settings . ExistsSettings ( path . Join ( settings . SettingsDir , settings . SettingsFile ) ) {
2020-11-13 10:25:29 +00:00
if err = api . ResetSettings ( ) ; err != nil {
2018-08-19 19:00:32 +00:00
log . Fatal ( "Unable to initialize settings.json:" , err )
}
2022-06-01 20:49:43 +00:00
}
var config * settings . Settings
if config , err = settings . ReadSettings ( path . Join ( settings . SettingsDir , settings . SettingsFile ) ) ; err != nil {
log . Fatal ( "Unable to read settings.json:" , err )
2018-12-06 20:25:09 +00:00
} else {
2022-06-01 20:49:43 +00:00
api . ApplySettings ( config )
2018-08-19 19:00:32 +00:00
}
2024-03-14 16:44:21 +00:00
// Initialize dashboard presets
if err = api . InitDashboardPresets ( api . DashboardDir ) ; err != nil {
log . Println ( "Unable to initialize dashboards presets:" , err )
}
2018-08-19 19:00:32 +00:00
// Database connection
2018-01-21 13:18:26 +00:00
log . Println ( "Opening database..." )
2019-01-17 15:55:54 +00:00
if err = fic . DBInit ( * dsn ) ; err != nil {
2016-01-07 17:28:16 +00:00
log . Fatal ( "Cannot open the database: " , err )
}
2016-01-13 19:25:25 +00:00
defer fic . DBClose ( )
2016-01-07 17:28:16 +00:00
log . Println ( "Creating database..." )
2019-01-17 15:55:54 +00:00
if err = fic . DBCreate ( ) ; err != nil {
2016-01-07 17:28:16 +00:00
log . Fatal ( "Cannot create database: " , err )
}
2017-11-21 21:24:06 +00:00
// Update base URL on main page
2021-07-21 10:30:51 +00:00
log . Println ( "Changing base URL to" , baseURL + "/" , "..." )
genIndex ( baseURL )
2016-11-19 15:26:23 +00:00
2018-03-08 18:33:05 +00:00
// Prepare graceful shutdown
interrupt := make ( chan os . Signal , 1 )
signal . Notify ( interrupt , os . Interrupt , syscall . SIGTERM )
2024-01-18 10:15:39 +00:00
app := NewApp ( config , baseURL , bind )
2022-05-16 09:38:46 +00:00
go app . Start ( )
2018-03-08 18:33:05 +00:00
// Wait shutdown signal
<- interrupt
log . Print ( "The service is shutting down..." )
2022-05-16 09:38:46 +00:00
app . Stop ( )
2018-03-08 18:33:05 +00:00
log . Println ( "done" )
2016-01-07 17:27:53 +00:00
}