Compare commits

..

95 commits

Author SHA1 Message Date
5652da5ade [WIP] admin: new file regrouping all teams stats 2017-04-05 02:08:03 +02:00
6765e6af32 frontend: improve home page 2017-04-05 02:06:44 +02:00
6fab208a23 backend: simplify condition 2017-04-05 02:06:44 +02:00
7c07cbb010 admin: improve design of settings page 2017-04-05 02:06:43 +02:00
77c6996ecc admin: manage team certificate from interface 2017-04-05 02:06:43 +02:00
cd0d6d7cfb admin: unify API to revoke certificates 2017-04-05 02:06:42 +02:00
c0433ce4ab frontend: new page that list videos 2017-04-05 02:06:42 +02:00
ce901fbfed admin: Add a page to list teams and members 2017-04-05 02:06:41 +02:00
f1fb33aa4d settings: add title and authors 2017-04-05 02:06:40 +02:00
f4696d0e75 admin: fix and generalize team stats 2017-04-05 02:06:40 +02:00
70093b98e7 admin: add danger alert in select 2017-04-05 02:06:39 +02:00
16d6813e9c Move PKI scripts at root 2017-04-05 02:06:39 +02:00
a2a99c6873 frontend: use ng-cloak and ng-if 2017-04-05 02:06:38 +02:00
103476eabe Add password paper generator 2017-04-05 02:06:37 +02:00
704cfeb22e Compute hint mime type in a variable and display it instead of the hint content 2017-04-05 02:06:37 +02:00
c6b237be46 admin: add a route to simulate time.json on backend machine 2017-04-05 02:06:36 +02:00
1a8e7066c3 db: add constraints to avoid multiple records of unique values 2017-04-05 02:06:36 +02:00
4793d0de4e admin: add button and route to reset some parts 2017-04-05 02:06:35 +02:00
89eaeef88e admin: interface to edit teams 2017-04-05 02:06:35 +02:00
012be2c69a frontend: improve 401 page thank to initial guide 2017-04-05 02:06:34 +02:00
b2ce6dfbdf backend: generate an event when a team open an hint 2017-04-05 02:06:34 +02:00
1c6a665a98 frontend: move file (on the same partition) instead of open, write, close the final file 2017-04-05 02:06:33 +02:00
a449131fa8 libfic: new function to retrieve exercices from a hint 2017-04-05 02:06:33 +02:00
610217f87c change the way themes are stored in stats 2017-04-05 02:06:32 +02:00
fa8c9caaf0 admin: can force page regeneration 2017-04-05 02:06:32 +02:00
95d60c69e0 Update openssl settings 2017-04-05 02:06:31 +02:00
37b3974d69 admin: new route /members/ 2017-04-05 02:06:30 +02:00
725e867702 admin: add public interface management 2017-04-05 02:06:30 +02:00
14277e525f public interface: rework 2017-04-05 02:06:29 +02:00
4bce3aa1c1 admin: allow import of remote hint and partials remote parts 2017-04-05 02:06:29 +02:00
dd4e207892 admin: restore function to add team and members 2017-04-05 02:06:28 +02:00
9bf91f819b admin: sanitize use of InitialName when needed 2017-04-05 02:06:28 +02:00
cbfea4444e frontend: move time in a separate package to be used elsewhere 2017-04-05 02:06:27 +02:00
1376be011b certificates: avoid error on noexec partition 2017-04-05 02:06:26 +02:00
8d7a291fd4 admin: Display time before start in UI 2017-04-05 02:06:26 +02:00
e3673c6b18 backend: don't regenerate files if config doesn't change 2017-04-05 02:06:25 +02:00
25e1dc5065 Force cd into PKI directory 2017-04-05 02:06:25 +02:00
edcb9a5256 frontend: fix partial solved flags display 2017-04-05 02:06:24 +02:00
0b965a36e3 settings: admin interface see default params 2017-04-05 02:06:23 +02:00
61bde6d31e admin: control settings 2017-04-05 02:06:23 +02:00
eab92973e8 Coefficients transit and display on UI 2017-04-05 02:06:22 +02:00
e428a43109 fixup! fixup! WIP esthetic changes 2017-04-05 02:06:22 +02:00
b7b56a0628 frontend: dedicate a field in JSON to file hint 2017-04-05 02:06:21 +02:00
c2dea2f985 Hints can something else than text 2017-04-05 02:06:21 +02:00
ee8bb97057 front: use ng-pluralize 2017-04-05 02:06:20 +02:00
c8297237ad WIP esthetic changes 2017-04-05 02:06:20 +02:00
d106a4766d libfic: refactor rank/points SQL query 2017-04-05 02:06:19 +02:00
2254ee7702 admin: Improve CA API 2017-04-05 02:06:18 +02:00
6d5ded2c3b squash! WIP: apply a coeff on given points 2017-04-05 02:06:18 +02:00
b30f3b18e6 frontend: improve rank rendering 2017-04-05 02:06:17 +02:00
e41b3acb7e fill_exercices: flags.txt files can use tabulation char as separator instead of : 2017-04-05 02:06:16 +02:00
f14a7940b3 frontend: use a common JS file to contain common features between challenger and public interface 2017-04-05 02:06:16 +02:00
cbb58bcefb WIP: apply a coeff on given points 2017-04-05 02:06:15 +02:00
7d26b172ea frontend: add /rules page 2017-04-05 02:06:15 +02:00
fae97e5411 Settings are now given through TEAMS/settings.json instead of been given through command line arguments 2017-04-05 02:06:14 +02:00
b1541d9a45 New rank and score calculation 2017-04-05 02:06:14 +02:00
8108125cb8 backend: log generation errors 2017-04-05 02:06:13 +02:00
56d43cc65b fill_exercice: define HINT_COST 2017-04-05 02:06:12 +02:00
d7d22fe471 Handle file import digest 2017-04-05 02:06:11 +02:00
65d40773cc admin: various fixes in fill_exercices 2017-04-05 02:06:11 +02:00
4ff0c0ac59 admin: can pass args to fill_exercices to limit the fill to a theme or an exercice 2017-04-05 02:06:10 +02:00
4cea4a4aa0 admin: new argument --rapidimport to speed up the import but don't ensure consistency 2017-04-05 02:06:10 +02:00
3636002549 Split team.go into multiple files 2017-04-05 02:06:09 +02:00
ca266c1709 [admin] Add new routes to manage hints, files and keys 2017-04-05 02:06:08 +02:00
f14e9e80c8 [admin] Add events 2017-04-05 02:06:08 +02:00
46d452c82b [admin] Add exercices related pages 2017-04-05 02:06:07 +02:00
c23a71912b [admin] Add page title 2017-04-05 02:06:07 +02:00
632e699fa8 [admin] Add ng-sanitize 2017-04-05 02:06:06 +02:00
5111143d2a Merge exercices API routes 2017-04-05 02:06:05 +02:00
f008aac04c Bump new version API 2017-04-05 02:06:05 +02:00
1bb978a9c6 Use github.com/julienschmidt/httprouter instead of gorilla 2017-04-05 02:06:04 +02:00
0d6e36798c Merge big splitted files before import 2017-04-05 02:06:04 +02:00
00dfbd92dd Use 2017 logos 2017-04-05 02:06:03 +02:00
937990fb48 frontend: interface can open hints 2017-04-05 02:06:02 +02:00
3d60896bdf frontend: able to receive opening hint 2017-04-05 02:06:01 +02:00
c669319e56 backend: can open hint 2017-04-05 02:06:01 +02:00
6e7e174713 frontend: refactor and dispatch in many routes 2017-04-05 02:06:00 +02:00
4fc4e34a4e WIP misc 2017-04-05 02:05:59 +02:00
ee5335e515 Partial resolution of exercices 2017-04-05 02:05:59 +02:00
ccbc787001 Multiple hints 2017-04-05 02:05:58 +02:00
9b35e78163 backend: use fsnotify instead of the deprecated inotify 2017-04-05 02:05:58 +02:00
10dc6c4d30 admin/api: use gorilla/mux instead of Go router 2017-04-05 02:05:57 +02:00
27ef7cb6c1 frontend: redesign download part 2017-04-05 02:05:57 +02:00
fa98b4bde3 frontend: add some glyphicons 2017-04-05 02:05:56 +02:00
8fab3aa85d frontend: move user box to the top of the page 2017-04-05 02:05:56 +02:00
003ceb8f98 backend: new option that unlock all challenges 2017-04-05 02:05:55 +02:00
07c1a22d75 themes: don't expect authors to be dirty 2017-04-05 02:05:55 +02:00
e04f94efbf frontend: fail if TEAMS directory doesn't exists 2017-04-05 02:05:54 +02:00
28054a3dd7 frontend: add resolution route 2017-04-05 02:05:53 +02:00
cbc0ad6a8d frontend: add link to frontend htdocs, like admin static pages 2017-04-05 02:05:53 +02:00
7bb7da5338 admin: can give the static dir location 2017-04-05 01:51:24 +02:00
7fb4b22a1f admin: can change the baseurl interface 2017-04-05 01:51:23 +02:00
ab4bf8f307 by default, only listen on localhost 2017-04-05 01:51:22 +02:00
a9da1fe059 fill_team: improve script reliability 2017-04-05 01:51:22 +02:00
1ccec4ab29 admin: add ability to add files from local storage 2017-04-05 01:51:21 +02:00
601 changed files with 7448 additions and 62180 deletions

View file

@ -1,33 +0,0 @@
admin/admin
checker/checker
dashboard/dashboard
evdist/evdist
generator/generator
receiver/receiver
repochecker/repochecker
frontend/fic/build
frontend/fic/node_modules
qa/ui/build
qa/ui/node_modules
fickit-backend-initrd.img
fickit-backend-kernel
fickit-backend-squashfs.img
fickit-backend-state
fickit-frontend-initrd.img
fickit-frontend-kernel
fickit-frontend-squashfs.img
fickit-frontend-state
fickit-prepare-initrd.img
fickit-prepare-kernel
fickit-update-initrd.img
fickit-update-kernel
DASHBOARD
FILES
PKI
REMOTE
repochecker/*.so
SETTINGS
SETTINGSDIST
submissions
TEAMS
vendor

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,22 +0,0 @@
image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
- image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm
platform:
architecture: arm
os: linux
variant: v7

View file

@ -1,816 +0,0 @@
---
kind: pipeline
type: docker
name: build-amd64
platform:
os: linux
arch: amd64
workspace:
base: /go
path: src/srs.epita.fr/fic-server
steps:
- name: get deps
image: golang:alpine
commands:
- apk --no-cache add git
- go get -v -d ./...
- mkdir deploy
- name: build qa ui
image: node:23-alpine
commands:
- cd qa/ui
- npm install --network-timeout=100000
- npm run build
- tar chjf ../../deploy/htdocs-qa.tar.bz2 build
- name: vet and tests
image: golang:alpine
commands:
- apk --no-cache add build-base
- go vet -buildvcs=false -tags gitgo ./...
- go vet -buildvcs=false ./...
- go test ./...
- name: build admin
image: golang:alpine
commands:
- go build -buildvcs=false -tags gitgo -o deploy/admin-gitgo-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin
- go build -buildvcs=false -o deploy/admin-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin
- tar chjf deploy/htdocs-admin.tar.bz2 htdocs-admin
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build checker
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/checker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/checker
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build evdist
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/evdist-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/evdist
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build generator
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/generator-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/generator
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build receiver
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/receiver-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/receiver
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build frontend fic ui
image: node:23-alpine
commands:
- cd frontend/fic
- npm install --network-timeout=100000
- npm run build
- tar chjf ../../deploy/htdocs-frontend-fic.tar.bz2 build
when:
branch:
exclude:
- master
- name: build dashboard
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/dashboard-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/dashboard
- tar chjf deploy/htdocs-dashboard.tar.bz2 htdocs-dashboard
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build repochecker
image: golang:alpine
commands:
- apk --no-cache add build-base
- go build -buildvcs=false --tags checkupdate -o deploy/repochecker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker
- go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-epita-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/epita
- go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-file-inspector-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/file-inspector
- go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-grammalecte-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/grammalecte
- go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-pcap-inspector-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/pcap-inspector
- go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-videos-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/videos
- grep "const version" repochecker/update.go | sed -r 's/^.*=\s*(\S.*)$/\1/' > deploy/repochecker.version
when:
branch:
exclude:
- master
- name: build qa
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/qa-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/qa
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: docker admin
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-admin
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-admin
when:
branch:
- master
- name: docker checker
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-checker
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-checker
when:
branch:
- master
- name: docker evdist
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-evdist
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-evdist
when:
branch:
- master
- name: docker generator
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-generator
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-generator
when:
branch:
- master
- name: docker receiver
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-receiver
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-receiver
when:
branch:
- master
- name: docker frontend nginx
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-nginx
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-nginx
when:
branch:
- master
- name: docker frontend ui
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-frontend-ui
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-frontend-ui
when:
branch:
- master
- name: docker dashboard
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-dashboard
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-dashboard
when:
branch:
- master
- name: docker qa
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-qa
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-qa
when:
branch:
- master
- name: docker repochecker
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-repochecker
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-repochecker
when:
branch:
- master
- name: docker remote-scores-sync-zqds
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-remote-scores-sync-zqds
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-remote-scores-sync-zqds
when:
branch:
- master
- name: docker remote-challenge-sync-airbus
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-remote-challenge-sync-airbus
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-remote-challenge-sync-airbus
when:
branch:
- master
- name: docker fic-get-remote-files
failure: ignore
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-get-remote-files
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-get-remote-files
when:
branch:
- master
- name: docker fickit-deploy
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fickit-deploy
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-deploy
when:
branch:
- master
trigger:
event:
- cron
- push
- tag
---
kind: pipeline
type: docker
name: build-arm64
platform:
os: linux
arch: arm64
workspace:
base: /go
path: src/srs.epita.fr/fic-server
steps:
- name: get deps
image: golang:alpine
commands:
- apk --no-cache add git
- go get -d ./...
- mkdir deploy
- name: build admin
image: golang:alpine
commands:
- apk --no-cache add build-base
- go build -buildvcs=false -o deploy/admin-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build checker
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/checker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/checker
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build evdist
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/evdist-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/evdist
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build generator
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/generator-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/generator
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build receiver
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/receiver-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/receiver
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build frontend fic ui
image: node:23-alpine
commands:
- cd frontend/fic
- npm install --network-timeout=100000
- npm run build
when:
branch:
exclude:
- master
- name: build dashboard
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/dashboard-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/dashboard
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build repochecker
image: golang:alpine
commands:
- apk --no-cache add build-base
- go build -buildvcs=false --tags checkupdate -o deploy/repochecker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: build repochecker for macOS
image: golang:alpine
commands:
- apk --no-cache add build-base
- go build -buildvcs=false --tags checkupdate -o deploy/repochecker-darwin-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker
environment:
CGO_ENABLED: 0
GOOS: darwin
GOARCH: arm64
when:
branch:
exclude:
- master
- name: build qa ui
image: node:23-alpine
commands:
- cd qa/ui
- npm install --network-timeout=100000
- npm run build
- tar chjf ../../deploy/htdocs-qa.tar.bz2 build
when:
branch:
exclude:
- master
- name: build qa
image: golang:alpine
commands:
- go build -buildvcs=false -o deploy/qa-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/qa
environment:
CGO_ENABLED: 0
when:
branch:
exclude:
- master
- name: docker admin
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-admin
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-admin
when:
branch:
- master
- name: docker checker
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-checker
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-checker
when:
branch:
- master
- name: docker evdist
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-evdist
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-evdist
when:
branch:
- master
- name: docker generator
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-generator
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-generator
when:
branch:
- master
- name: docker fic-get-remote-files
failure: ignore
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-get-remote-files
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-get-remote-files
when:
branch:
- master
- name: docker receiver
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-receiver
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-receiver
when:
branch:
- master
- name: docker frontend nginx
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-nginx
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-nginx
when:
branch:
- master
- name: docker dashboard
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-dashboard
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-dashboard
when:
branch:
- master
- name: docker qa
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-qa
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-qa
when:
branch:
- master
- name: docker repochecker
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-repochecker
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-repochecker
when:
branch:
- master
trigger:
event:
- cron
- push
- tag
---
kind: pipeline
name: docker-manifest
steps:
- name: publish admin
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-admin.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish checker
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-checker.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish evdist
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-evdist.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish generator
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-generator.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish receiver
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-receiver.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish frontend nginx
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-nginx.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish frontend ui
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-frontend-ui.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish dashboard
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-dashboard.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish repochecker
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-repochecker.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: publish qa
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fic-qa.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
- name: docker fic-get-remote-files
failure: ignore
image: plugins/docker
settings:
username:
from_secret: docker_username
password:
from_secret: docker_password
repo: nemunaire/fic-get-remote-files
auto_tag: true
auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}
dockerfile: Dockerfile-get-remote-files
when:
branch:
- master
- name: publish fickit-deploy
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
spec: .drone-manifest-fickit-deploy.yml
username:
from_secret: docker_username
password:
from_secret: docker_password
trigger:
event:
- push
- tag
depends_on:
- build-amd64
- build-arm64

43
.gitignore vendored
View file

@ -1,43 +0,0 @@
vendor/
DASHBOARD/
FILES/
PKI/
REMOTE/
SETTINGS/
SETTINGSDIST/
TEAMS/
submissions/
admin/sync/README.html
fickit-boot-cmdline
fickit-boot-initrd.img
fickit-boot-kernel
fickit-backend-cmdline
fickit-backend-initrd.img
fickit-backend-squashfs.img
fickit-backend-kernel
fickit-backend-state
fickit-frontend-cmdline
fickit-frontend-initrd.img
fickit-frontend-squashfs.img
fickit-frontend-kernel
fickit-frontend-state
fickit-prepare-bios.img
fickit-prepare-cmdline
fickit-prepare-initrd.img
fickit-prepare-kernel
fickit-prepare-state
fickit-update-cmdline
fickit-update-initrd.img
fickit-update-kernel
fickit-update-squashfs.img
result
started
# Standalone binaries
admin/get-remote-files/get-remote-files
fic-admin
fic-backend
fic-dashboard
fic-frontend
fic-qa
fic-repochecker

View file

@ -1,122 +0,0 @@
---
stages:
- deps
- build
- fickit
- sast
- qa
- image
- container_scanning
cache:
paths:
- .go/pkg/mod/
- qa/ui/node_modules/
- frontend/ui/node_modules/
include:
- '.gitlab-ci/build.yml'
- '.gitlab-ci/image.yml'
- template: SAST.gitlab-ci.yml
- template: Security/Dependency-Scanning.gitlab-ci.yml
- template: Security/Secret-Detection.gitlab-ci.yml
- template: Security/Container-Scanning.gitlab-ci.yml
.scanners-matrix:
parallel:
matrix:
- IMAGE_NAME: [checker, admin, evdist, frontend-ui, nginx, dashboard, repochecker, qa, receiver, generator, remote-challenge-sync-airbus]
container_scanning:
stage: container_scanning
extends:
- .scanners-matrix
variables:
DOCKER_SERVICE: localhost
DOCKERFILE_PATH: Dockerfile-${IMAGE_NAME}
CI_APPLICATION_REPOSITORY: ${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${IMAGE_NAME}
CI_APPLICATION_TAG: latest
GIT_STRATEGY: fetch
before_script:
- 'echo "Scanning: ${IMAGE_NAME}"'
rules:
- if: '$CI_COMMIT_BRANCH == "master"'
sast:
stage: sast
interruptible: true
needs: []
before_script:
- rm -rf .go/
secret_detection:
stage: sast
interruptible: true
needs: []
dependency_scanning:
stage: qa
interruptible: true
needs: []
get-deps:
stage: deps
image: golang:1-alpine
before_script:
- export GOPATH="$CI_PROJECT_DIR/.go"
- mkdir -p .go
script:
- apk --no-cache add git
- go get -v -d ./...
vet:
stage: sast
needs: ["build-qa-ui"]
dependencies:
- build-qa-ui
image: golang:1-alpine
before_script:
- export GOPATH="$CI_PROJECT_DIR/.go"
- mkdir -p .go
script:
- apk --no-cache add build-base
- go vet -v -buildvcs=false -tags gitgo ./...
- go vet -v -buildvcs=false ./...
fickit:
stage: fickit
interruptible: true
needs: ["build-admin","build-checker","build-dashboard","build-evdist","build-generator","build-qa","build-receiver","build-repochecker"]
image: nemunaire/linuxkit
tags: ['docker']
before_script:
- mkdir -p ~/.docker
- echo "{\"auths\":{\"${CI_REGISTRY}\":{\"username\":\"${CI_REGISTRY_USER}\",\"password\":\"${CI_REGISTRY_PASSWORD}\"}}}" > ~/.docker/config.json
script:
- dockerd & sleep 5
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/boot/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/kexec/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/mariadb-client/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/mdadm/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/rsync/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/syslinux/
- linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/unbound/
- sed -i "s@nemunaire/fic-@${CI_REGISTRY_IMAGE}/master/@;s@nemunaire/@${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/@" fickit-backend.yml fickit-boot.yml fickit-frontend.yml fickit-prepare.yml fickit-update.yml
- linuxkit build -format kernel+squashfs fickit-backend.yml
- linuxkit build -format kernel+squashfs fickit-frontend.yml
- linuxkit build -format kernel+initrd fickit-boot.yml
- linuxkit build -format kernel+initrd fickit-prepare.yml
- linuxkit build -format kernel+initrd fickit-update.yml
artifacts:
expire_in: 8 hours
paths:
- fickit-backend-squashfs.img
- fickit-frontend-squashfs.img
- fickit-boot-kernel
- fickit-boot-initrd.img
- fickit-prepare-initrd.img
- fickit-update-initrd.img

View file

@ -1,93 +0,0 @@
---
.build:
stage: build
image: golang:1-alpine
before_script:
- export GOPATH="$CI_PROJECT_DIR/.go"
- mkdir -p .go
variables:
CGO_ENABLED: 0
build-qa-ui:
stage: build
image: node:21-alpine
before_script:
script:
- cd qa/ui
- npm install --network-timeout=100000
- npm run build
artifacts:
paths:
- qa/ui/build/
when: on_success
build-checker:
extends:
- .build
script:
- go build -v -buildvcs=false -o deploy/checker srs.epita.fr/fic-server/checker
build-generator:
extends:
- .build
script:
- go build -v -buildvcs=false -o deploy/generator srs.epita.fr/fic-server/generator
build-receiver:
extends:
- .build
script:
- go build -v -buildvcs=false -o deploy/receiver srs.epita.fr/fic-server/receiver
build-admin:
extends:
- .build
script:
- go build -v -buildvcs=false -tags gitgo -o deploy/admin-gitgo srs.epita.fr/fic-server/admin
- go build -v -buildvcs=false -o deploy/admin srs.epita.fr/fic-server/admin
build-evdist:
extends:
- .build
script:
- go build -v -buildvcs=false -o deploy/evdist srs.epita.fr/fic-server/evdist
build-frontend-ui:
stage: build
image: node:21-alpine
before_script:
script:
- cd frontend/fic
- npm install --network-timeout=100000
- npm run build
build-dashboard:
extends:
- .build
script:
- go build -v -buildvcs=false -o deploy/dashboard srs.epita.fr/fic-server/dashboard
build-repochecker:
extends:
- .build
variables:
CGO_ENABLED: 1
script:
- apk --no-cache add build-base
- go build -buildvcs=false --tags checkupdate -v -o deploy/repochecker srs.epita.fr/fic-server/repochecker
- go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-epita-rules.so srs.epita.fr/fic-server/repochecker/epita
- go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-file-inspector-rules.so srs.epita.fr/fic-server/repochecker/file-inspector
- go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-grammalecte-rules.so srs.epita.fr/fic-server/repochecker/grammalecte
- go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-pcap-inspector-rules.so srs.epita.fr/fic-server/repochecker/pcap-inspector
- go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-videos-rules.so srs.epita.fr/fic-server/repochecker/videos
- grep "const version" repochecker/update.go | sed -r 's/^.*=\s*(\S.*)$/\1/' > deploy/repochecker.version
build-qa:
extends:
- .build
needs: ["build-qa-ui"]
dependencies:
- build-qa-ui
script:
- go build -v -buildvcs=false -o deploy/qa srs.epita.fr/fic-server/qa

View file

@ -1,99 +0,0 @@
---
.push:
stage: image
interruptible: true
needs: []
image:
name: gcr.io/kaniko-project/executor:v1.9.0-debug
entrypoint: [""]
before_script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"${CI_REGISTRY}\":{\"username\":\"${CI_REGISTRY_USER}\",\"password\":\"${CI_REGISTRY_PASSWORD}\"}}}" > /kaniko/.docker/config.json
script:
- |
/kaniko/executor \
--context . \
--dockerfile "${DOCKERFILE}" \
--destination "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${CI_JOB_NAME}:${CI_COMMIT_SHA}" \
--destination "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${CI_JOB_NAME}:latest"
only:
- master
checker:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-checker
receiver:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-receiver
generator:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-generator
admin:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-admin
fickit-deploy:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-deploy
get-remote-files:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-get-remote-files
evdist:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-evdist
frontend-ui:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-frontend-ui
nginx:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-nginx
dashboard:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-dashboard
repochecker:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-repochecker
qa:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-qa
remote-challenge-sync-airbus:
extends:
- .push
variables:
DOCKERFILE: Dockerfile-remote-challenge-sync-airbus

View file

@ -1,42 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
RUN apk add --no-cache binutils-gold build-base
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY admin ./admin/
COPY repochecker ./repochecker/
RUN go get -d -v ./admin && \
go build -v -o admin/admin ./admin && \
go build -v -buildmode=plugin -o repochecker/epita-rules.so ./repochecker/epita && \
go build -v -buildmode=plugin -o repochecker/file-inspector.so ./repochecker/file-inspector && \
go build -v -buildmode=plugin -o repochecker/grammalecte-rules.so ./repochecker/grammalecte && \
go build -v -buildmode=plugin -o repochecker/videos-rules.so ./repochecker/videos
FROM alpine:3.21
RUN apk add --no-cache \
ca-certificates \
git \
git-lfs \
openssh-client-default \
openssl
EXPOSE 8081
WORKDIR /srv
ENTRYPOINT ["/srv/admin", "-bind=:8081", "-baseurl=/admin/"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/admin/admin /srv/admin
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/epita-rules.so /srv/epita-rules.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/file-inspector.so /usr/lib/file-inspector.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/grammalecte-rules.so /usr/lib/grammalecte-rules.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/videos-rules.so /usr/lib/videos-rules.so

View file

@ -1,22 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY checker ./checker/
RUN go get -d -v ./checker && \
go build -v -buildvcs=false -o checker/checker ./checker
FROM alpine:3.21
WORKDIR /srv
ENTRYPOINT ["/srv/checker"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/checker/checker /srv/checker

View file

@ -1,32 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY dashboard ./dashboard/
RUN go get -d -v ./dashboard && \
go build -v -buildvcs=false -o dashboard/dashboard ./dashboard
FROM alpine:3.21
EXPOSE 8082
WORKDIR /srv
ENTRYPOINT ["/srv/dashboard", "--bind=:8082"]
VOLUME /srv/htdocs-dashboard/
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/dashboard/dashboard /srv/dashboard
COPY dashboard/static/index.html /srv/htdocs-dashboard/
COPY admin/static/css/bootstrap.min.css dashboard/static/css/fic.css admin/static/css/glyphicon.css /srv/htdocs-dashboard/css/
COPY admin/static/fonts /srv/htdocs-dashboard/fonts
COPY dashboard/static/img/srs.png /srv/htdocs-dashboard/img/
COPY dashboard/static/js/dashboard.js admin/static/js/angular.min.js dashboard/static/js/angular-animate.min.js admin/static/js/angular-route.min.js admin/static/js/angular-sanitize.min.js admin/static/js/bootstrap.min.js admin/static/js/common.js admin/static/js/d3.v3.min.js admin/static/js/jquery.min.js /srv/htdocs-dashboard/js/
COPY admin/static/js/i18n/* /srv/htdocs-dashboard/js/i18n/

View file

@ -1,24 +0,0 @@
FROM alpine:3.21
EXPOSE 67/udp
EXPOSE 69/udp
EXPOSE 80/tcp
ENTRYPOINT ["/usr/sbin/initial-config.sh"]
CMD ["/usr/bin/supervisord", "-c", "/etc/supervisord.conf"]
WORKDIR /srv/s
RUN apk add --no-cache \
busybox-extras \
supervisor \
syslinux \
tftp-hpa
RUN touch /var/lib/udhcpd/udhcpd.leases && \
mv /usr/share/syslinux/* /srv
COPY configs/deploy-initial-config.sh /usr/sbin/initial-config.sh
COPY configs/deploy-supervisord.conf /etc/supervisord.conf
COPY configs/udhcpd-sample.conf /etc/udhcpd.conf
COPY configs/pxelinux.cfg /srv/pxelinux.cfg/default

View file

@ -1,21 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY evdist ./evdist/
RUN go get -d -v ./evdist && \
go build -v -buildvcs=false -o evdist/evdist ./evdist
FROM alpine:3.21
WORKDIR /srv
ENTRYPOINT ["/srv/evdist"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/evdist/evdist /srv/evdist

View file

@ -1,13 +0,0 @@
FROM node:23-alpine AS nodebuild
WORKDIR /ui
COPY frontend/fic/ .
RUN npm install --network-timeout=100000 && \
npm run build
FROM scratch
COPY --from=nodebuild /ui/build/ /www/htdocs-frontend

View file

@ -1,22 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY generator ./generator/
RUN go get -d -v ./generator && \
go build -v -buildvcs=false -o generator/generator ./generator
FROM alpine:3.21
WORKDIR /srv
ENTRYPOINT ["/srv/generator"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/generator/generator /srv/generator

View file

@ -1,27 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
RUN apk add --no-cache build-base
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY admin ./admin/
RUN go get -d -v ./admin && \
go build -v -o get-remote-files ./admin/get-remote-files
FROM alpine:3.21
RUN apk add --no-cache \
ca-certificates
WORKDIR /srv
ENTRYPOINT ["/srv/get-remote-files", "/mnt/fic/"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/get-remote-files /srv/get-remote-files

View file

@ -1,32 +0,0 @@
FROM node:23-alpine AS nodebuild
WORKDIR /ui
COPY frontend/fic/ .
RUN npm install --network-timeout=100000 && \
npm run build
FROM nginx:stable-alpine-slim
ENV FIC_BASEURL /
ENV HOST_RECEIVER receiver:8080
ENV HOST_ADMIN admin:8081
ENV HOST_DASHBOARD dashboard:8082
ENV HOST_QA qa:8083
ENV PATH_FILES /srv/FILES
ENV PATH_STARTINGBLOCK /srv/STARTINGBLOCK
ENV PATH_STATIC /srv/htdocs-frontend
ENV PATH_SETTINGS /srv/SETTINGSDIST
ENV PATH_TEAMS /srv/TEAMS
EXPOSE 80
COPY configs/nginx-chbase.sh /docker-entrypoint.d/40-update-baseurl.sh
COPY configs/nginx/get-team/upstream.conf /etc/nginx/fic-get-team.conf
COPY configs/nginx/auth/none.conf /etc/nginx/fic-auth.conf
COPY configs/nginx/base/docker.conf /etc/nginx/templates/default.conf.template
COPY --from=nodebuild /ui/build/ /srv/htdocs-frontend

View file

@ -1,38 +0,0 @@
FROM node:23-alpine AS nodebuild
WORKDIR /ui
COPY qa/ui/ .
RUN npm install --network-timeout=100000 && \
npm run build
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY --from=nodebuild /ui ./qa/ui
COPY qa ./qa/
COPY admin ./admin/
RUN go get -d -v ./qa && \
go build -v -buildvcs=false -o qa/qa ./qa
FROM alpine:3.21
EXPOSE 8083
WORKDIR /srv
ENTRYPOINT ["/srv/qa", "--bind=:8083"]
VOLUME /srv/htdocs-qa/
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/qa/qa /srv/qa

View file

@ -1,27 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY receiver ./receiver/
RUN go get -d -v ./receiver && \
go build -v -buildvcs=false -o ./receiver/receiver ./receiver
FROM alpine:3.21
EXPOSE 8080
WORKDIR /srv
ENTRYPOINT ["/usr/sbin/entrypoint.sh"]
CMD ["--bind=:8080"]
COPY entrypoint-receiver.sh /usr/sbin/entrypoint.sh
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/receiver/receiver /srv/receiver

View file

@ -1,24 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY libfic ./libfic/
COPY settings ./settings/
COPY remote/challenge-sync-airbus ./remote/challenge-sync-airbus/
RUN go get -d -v ./remote/challenge-sync-airbus && \
go build -v -buildvcs=false -o ./challenge-sync-airbus ./remote/challenge-sync-airbus
FROM alpine:3.21
RUN apk add --no-cache openssl ca-certificates
WORKDIR /srv
ENTRYPOINT ["/srv/challenge-sync-airbus"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/challenge-sync-airbus /srv/challenge-sync-airbus

View file

@ -1,24 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
COPY go.mod go.sum ./
COPY libfic ./libfic/
COPY settings ./settings/
COPY remote/scores-sync-zqds ./remote/scores-sync-zqds/
RUN go get -d -v ./remote/scores-sync-zqds && \
go build -v -buildvcs=false -o ./scores-sync-zqds ./remote/scores-sync-zqds
FROM alpine:3.21
RUN apk add --no-cache openssl ca-certificates
WORKDIR /srv
ENTRYPOINT ["/srv/scores-sync-zqds"]
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/scores-sync-zqds /srv/scores-sync-zqds

View file

@ -1,42 +0,0 @@
FROM golang:1-alpine AS gobuild
RUN apk add --no-cache git
WORKDIR /go/src/srs.epita.fr/fic-server/
RUN apk add --no-cache binutils-gold build-base
COPY go.mod go.sum ./
COPY settings settings/
COPY libfic ./libfic/
COPY admin ./admin/
COPY repochecker ./repochecker/
RUN go get -d -v ./repochecker && \
go build -v -o repochecker/repochecker ./repochecker && \
go build -v -buildmode=plugin -o repochecker/epita-rules.so ./repochecker/epita && \
go build -v -buildmode=plugin -o repochecker/file-inspector.so ./repochecker/file-inspector && \
go build -v -buildmode=plugin -o repochecker/grammalecte-rules.so ./repochecker/grammalecte && \
go build -v -buildmode=plugin -o repochecker/pcap-inspector.so ./repochecker/pcap-inspector && \
go build -v -buildmode=plugin -o repochecker/videos-rules.so ./repochecker/videos
ENV GRAMMALECTE_VERSION 2.1.1
ADD https://web.archive.org/web/20240926154729if_/https://grammalecte.net/zip/Grammalecte-fr-v$GRAMMALECTE_VERSION.zip /srv/grammalecte.zip
RUN mkdir /srv/grammalecte && cd /srv/grammalecte && unzip /srv/grammalecte.zip && sed -i 's/if sys.version_info.major < (3, 7):/if False:/' /srv/grammalecte/grammalecte-server.py
FROM alpine:3.19
ENTRYPOINT ["/usr/bin/repochecker", "--rules-plugins=/usr/lib/epita-rules.so", "--rules-plugins=/usr/lib/file-inspector.so", "--rules-plugins=/usr/lib/grammalecte-rules.so", "--rules-plugins=/usr/lib/pcap-inspector.so", "--rules-plugins=/usr/lib/videos-rules.so"]
RUN apk add --no-cache git python3 ffmpeg
COPY --from=gobuild /srv/grammalecte /srv/grammalecte
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/repochecker /usr/bin/repochecker
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/epita-rules.so /usr/lib/epita-rules.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/file-inspector.so /usr/lib/file-inspector.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/grammalecte-rules.so /usr/lib/grammalecte-rules.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/pcap-inspector.so /usr/lib/pcap-inspector.so
COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/videos-rules.so /usr/lib/videos-rules.so

21
LICENSE
View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2016-2018 Pierre-Olivier Mercier <nemunaire@nemunai.re>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

238
README.md
View file

@ -1,238 +0,0 @@
FIC Forensic CTF Platform
=========================
This is a CTF server for distributing and validating challenges. It is design
to be robust, so it uses some uncommon technics like client certificate for
authentication, lots of state of the art cryptographic methods and aims to be
deployed in a DMZ network architecture.
## Features
- **Collaborative Challenge Design and Review:** Facilitates large team collaboration for challenge creation and review.
- **Versatile Flag Formats:** Supports flags as strings, numbers, multiple-choice questions, unique-choice questions, selects, multiline inputs, and strings with capture regexp.
- **Engaging Challenge Interface:** A visually appealing interface that incorporates images to illustrate exercises.
- **Public Dashboard:** Allow spectators to follow the competition alongside players.
- **Archival Mode:** Preserve past challenges and data in a static form, with no code. Your archive can lied on a S3 bucket.
- **Export Capabilities:** Export challenges to other CTF platforms.
- **Security-Focused:** Designed with security as a top priority. Each service aims to be isolated with right restrictions. Answers are not stored in the database, ...
- **Choose your Authentication:** Authentication is not part of this project, integrate your own authentication methods.
- **Extensible:** Easily extend and customize the platform. The main codebase in Golang is highly documented, each frontend part can be recreated in another language with ease.
- **Comprehensive Settings:** A wide range of settings for challenge customization. You can have first blood or not, dynamic exercice gain, evenemential bonus, ...
- **Git Integration:** Seamless verification and integration with Git.
- **Infrastructure as Code (IaC):** Ensure read-only and reproducible infrastructure.
- **Last-Minute Checks:** Ensure your challenge is ready with a comprehensive set of checks that can be performed anytime, verifying that downloadable files are as expected by the challenge creators.
- **Lightweight:** Optimized for minimal resource consumption, supporting features like serving gzipped files directly to browsers without CPU usage.
- **Scalable:** Designed to handle large-scale competitions with multiple receivers and frontend servers, smoothly queuing activity peaks on the backend.
- **Offline Capability:** Run your challenges offline.
- **Integrated Exercise Issue Ticketing System:** Manage and track issues related to exercises during the competition directly with teams. During designing phase, this transform in a complete dedicated QA platform.
- **Detailed Statistics:** Provide administrators with insights into exercise preferences and complexity.
- **Change Planning:** Schedule events in advance, such as new exercise availability or ephemeral bonuses, with second-by-second precision.
- **Frontend Time Synchronization:** Ensure accurate remaining time and event synchronization between servers and players.
## Overview
This is a [monorepo](https://danluu.com/monorepo/), containing several
micro-services :
- `admin` is the web interface and API used to control the challenge
and doing synchronization.
- `checker` is an inotify reacting service that handles submissions
checking.
- `dashboard` is a public interface to explain and follow the
conquest, aims to animate the challenge for visitors.
- `evdist` is an inotify reacting service that handles settings
changes during the challenge (eg. a 30 minutes event where hints are
free, ...).
- `generator` takes care of global and team's files generation.
- `qa` is an interface dedicated to challenge development, it stores
reports to be treated by challenges creators.
- `receiver` is only responsible for receiving submissions. It is the
only dynamic part accessibe to players, so it's codebase is reduce
to the minimum. It does not parse or try to understand players
submissions, it just write it down to a file in the file
system. Parsing and treatment is made by the `checker`.
- `remote/challenge-sync-airbus` is an inotify reacting service that
allows us to synchronize scores and exercice validations with the
Airbus scoring platform.
- `remote/scores-sync-zqds` is an inotify reacting service that allows
us to synchronize scores with the ZQDS scoring platform.
- `repochecker` is a side project to check offline for synchronization
issues.
Here is how thoses services speak to each others:
![Overview of the micro-services](doc/micro-services.png)
In the production setup, each micro-service runs in a dedicated
container, isolated from each other. Moreover, two physical machines
should be used:
- `phobos` communicates with players: displaying the web interface,
authenticate teams and players, storing contest files and handling
submissions retrieval without understanding them. It can't access
`deimos` so its job stops after writing requests on the filesystem.
- `deimos` is hidden from players, isolated from the network. It can
only access `phobos` via a restricted ssh connection, to retrieve
requests from `phobos` filesystem and pushing to it newly generated
static files.
Concretely, the L2 looks like this:
![Layer 2 connections](doc/l2.png)
So, the general filesystem is organized this way:
- `DASHBOARD` contains files structuring the content of the dashboard
screen(s).
- `FILES` stores the contest file to be downloaded by players. To be
accessible without authentication and to avoid bruteforce, each file
is placed into a directory with a hashed name (the original file
name is preserved). It's rsynced as is to `deimos`.
- `GENERATOR` contains a socket to allow other services to communicate
with the `generator`.
- `PKI` takes care of the PKI used for the client certiciate
authorization process, and more generaly, all authentication related
files (htpasswd, dexidp config, ...). Only the `shared` subdirectory
is shared with `deimos`, private key and teams P12 don't go out.
- `SETTINGS` stores the challenge config as wanted by admins. It's not
always the config in use: it uses can be delayed waiting for a
trigger.
- `SETTINGSDIST` is the challenge configuration in use. It is the one
shared with players.
- `startingblock` keep the `started` state of the challenge. This
helps `nginx` to know when it can start distributing exercices
related files.
- `TEAMS` stores the static files generated by the `generator`, there is
one subdirectory by team (id of the team), plus some files at the
root, which are common to all teams. There is also symlink pointing
to team directory, each symlink represent an authentication
association (certificate ID, OpenID username, htpasswd user, ...).
- `submissions` is the directory where the `receiver` writes
requests. It creates subdirectories at the name of the
authentication association, as seen in `TEAMS`, `checker` then
resolve the association regarding `TEAMS` directory. There is also a
special directory to handle team registration.
Here is a diagram showing how each micro-service uses directories it has access to (blue for read access, red for write access):
![Usage of directories by each micro-service](doc/directories.png)
Local developer setup
---------------------
### Using Docker
Use `docker-compose build`, then `docker-compose up` to launch the infrastructure.
After booting, you'll be able to reach the main interface at:
<http://localhost:8042/> and the admin one at: <http://localhost:8081/> (or at <http://localhost:8042/admin/>).
The dashboard is available at <http://localhost:8042/dashboard/> and the QA service at <http://localhost:8042/qa/>.
In this setup, there is no authentication. You are identfied [as a team](./configs/nginx/get-team/team-1.conf). On first use you'll need to register.
#### Import folder
##### Local import folder
The following changes is only required if your are trying to change the local import folder `~/fic` location.
Make the following changes inside this file `docker-compose.yml`:
23 volumes:
24 - - ~/fic:/mnt/fic:ro
24 + - <custom-path-to-import-folder>/fic:/mnt/fic:ro
##### Git import
A git repository can be used:
29 - command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink
29 + command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink -git-import-remote git@gitlab.cri.epita.fr:ing/majeures/srs/fic/2042/challenges.git
##### Owncloud import folder
If your are trying to use the folder available with the Owncloud service, make the following changes inside this file `docker-compose.yml`:
29 - command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink
29 + command: --baseurl /admin/ -clouddav=https://owncloud.srs.epita.fr/remote.php/webdav/FIC%202019/ -clouduser <login_x> -cloudpass '<passwd>'
### Manual builds
Running this project requires a web server (configuration is given for nginx),
a database (currently supporting only MySQL/MariaDB), a Go compiler for the
revision 1.18 at least and a `inotify`-aware system. You'll also need NodeJS to
compile some user interfaces.
1. Above all, you need to build Node projects:
cd frontend/fic; npm install && npm run build
cd qa/ui; npm install && npm run build
2. First, you'll need to retrieve the dependencies:
go mod vendor
2. Then, build the three Go projects:
go build -o fic-admin ./admin
go build -o fic-checker ./checker
go build -o fic-dashboard ./dashboard
go build -o fic-generator ./generator
go build -o fic-qa ./qa
go build -o fic-receiver ./receiver
go build -o fic-repochecker ./repochecker
...
3. Before launching anything, you need to create a database:
mysql -u root -p <<EOF
CREATE DATABASE fic;
CREATE USER fic@localhost IDENTIFIED BY 'fic';
GRANT ALL ON fic.* TO fic@localhost;
EOF
By default, expected credentials for development purpose is `fic`,
for both username, password and database name. If you want to use
other credentials, define the corresponding environment variable:
`MYSQL_HOST`, `MYSQL_USER`, `MYSQL_PASSWORD` and
`MYSQL_DATABASE`. Those variables are the one used by the `mysql`
docker image, so just link them together if you use containers.
4. Launch it!
./fic-admin &
After initializing the database, the server will listen on
<http://localhost:8081/>: this is the administration part.
./fic-generator &
This daemon generates static and team related files and then waits
another process to tell it to regenerate some files.
./fic-receiver &
This one exposes an API that gives time synchronization to clients and
handle submission reception (but without treating them).
./fic-checker &
This service waits for new submissions (expected in `submissions`
directory). It only watchs modifications on the file system, it has no web
interface.
./fic-dashboard &
This last server runs the public dashboard. It serves all file, without the
need of a webserver. It listens on port 8082 by default.
./fic-qa &
If you need it, this will launch a web interface on the port 8083 by
default, to perform quality control.
For the moment, a web server is mandatory to serve static files, look
at the samples given in the `configs/nginx` directory. You need to
pick one base configation flavor in the `configs/nginx/base`
directory, and associated with an authentication mechanism in
`configs/nginx/auth` (named the file `fic-auth.conf` in `/etc/nginx`),
and also pick the corresponding `configs/nginx/get-team` file, you
named `fic-get-team.conf`.

1
admin/.gitignore vendored
View file

@ -2,4 +2,3 @@ admin
fic.db
PKI/
FILES/
static/full_import_report.json

View file

@ -1,479 +1,60 @@
package api
import (
"crypto/rand"
"crypto/sha1"
"crypto/x509"
"crypto/x509/pkix"
"encoding/base32"
"encoding/base64"
"errors"
"fmt"
"io/ioutil"
"log"
"math"
"math/big"
"net/http"
"os"
"path"
"strconv"
"strings"
"time"
"srs.epita.fr/fic-server/admin/pki"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
var TeamsDir string
func init() {
router.GET("/api/ca.pem", apiHandler(GetCAPEM))
router.POST("/api/ca/new", apiHandler(
func(_ httprouter.Params, _ []byte) (interface{}, error) { return fic.GenerateCA() }))
router.GET("/api/ca/crl", apiHandler(GetCRL))
router.POST("/api/ca/crl", apiHandler(
func(_ httprouter.Params, _ []byte) (interface{}, error) { return fic.GenerateCRL() }))
func declareCertificateRoutes(router *gin.RouterGroup) {
router.GET("/htpasswd", func(c *gin.Context) {
ret, err := genHtpasswd(true)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
c.String(http.StatusOK, ret)
})
router.POST("/htpasswd", func(c *gin.Context) {
if htpasswd, err := genHtpasswd(true); err != nil {
log.Println("Unable to generate htpasswd:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
} else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "ficpasswd"), []byte(htpasswd), 0644); err != nil {
log.Println("Unable to write htpasswd:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.AbortWithStatus(http.StatusOK)
})
router.DELETE("/htpasswd", func(c *gin.Context) {
if err := os.Remove(path.Join(pki.PKIDir, "shared", "ficpasswd")); err != nil {
log.Println("Unable to remove htpasswd:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.AbortWithStatus(http.StatusOK)
})
router.GET("/htpasswd.apr1", func(c *gin.Context) {
ret, err := genHtpasswd(false)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
c.String(http.StatusOK, ret)
})
router.GET("/ca", infoCA)
router.GET("/ca.pem", getCAPEM)
router.POST("/ca/new", func(c *gin.Context) {
var upki PKISettings
err := c.ShouldBindJSON(&upki)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
if err := pki.GenerateCA(upki.NotBefore, upki.NotAfter); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusCreated, true)
})
router.GET("/certs", getCertificates)
router.POST("/certs", generateClientCert)
router.DELETE("/certs", func(c *gin.Context) {
v, err := fic.ClearCertificates()
if err != nil {
log.Println("Unable to ClearCertificates:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, v)
})
apiCertificatesRoutes := router.Group("/certs/:certid")
apiCertificatesRoutes.Use(CertificateHandler)
apiCertificatesRoutes.HEAD("", getTeamP12File)
apiCertificatesRoutes.GET("", getTeamP12File)
apiCertificatesRoutes.PUT("", updateCertificateAssociation)
apiCertificatesRoutes.DELETE("", func(c *gin.Context) {
cert := c.MustGet("cert").(*fic.Certificate)
v, err := cert.Revoke()
if err != nil {
log.Println("Unable to Revoke:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, v)
})
router.HEAD("/api/teams/:tid/certificate.p12", apiHandler(teamHandler(GetTeamCertificate)))
router.GET("/api/teams/:tid/certificate.p12", apiHandler(teamHandler(GetTeamCertificate)))
router.DELETE("/api/teams/:tid/certificate.p12", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) { return team.RevokeCert() })))
router.GET("/api/teams/:tid/certificate/generate", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) { return team.GenerateCert() })))
}
func declareTeamCertificateRoutes(router *gin.RouterGroup) {
router.GET("/certificates", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
if serials, err := pki.GetTeamSerials(TeamsDir, team.Id); err != nil {
log.Println("Unable to GetTeamSerials:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
} else {
var certs []CertExported
for _, serial := range serials {
if cert, err := fic.GetCertificate(serial); err == nil {
certs = append(certs, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, cert.Password, &team.Id, cert.Revoked})
} else {
log.Println("Unable to get back certificate, whereas an association exists on disk: ", err)
}
}
c.JSON(http.StatusOK, certs)
}
})
router.GET("/associations", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
assocs, err := pki.GetTeamAssociations(TeamsDir, team.Id)
if err != nil {
log.Println("Unable to GetTeamAssociations:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, assocs)
})
apiTeamAssociationsRoutes := router.Group("/associations/:assoc")
apiTeamAssociationsRoutes.POST("", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
if err := os.Symlink(fmt.Sprintf("%d", team.Id), path.Join(TeamsDir, c.Params.ByName("assoc"))); err != nil {
log.Println("Unable to create association symlink:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to create association symlink: %s", err.Error())})
return
}
c.JSON(http.StatusOK, c.Params.ByName("assoc"))
})
apiTeamAssociationsRoutes.DELETE("", func(c *gin.Context) {
err := pki.DeleteTeamAssociation(TeamsDir, c.Params.ByName("assoc"))
if err != nil {
log.Printf("Unable to DeleteTeamAssociation(%s): %s", c.Params.ByName("assoc"), err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to delete association symlink: %s", err.Error())})
return
}
c.JSON(http.StatusOK, nil)
})
}
func CertificateHandler(c *gin.Context) {
var certid uint64
var err error
cid := strings.TrimSuffix(string(c.Params.ByName("certid")), ".p12")
if certid, err = strconv.ParseUint(cid, 10, 64); err != nil {
if certid, err = strconv.ParseUint(cid, 16, 64); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid certficate identifier"})
return
}
}
cert, err := fic.GetCertificate(certid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Certificate not found"})
return
}
c.Set("cert", cert)
c.Next()
}
func genHtpasswd(ssha bool) (ret string, err error) {
var teams []*fic.Team
teams, err = fic.GetTeams()
if err != nil {
return
}
for _, team := range teams {
var serials []uint64
serials, err = pki.GetTeamSerials(TeamsDir, team.Id)
if err != nil {
return
}
if len(serials) == 0 {
// Don't include teams that don't have associated certificates
continue
}
for _, serial := range serials {
var cert *fic.Certificate
cert, err = fic.GetCertificate(serial)
if err != nil {
// Ignore invalid/incorrect/non-existant certificates
continue
}
if cert.Revoked != nil {
continue
}
salt := make([]byte, 5)
if _, err = rand.Read(salt); err != nil {
return
}
if ssha {
hash := sha1.New()
hash.Write([]byte(cert.Password))
hash.Write([]byte(salt))
passwdline := fmt.Sprintf(":{SSHA}%s\n", base64.StdEncoding.EncodeToString(append(hash.Sum(nil), salt...)))
ret += strings.ToLower(team.Name) + passwdline
ret += fmt.Sprintf("%0[2]*[1]x", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)) + passwdline
ret += fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)) + passwdline
teamAssociations, _ := pki.GetTeamAssociations(TeamsDir, team.Id)
log.Println(path.Join(TeamsDir, fmt.Sprintf("%d", team.Id)), teamAssociations)
for _, ta := range teamAssociations {
ret += strings.Replace(ta, ":", "", -1) + passwdline
}
} else {
salt32 := base32.StdEncoding.EncodeToString(salt)
ret += fmt.Sprintf(
"%s:$apr1$%s$%s\n",
strings.ToLower(team.Name),
salt32,
fic.Apr1Md5(cert.Password, salt32),
)
}
}
}
return
}
type PKISettings struct {
Version int `json:"version"`
SerialNumber *big.Int `json:"serialnumber"`
Issuer pkix.Name `json:"issuer"`
Subject pkix.Name `json:"subject"`
NotBefore time.Time `json:"notbefore"`
NotAfter time.Time `json:"notafter"`
SignatureAlgorithm x509.SignatureAlgorithm `json:"signatureAlgorithm,"`
PublicKeyAlgorithm x509.PublicKeyAlgorithm `json:"publicKeyAlgorithm"`
}
func infoCA(c *gin.Context) {
_, cacert, err := pki.LoadCA()
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "CA not found"})
return
}
c.JSON(http.StatusOK, PKISettings{
Version: cacert.Version,
SerialNumber: cacert.SerialNumber,
Issuer: cacert.Issuer,
Subject: cacert.Subject,
NotBefore: cacert.NotBefore,
NotAfter: cacert.NotAfter,
SignatureAlgorithm: cacert.SignatureAlgorithm,
PublicKeyAlgorithm: cacert.PublicKeyAlgorithm,
})
}
func getCAPEM(c *gin.Context) {
if _, err := os.Stat(pki.CACertPath()); os.IsNotExist(err) {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Unable to locate the CA root certificate. Have you generated it?"})
return
} else if fd, err := os.Open(pki.CACertPath()); err != nil {
log.Println("Unable to open CA root certificate:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
func GetCAPEM(_ httprouter.Params, _ []byte) (interface{}, error) {
if _, err := os.Stat("../PKI/shared/cacert.crt"); os.IsNotExist(err) {
return nil, errors.New("Unable to locate the CA root certificate. Have you generated it?")
} else if fd, err := os.Open("../PKI/shared/cacert.crt"); err == nil {
return ioutil.ReadAll(fd)
} else {
defer fd.Close()
cnt, err := ioutil.ReadAll(fd)
if err != nil {
log.Println("Unable to read CA root certificate:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.String(http.StatusOK, string(cnt))
return nil, err
}
}
func getTeamP12File(c *gin.Context) {
cert := c.MustGet("cert").(*fic.Certificate)
// Create p12 if necessary
if _, err := os.Stat(pki.ClientP12Path(cert.Id)); os.IsNotExist(err) {
if err := pki.WriteP12(cert.Id, cert.Password); err != nil {
log.Println("Unable to WriteP12:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
}
if _, err := os.Stat(pki.ClientP12Path(cert.Id)); os.IsNotExist(err) {
log.Println("Unable to compute ClientP12Path:", err.Error())
c.AbortWithError(http.StatusInternalServerError, errors.New("Unable to locate the p12. Have you generated it?"))
return
} else if fd, err := os.Open(pki.ClientP12Path(cert.Id)); err != nil {
log.Println("Unable to open ClientP12Path:", err.Error())
c.AbortWithError(http.StatusInternalServerError, fmt.Errorf("Unable to open the p12: %w", err))
return
func GetCRL(_ httprouter.Params, _ []byte) (interface{}, error) {
if _, err := os.Stat("../PKI/shared/crl.pem"); os.IsNotExist(err) {
return nil, errors.New("Unable to locate the CRL. Have you generated it?")
} else if fd, err := os.Open("../PKI/shared/crl.pem"); err == nil {
return ioutil.ReadAll(fd)
} else {
defer fd.Close()
data, err := ioutil.ReadAll(fd)
if err != nil {
log.Println("Unable to open ClientP12Path:", err.Error())
c.AbortWithError(http.StatusInternalServerError, fmt.Errorf("Unable to open the p12: %w", err))
return
}
c.Data(http.StatusOK, "application/x-pkcs12", data)
return nil, err
}
}
func generateClientCert(c *gin.Context) {
// First, generate a new, unique, serial
var serial_gen [8]byte
if _, err := rand.Read(serial_gen[:]); err != nil {
log.Println("Unable to read enough entropy to generate client certificate:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to read enough entropy"})
return
}
for fic.ExistingCertSerial(serial_gen) {
if _, err := rand.Read(serial_gen[:]); err != nil {
log.Println("Unable to read enough entropy to generate client certificate:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to read enough entropy"})
return
}
}
var serial_b big.Int
serial_b.SetBytes(serial_gen[:])
serial := serial_b.Uint64()
// Let's pick a random password
password, err := fic.GeneratePassword()
if err != nil {
log.Println("Unable to generate password:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate password: " + err.Error()})
return
}
// Ok, now load CA
capriv, cacert, err := pki.LoadCA()
if err != nil {
log.Println("Unable to load the CA:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to load the CA"})
return
}
// Generate our privkey
if err := pki.GenerateClient(serial, cacert.NotBefore, cacert.NotAfter, &cacert, &capriv); err != nil {
log.Println("Unable to generate private key:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate private key: " + err.Error()})
return
}
// Save in DB
cert, err := fic.RegisterCertificate(serial, password)
if err != nil {
log.Println("Unable to register certificate:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to register certificate."})
return
}
c.JSON(http.StatusOK, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, cert.Password, nil, cert.Revoked})
}
type CertExported struct {
Id string `json:"id"`
Creation time.Time `json:"creation"`
Password string `json:"password,omitempty"`
IdTeam *int64 `json:"id_team"`
Revoked *time.Time `json:"revoked"`
}
func getCertificates(c *gin.Context) {
certificates, err := fic.GetCertificates()
if err != nil {
log.Println("Unable to retrieve certificates list:", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during certificates retrieval."})
return
}
ret := make([]CertExported, 0)
for _, cert := range certificates {
dstLinkPath := path.Join(TeamsDir, pki.GetCertificateAssociation(cert.Id))
var idTeam *int64 = nil
if lnk, err := os.Readlink(dstLinkPath); err == nil {
if tid, err := strconv.ParseInt(lnk, 10, 64); err == nil {
idTeam = &tid
}
}
ret = append(ret, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, "", idTeam, cert.Revoked})
}
c.JSON(http.StatusOK, ret)
}
type CertUploaded struct {
Team *int64 `json:"id_team"`
}
func updateCertificateAssociation(c *gin.Context) {
cert := c.MustGet("cert").(*fic.Certificate)
var uc CertUploaded
err := c.ShouldBindJSON(&uc)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
dstLinkPath := path.Join(TeamsDir, pki.GetCertificateAssociation(cert.Id))
if uc.Team != nil {
srcLinkPath := fmt.Sprintf("%d", *uc.Team)
if err := os.Symlink(srcLinkPath, dstLinkPath); err != nil {
log.Println("Unable to create certificate symlink:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to create certificate symlink: %s", err.Error())})
return
}
// Mark team as active to ensure it'll be generated
if ut, err := fic.GetTeam(*uc.Team); err != nil {
log.Println("Unable to GetTeam:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team retrieval."})
return
} else if !ut.Active {
ut.Active = true
_, err := ut.Update()
if err != nil {
log.Println("Unable to UpdateTeam after updateCertificateAssociation:", err.Error())
}
}
func GetTeamCertificate(team fic.Team, _ []byte) (interface{}, error) {
if _, err := os.Stat("../PKI/pkcs/" + team.InitialName + ".p12"); os.IsNotExist(err) {
return nil, errors.New("Unable to locate the p12. Have you generated it?")
} else if fd, err := os.Open("../PKI/pkcs/" + team.InitialName + ".p12"); err == nil {
return ioutil.ReadAll(fd)
} else {
os.Remove(dstLinkPath)
return nil, err
}
c.JSON(http.StatusOK, cert)
}

View file

@ -1,499 +0,0 @@
package api
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"path"
"strconv"
"time"
"srs.epita.fr/fic-server/admin/generation"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
)
func declareClaimsRoutes(router *gin.RouterGroup) {
// Tasks
router.GET("/claims", getClaims)
router.POST("/claims", newClaim)
router.DELETE("/claims", clearClaims)
apiClaimsRoutes := router.Group("/claims/:cid")
apiClaimsRoutes.Use(ClaimHandler)
apiClaimsRoutes.GET("", showClaim)
apiClaimsRoutes.PUT("", updateClaim)
apiClaimsRoutes.POST("", addClaimDescription)
apiClaimsRoutes.DELETE("", deleteClaim)
apiClaimsRoutes.GET("/last_update", getClaimLastUpdate)
apiClaimsRoutes.PUT("/descriptions", updateClaimDescription)
// Assignees
router.GET("/claims-assignees", getAssignees)
router.POST("/claims-assignees", newAssignee)
apiClaimAssigneesRoutes := router.Group("/claims-assignees/:aid")
apiClaimAssigneesRoutes.Use(ClaimAssigneeHandler)
router.GET("/claims-assignees/:aid", showClaimAssignee)
router.PUT("/claims-assignees/:aid", updateClaimAssignee)
router.DELETE("/claims-assignees/:aid", deleteClaimAssignee)
}
func declareExerciceClaimsRoutes(router *gin.RouterGroup) {
router.GET("/claims", getExerciceClaims)
}
func declareTeamClaimsRoutes(router *gin.RouterGroup) {
router.GET("/api/teams/:tid/issue.json", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
issues, err := team.MyIssueFile()
if err != nil {
log.Printf("Unable to MyIssueFile(tid=%d): %s", team.Id, err.Error())
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate issues.json."})
return
}
c.JSON(http.StatusOK, issues)
})
router.GET("/claims", getTeamClaims)
}
func ClaimHandler(c *gin.Context) {
cid, err := strconv.ParseInt(string(c.Params.ByName("cid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid claim identifier"})
return
}
claim, err := fic.GetClaim(cid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Requested claim not found"})
return
}
c.Set("claim", claim)
c.Next()
}
func ClaimAssigneeHandler(c *gin.Context) {
aid, err := strconv.ParseInt(string(c.Params.ByName("aid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid claim assignee identifier"})
return
}
assignee, err := fic.GetAssignee(aid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Requested claim-assignee not found"})
return
}
c.Set("claim-assignee", assignee)
c.Next()
}
func getClaims(c *gin.Context) {
claims, err := fic.GetClaims()
if err != nil {
log.Println("Unable to getClaims:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claims retrieval."})
return
}
c.JSON(http.StatusOK, claims)
}
func getTeamClaims(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
claims, err := team.GetClaims()
if err != nil {
log.Printf("Unable to GetClaims(tid=%d): %s", team.Id, err.Error())
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve claim list."})
return
}
c.JSON(http.StatusOK, claims)
}
func getExerciceClaims(c *gin.Context) {
exercice := c.MustGet("exercice").(*fic.Exercice)
claims, err := exercice.GetClaims()
if err != nil {
log.Printf("Unable to GetClaims(eid=%d): %s", exercice.Id, err.Error())
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve claim list."})
return
}
c.JSON(http.StatusOK, claims)
}
func getClaimLastUpdate(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
v, err := claim.GetLastUpdate()
if err != nil {
log.Printf("Unable to GetLastUpdate: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim last update retrieval."})
return
}
c.JSON(http.StatusOK, v)
}
type ClaimExported struct {
Id int64 `json:"id"`
Subject string `json:"subject"`
IdTeam *int64 `json:"id_team"`
Team *fic.Team `json:"team"`
IdExercice *int64 `json:"id_exercice"`
Exercice *fic.Exercice `json:"exercice"`
IdAssignee *int64 `json:"id_assignee"`
Assignee *fic.ClaimAssignee `json:"assignee"`
Creation time.Time `json:"creation"`
LastUpdate time.Time `json:"last_update"`
State string `json:"state"`
Priority string `json:"priority"`
Descriptions []*fic.ClaimDescription `json:"descriptions"`
}
func showClaim(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
var e ClaimExported
var err error
if e.Team, err = claim.GetTeam(); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated team: %s", err.Error())})
return
}
if e.Exercice, err = claim.GetExercice(); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated exercice: %s", err.Error())})
return
}
if e.Assignee, err = claim.GetAssignee(); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated assignee: %s", err.Error())})
return
}
if e.Descriptions, err = claim.GetDescriptions(); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find claim's descriptions: %s", err.Error())})
return
}
e.LastUpdate = e.Creation
for _, d := range e.Descriptions {
if d.Date.After(e.LastUpdate) {
e.LastUpdate = d.Date
}
}
e.Id = claim.Id
e.IdAssignee = claim.IdAssignee
e.IdTeam = claim.IdTeam
e.IdExercice = claim.IdExercice
e.Subject = claim.Subject
e.Creation = claim.Creation
e.State = claim.State
e.Priority = claim.Priority
c.JSON(http.StatusOK, e)
}
type ClaimUploaded struct {
fic.Claim
Whoami *int64 `json:"whoami"`
}
func newClaim(c *gin.Context) {
var uc ClaimUploaded
err := c.ShouldBindJSON(&uc)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
if uc.Subject == "" {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Claim's subject cannot be empty."})
return
}
var t *fic.Team
if uc.IdTeam != nil {
if team, err := fic.GetTeam(*uc.IdTeam); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated team: %s", err.Error())})
return
} else {
t = team
}
} else {
t = nil
}
var e *fic.Exercice
if uc.IdExercice != nil {
if exercice, err := fic.GetExercice(*uc.IdExercice); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated exercice: %s", err.Error())})
return
} else {
e = exercice
}
} else {
e = nil
}
var a *fic.ClaimAssignee
if uc.IdAssignee != nil {
if assignee, err := fic.GetAssignee(*uc.IdAssignee); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated assignee: %s", err.Error())})
return
} else {
a = assignee
}
} else {
a = nil
}
if uc.Priority == "" {
uc.Priority = "medium"
}
claim, err := fic.NewClaim(uc.Subject, t, e, a, uc.Priority)
if err != nil {
log.Println("Unable to newClaim:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to register new claim"})
return
}
c.JSON(http.StatusOK, claim)
}
func clearClaims(c *gin.Context) {
nb, err := fic.ClearClaims()
if err != nil {
log.Printf("Unable to clearClaims: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claims clearing."})
return
}
c.JSON(http.StatusOK, nb)
}
func generateTeamIssuesFile(team fic.Team) error {
if generation.GeneratorSocket == "" {
if my, err := team.MyIssueFile(); err != nil {
return fmt.Errorf("Unable to generate issue FILE (tid=%d): %w", team.Id, err)
} else if j, err := json.Marshal(my); err != nil {
return fmt.Errorf("Unable to encode issues' file JSON: %w", err)
} else if err = ioutil.WriteFile(path.Join(TeamsDir, fmt.Sprintf("%d", team.Id), "issues.json"), j, 0644); err != nil {
return fmt.Errorf("Unable to write issues' file: %w", err)
}
} else {
resp, err := generation.PerformGeneration(fic.GenStruct{Type: fic.GenTeamIssues, TeamId: team.Id})
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
v, _ := ioutil.ReadAll(resp.Body)
return fmt.Errorf("%s", string(v))
}
}
return nil
}
func addClaimDescription(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
var ud fic.ClaimDescription
err := c.ShouldBindJSON(&ud)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
assignee, err := fic.GetAssignee(ud.IdAssignee)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated assignee: %s", err.Error())})
return
}
description, err := claim.AddDescription(ud.Content, assignee, ud.Publish)
if err != nil {
log.Println("Unable to addClaimDescription:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to add description"})
return
}
if team, _ := claim.GetTeam(); team != nil {
err = generateTeamIssuesFile(*team)
if err != nil {
log.Println("Unable to generateTeamIssuesFile after addClaimDescription:", err.Error())
}
}
c.JSON(http.StatusOK, description)
}
func updateClaimDescription(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
var ud fic.ClaimDescription
err := c.ShouldBindJSON(&ud)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
if _, err := ud.Update(); err != nil {
log.Println("Unable to updateClaimDescription:", err.Error())
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during claim description updating."})
return
}
if team, _ := claim.GetTeam(); team != nil {
err = generateTeamIssuesFile(*team)
if err != nil {
log.Println("Unable to generateTeamIssuesFile:", err.Error())
}
}
c.JSON(http.StatusOK, ud)
}
func updateClaim(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
var uc ClaimUploaded
err := c.ShouldBindJSON(&uc)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
uc.Id = claim.Id
_, err = uc.Update()
if err != nil {
log.Printf("Unable to updateClaim: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim update."})
return
}
if claim.State != uc.State {
if uc.Whoami != nil {
if assignee, err := fic.GetAssignee(*uc.Whoami); err == nil {
claim.AddDescription(fmt.Sprintf("%s a changé l'état de la tâche vers %q (était %q).", assignee.Name, uc.State, claim.State), assignee, true)
}
}
}
if claim.IdAssignee != uc.IdAssignee {
if uc.Whoami != nil {
if whoami, err := fic.GetAssignee(*uc.Whoami); err == nil {
if uc.IdAssignee != nil {
if assignee, err := fic.GetAssignee(*uc.IdAssignee); err == nil {
if assignee.Id != whoami.Id {
claim.AddDescription(fmt.Sprintf("%s a assigné la tâche à %s.", whoami.Name, assignee.Name), whoami, false)
} else {
claim.AddDescription(fmt.Sprintf("%s s'est assigné la tâche.", assignee.Name), whoami, false)
}
}
} else {
claim.AddDescription(fmt.Sprintf("%s a retiré l'attribution de la tâche.", whoami.Name), whoami, false)
}
}
}
}
if team, _ := claim.GetTeam(); team != nil {
err = generateTeamIssuesFile(*team)
}
c.JSON(http.StatusOK, uc)
}
func deleteClaim(c *gin.Context) {
claim := c.MustGet("claim").(*fic.Claim)
if nb, err := claim.Delete(); err != nil {
log.Println("Unable to deleteClaim:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim deletion."})
return
} else {
c.JSON(http.StatusOK, nb)
}
}
func getAssignees(c *gin.Context) {
assignees, err := fic.GetAssignees()
if err != nil {
log.Println("Unable to getAssignees:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during assignees retrieval."})
return
}
c.JSON(http.StatusOK, assignees)
}
func showClaimAssignee(c *gin.Context) {
c.JSON(http.StatusOK, c.MustGet("claim-assignee").(*fic.ClaimAssignee))
}
func newAssignee(c *gin.Context) {
var ua fic.ClaimAssignee
err := c.ShouldBindJSON(&ua)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
assignee, err := fic.NewClaimAssignee(ua.Name)
if err != nil {
log.Println("Unable to newAssignee:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during assignee creation."})
return
}
c.JSON(http.StatusOK, assignee)
}
func updateClaimAssignee(c *gin.Context) {
assignee := c.MustGet("claim-assignee").(*fic.ClaimAssignee)
var ua fic.ClaimAssignee
err := c.ShouldBindJSON(&ua)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
ua.Id = assignee.Id
if _, err := ua.Update(); err != nil {
log.Println("Unable to updateClaimAssignee:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim assignee update."})
return
}
c.JSON(http.StatusOK, ua)
}
func deleteClaimAssignee(c *gin.Context) {
assignee := c.MustGet("claim-assignee").(*fic.ClaimAssignee)
if _, err := assignee.Delete(); err != nil {
log.Println("Unable to deleteClaimAssignee:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("An error occurs during claim assignee deletion: %s", err.Error())})
return
}
c.JSON(http.StatusOK, true)
}

View file

@ -2,153 +2,75 @@ package api
import (
"encoding/json"
"io/ioutil"
"log"
"net/http"
"path"
"strconv"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
func declareEventsRoutes(router *gin.RouterGroup) {
router.GET("/events", getEvents)
router.GET("/events.json", getLastEvents)
router.POST("/events", newEvent)
router.DELETE("/events", clearEvents)
func init() {
router.GET("/api/events/", apiHandler(getEvents))
router.GET("/api/events.json", apiHandler(getLastEvents))
router.POST("/api/events/", apiHandler(newEvent))
router.DELETE("/api/events/", apiHandler(clearEvents))
apiEventsRoutes := router.Group("/events/:evid")
apiEventsRoutes.Use(EventHandler)
apiEventsRoutes.GET("", showEvent)
apiEventsRoutes.PUT("", updateEvent)
apiEventsRoutes.DELETE("", deleteEvent)
router.GET("/api/events/:evid", apiHandler(eventHandler(showEvent)))
router.PUT("/api/events/:evid", apiHandler(eventHandler(updateEvent)))
router.DELETE("/api/events/:evid", apiHandler(eventHandler(deleteEvent)))
}
func EventHandler(c *gin.Context) {
evid, err := strconv.ParseInt(string(c.Params.ByName("evid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid event identifier"})
return
func getEvents(_ httprouter.Params, _ []byte) (interface{}, error) {
if evts, err := fic.GetEvents(); err != nil {
return nil, err
} else {
return evts, nil
}
event, err := fic.GetEvent(evid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Event not found"})
return
}
c.Set("event", event)
c.Next()
}
func genEventsFile() error {
func getLastEvents(_ httprouter.Params, _ []byte) (interface{}, error) {
if evts, err := fic.GetLastEvents(); err != nil {
return err
} else if j, err := json.Marshal(evts); err != nil {
return err
} else if err := ioutil.WriteFile(path.Join(TeamsDir, "events.json"), j, 0666); err != nil {
return err
return nil, err
} else {
return evts, nil
}
return nil
}
func getEvents(c *gin.Context) {
evts, err := fic.GetEvents()
if err != nil {
log.Println("Unable to GetEvents:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve events list"})
return
}
c.JSON(http.StatusOK, evts)
func showEvent(event fic.Event, _ []byte) (interface{}, error) {
return event, nil
}
func getLastEvents(c *gin.Context) {
evts, err := fic.GetLastEvents()
if err != nil {
log.Println("Unable to GetLastEvents:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve last events list"})
return
}
c.JSON(http.StatusOK, evts)
}
func newEvent(c *gin.Context) {
func newEvent(_ httprouter.Params, body []byte) (interface{}, error) {
var ue fic.Event
err := c.ShouldBindJSON(&ue)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
if err := json.Unmarshal(body, &ue); err != nil {
return nil, err
}
event, err := fic.NewEvent(ue.Text, ue.Kind)
if err != nil {
log.Printf("Unable to newEvent: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event creation."})
return
if event, err := fic.NewEvent(ue.Text, ue.Kind); err != nil {
return nil, err
} else {
return event, nil
}
genEventsFile()
c.JSON(http.StatusOK, event)
}
func clearEvents(c *gin.Context) {
nb, err := fic.ClearEvents()
if err != nil {
log.Printf("Unable to clearEvent: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event clearing."})
return
}
c.JSON(http.StatusOK, nb)
func clearEvents(_ httprouter.Params, _ []byte) (interface{}, error) {
return fic.ClearEvents()
}
func showEvent(c *gin.Context) {
event := c.MustGet("event").(*fic.Event)
c.JSON(http.StatusOK, event)
}
func updateEvent(c *gin.Context) {
event := c.MustGet("event").(*fic.Event)
func updateEvent(event fic.Event, body []byte) (interface{}, error) {
var ue fic.Event
err := c.ShouldBindJSON(&ue)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
if err := json.Unmarshal(body, &ue); err != nil {
return nil, err
}
ue.Id = event.Id
if _, err := ue.Update(); err != nil {
log.Printf("Unable to updateEvent: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event update."})
return
return nil, err
} else {
return ue, nil
}
genEventsFile()
c.JSON(http.StatusOK, ue)
}
func deleteEvent(c *gin.Context) {
event := c.MustGet("event").(*fic.Event)
_, err := event.Delete()
if err != nil {
log.Printf("Unable to deleteEvent: %s", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event deletion."})
return
}
genEventsFile()
c.JSON(http.StatusOK, true)
func deleteEvent(event fic.Event, _ []byte) (interface{}, error) {
return event.Delete()
}

File diff suppressed because it is too large Load diff

View file

@ -1,126 +0,0 @@
package api
import (
"archive/zip"
"encoding/json"
"io"
"log"
"net/http"
"path"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
"srs.epita.fr/fic-server/settings"
"github.com/gin-gonic/gin"
)
func declareExportRoutes(router *gin.RouterGroup) {
router.GET("/archive.zip", func(c *gin.Context) {
challengeinfo, err := GetChallengeInfo()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
my, err := fic.MyJSONTeam(nil, true)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
s, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile))
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
s.End = nil
s.NextChangeTime = nil
s.DelegatedQA = []string{}
teams, err := fic.ExportTeams(false)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
themes, err := fic.ExportThemes()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.Writer.WriteHeader(http.StatusOK)
c.Header("Content-Disposition", "attachment; filename=archive.zip")
c.Header("Content-Type", "application/zip")
w := zip.NewWriter(c.Writer)
// challenge.json
f, err := w.Create("challenge.json")
if err == nil {
json.NewEncoder(f).Encode(challengeinfo)
}
// Include partners' logos from challenge.json
if sync.GlobalImporter != nil {
if len(challengeinfo.MainLogo) > 0 {
for _, logo := range challengeinfo.MainLogo {
fd, closer, err := sync.OpenOrGetFile(sync.GlobalImporter, logo)
if err != nil {
log.Printf("Unable to archive main logo %q: %s", logo, err.Error())
continue
}
f, err := w.Create(path.Join("logo", path.Base(logo)))
if err == nil {
io.Copy(f, fd)
}
closer()
}
}
if len(challengeinfo.Partners) > 0 {
for _, partner := range challengeinfo.Partners {
fd, closer, err := sync.OpenOrGetFile(sync.GlobalImporter, partner.Src)
if err != nil {
log.Printf("Unable to archive partner logo %q: %s", partner.Src, err.Error())
continue
}
f, err := w.Create(path.Join("partner", path.Base(partner.Src)))
if err == nil {
io.Copy(f, fd)
}
closer()
}
}
}
// my.json
f, err = w.Create("my.json")
if err == nil {
json.NewEncoder(f).Encode(my)
}
// settings.json
f, err = w.Create("settings.json")
if err == nil {
json.NewEncoder(f).Encode(s)
}
// teams.json
f, err = w.Create("teams.json")
if err == nil {
json.NewEncoder(f).Encode(teams)
}
// themes.json
f, err = w.Create("themes.json")
if err == nil {
json.NewEncoder(f).Encode(themes)
}
w.Close()
})
}

View file

@ -1,297 +1,153 @@
package api
import (
"bufio"
"crypto/sha512"
"encoding/base32"
"encoding/hex"
"errors"
"fmt"
"log"
"net/http"
"os"
"path/filepath"
"strconv"
"path"
"strings"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
)
func declareFilesGlobalRoutes(router *gin.RouterGroup) {
router.DELETE("/files/", clearFiles)
// Remote
router.GET("/remote/themes/:thid/exercices/:exid/files", sync.ApiGetRemoteExerciceFiles)
}
func declareFilesRoutes(router *gin.RouterGroup) {
router.GET("/files", listFiles)
router.POST("/files", createExerciceFile)
apiFilesRoutes := router.Group("/files/:fileid")
apiFilesRoutes.Use(FileHandler)
apiFilesRoutes.GET("", showFile)
apiFilesRoutes.PUT("", updateFile)
apiFilesRoutes.DELETE("", deleteFile)
apiFileDepsRoutes := apiFilesRoutes.Group("/dependancies/:depid")
apiFileDepsRoutes.Use(FileDepHandler)
apiFileDepsRoutes.DELETE("", deleteFileDep)
// Check
apiFilesRoutes.POST("/check", checkFile)
apiFilesRoutes.POST("/gunzip", gunzipFile)
}
func FileHandler(c *gin.Context) {
fileid, err := strconv.ParseInt(string(c.Params.ByName("fileid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid file identifier"})
return
}
var file *fic.EFile
if exercice, exists := c.Get("exercice"); exists {
file, err = exercice.(*fic.Exercice).GetFile(fileid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "File not found"})
return
}
} else {
file, err = fic.GetFile(fileid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "File not found"})
return
}
}
c.Set("file", file)
c.Next()
}
func FileDepHandler(c *gin.Context) {
depid, err := strconv.ParseInt(string(c.Params.ByName("depid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid dependency identifier"})
return
}
c.Set("file-depid", depid)
c.Next()
}
type APIFile struct {
*fic.EFile
Depends []fic.Flag `json:"depends,omitempty"`
}
func genFileList(in []*fic.EFile, e error) (out []APIFile, err error) {
if e != nil {
return nil, e
}
for _, f := range in {
g := APIFile{EFile: f}
var deps []fic.Flag
deps, err = f.GetDepends()
if err != nil {
return
}
for _, d := range deps {
if k, ok := d.(*fic.FlagKey); ok {
k, err = fic.GetFlagKey(k.Id)
if err != nil {
return
}
g.Depends = append(g.Depends, k)
} else if m, ok := d.(*fic.MCQ); ok {
m, err = fic.GetMCQ(m.Id)
if err != nil {
return
}
g.Depends = append(g.Depends, m)
} else {
err = fmt.Errorf("Unknown type %T to handle file dependancy", k)
return
}
}
out = append(out, g)
}
return
}
func listFiles(c *gin.Context) {
var files []APIFile
var err error
if exercice, exists := c.Get("exercice"); exists {
files, err = genFileList(exercice.(*fic.Exercice).GetFiles())
} else {
files, err = genFileList(fic.GetFiles())
}
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, files)
}
func clearFiles(c *gin.Context) {
err := os.RemoveAll(fic.FilesDir)
if err != nil {
log.Println("Unable to remove files:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
err = os.MkdirAll(fic.FilesDir, 0751)
if err != nil {
log.Println("Unable to create FILES:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
_, err = fic.ClearFiles()
if err != nil {
log.Println("Unable to clean DB files:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Les fichiers ont bien été effacés. Mais il n'a pas été possible d'effacer la base de données. Refaites une synchronisation maintenant. " + err.Error()})
return
}
c.JSON(http.StatusOK, true)
}
func showFile(c *gin.Context) {
c.JSON(http.StatusOK, c.MustGet("file").(*fic.EFile))
}
var CloudDAVBase string
var CloudUsername string
var CloudPassword string
var RapidImport bool
type uploadedFile struct {
URI string
Digest string
URI string
Digest string
Path string
Parts []string
}
func createExerciceFile(c *gin.Context) {
exercice, exists := c.Get("exercice")
if !exists {
c.AbortWithStatusJSON(http.StatusMethodNotAllowed, gin.H{"errmsg": "File can only be added inside an exercice."})
return
}
func importFile(uf uploadedFile, next func(string, string, []byte) (interface{}, error)) (interface{}, error) {
var hash [sha512.Size]byte
var logStr string
var fromURI string
var getFile func(string) (error)
paramsFiles, err := sync.GetExerciceFilesParams(sync.GlobalImporter, exercice.(*fic.Exercice))
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
var uf uploadedFile
err = c.ShouldBindJSON(&uf)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
ret, err := sync.ImportFile(sync.GlobalImporter, uf.URI,
func(filePath string, origin string) (interface{}, error) {
if digest, err := hex.DecodeString(uf.Digest); err != nil {
return nil, err
if uf.URI != "" && len(uf.Parts) > 0 {
hash = sha512.Sum512([]byte(uf.URI))
logStr = fmt.Sprintf("Import file from Cloud: %s =>", uf.Parts)
fromURI = uf.URI
getFile = func(dest string) error {
if fdto, err := os.Create(dest); err != nil {
return err
} else {
published := true
disclaimer := ""
if f, exists := paramsFiles[filepath.Base(filePath)]; exists {
published = !f.Hidden
if disclaimer, err = sync.ProcessMarkdown(sync.GlobalImporter, f.Disclaimer, exercice.(*fic.Exercice).Path); err != nil {
return nil, fmt.Errorf("error during markdown formating of disclaimer: %w", err)
writer := bufio.NewWriter(fdto)
for _, partname := range uf.Parts {
if err := getCloudPart(partname, writer); err != nil {
return err
}
}
return exercice.(*fic.Exercice).ImportFile(filePath, origin, digest, nil, disclaimer, published)
fdto.Close()
}
})
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
return nil
}
} else if uf.URI != "" {
hash = sha512.Sum512([]byte(uf.URI))
logStr = "Import file from Cloud: " + uf.URI + " =>"
fromURI = uf.URI
getFile = func(dest string) error { return getCloudFile(uf.URI, dest); }
} else if uf.Path != "" && len(uf.Parts) > 0 {
hash = sha512.Sum512([]byte(uf.Path))
logStr = fmt.Sprintf("Import file from local FS: %s =>", uf.Parts)
fromURI = uf.Path
getFile = func(dest string) error {
if fdto, err := os.Create(dest); err != nil {
return err
} else {
writer := bufio.NewWriter(fdto)
for _, partname := range uf.Parts {
if fdfrm, err := os.Open(partname); err != nil {
return err
} else {
reader := bufio.NewReader(fdfrm)
reader.WriteTo(writer)
writer.Flush()
fdfrm.Close()
}
}
fdto.Close()
}
return nil
}
} else if uf.Path != "" {
hash = sha512.Sum512([]byte(uf.Path))
logStr = "Import file from local FS: " + uf.Path + " =>"
fromURI = uf.Path
getFile = func(dest string) error { return os.Symlink(uf.Path, dest); }
} else {
return nil, errors.New("URI or path not filled")
}
c.JSON(http.StatusOK, ret)
pathname := path.Join(fic.FilesDir, strings.ToLower(base32.StdEncoding.EncodeToString(hash[:])), path.Base(fromURI))
// Remove the file if it exists
// TODO: check if this is symlink => remove to avoid File not found error after, because the file is writen at the adresse pointed.
if _, err := os.Stat(pathname); !os.IsNotExist(err) && !RapidImport {
if err := os.Remove(pathname); err != nil {
return nil, err
}
}
if _, err := os.Stat(pathname); os.IsNotExist(err) {
log.Println(logStr, pathname)
if err := os.MkdirAll(path.Dir(pathname), 0777); err != nil {
return nil, err
} else if err := getFile(pathname); err != nil {
return nil, err
}
}
if digest, err := hex.DecodeString(uf.Digest); err != nil {
return nil, err
} else {
return next(pathname, fromURI, digest)
}
}
func updateFile(c *gin.Context) {
file := c.MustGet("file").(*fic.EFile)
func getCloudFile(pathname string, dest string) error {
if fd, err := os.Create(dest); err != nil {
return err
} else {
defer fd.Close()
var uf fic.EFile
err := c.ShouldBindJSON(&uf)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
writer := bufio.NewWriter(fd)
if err := getCloudPart(pathname, writer); err != nil {
return err
}
}
uf.Id = file.Id
if _, err := uf.Update(); err != nil {
log.Println("Unable to updateFile:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update file."})
return
}
c.JSON(http.StatusOK, uf)
return nil
}
func deleteFile(c *gin.Context) {
file := c.MustGet("file").(*fic.EFile)
func getCloudPart(pathname string, writer *bufio.Writer) error {
client := http.Client{}
if req, err := http.NewRequest("GET", CloudDAVBase+pathname, nil); err != nil {
return err
} else {
req.SetBasicAuth(CloudUsername, CloudPassword)
if resp, err := client.Do(req); err != nil {
return err
} else {
defer resp.Body.Close()
_, err := file.Delete()
if err != nil {
log.Println("Unable to updateFile:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update file."})
return
if resp.StatusCode != http.StatusOK {
return errors.New(resp.Status)
} else {
reader := bufio.NewReader(resp.Body)
reader.WriteTo(writer)
writer.Flush()
}
}
}
c.JSON(http.StatusOK, true)
}
func deleteFileDep(c *gin.Context) {
file := c.MustGet("file").(*fic.EFile)
depid := c.MustGet("file-depid").(int64)
err := file.DeleteDepend(&fic.FlagKey{Id: int(depid)})
if err != nil {
log.Println("Unable to deleteFileDep:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete file dependency."})
return
}
c.JSON(http.StatusOK, true)
}
func checkFile(c *gin.Context) {
file := c.MustGet("file").(*fic.EFile)
err := file.CheckFileOnDisk()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
}
func gunzipFile(c *gin.Context) {
file := c.MustGet("file").(*fic.EFile)
err := file.GunzipFileOnDisk()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
return nil
}

227
admin/api/handlers.go Normal file
View file

@ -0,0 +1,227 @@
package api
import (
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"strconv"
"srs.epita.fr/fic-server/libfic"
"github.com/julienschmidt/httprouter"
)
type DispatchFunction func(httprouter.Params, []byte) (interface{}, error)
func apiHandler(f DispatchFunction) func(http.ResponseWriter, *http.Request, httprouter.Params) {
return func(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
log.Printf("Handling %s request from %s: %s [%s]\n", r.Method, r.RemoteAddr, r.URL.Path, r.UserAgent())
w.Header().Set("Content-Type", "application/json")
var ret interface{}
var err error = nil
// Read the body
if r.ContentLength < 0 || r.ContentLength > 6553600 {
http.Error(w, fmt.Sprintf("{errmsg:\"Request too large or request size unknown\"}", err), http.StatusRequestEntityTooLarge)
return
}
var body []byte
if r.ContentLength > 0 {
tmp := make([]byte, 1024)
for {
n, err := r.Body.Read(tmp)
for j := 0; j < n; j++ {
body = append(body, tmp[j])
}
if err != nil || n <= 0 {
break
}
}
}
ret, err = f(ps, body)
// Format response
resStatus := http.StatusOK
if err != nil {
ret = map[string]string{"errmsg": err.Error()}
resStatus = http.StatusBadRequest
log.Println(r.RemoteAddr, resStatus, err.Error())
}
if ret == nil {
ret = map[string]string{"errmsg": "Page not found"}
resStatus = http.StatusNotFound
}
if str, found := ret.(string); found {
w.WriteHeader(resStatus)
io.WriteString(w, str)
} else if bts, found := ret.([]byte); found {
w.WriteHeader(resStatus)
w.Write(bts)
} else if j, err := json.Marshal(ret); err != nil {
http.Error(w, fmt.Sprintf("{\"errmsg\":\"%q\"}", err), http.StatusInternalServerError)
} else {
w.WriteHeader(resStatus)
w.Write(j)
}
}
}
func teamPublicHandler(f func(*fic.Team,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if tid, err := strconv.Atoi(string(ps.ByName("tid"))); err != nil {
if team, err := fic.GetTeamByInitialName(ps.ByName("tid")); err != nil {
return nil, err
} else {
return f(&team, body)
}
} else if tid == 0 {
return f(nil, body)
} else if team, err := fic.GetTeam(tid); err != nil {
return nil, err
} else {
return f(&team, body)
}
}
}
func teamHandler(f func(fic.Team,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if tid, err := strconv.Atoi(string(ps.ByName("tid"))); err != nil {
if team, err := fic.GetTeamByInitialName(ps.ByName("tid")); err != nil {
return nil, err
} else {
return f(team, body)
}
} else if team, err := fic.GetTeam(tid); err != nil {
return nil, err
} else {
return f(team, body)
}
}
}
func themeHandler(f func(fic.Theme,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if thid, err := strconv.Atoi(string(ps.ByName("thid"))); err != nil {
return nil, err
} else if theme, err := fic.GetTheme(thid); err != nil {
return nil, err
} else {
return f(theme, body)
}
}
}
func exerciceHandler(f func(fic.Exercice,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if eid, err := strconv.Atoi(string(ps.ByName("eid"))); err != nil {
return nil, err
} else if exercice, err := fic.GetExercice(int64(eid)); err != nil {
return nil, err
} else {
return f(exercice, body)
}
}
}
func themedExerciceHandler(f func(fic.Theme,fic.Exercice,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
var theme fic.Theme
var exercice fic.Exercice
themeHandler(func (th fic.Theme, _[]byte) (interface{}, error) {
theme = th
return nil,nil
})(ps, body)
exerciceHandler(func (ex fic.Exercice, _[]byte) (interface{}, error) {
exercice = ex
return nil,nil
})(ps, body)
return f(theme, exercice, body)
}
}
func hintHandler(f func(fic.EHint,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if hid, err := strconv.Atoi(string(ps.ByName("hid"))); err != nil {
return nil, err
} else if hint, err := fic.GetHint(int64(hid)); err != nil {
return nil, err
} else {
return f(hint, body)
}
}
}
func keyHandler(f func(fic.Key,fic.Exercice,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
var exercice fic.Exercice
exerciceHandler(func (ex fic.Exercice, _[]byte) (interface{}, error) {
exercice = ex
return nil,nil
})(ps, body)
if kid, err := strconv.Atoi(string(ps.ByName("kid"))); err != nil {
return nil, err
} else if keys, err := exercice.GetKeys(); err != nil {
return nil, err
} else {
for _, key := range keys {
if (key.Id == int64(kid)) {
return f(key, exercice, body)
}
}
return nil, errors.New("Unable to find the requested key")
}
}
}
func fileHandler(f func(fic.EFile,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
var exercice fic.Exercice
exerciceHandler(func (ex fic.Exercice, _[]byte) (interface{}, error) {
exercice = ex
return nil,nil
})(ps, body)
if fid, err := strconv.Atoi(string(ps.ByName("fid"))); err != nil {
return nil, err
} else if files, err := exercice.GetFiles(); err != nil {
return nil, err
} else {
for _, file := range files {
if (file.Id == int64(fid)) {
return f(file, body)
}
}
return nil, errors.New("Unable to find the requested file")
}
}
}
func eventHandler(f func(fic.Event,[]byte) (interface{}, error)) func (httprouter.Params,[]byte) (interface{}, error) {
return func (ps httprouter.Params, body []byte) (interface{}, error) {
if evid, err := strconv.Atoi(string(ps.ByName("evid"))); err != nil {
return nil, err
} else if event, err := fic.GetEvent(evid); err != nil {
return nil, err
} else {
return f(event, body)
}
}
}
func notFound(ps httprouter.Params, _ []byte) (interface{}, error) {
return nil, nil
}

View file

@ -1,156 +0,0 @@
package api
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"path"
"strings"
"time"
"srs.epita.fr/fic-server/admin/pki"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
)
var TimestampCheck = "submissions"
func declareHealthRoutes(router *gin.RouterGroup) {
router.GET("/timestamps.json", func(c *gin.Context) {
stat, err := os.Stat(TimestampCheck)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("timestamp.json: %s", err.Error())})
return
}
now := time.Now().UTC()
c.JSON(http.StatusOK, gin.H{
"frontend": stat.ModTime().UTC(),
"backend": now,
"diffFB": now.Sub(stat.ModTime()),
})
})
router.GET("/health.json", GetHealth)
router.GET("/submissions-stats.json", GetSubmissionsStats)
router.GET("/validations-stats.json", GetValidationsStats)
router.DELETE("/submissions/*path", func(c *gin.Context) {
err := os.Remove(path.Join(TimestampCheck, c.Params.ByName("path")))
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": err.Error()})
return
}
c.Status(http.StatusOK)
})
}
type healthFileReport struct {
IdTeam string `json:"id_team,omitempty"`
Path string `json:"path"`
Error string `json:"error"`
}
func getHealth(pathname string) (ret []healthFileReport) {
if ds, err := ioutil.ReadDir(pathname); err != nil {
ret = append(ret, healthFileReport{
Path: strings.TrimPrefix(pathname, TimestampCheck),
Error: fmt.Sprintf("unable to ReadDir: %s", err),
})
return
} else {
for _, d := range ds {
p := path.Join(pathname, d.Name())
if d.IsDir() && d.Name() != ".tmp" && d.Mode()&os.ModeSymlink == 0 {
ret = append(ret, getHealth(p)...)
} else if !d.IsDir() && d.Mode()&os.ModeSymlink == 0 && time.Since(d.ModTime()) > 2*time.Second {
if d.Name() == ".locked" {
continue
}
teamDir := strings.TrimPrefix(pathname, TimestampCheck)
idteam, _ := pki.GetAssociation(path.Join(TeamsDir, teamDir))
ret = append(ret, healthFileReport{
IdTeam: idteam,
Path: path.Join(teamDir, d.Name()),
Error: "existing untreated file",
})
}
}
return
}
}
func GetHealth(c *gin.Context) {
if _, err := os.Stat(TimestampCheck); err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("health.json: %s", err.Error())})
return
}
c.JSON(http.StatusOK, getHealth(TimestampCheck))
}
type SubmissionsStats struct {
NbSubmissionLastMinute uint `json:"nbsubminute"`
NbSubmissionLast5Minute uint `json:"nbsub5minute"`
NbSubmissionLastQuarter uint `json:"nbsubquarter"`
NbSubmissionLastHour uint `json:"nbsubhour"`
NbSubmissionLastDay uint `json:"nbsubday"`
}
func calcSubmissionsStats(tries []time.Time) (stats SubmissionsStats) {
lastMinute := time.Now().Add(-1 * time.Minute)
last5Minute := time.Now().Add(-5 * time.Minute)
lastQuarter := time.Now().Add(-15 * time.Minute)
lastHour := time.Now().Add(-1 * time.Hour)
lastDay := time.Now().Add(-24 * time.Hour)
for _, t := range tries {
if lastMinute.Before(t) {
stats.NbSubmissionLastMinute += 1
stats.NbSubmissionLast5Minute += 1
stats.NbSubmissionLastQuarter += 1
stats.NbSubmissionLastHour += 1
stats.NbSubmissionLastDay += 1
} else if last5Minute.Before(t) {
stats.NbSubmissionLast5Minute += 1
stats.NbSubmissionLastQuarter += 1
stats.NbSubmissionLastHour += 1
stats.NbSubmissionLastDay += 1
} else if lastQuarter.Before(t) {
stats.NbSubmissionLastQuarter += 1
stats.NbSubmissionLastHour += 1
stats.NbSubmissionLastDay += 1
} else if lastHour.Before(t) {
stats.NbSubmissionLastHour += 1
stats.NbSubmissionLastDay += 1
} else if lastDay.Before(t) {
stats.NbSubmissionLastDay += 1
}
}
return
}
func GetSubmissionsStats(c *gin.Context) {
tries, err := fic.GetTries(nil, nil)
if err != nil {
log.Println("Unable to GetTries:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."})
return
}
c.JSON(http.StatusOK, calcSubmissionsStats(tries))
}
func GetValidationsStats(c *gin.Context) {
tries, err := fic.GetValidations(nil, nil)
if err != nil {
log.Println("Unable to GetTries:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."})
return
}
c.JSON(http.StatusOK, calcSubmissionsStats(tries))
}

View file

@ -1,98 +0,0 @@
package api
import (
"bufio"
"io/ioutil"
"net/http"
"os"
"strconv"
"strings"
"github.com/gin-gonic/gin"
)
func declareMonitorRoutes(router *gin.RouterGroup) {
router.GET("/monitor", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"localhost": genLocalConstants(),
})
})
}
func readLoadAvg(fd *os.File) (ret map[string]float64) {
if s, err := ioutil.ReadAll(fd); err == nil {
f := strings.Fields(strings.TrimSpace(string(s)))
if len(f) >= 3 {
ret = map[string]float64{}
ret["1m"], _ = strconv.ParseFloat(f[0], 64)
ret["5m"], _ = strconv.ParseFloat(f[1], 64)
ret["15m"], _ = strconv.ParseFloat(f[2], 64)
}
}
return
}
func readMeminfo(fd *os.File) (ret map[string]uint64) {
ret = map[string]uint64{}
scanner := bufio.NewScanner(fd)
for scanner.Scan() {
f := strings.Fields(strings.TrimSpace(scanner.Text()))
if len(f) >= 2 {
if v, err := strconv.ParseUint(f[1], 10, 64); err == nil {
ret[strings.ToLower(strings.TrimSuffix(f[0], ":"))] = v * 1024
}
}
}
return
}
func readCPUStats(fd *os.File) (ret map[string]map[string]uint64) {
ret = map[string]map[string]uint64{}
scanner := bufio.NewScanner(fd)
for scanner.Scan() {
f := strings.Fields(strings.TrimSpace(scanner.Text()))
if len(f[0]) >= 4 && f[0][0:3] == "cpu" && len(f) >= 8 {
ret[f[0]] = map[string]uint64{}
var total uint64 = 0
for i, k := range []string{"user", "nice", "system", "idle", "iowait", "irq", "softirq"} {
if v, err := strconv.ParseUint(f[i+1], 10, 64); err == nil {
ret[f[0]][k] = v
total += v
}
}
ret[f[0]]["total"] = total
}
}
return
}
func genLocalConstants() interface{} {
ret := map[string]interface{}{}
fi, err := os.Open("/proc/loadavg")
if err != nil {
return err
}
defer fi.Close()
ret["loadavg"] = readLoadAvg(fi)
fi, err = os.Open("/proc/meminfo")
if err != nil {
return err
}
defer fi.Close()
ret["meminfo"] = readMeminfo(fi)
fi, err = os.Open("/proc/stat")
if err != nil {
return err
}
defer fi.Close()
ret["cpustat"] = readCPUStats(fi)
return ret
}

View file

@ -1,360 +0,0 @@
package api
import (
"bytes"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"path"
"text/template"
"unicode"
"srs.epita.fr/fic-server/admin/pki"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
)
var (
OidcIssuer = "live.fic.srs.epita.fr"
OidcClientId = "epita-challenge"
OidcSecret = ""
)
func declarePasswordRoutes(router *gin.RouterGroup) {
router.POST("/password", func(c *gin.Context) {
passwd, err := fic.GeneratePassword()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"password": passwd})
})
router.GET("/oauth-status", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"secret_defined": OidcSecret != "",
})
})
router.GET("/dex.yaml", func(c *gin.Context) {
cfg, err := genDexConfig()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.String(http.StatusOK, string(cfg))
})
router.POST("/dex.yaml", func(c *gin.Context) {
if dexcfg, err := genDexConfig(); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
} else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "dex-config.yaml"), []byte(dexcfg), 0644); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
})
router.GET("/dex-password.tpl", func(c *gin.Context) {
passtpl, err := genDexPasswordTpl()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.String(http.StatusOK, string(passtpl))
})
router.POST("/dex-password.tpl", func(c *gin.Context) {
if dexcfg, err := genDexPasswordTpl(); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
} else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "dex-password.tpl"), []byte(dexcfg), 0644); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
})
router.GET("/vouch-proxy.yaml", func(c *gin.Context) {
cfg, err := genVouchProxyConfig()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.String(http.StatusOK, string(cfg))
})
router.POST("/vouch-proxy.yaml", func(c *gin.Context) {
if dexcfg, err := genVouchProxyConfig(); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
} else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "vouch-config.yaml"), []byte(dexcfg), 0644); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
})
}
func declareTeamsPasswordRoutes(router *gin.RouterGroup) {
router.GET("/password", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
if team.Password != nil {
c.String(http.StatusOK, *team.Password)
} else {
c.AbortWithStatusJSON(http.StatusNotFound, nil)
}
})
router.POST("/password", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
if passwd, err := fic.GeneratePassword(); err != nil {
log.Println("Unable to GeneratePassword:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Something went wrong when generating the new team password"})
return
} else {
team.Password = &passwd
_, err := team.Update()
if err != nil {
log.Println("Unable to Update Team:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Something went wrong when updating the new team password"})
return
}
c.JSON(http.StatusOK, team)
}
})
}
const dexcfgtpl = `issuer: {{ .Issuer }}
storage:
type: sqlite3
config:
file: /var/dex/dex.db
web:
http: 0.0.0.0:5556
frontend:
issuer: {{ .Name }}
logoURL: {{ .LogoPath }}
dir: /srv/dex/web/
oauth2:
skipApprovalScreen: true
staticClients:
{{ range $c := .Clients }}
- id: {{ $c.Id }}
name: {{ $c.Name }}
redirectURIs: [{{ range $u := $c.RedirectURIs }}'{{ $u }}'{{ end }}]
secret: {{ $c.Secret }}
{{ end }}
enablePasswordDB: true
staticPasswords:
{{ range $t := .Teams }}
- email: "team{{ printf "%02d" $t.Id }}"
hash: "{{with $t }}{{ .HashedPassword }}{{end}}"
{{ end }}
`
const dexpasswdtpl = `{{ "{{" }} template "header.html" . {{ "}}" }}
<div class="theme-panel">
<h2 class="theme-heading">
Bienvenue au {{ .Name }}&nbsp;!
</h2>
<form method="post" action="{{ "{{" }} .PostURL {{ "}}" }}">
<div class="theme-form-row">
<div class="theme-form-label">
<label for="userid">Votre équipe</label>
</div>
<select tabindex="1" required id="login" name="login" class="theme-form-input" autofocus>
{{ range $t := .Teams }} <option value="team{{ printf "%02d" $t.Id }}">{{ $t.Name }}</option>
{{ end }} </select>
</div>
<div class="theme-form-row">
<div class="theme-form-label">
<label for="password">Mot de passe</label>
</div>
<input tabindex="2" required id="password" name="password" type="password" class="theme-form-input" placeholder="mot de passe" {{ "{{" }} if .Invalid {{ "}}" }} autofocus {{ "{{" }} end {{ "}}" }}/>
</div>
{{ "{{" }} if .Invalid {{ "}}" }}
<div id="login-error" class="dex-error-box">
Identifiants incorrects.
</div>
{{ "{{" }} end {{ "}}" }}
<button tabindex="3" id="submit-login" type="submit" class="dex-btn theme-btn--primary">C'est parti&nbsp;!</button>
</form>
{{ "{{" }} if .BackLink {{ "}}" }}
<div class="theme-link-back">
<a class="dex-subtle-text" href="{{ "{{" }} .BackLink {{ "}}" }}">Sélectionner une autre méthode d'authentification.</a>
</div>
{{ "{{" }} end {{ "}}" }}
</div>
{{ "{{" }} template "footer.html" . {{ "}}" }}
`
type dexConfigClient struct {
Id string
Name string
RedirectURIs []string
Secret string
}
type dexConfig struct {
Name string
Issuer string
Clients []dexConfigClient
Teams []*fic.Team
LogoPath string
}
func genDexConfig() ([]byte, error) {
if OidcSecret == "" {
return nil, fmt.Errorf("Unable to generate dex configuration: OIDC Secret not defined. Please define FICOIDC_SECRET in your environment.")
}
teams, err := fic.GetTeams()
if err != nil {
return nil, err
}
b := bytes.NewBufferString("")
challengeInfo, err := GetChallengeInfo()
if err != nil {
return nil, fmt.Errorf("Cannot create template: %w", err)
}
// Lower the first letter to be included in a sentence.
name := []rune(challengeInfo.Title)
if len(name) > 0 {
name[0] = unicode.ToLower(name[0])
}
logoPath := ""
if len(challengeInfo.MainLogo) > 0 {
logoPath = path.Join("../../files", "logo", path.Base(challengeInfo.MainLogo[len(challengeInfo.MainLogo)-1]))
}
dexTmpl, err := template.New("dexcfg").Parse(dexcfgtpl)
if err != nil {
return nil, fmt.Errorf("Cannot create template: %w", err)
}
err = dexTmpl.Execute(b, dexConfig{
Name: string(name),
Issuer: "https://" + OidcIssuer,
Clients: []dexConfigClient{
dexConfigClient{
Id: OidcClientId,
Name: challengeInfo.Title,
RedirectURIs: []string{"https://" + OidcIssuer + "/challenge_access/auth"},
Secret: OidcSecret,
},
},
Teams: teams,
LogoPath: logoPath,
})
if err != nil {
return nil, fmt.Errorf("An error occurs during template execution: %w", err)
}
// Also generate team associations
for _, team := range teams {
if _, err := os.Stat(path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err == nil {
if err = os.Remove(path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err != nil {
log.Println("Unable to remove existing association symlink:", err.Error())
return nil, fmt.Errorf("Unable to remove existing association symlink: %s", err.Error())
}
}
if err := os.Symlink(fmt.Sprintf("%d", team.Id), path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err != nil {
log.Println("Unable to create association symlink:", err.Error())
return nil, fmt.Errorf("Unable to create association symlink: %s", err.Error())
}
}
return b.Bytes(), nil
}
func genDexPasswordTpl() ([]byte, error) {
challengeInfo, err := GetChallengeInfo()
if err != nil {
return nil, fmt.Errorf("Cannot create template: %w", err)
}
if teams, err := fic.GetTeams(); err != nil {
return nil, err
} else {
b := bytes.NewBufferString("")
if dexTmpl, err := template.New("dexpasswd").Parse(dexpasswdtpl); err != nil {
return nil, fmt.Errorf("Cannot create template: %w", err)
} else if err = dexTmpl.Execute(b, dexConfig{
Teams: teams,
Name: challengeInfo.Title,
}); err != nil {
return nil, fmt.Errorf("An error occurs during template execution: %w", err)
} else {
return b.Bytes(), nil
}
}
}
const vouchcfgtpl = `# CONFIGURATION FILE HANDLED BY fic-admin
# DO NOT MODIFY IT BY HAND
vouch:
logLevel: debug
allowAllUsers: true
document_root: /challenge_access
cookie:
domain: {{ .Domain }}
oauth:
provider: oidc
client_id: {{ .ClientId }}
client_secret: {{ .ClientSecret }}
callback_urls:
- https://{{ .Domain }}/challenge_access/auth
auth_url: https://{{ .Domain }}/auth
token_url: http://127.0.0.1:5556/token
user_info_url: http://127.0.0.1:5556/userinfo
scopes:
- openid
- email
`
type vouchProxyConfig struct {
Domain string
ClientId string
ClientSecret string
}
func genVouchProxyConfig() ([]byte, error) {
if OidcSecret == "" {
return nil, fmt.Errorf("Unable to generate vouch proxy configuration: OIDC Secret not defined. Please define FICOIDC_SECRET in your environment.")
}
b := bytes.NewBufferString("")
if vouchTmpl, err := template.New("vouchcfg").Parse(vouchcfgtpl); err != nil {
return nil, fmt.Errorf("Cannot create template: %w", err)
} else if err = vouchTmpl.Execute(b, vouchProxyConfig{
Domain: OidcIssuer,
ClientId: OidcClientId,
ClientSecret: OidcSecret,
}); err != nil {
return nil, fmt.Errorf("An error occurs during template execution: %w", err)
} else {
return b.Bytes(), nil
}
}

View file

@ -2,48 +2,25 @@ package api
import (
"encoding/json"
"fmt"
"log"
"net/http"
"os"
"path"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
var DashboardDir string
func declarePublicRoutes(router *gin.RouterGroup) {
router.GET("/public/", listPublic)
router.GET("/public/:sid", getPublic)
router.DELETE("/public/:sid", deletePublic)
router.PUT("/public/:sid", savePublic)
func init() {
router.GET("/api/public.json", apiHandler(getPublic))
router.DELETE("/api/public.json", apiHandler(deletePublic))
router.PUT("/api/public.json", apiHandler(savePublic))
}
type FICPublicScene struct {
Type string `json:"type"`
Params map[string]interface{} `json:"params"`
Type string `json:"type"`
Params map[string]interface{} `json:"params"`
}
type FICPublicDisplay struct {
Scenes []FICPublicScene `json:"scenes"`
Side []FICPublicScene `json:"side"`
CustomCountdown map[string]interface{} `json:"customCountdown"`
HideEvents bool `json:"hideEvents"`
HideCountdown bool `json:"hideCountdown"`
HideCarousel bool `json:"hideCarousel"`
PropagationTime *time.Time `json:"propagationTime,omitempty"`
}
func InitDashboardPresets(dir string) error {
return nil
}
func readPublic(path string) (FICPublicDisplay, error) {
var s FICPublicDisplay
func readPublic(path string) ([]FICPublicScene, error) {
var s []FICPublicScene
if fd, err := os.Open(path); err != nil {
return s, err
} else {
@ -58,7 +35,7 @@ func readPublic(path string) (FICPublicDisplay, error) {
}
}
func savePublicTo(path string, s FICPublicDisplay) error {
func savePublicTo(path string, s []FICPublicScene) error {
if fd, err := os.Create(path); err != nil {
return err
} else {
@ -73,134 +50,31 @@ func savePublicTo(path string, s FICPublicDisplay) error {
}
}
type DashboardFiles struct {
Presets []string `json:"presets"`
Nexts []*NextDashboardFile `json:"nexts"`
}
type NextDashboardFile struct {
Name string `json:"name"`
Screen int `json:"screen"`
Date time.Time `json:"date"`
}
func listPublic(c *gin.Context) {
files, err := os.ReadDir(DashboardDir)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
var ret DashboardFiles
for _, file := range files {
if strings.HasPrefix(file.Name(), "preset-") {
ret.Presets = append(ret.Presets, strings.TrimSuffix(strings.TrimPrefix(file.Name(), "preset-"), ".json"))
continue
}
if !strings.HasPrefix(file.Name(), "public") || len(file.Name()) < 18 {
continue
}
ts, err := strconv.ParseInt(file.Name()[8:18], 10, 64)
if err == nil {
s, _ := strconv.Atoi(file.Name()[6:7])
ret.Nexts = append(ret.Nexts, &NextDashboardFile{
Name: file.Name()[6:18],
Screen: s,
Date: time.Unix(ts, 0),
})
}
}
c.JSON(http.StatusOK, ret)
}
func getPublic(c *gin.Context) {
if strings.Contains(c.Params.ByName("sid"), "/") {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"})
return
}
filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid"))
if strings.HasPrefix(c.Params.ByName("sid"), "preset-") {
filename = fmt.Sprintf("%s.json", c.Params.ByName("sid"))
}
if _, err := os.Stat(path.Join(DashboardDir, filename)); !os.IsNotExist(err) {
p, err := readPublic(path.Join(DashboardDir, filename))
if err != nil {
log.Println("Unable to readPublic in getPublic:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene retrieval."})
return
}
c.JSON(http.StatusOK, p)
return
}
c.JSON(http.StatusOK, FICPublicDisplay{Scenes: []FICPublicScene{}, Side: []FICPublicScene{}})
}
func deletePublic(c *gin.Context) {
if strings.Contains(c.Params.ByName("sid"), "/") {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"})
return
}
filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid"))
if strings.HasPrefix(c.Params.ByName("sid"), "preset-") {
filename = fmt.Sprintf("%s.json", c.Params.ByName("sid"))
}
if len(filename) == 12 {
if err := savePublicTo(path.Join(DashboardDir, filename), FICPublicDisplay{}); err != nil {
log.Println("Unable to deletePublic:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene deletion."})
return
}
func getPublic(_ httprouter.Params, body []byte) (interface{}, error) {
if _, err := os.Stat(path.Join(TeamsDir, "_public", "public.json")); !os.IsNotExist(err) {
return readPublic(path.Join(TeamsDir, "_public", "public.json"))
} else {
if err := os.Remove(path.Join(DashboardDir, filename)); err != nil {
log.Println("Unable to deletePublic:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene deletion."})
return
}
return []FICPublicScene{}, nil
}
c.JSON(http.StatusOK, FICPublicDisplay{Scenes: []FICPublicScene{}, Side: []FICPublicScene{}})
}
func savePublic(c *gin.Context) {
if strings.Contains(c.Params.ByName("sid"), "/") {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"})
return
func deletePublic(_ httprouter.Params, body []byte) (interface{}, error) {
if err := savePublicTo(path.Join(TeamsDir, "_public", "public.json"), []FICPublicScene{}); err != nil {
return nil, err
} else {
return []FICPublicScene{}, err
}
}
func savePublic(_ httprouter.Params, body []byte) (interface{}, error) {
var scenes []FICPublicScene
if err := json.Unmarshal(body, &scenes); err != nil {
return nil, err
}
if err := savePublicTo(path.Join(TeamsDir, "_public", "public.json"), scenes); err != nil {
return nil, err
} else {
return scenes, err
}
var scenes FICPublicDisplay
err := c.ShouldBindJSON(&scenes)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid"))
if c.Request.URL.Query().Has("t") {
t, err := time.Parse(time.RFC3339, c.Request.URL.Query().Get("t"))
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
filename = fmt.Sprintf("public%s-%d.json", c.Params.ByName("sid"), t.Unix())
} else if c.Request.URL.Query().Has("p") {
filename = fmt.Sprintf("preset-%s.json", c.Request.URL.Query().Get("p"))
}
if err := savePublicTo(path.Join(DashboardDir, filename), scenes); err != nil {
log.Println("Unable to savePublicTo:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene saving."})
return
}
c.JSON(http.StatusOK, scenes)
}

View file

@ -1,119 +0,0 @@
package api
import (
"log"
"net/http"
"strconv"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
)
func declareQARoutes(router *gin.RouterGroup) {
router.POST("/qa/", importExerciceQA)
apiQARoutes := router.Group("/qa/:qid")
apiQARoutes.Use(QAHandler)
apiQARoutes.POST("/comments", importQAComment)
}
func QAHandler(c *gin.Context) {
qid, err := strconv.ParseInt(string(c.Params.ByName("qid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid QA identifier"})
return
}
qa, err := fic.GetQAQuery(qid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "QA query not found"})
return
}
c.Set("qa-query", qa)
c.Next()
}
func importExerciceQA(c *gin.Context) {
// Create a new query
var uq fic.QAQuery
err := c.ShouldBindJSON(&uq)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
var exercice *fic.Exercice
if uq.IdExercice == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "id_exercice not filled"})
return
} else if exercice, err = fic.GetExercice(uq.IdExercice); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Unable to find requested exercice"})
return
}
if len(uq.State) == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "State not filled"})
return
}
if len(uq.Subject) == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Subject not filled"})
return
}
if qa, err := exercice.NewQAQuery(uq.Subject, uq.IdTeam, uq.User, uq.State, nil); err != nil {
log.Println("Unable to importExerciceQA:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during query creation."})
return
} else {
qa.Creation = uq.Creation
qa.Solved = uq.Solved
qa.Closed = uq.Closed
_, err = qa.Update()
if err != nil {
log.Println("Unable to update in importExerciceQA:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during query updating."})
return
}
c.JSON(http.StatusOK, qa)
}
}
func importQAComment(c *gin.Context) {
query := c.MustGet("qa-query").(*fic.QAQuery)
// Create a new query
var uc fic.QAComment
err := c.ShouldBindJSON(&uc)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
if len(uc.Content) == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Empty comment"})
return
}
if qac, err := query.AddComment(uc.Content, uc.IdTeam, uc.User); err != nil {
log.Println("Unable to AddComment in importQAComment:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during comment creation."})
return
} else {
qac.Date = uc.Date
_, err = qac.Update()
if err != nil {
log.Println("Unable to Update comment in importQAComment")
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during comment creation."})
return
}
c.JSON(http.StatusOK, qac)
}
}

View file

@ -1,67 +0,0 @@
package api
import (
"net/http"
"strings"
"srs.epita.fr/fic-server/admin/sync"
"github.com/gin-gonic/gin"
)
func declareRepositoriesRoutes(router *gin.RouterGroup) {
if gi, ok := sync.GlobalImporter.(sync.GitImporter); ok {
router.GET("/repositories", func(c *gin.Context) {
mod, err := gi.GetSubmodules()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, gin.H{"repositories": mod})
})
router.GET("/repositories/*repopath", func(c *gin.Context) {
repopath := strings.TrimPrefix(c.Param("repopath"), "/")
mod, err := gi.GetSubmodule(repopath)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, mod)
})
router.POST("/repositories/*repopath", func(c *gin.Context) {
repopath := strings.TrimPrefix(c.Param("repopath"), "/")
mod, err := gi.IsRepositoryUptodate(repopath)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, mod)
})
router.DELETE("/repositories/*repopath", func(c *gin.Context) {
di, ok := sync.GlobalImporter.(sync.DeletableImporter)
if !ok {
c.AbortWithStatusJSON(http.StatusNotImplemented, gin.H{"errmsg": "Not implemented"})
return
}
if strings.Contains(c.Param("repopath"), "..") {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Repopath contains invalid characters"})
return
}
repopath := strings.TrimPrefix(c.Param("repopath"), "/")
err := di.DeleteDir(repopath)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, true)
})
}
}

View file

@ -1,29 +1,11 @@
package api
import (
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
func DeclareRoutes(router *gin.RouterGroup) {
apiRoutes := router.Group("/api")
var router = httprouter.New()
declareCertificateRoutes(apiRoutes)
declareClaimsRoutes(apiRoutes)
declareEventsRoutes(apiRoutes)
declareExercicesRoutes(apiRoutes)
declareExportRoutes(apiRoutes)
declareFilesGlobalRoutes(apiRoutes)
declareFilesRoutes(apiRoutes)
declareGlobalExercicesRoutes(apiRoutes)
declareHealthRoutes(apiRoutes)
declareMonitorRoutes(apiRoutes)
declarePasswordRoutes(apiRoutes)
declarePublicRoutes(apiRoutes)
declareQARoutes(apiRoutes)
declareRepositoriesRoutes(apiRoutes)
declareTeamsRoutes(apiRoutes)
declareThemesRoutes(apiRoutes)
declareSettingsRoutes(apiRoutes)
declareSyncRoutes(apiRoutes)
DeclareVersionRoutes(apiRoutes)
func Router() *httprouter.Router {
return router
}

View file

@ -2,424 +2,61 @@ package api
import (
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"os"
"errors"
"path"
"strconv"
"time"
"srs.epita.fr/fic-server/admin/generation"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
"srs.epita.fr/fic-server/settings"
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
var IsProductionEnv = false
var TeamsDir string
func declareSettingsRoutes(router *gin.RouterGroup) {
router.GET("/challenge.json", getChallengeInfo)
router.PUT("/challenge.json", saveChallengeInfo)
func init() {
router.GET("/api/settings.json", apiHandler(getSettings))
router.PUT("/api/settings.json", apiHandler(saveSettings))
router.GET("/settings.json", getSettings)
router.PUT("/settings.json", saveSettings)
router.DELETE("/settings.json", func(c *gin.Context) {
err := ResetSettings()
if err != nil {
log.Println("Unable to ResetSettings:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during setting reset."})
return
}
c.JSON(http.StatusOK, true)
})
router.GET("/settings-next", listNextSettings)
apiNextSettingsRoutes := router.Group("/settings-next/:ts")
apiNextSettingsRoutes.Use(NextSettingsHandler)
apiNextSettingsRoutes.GET("", getNextSettings)
apiNextSettingsRoutes.DELETE("", deleteNextSettings)
router.POST("/reset", reset)
router.POST("/full-generation", fullGeneration)
router.GET("/prod", func(c *gin.Context) {
c.JSON(http.StatusOK, IsProductionEnv)
})
router.PUT("/prod", func(c *gin.Context) {
err := c.ShouldBindJSON(&IsProductionEnv)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, IsProductionEnv)
})
router.POST("/api/reset", apiHandler(reset))
}
func NextSettingsHandler(c *gin.Context) {
ts, err := strconv.ParseInt(string(c.Params.ByName("ts")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid next settings identifier"})
return
}
nsf, err := settings.ReadNextSettingsFile(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", ts)), ts)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Next settings not found"})
return
}
c.Set("next-settings", nsf)
c.Next()
}
func fullGeneration(c *gin.Context) {
resp, err := generation.FullGeneration()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
"errmsg": err.Error(),
})
return
}
defer resp.Body.Close()
v, _ := io.ReadAll(resp.Body)
c.JSON(resp.StatusCode, gin.H{
"errmsg": string(v),
})
}
func GetChallengeInfo() (*settings.ChallengeInfo, error) {
var challengeinfo string
var err error
if sync.GlobalImporter == nil {
if fd, err := os.Open(path.Join(settings.SettingsDir, settings.ChallengeFile)); err == nil {
defer fd.Close()
var buf []byte
buf, err = io.ReadAll(fd)
if err == nil {
challengeinfo = string(buf)
}
}
func getSettings(_ httprouter.Params, body []byte) (interface{}, error) {
if settings.ExistsSettings(path.Join(TeamsDir, settings.SettingsFile)) {
return settings.ReadSettings(path.Join(TeamsDir, settings.SettingsFile))
} else {
challengeinfo, err = sync.GetFileContent(sync.GlobalImporter, settings.ChallengeFile)
return settings.FICSettings{"Challenge FIC", "Laboratoire SRS, ÉPITA", time.Unix(0,0), time.Unix(0,0), time.Unix(0,0), fic.FirstBlood, fic.SubmissionCostBase, false, false, false, true, true}, nil
}
if err != nil {
log.Println("Unable to retrieve challenge.json:", err.Error())
return nil, fmt.Errorf("Unable to retrive challenge.json: %w", err)
}
s, err := settings.ReadChallengeInfo(challengeinfo)
if err != nil {
log.Println("Unable to ReadChallengeInfo:", err.Error())
return nil, fmt.Errorf("Unable to read challenge info: %w", err)
}
return s, nil
}
func getChallengeInfo(c *gin.Context) {
if s, err := GetChallengeInfo(); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
func saveSettings(_ httprouter.Params, body []byte) (interface{}, error) {
var config settings.FICSettings
if err := json.Unmarshal(body, &config); err != nil {
return nil, err
}
if err := settings.SaveSettings(path.Join(TeamsDir, settings.SettingsFile), config); err != nil {
return nil, err
} else {
c.JSON(http.StatusOK, s)
return config, err
}
}
func saveChallengeInfo(c *gin.Context) {
var info *settings.ChallengeInfo
err := c.ShouldBindJSON(&info)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
if sync.GlobalImporter != nil {
jenc, err := json.Marshal(info)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
err = sync.WriteFileContent(sync.GlobalImporter, "challenge.json", jenc)
if err != nil {
log.Println("Unable to SaveChallengeInfo:", err.Error())
// Ignore the error, try to continue
}
err = sync.ImportChallengeInfo(info, DashboardDir)
if err != nil {
log.Println("Unable to ImportChallengeInfo:", err.Error())
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Something goes wrong when trying to import related files: %s", err.Error())})
return
}
}
if err := settings.SaveChallengeInfo(path.Join(settings.SettingsDir, settings.ChallengeFile), info); err != nil {
log.Println("Unable to SaveChallengeInfo:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to save distributed challenge info: %s", err.Error())})
return
}
c.JSON(http.StatusOK, info)
}
func getSettings(c *gin.Context) {
s, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile))
if err != nil {
log.Println("Unable to ReadSettings:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to read settings: %s", err.Error())})
return
}
s.WorkInProgress = !IsProductionEnv
c.Writer.Header().Add("X-FIC-Time", fmt.Sprintf("%d", time.Now().Unix()))
c.JSON(http.StatusOK, s)
}
func saveSettings(c *gin.Context) {
var config *settings.Settings
err := c.ShouldBindJSON(&config)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
// Is this a future setting?
if c.Request.URL.Query().Has("t") {
t, err := time.Parse(time.RFC3339, c.Request.URL.Query().Get("t"))
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
// Load current settings to perform diff later
init_settings, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile))
if err != nil {
log.Println("Unable to ReadSettings:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to read settings: %s", err.Error())})
return
}
current_settings := init_settings
// Apply already registered settings
nsu, err := settings.MergeNextSettingsUntil(&t)
if err == nil {
current_settings = settings.MergeSettings(*init_settings, nsu)
} else {
log.Println("Unable to MergeNextSettingsUntil:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to merge next settings: %s", err.Error())})
return
}
// Keep only diff
diff := settings.DiffSettings(current_settings, config)
hasItems := false
for _, _ = range diff {
hasItems = true
break
}
if !hasItems {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "No difference to apply."})
return
}
if !c.Request.URL.Query().Has("erase") {
// Check if there is already diff to apply at the given time
if nsf, err := settings.ReadNextSettingsFile(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", t.Unix())), t.Unix()); err == nil {
for k, v := range nsf.Values {
if _, ok := diff[k]; !ok {
diff[k] = v
}
}
}
}
// Save the diff
settings.SaveSettings(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", t.Unix())), diff)
// Return current settings
c.JSON(http.StatusOK, current_settings)
} else {
// Just apply settings right now!
if err := settings.SaveSettings(path.Join(settings.SettingsDir, settings.SettingsFile), config); err != nil {
log.Println("Unable to SaveSettings:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to save settings: %s", err.Error())})
return
}
ApplySettings(config)
c.JSON(http.StatusOK, config)
}
}
func listNextSettings(c *gin.Context) {
nsf, err := settings.ListNextSettingsFiles()
if err != nil {
log.Println("Unable to ListNextSettingsFiles:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to list next settings files: %s", err.Error())})
return
}
c.JSON(http.StatusOK, nsf)
}
func getNextSettings(c *gin.Context) {
c.JSON(http.StatusOK, c.MustGet("next-settings").(*settings.NextSettingsFile))
}
func deleteNextSettings(c *gin.Context) {
nsf := c.MustGet("next-settings").(*settings.NextSettingsFile)
err := os.Remove(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", nsf.Id)))
if err != nil {
log.Println("Unable to remove the file:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to remove the file: %s", err.Error())})
return
}
c.JSON(http.StatusOK, true)
}
func ApplySettings(config *settings.Settings) {
fic.PartialValidation = config.PartialValidation
fic.UnlockedChallengeDepth = config.UnlockedChallengeDepth
fic.UnlockedChallengeUpTo = config.UnlockedChallengeUpTo
fic.DisplayAllFlags = config.DisplayAllFlags
fic.HideCaseSensitivity = config.HideCaseSensitivity
fic.UnlockedStandaloneExercices = config.UnlockedStandaloneExercices
fic.UnlockedStandaloneExercicesByThemeStepValidation = config.UnlockedStandaloneExercicesByThemeStepValidation
fic.UnlockedStandaloneExercicesByStandaloneExerciceValidation = config.UnlockedStandaloneExercicesByStandaloneExerciceValidation
fic.DisplayMCQBadCount = config.DisplayMCQBadCount
fic.FirstBlood = config.FirstBlood
fic.SubmissionCostBase = config.SubmissionCostBase
fic.HintCoefficient = config.HintCurCoefficient
fic.WChoiceCoefficient = config.WChoiceCurCoefficient
fic.ExerciceCurrentCoefficient = config.ExerciceCurCoefficient
fic.GlobalScoreCoefficient = config.GlobalScoreCoefficient
fic.SubmissionCostBase = config.SubmissionCostBase
fic.SubmissionUniqueness = config.SubmissionUniqueness
fic.CountOnlyNotGoodTries = config.CountOnlyNotGoodTries
fic.QuestionGainRatio = config.QuestionGainRatio
if config.DiscountedFactor != fic.DiscountedFactor {
fic.DiscountedFactor = config.DiscountedFactor
if err := fic.DBRecreateDiscountedView(); err != nil {
log.Println("Unable to recreate exercices_discounted view:", err.Error())
}
}
}
func ResetSettings() error {
return settings.SaveSettings(path.Join(settings.SettingsDir, settings.SettingsFile), &settings.Settings{
WorkInProgress: IsProductionEnv,
FirstBlood: fic.FirstBlood,
SubmissionCostBase: fic.SubmissionCostBase,
ExerciceCurCoefficient: 1,
HintCurCoefficient: 1,
WChoiceCurCoefficient: 1,
GlobalScoreCoefficient: 1,
DiscountedFactor: 0,
QuestionGainRatio: 0,
UnlockedStandaloneExercices: 10,
UnlockedStandaloneExercicesByThemeStepValidation: 1,
UnlockedStandaloneExercicesByStandaloneExerciceValidation: 0,
AllowRegistration: false,
CanJoinTeam: false,
DenyTeamCreation: false,
DenyNameChange: false,
AcceptNewIssue: true,
QAenabled: false,
EnableResolutionRoute: false,
PartialValidation: true,
UnlockedChallengeDepth: 0,
SubmissionUniqueness: true,
CountOnlyNotGoodTries: true,
DisplayAllFlags: false,
DisplayMCQBadCount: false,
EventKindness: false,
})
}
func ResetChallengeInfo() error {
return settings.SaveChallengeInfo(path.Join(settings.SettingsDir, settings.ChallengeFile), &settings.ChallengeInfo{
Title: "Challenge forensic",
SubTitle: "sous le patronage du commandement de la cyberdéfense",
Authors: "Laboratoire SRS, ÉPITA",
VideosLink: "",
Description: `<p>Le challenge <em>forensic</em> vous place dans la peau de <strong>spécialistes en investigation numérique</strong>. Nous mettons à votre disposition une <strong>vingtaine de scénarios différents</strong>, dans lesquels vous devrez faire les différentes étapes <strong>de la caractérisation d’une réponse à incident</strong> proposées.</p>
<p>Chaque scénario met en scène un contexte d’<strong>entreprise</strong>, ayant découvert récemment qu’elle a été <strong>victime d’une cyberattaque</strong>. Elle vous demande alors de l’aider à <strong>caractériser</strong>, afin de mieux comprendre <strong>la situation</strong>, notamment le <strong>mode opératoire de l’adversaire</strong>, les <strong>impacts</strong> de la cyberattaque, le <strong>périmètre technique compromis</strong>, etc. Il faudra parfois aussi l’éclairer sur les premières étapes de la réaction.</p>`,
Rules: `<h3>Déroulement</h3>
<p>Pendant toute la durée du challenge, vous aurez <strong>accès à tous les scénarios</strong>, mais seulement à la première des 5 étapes. <strong>Chaque étape</strong> supplémentaire <strong>est débloquée lorsque vous validez l’intégralité de l’étape précédente</strong>. Toutefois, pour dynamiser le challenge toutes les étapes et tous les scénarios seront débloquées pour la dernière heure du challenge.</p>
<p>Nous mettons à votre disposition une <strong>plateforme</strong> sur laquelle vous pourrez <strong>obtenir les informations sur le contexte</strong> de l’entreprise et, généralement, une <strong>série de fichiers</strong> qui semblent appropriés pour avancer dans l’investigation.</p>
<p>La <strong>validation d’une étape</strong> se fait sur la plateforme, après avoir analysé les informations fournies, en <strong>répondant à des questions</strong> plus ou moins précises. Il s’agit le plus souvent des <strong>mots-clefs</strong> que l’on placerait dans un <strong>rapport</strong>.</p>
<p>Pour vous débloquer ou accélérer votre investigation, vous pouvez accéder à quelques <strong><em>indices</em></strong>, en échange d’une décote sur votre score d’un certain nombre de points préalablement affichés.</p>
<h3>Calcul des points, bonus, malus et classement</h3>
<p>Chaque équipe dispose d’un <strong>compteur de points</strong> dans l’intervalle ]-∞;+∞[ (aux détails techniques près), à partir duquel <strong>le classement est établi</strong>.</p>
<p>Vous <strong>perdez des points</strong> en <strong>dévoilant des indices</strong>, en <strong>demandant des propositions de réponses</strong> en remplacement de certains champs de texte, ou en <strong>essayant un trop grand nombre de fois une réponse</strong>.</p>
<p>Le nombre de points que vous fait perdre un indice dépend habituellement de l’aide qu’il vous apportera et est indiqué avant de le dévoiler, car il peut fluctuer en fonction de l’avancement du challenge.</p>
<p>Pour chaque champ de texte, vous disposez de 10 tentatives avant de perdre des points (vous perdez les points même si vous ne validez pas l’étape) pour chaque tentative supplémentaire : -0,25&nbsp;point entre 11 et 20, -0,5 entre 21 et 30, -0,75 entre 31 et 40,&nbsp;…</p>
<p>La seule manière de <strong>gagner des points</strong> est de <strong>valider une étape d’un scénario dans son intégralité</strong>. Le nombre de points gagnés <strong>dépend de la difficulté théorique</strong> de l’étape ainsi que <strong>d’éventuels bonus</strong>. Un bonus de <strong>10&nbsp;%</strong> est accordé à la première équipe qui valide une étape. D’<strong>autres bonus</strong> peuvent ponctuer le challenge, détaillé dans la partie suivante.</p>
<p>Le classement est établi par équipe, selon le nombre de points récoltés et perdus par tous les membres. En cas d’égalité au score, les équipes sont départagées en fonction de leur ordre d’arrivée à ce score.</p>
<h3>Temps forts</h3>
<p>Le challenge <em>forensic</em> est jalonné de plusieurs temps forts durant lesquels <strong>certains calculs</strong> détaillés dans la partie précédente <strong>peuvent être altérés</strong>. L’équipe d’animation du challenge vous <strong>avertira</strong> environ <strong>15 minutes avant</strong> le début de la modification.</p>
<p>Chaque modification se répercute instantanément dans votre interface, attendez simplement qu’elle apparaisse afin d’être certain d’en bénéficier. Un compte à rebours est généralement affiché sur les écrans pour indiquer la fin d’un temps fort. La fin d’application d’un bonus est déterminé par l’heure d’arrivée de votre demande sur nos serveurs.</p>
<p>Sans y être limité ou assuré, sachez que durant les précédentes éditions du challenge <em>forensic</em>, nous avons par exemple : <strong>doublé les points</strong> de défis peu tentés, <strong>doublé les points de tous les défis</strong> pendant 30 minutes, <strong>réduit le coût des indices</strong> pendant 15 minutes, etc.</p>
<p></p>
<p>Tous les étudiants de la majeure Système, Réseaux et Sécurité de l’ÉPITA, son équipe enseignante ainsi que le commandement de la cyberdéfense vous souhaitent bon courage pour cette nouvelle éditions du challenge !</p>`,
YourMission: `<h4>Bienvenue au challenge forensic&nbsp;!</h4>
<p>Vous voici aujourd'hui dans la peau de <strong>spécialistes en investigation numérique</strong>. Vous avez à votre disposition une vingtaine de scénarios différents dans lesquels vous devrez faire les différentes étapes <strong>de la caractérisation d’une réponse à incident</strong>.</p>
<p>Chaque scénario est découpé en 5 grandes <strong>étapes de difficulté croissante</strong>. Un certain nombre de points est attribué à chaque étape, avec un processus de validation automatique.</p>
<p>Un classement est établi en temps réel, tenant compte des différents bonus, en fonction du nombre de points de chaque équipe.</p>`,
})
}
func reset(c *gin.Context) {
func reset(_ httprouter.Params, body []byte) (interface{}, error) {
var m map[string]string
err := c.ShouldBindJSON(&m)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
if err := json.Unmarshal(body, &m); err != nil {
return nil, err
}
t, ok := m["type"]
if !ok {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Field type not found"})
if t, ok := m["type"]; !ok {
return nil, errors.New("Field type not found")
} else if t == "teams" {
return true, fic.ResetTeams()
} else if t == "challenges" {
return true, fic.ResetExercices()
} else if t == "game" {
return true, fic.ResetGame()
} else {
return nil, errors.New("Unknown reset type")
}
switch t {
case "teams":
err = fic.ResetTeams()
case "challenges":
err = fic.ResetExercices()
case "game":
err = fic.ResetGame()
case "annexes":
err = fic.ResetAnnexes()
case "settings":
err = ResetSettings()
case "challengeInfo":
err = ResetChallengeInfo()
default:
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Unknown reset type"})
return
}
if err != nil {
log.Printf("Unable to reset (type=%q): %s", t, err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to performe the reset: %s", err.Error())})
return
}
c.JSON(http.StatusOK, true)
}

44
admin/api/stats.go Normal file
View file

@ -0,0 +1,44 @@
package api
import (
"fmt"
"srs.epita.fr/fic-server/libfic"
)
type statsTheme struct {
SolvedByLevel []int `json:"solvedByLevel"`
}
type stats struct {
Themes map[string]statsTheme `json:"themes"`
TryRank []int64 `json:"tryRank"`
}
func genStats() (interface{}, error) {
ret := map[string]statsTheme{}
if themes, err := fic.GetThemes(); err != nil {
return nil, err
} else {
for _, theme := range themes {
if exercices, err := theme.GetExercices(); err != nil {
return nil, err
} else {
exos := map[string]fic.ExportedExercice{}
for _, exercice := range exercices {
exos[fmt.Sprintf("%d", exercice.Id)] = fic.ExportedExercice{
exercice.Title,
exercice.Gain,
exercice.Coefficient,
exercice.SolvedCount(),
exercice.TriedTeamCount(),
}
}
ret[fmt.Sprintf("%d", theme.Id)] = statsTheme{}
}
}
return ret, nil
}
}

View file

@ -1,411 +0,0 @@
package api
import (
"fmt"
"log"
"net/http"
"net/url"
"os"
"path"
"reflect"
"strings"
"srs.epita.fr/fic-server/admin/generation"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
"go.uber.org/multierr"
)
var lastSyncError = ""
func flatifySyncErrors(errs error) (ret []string) {
for _, err := range multierr.Errors(errs) {
ret = append(ret, err.Error())
}
return
}
func declareSyncRoutes(router *gin.RouterGroup) {
apiSyncRoutes := router.Group("/sync")
// Return the global sync status
apiSyncRoutes.GET("/status", func(c *gin.Context) {
syncMtd := "Disabled"
if sync.GlobalImporter != nil {
syncMtd = sync.GlobalImporter.Kind()
}
var syncId *string
if sync.GlobalImporter != nil {
syncId = sync.GlobalImporter.Id()
}
c.JSON(http.StatusOK, gin.H{
"sync-type": reflect.TypeOf(sync.GlobalImporter).Name(),
"sync-id": syncId,
"sync": syncMtd,
"pullMutex": !sync.OneGitPullStatus(),
"syncMutex": !sync.OneDeepSyncStatus() && !sync.OneThemeDeepSyncStatus(),
"progress": sync.DeepSyncProgress,
"lastError": lastSyncError,
})
})
// Base sync checks if the local directory is in sync with remote one.
apiSyncRoutes.POST("/base", func(c *gin.Context) {
err := sync.GlobalImporter.Sync()
if err != nil {
lastSyncError = err.Error()
c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": err.Error()})
} else {
lastSyncError = ""
c.JSON(http.StatusOK, true)
}
})
// Speedy sync performs a recursive synchronization without importing files.
apiSyncRoutes.POST("/speed", func(c *gin.Context) {
st := sync.SpeedySyncDeep(sync.GlobalImporter)
sync.EditDeepReport(&st, false)
c.JSON(http.StatusOK, st)
})
// Deep sync: a fully recursive synchronization (can be limited by theme).
apiSyncRoutes.POST("/deep", func(c *gin.Context) {
r := sync.SyncDeep(sync.GlobalImporter)
lastSyncError = ""
c.JSON(http.StatusOK, r)
})
apiSyncRoutes.POST("/local-diff", APIDiffDBWithRemote)
apiSyncDeepRoutes := apiSyncRoutes.Group("/deep/:thid")
apiSyncDeepRoutes.Use(ThemeHandler)
// Special route to handle standalone exercices
apiSyncRoutes.POST("/deep/0", func(c *gin.Context) {
var st []string
for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, &fic.StandaloneExercicesTheme, 0, 250, nil)) {
st = append(st, se.Error())
}
sync.EditDeepReport(&sync.SyncReport{Exercices: st}, false)
sync.DeepSyncProgress = 255
lastSyncError = ""
c.JSON(http.StatusOK, st)
})
apiSyncDeepRoutes.POST("", func(c *gin.Context) {
theme := c.MustGet("theme").(*fic.Theme)
exceptions := sync.LoadThemeException(sync.GlobalImporter, theme)
var st []string
for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, theme, 0, 250, exceptions)) {
st = append(st, se.Error())
}
sync.EditDeepReport(&sync.SyncReport{Themes: map[string][]string{theme.Name: st}}, false)
sync.DeepSyncProgress = 255
lastSyncError = ""
c.JSON(http.StatusOK, st)
})
// Auto sync: to use with continuous deployment, in a development env
apiSyncRoutes.POST("/auto/*p", autoSync)
// Themes
apiSyncRoutes.POST("/fixurlids", fixAllURLIds)
apiSyncRoutes.POST("/themes", func(c *gin.Context) {
_, errs := sync.SyncThemes(sync.GlobalImporter)
lastSyncError = ""
c.JSON(http.StatusOK, flatifySyncErrors(errs))
})
apiSyncThemesRoutes := apiSyncRoutes.Group("/themes/:thid")
apiSyncThemesRoutes.Use(ThemeHandler)
apiSyncThemesRoutes.POST("/fixurlid", func(c *gin.Context) {
theme := c.MustGet("theme").(*fic.Theme)
if theme.FixURLId() {
v, err := theme.Update()
if err != nil {
log.Println("Unable to UpdateTheme after fixurlid:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when saving the theme."})
return
}
c.JSON(http.StatusOK, v)
} else {
c.AbortWithStatusJSON(http.StatusOK, 0)
}
})
// Exercices
declareSyncExercicesRoutes(apiSyncRoutes)
declareSyncExercicesRoutes(apiSyncThemesRoutes)
// Videos sync imports resolution.mp4 from path stored in database.
apiSyncRoutes.POST("/videos", func(c *gin.Context) {
exercices, err := fic.GetExercices()
if err != nil {
log.Println("Unable to GetExercices:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve exercices list."})
return
}
for _, e := range exercices {
if len(e.VideoURI) == 0 || !strings.HasPrefix(e.VideoURI, "$RFILES$/") {
continue
}
vpath, err := url.PathUnescape(strings.TrimPrefix(e.VideoURI, "$RFILES$/"))
if err != nil {
c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": fmt.Sprintf("Unable to perform URL unescape: %s", err.Error())})
return
}
_, err = sync.ImportFile(sync.GlobalImporter, vpath, func(filePath, URI string) (interface{}, error) {
e.VideoURI = path.Join("$FILES$", strings.TrimPrefix(filePath, fic.FilesDir))
return e.Update()
})
if err != nil {
c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": err.Error()})
return
}
}
c.JSON(http.StatusOK, true)
})
// Remove soluces from the database.
apiSyncRoutes.POST("/drop_soluces", func(c *gin.Context) {
exercices, err := fic.GetExercices()
if err != nil {
log.Println("Unable to GetExercices:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve exercices list."})
return
}
var errs error
for _, e := range exercices {
// Remove any published video
if len(e.VideoURI) > 0 && strings.HasPrefix(e.VideoURI, "$FILES$") {
vpath := path.Join(fic.FilesDir, strings.TrimPrefix(e.VideoURI, "$FILES$/"))
err = os.Remove(vpath)
if err != nil {
errs = multierr.Append(errs, fmt.Errorf("unable to delete published video (%q): %w", e.VideoURI, err))
}
}
// Clean the database
if len(e.VideoURI) > 0 || len(e.Resolution) > 0 {
e.VideoURI = ""
e.Resolution = ""
_, err = e.Update()
if err != nil {
errs = multierr.Append(errs, fmt.Errorf("unable to update exercice (%d: %s): %w", e.Id, e.Title, err))
}
}
}
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": flatifySyncErrors(err)})
} else {
c.JSON(http.StatusOK, true)
}
})
}
func declareSyncExercicesRoutes(router *gin.RouterGroup) {
router.POST("/exercices", func(c *gin.Context) {
theme := c.MustGet("theme").(*fic.Theme)
exceptions := sync.LoadThemeException(sync.GlobalImporter, theme)
_, errs := sync.SyncExercices(sync.GlobalImporter, theme, exceptions)
c.JSON(http.StatusOK, flatifySyncErrors(errs))
})
apiSyncExercicesRoutes := router.Group("/exercices/:eid")
apiSyncExercicesRoutes.Use(ExerciceHandler)
apiSyncExercicesRoutes.POST("", func(c *gin.Context) {
theme := c.MustGet("theme").(*fic.Theme)
exercice := c.MustGet("exercice").(*fic.Exercice)
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
_, _, _, errs := sync.SyncExercice(sync.GlobalImporter, theme, exercice.Path, nil, exceptions)
c.JSON(http.StatusOK, flatifySyncErrors(errs))
})
apiSyncExercicesRoutes.POST("/files", func(c *gin.Context) {
exercice := c.MustGet("exercice").(*fic.Exercice)
theme := c.MustGet("theme").(*fic.Theme)
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
c.JSON(http.StatusOK, flatifySyncErrors(sync.ImportExerciceFiles(sync.GlobalImporter, exercice, exceptions)))
})
apiSyncExercicesRoutes.POST("/fixurlid", func(c *gin.Context) {
exercice := c.MustGet("exercice").(*fic.Exercice)
if exercice.FixURLId() {
v, err := exercice.Update()
if err != nil {
log.Println("Unable to UpdateExercice after fixurlid:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when saving the exercice."})
return
}
c.JSON(http.StatusOK, v)
} else {
c.AbortWithStatusJSON(http.StatusOK, 0)
}
})
apiSyncExercicesRoutes.POST("/hints", func(c *gin.Context) {
exercice := c.MustGet("exercice").(*fic.Exercice)
theme := c.MustGet("theme").(*fic.Theme)
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
_, errs := sync.SyncExerciceHints(sync.GlobalImporter, exercice, sync.ExerciceFlagsMap(sync.GlobalImporter, exercice), exceptions)
c.JSON(http.StatusOK, flatifySyncErrors(errs))
})
apiSyncExercicesRoutes.POST("/flags", func(c *gin.Context) {
exercice := c.MustGet("exercice").(*fic.Exercice)
theme := c.MustGet("theme").(*fic.Theme)
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
_, errs := sync.SyncExerciceFlags(sync.GlobalImporter, exercice, exceptions)
_, herrs := sync.SyncExerciceHints(sync.GlobalImporter, exercice, sync.ExerciceFlagsMap(sync.GlobalImporter, exercice), exceptions)
c.JSON(http.StatusOK, flatifySyncErrors(multierr.Append(errs, herrs)))
})
}
// autoSync tries to performs a smart synchronization, when in development environment.
// It'll sync most of modified things, and will delete out of sync data.
// Avoid using it in a production environment.
func autoSync(c *gin.Context) {
p := strings.Split(strings.TrimPrefix(c.Params.ByName("p"), "/"), "/")
if !IsProductionEnv {
if err := sync.GlobalImporter.Sync(); err != nil {
lastSyncError = err.Error()
log.Println("Unable to sync.GI.Sync:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to perform the pull."})
return
}
lastSyncError = ""
}
themes, err := fic.GetThemes()
if err != nil {
log.Println("Unable to GetThemes:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve theme list."})
return
}
// No argument, do a deep sync
if len(p) == 0 {
if !IsProductionEnv {
for _, theme := range themes {
theme.DeleteDeep()
}
}
st := sync.SyncDeep(sync.GlobalImporter)
c.JSON(http.StatusOK, st)
return
}
var theTheme *fic.Theme
// Find the given theme
for _, theme := range themes {
if theme.Path == p[0] {
theTheme = theme
break
}
}
if theTheme == nil {
// The theme doesn't exists locally, perhaps it has not been imported already?
rThemes, err := sync.GetThemes(sync.GlobalImporter)
if err == nil {
for _, theme := range rThemes {
if theme == p[0] {
sync.SyncThemes(sync.GlobalImporter)
themes, err := fic.GetThemes()
if err == nil {
for _, theme := range themes {
if theme.Path == p[0] {
theTheme = theme
break
}
}
}
break
}
}
}
if theTheme == nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("Theme not found %q", p[0])})
return
}
}
if !IsProductionEnv {
exercices, err := theTheme.GetExercices()
if err == nil {
for _, exercice := range exercices {
if len(p) <= 1 || exercice.Path == path.Join(p[0], p[1]) {
exercice.DeleteDeep()
}
}
}
}
exceptions := sync.LoadThemeException(sync.GlobalImporter, theTheme)
var st []string
for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, theTheme, 0, 250, exceptions)) {
st = append(st, se.Error())
}
sync.EditDeepReport(&sync.SyncReport{Themes: map[string][]string{theTheme.Name: st}}, false)
sync.DeepSyncProgress = 255
resp, err := generation.FullGeneration()
if err == nil {
defer resp.Body.Close()
}
c.JSON(http.StatusOK, st)
}
func diffDBWithRemote() (map[string][]syncDiff, error) {
diffs := map[string][]syncDiff{}
themes, err := fic.GetThemesExtended()
if err != nil {
return nil, err
}
// Compare inner themes
for _, theme := range themes {
diffs[theme.Name], err = diffThemeWithRemote(theme)
if err != nil {
return nil, fmt.Errorf("Unable to diffThemeWithRemote: %w", err)
}
}
return diffs, err
}
func APIDiffDBWithRemote(c *gin.Context) {
diffs, err := diffDBWithRemote()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()})
return
}
c.JSON(http.StatusOK, diffs)
}

View file

@ -3,639 +3,221 @@ package api
import (
"encoding/json"
"fmt"
"log"
"net/http"
"strconv"
"strings"
"time"
"srs.epita.fr/fic-server/admin/pki"
"srs.epita.fr/fic-server/libfic"
"github.com/gin-gonic/gin"
"github.com/julienschmidt/httprouter"
)
func declareTeamsRoutes(router *gin.RouterGroup) {
router.GET("/teams.json", func(c *gin.Context) {
teams, err := fic.ExportTeams(false)
if err != nil {
log.Println("Unable to ExportTeams:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams export."})
return
}
c.JSON(http.StatusOK, teams)
})
router.GET("/teams-members.json", func(c *gin.Context) {
teams, err := fic.ExportTeams(true)
if err != nil {
log.Println("Unable to ExportTeams:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams export."})
return
}
c.JSON(http.StatusOK, teams)
})
router.GET("/teams-associations.json", allAssociations)
router.GET("/teams-binding", bindingTeams)
router.GET("/teams-nginx", nginxGenTeams)
router.POST("/refine_colors", refineTeamsColors)
router.POST("/disableinactiveteams", disableInactiveTeams)
router.POST("/enableallteams", enableAllTeams)
router.GET("/teams-members-nginx", nginxGenMember)
router.GET("/teams-tries.json", func(c *gin.Context) {
tries, err := fic.GetTries(nil, nil)
if err != nil {
log.Println("Unable to GetTries:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."})
return
}
c.JSON(http.StatusOK, tries)
})
router.GET("/teams", func(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams listing."})
return
}
c.JSON(http.StatusOK, teams)
})
router.POST("/teams", createTeam)
apiTeamsRoutes := router.Group("/teams/:tid")
apiTeamsRoutes.Use(TeamHandler)
apiTeamsRoutes.GET("/", func(c *gin.Context) {
c.JSON(http.StatusOK, c.MustGet("team").(*fic.Team))
})
apiTeamsRoutes.PUT("/", updateTeam)
apiTeamsRoutes.POST("/", addTeamMember)
apiTeamsRoutes.DELETE("/", deleteTeam)
apiTeamsRoutes.GET("/score-grid.json", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
sg, err := team.ScoreGrid()
if err != nil {
log.Printf("Unable to get ScoreGrid(tid=%d): %s", team.Id, err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during score grid calculation."})
return
}
c.JSON(http.StatusOK, sg)
})
apiTeamsPublicRoutes := router.Group("/teams/:tid")
apiTeamsPublicRoutes.Use(TeamPublicHandler)
apiTeamsPublicRoutes.GET("/my.json", func(c *gin.Context) {
var team *fic.Team
if t, ok := c.Get("team"); ok && t != nil {
team = t.(*fic.Team)
}
tfile, err := fic.MyJSONTeam(team, true)
if err != nil {
log.Println("Unable to get MyJSONTeam:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team JSON generation."})
return
}
c.JSON(http.StatusOK, tfile)
})
apiTeamsPublicRoutes.GET("/wait.json", func(c *gin.Context) {
var team *fic.Team
if t, ok := c.Get("team"); ok && t != nil {
team = t.(*fic.Team)
}
tfile, err := fic.MyJSONTeam(team, false)
if err != nil {
log.Println("Unable to get MyJSONTeam:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team JSON generation."})
return
}
c.JSON(http.StatusOK, tfile)
})
apiTeamsPublicRoutes.GET("/stats.json", func(c *gin.Context) {
var team *fic.Team
if t, ok := c.Get("team"); ok && t != nil {
team = t.(*fic.Team)
}
if team != nil {
stats, err := team.GetStats()
if err != nil {
log.Println("Unable to get GetStats:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during stats calculation."})
return
}
c.JSON(http.StatusOK, stats)
} else {
stats, err := fic.GetTeamsStats(nil)
if err != nil {
log.Println("Unable to get GetTeamsStats:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during global stats calculation."})
return
}
c.JSON(http.StatusOK, stats)
}
})
apiTeamsRoutes.GET("/history.json", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
history, err := team.GetHistory()
if err != nil {
log.Println("Unable to get GetHistory:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during history calculation."})
return
}
c.JSON(http.StatusOK, history)
})
apiTeamsRoutes.PATCH("/history.json", updateHistory)
apiTeamsRoutes.DELETE("/history.json", delHistory)
apiTeamsPublicRoutes.GET("/tries", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
tries, err := fic.GetTries(team, nil)
if err != nil {
log.Println("Unable to GetTries:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during tries calculation."})
return
}
c.JSON(http.StatusOK, tries)
})
apiTeamsRoutes.GET("/members", func(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
members, err := team.GetMembers()
if err != nil {
log.Println("Unable to GetMembers:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during members retrieval."})
return
}
c.JSON(http.StatusOK, members)
})
apiTeamsRoutes.POST("/members", addTeamMember)
apiTeamsRoutes.PUT("/members", setTeamMember)
declareTeamsPasswordRoutes(apiTeamsRoutes)
declareTeamClaimsRoutes(apiTeamsRoutes)
declareTeamCertificateRoutes(apiTeamsRoutes)
// Import teams from cyberrange
router.POST("/cyberrange-teams.json", importTeamsFromCyberrange)
}
func TeamHandler(c *gin.Context) {
tid, err := strconv.ParseInt(string(c.Params.ByName("tid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid team identifier"})
return
}
team, err := fic.GetTeam(tid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Team not found"})
return
}
c.Set("team", team)
c.Next()
}
func TeamPublicHandler(c *gin.Context) {
tid, err := strconv.ParseInt(string(c.Params.ByName("tid")), 10, 64)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid team identifier"})
return
}
if tid != 0 {
team, err := fic.GetTeam(tid)
if err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Team not found"})
return
}
c.Set("team", team)
} else {
c.Set("team", nil)
}
c.Next()
}
func nginxGenTeams(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
ret := ""
for _, team := range teams {
ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%d\"; }\n", strings.ToLower(team.Name), team.Id)
}
c.String(http.StatusOK, ret)
}
func nginxGenMember(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
ret := ""
for _, team := range teams {
if members, err := team.GetMembers(); err == nil {
for _, member := range members {
ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%d\"; }\n", member.Nickname, team.Id)
}
} else {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
}
c.String(http.StatusOK, ret)
}
func bindingTeams(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
ret := ""
for _, team := range teams {
if members, err := team.GetMembers(); err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
} else {
var mbs []string
for _, member := range members {
mbs = append(mbs, fmt.Sprintf("%s %s", member.Firstname, member.Lastname))
}
ret += fmt.Sprintf("%d;%s;%s\n", team.Id, team.Name, strings.Join(mbs, ";"))
}
}
c.String(http.StatusOK, ret)
}
type teamAssociation struct {
Association string `json:"association"`
TeamId int64 `json:"team_id"`
}
func allAssociations(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
var ret []teamAssociation
for _, team := range teams {
assocs, err := pki.GetTeamAssociations(TeamsDir, team.Id)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
for _, a := range assocs {
ret = append(ret, teamAssociation{a, team.Id})
}
}
c.JSON(http.StatusOK, ret)
}
func importTeamsFromCyberrange(c *gin.Context) {
file, err := c.FormFile("file")
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"errmsg": "Failed to get file: " + err.Error()})
return
}
src, err := file.Open()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Failed to open file: " + err.Error()})
return
}
defer src.Close()
var ut []fic.CyberrangeTeamBase
err = json.NewDecoder(src).Decode(&fic.CyberrangeAPIResponse{Data: &ut})
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
teams, err := fic.GetTeams()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible de récupérer la liste des équipes actuelles: %s", err.Error())})
return
}
for _, crteam := range ut {
var exist_team *fic.Team
for _, team := range teams {
if team.Name == crteam.Name || team.ExternalId == crteam.UUID {
exist_team = team
break
}
}
if exist_team != nil {
exist_team.Name = crteam.Name
exist_team.ExternalId = crteam.UUID
_, err = exist_team.Update()
} else {
exist_team, err = fic.CreateTeam(crteam.Name, fic.RandomColor().ToRGB(), crteam.UUID)
}
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible d'ajouter/de modifier l'équipe %v: %s", crteam, err.Error())})
return
}
// Import members
if c.DefaultQuery("nomembers", "0") != "" && len(crteam.Members) > 0 {
exist_team.ClearMembers()
for _, member := range crteam.Members {
_, err = exist_team.AddMember(member.Name, "", member.Nickname, exist_team.Name)
if err != nil {
log.Printf("Unable to add member %q to team %s (tid=%d): %s", member.UUID, exist_team.Name, exist_team.Id, err.Error())
func init() {
router.GET("/api/teams.json", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
return fic.ExportTeams() }))
router.GET("/api/teams-stats.json", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
if teams, err := fic.GetTeams(); err != nil {
return "", err
} else {
ret := map[int64]interface{}{}
if stats, err := fic.GetTeamsStats(nil); err != nil {
return ret, err
} else {
ret[0] = stats
}
for _, team := range teams {
if stats, err := team.GetStats(); err != nil {
return ret, err
} else {
ret[team.Id] = stats
}
}
return ret, nil
}
}))
router.GET("/api/teams-binding", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
return bindingTeams() }))
router.GET("/api/teams-nginx", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
return nginxGenTeam() }))
router.GET("/api/teams-nginx-members", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
return nginxGenMember() }))
router.GET("/api/teams/", apiHandler(
func(httprouter.Params,[]byte) (interface{}, error) {
return fic.GetTeams() }))
router.POST("/api/teams/", apiHandler(createTeam))
router.GET("/api/teams/:tid/", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) {
return team, nil })))
router.PUT("/api/teams/:tid/", apiHandler(teamHandler(updateTeam)))
router.POST("/api/teams/:tid/", apiHandler(teamHandler(addTeamMember)))
router.DELETE("/api/teams/:tid/", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) {
return team.Delete() })))
router.GET("/api/teams/:tid/my.json", apiHandler(teamPublicHandler(
func(team *fic.Team, _ []byte) (interface{}, error) {
return fic.MyJSONTeam(team, true) })))
router.GET("/api/teams/:tid/wait.json", apiHandler(teamPublicHandler(
func(team *fic.Team, _ []byte) (interface{}, error) {
return fic.MyJSONTeam(team, false) })))
router.GET("/api/teams/:tid/stats.json", apiHandler(teamPublicHandler(
func(team *fic.Team, _ []byte) (interface{}, error) {
if team != nil {
return team.GetStats()
} else {
return fic.GetTeamsStats(nil)
}
})))
router.GET("/api/teams/:tid/tries", apiHandler(teamPublicHandler(
func(team *fic.Team, _ []byte) (interface{}, error) {
return fic.GetTries(team, nil) })))
router.GET("/api/teams/:tid/members", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) {
return team.GetMembers() })))
router.POST("/api/teams/:tid/members", apiHandler(teamHandler(addTeamMember)))
router.PUT("/api/teams/:tid/members", apiHandler(teamHandler(setTeamMember)))
router.GET("/api/teams/:tid/name", apiHandler(teamHandler(
func(team fic.Team, _ []byte) (interface{}, error) {
return team.InitialName, nil })))
router.GET("/api/members/:mid/team", apiHandler(dispMemberTeam))
router.GET("/api/members/:mid/team/name", apiHandler(dispMemberTeamName))
}
func nginxGenMember() (string, error) {
if teams, err := fic.GetTeams(); err != nil {
return "", err
} else {
ret := ""
for _, team := range teams {
if members, err := team.GetMembers(); err == nil {
for _, member := range members {
ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%s\"; }\n", member.Nickname, team.InitialName)
}
} else {
return "", err
}
}
}
teams, err = fic.GetTeams()
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible de récupérer la liste des équipes après import: %s", err.Error())})
return
return ret, nil
}
c.JSON(http.StatusOK, teams)
}
func createTeam(c *gin.Context) {
var ut fic.Team
err := c.ShouldBindJSON(&ut)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
}
func nginxGenTeam() (string, error) {
if teams, err := fic.GetTeams(); err != nil {
return "", err
} else {
ret := ""
for _, team := range teams {
ret += fmt.Sprintf(" if ($ssl_client_s_dn ~ \"/C=FR/ST=France/O=Epita/OU=SRS/CN=%s\") { set $team \"%s\"; }\n", team.InitialName, team.InitialName)
}
if ut.Color == 0 {
ut.Color = fic.RandomColor().ToRGB()
return ret, nil
}
team, err := fic.CreateTeam(strings.TrimSpace(ut.Name), ut.Color, ut.ExternalId)
if err != nil {
log.Println("Unable to CreateTeam:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team creation."})
return
}
c.JSON(http.StatusOK, team)
}
func updateTeam(c *gin.Context) {
team := c.MustGet("team").(*fic.Team)
func bindingTeams() (string, error) {
if teams, err := fic.GetTeams(); err != nil {
return "", err
} else {
ret := ""
for _, team := range teams {
if members, err := team.GetMembers(); err != nil {
return "", err
} else {
var mbs []string
for _, member := range members {
mbs = append(mbs, fmt.Sprintf("%s %s", member.Firstname, member.Lastname))
}
ret += fmt.Sprintf("%d;%s;%s\n", team.Id, team.Name, strings.Join(mbs, ";"))
}
}
return ret, nil
}
}
type uploadedTeam struct {
Name string
Color uint32
}
type uploadedMember struct {
Firstname string
Lastname string
Nickname string
Company string
}
func createTeam(_ httprouter.Params, body []byte) (interface{}, error) {
var ut uploadedTeam
if err := json.Unmarshal(body, &ut); err != nil {
return nil, err
}
return fic.CreateTeam(strings.TrimSpace(ut.Name), ut.Color)
}
func updateTeam(team fic.Team, body []byte) (interface{}, error) {
var ut fic.Team
err := c.ShouldBindJSON(&ut)
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()})
return
if err := json.Unmarshal(body, &ut); err != nil {
return nil, err
}
ut.Id = team.Id
if ut.Password != nil && *ut.Password == "" {
ut.Password = nil
if _, err := ut.Update(); err != nil {
return nil, err
}
_, err = ut.Update()
if err != nil {
log.Println("Unable to updateTeam:", err.Error())
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team updating."})
return
}
c.JSON(http.StatusOK, ut)
return ut, nil
}
func refineTeamsColors(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
for i, team := range teams {
team.Color = fic.HSL{
H: float64(i)/float64(len(teams)) - 0.2,
S: float64(1) / float64(1+i%2),
L: 0.25 + float64(0.5)/float64(1+i%3),
}.ToRGB()
_, err = team.Update()
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
}
c.JSON(http.StatusOK, teams)
}
func disableInactiveTeams(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.StatusInternalServerError, err)
return
}
for _, team := range teams {
var serials []uint64
serials, err = pki.GetTeamSerials(TeamsDir, team.Id)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
var assocs []string
assocs, err = pki.GetTeamAssociations(TeamsDir, team.Id)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
if len(serials) == 0 && len(assocs) == 0 {
if team.Active {
team.Active = false
team.Update()
}
} else if !team.Active {
team.Active = true
team.Update()
}
}
c.JSON(http.StatusOK, true)
}
func enableAllTeams(c *gin.Context) {
teams, err := fic.GetTeams()
if err != nil {
log.Println("Unable to GetTeams:", err.Error())
c.AbortWithError(http.<