diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..fedf55d1 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,33 @@ +admin/admin +checker/checker +dashboard/dashboard +evdist/evdist +generator/generator +receiver/receiver +repochecker/repochecker +frontend/fic/build +frontend/fic/node_modules +qa/ui/build +qa/ui/node_modules +fickit-backend-initrd.img +fickit-backend-kernel +fickit-backend-squashfs.img +fickit-backend-state +fickit-frontend-initrd.img +fickit-frontend-kernel +fickit-frontend-squashfs.img +fickit-frontend-state +fickit-prepare-initrd.img +fickit-prepare-kernel +fickit-update-initrd.img +fickit-update-kernel +DASHBOARD +FILES +PKI +REMOTE +repochecker/*.so +SETTINGS +SETTINGSDIST +submissions +TEAMS +vendor \ No newline at end of file diff --git a/.drone-manifest-fic-admin.yml b/.drone-manifest-fic-admin.yml new file mode 100644 index 00000000..fc1154f7 --- /dev/null +++ b/.drone-manifest-fic-admin.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-admin:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-checker.yml b/.drone-manifest-fic-checker.yml new file mode 100644 index 00000000..13721117 --- /dev/null +++ b/.drone-manifest-fic-checker.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-checker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-dashboard.yml b/.drone-manifest-fic-dashboard.yml new file mode 100644 index 00000000..4f74d234 --- /dev/null +++ b/.drone-manifest-fic-dashboard.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-dashboard:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-evdist.yml b/.drone-manifest-fic-evdist.yml new file mode 100644 index 00000000..70b1e5c5 --- /dev/null +++ b/.drone-manifest-fic-evdist.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-evdist:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-frontend-ui.yml b/.drone-manifest-fic-frontend-ui.yml new file mode 100644 index 00000000..0fc4a3a6 --- /dev/null +++ b/.drone-manifest-fic-frontend-ui.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-frontend-ui:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-generator.yml b/.drone-manifest-fic-generator.yml new file mode 100644 index 00000000..a2b2443f --- /dev/null +++ b/.drone-manifest-fic-generator.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-generator:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-get-remote-files.yml b/.drone-manifest-fic-get-remote-files.yml new file mode 100644 index 00000000..0f64b8c1 --- /dev/null +++ b/.drone-manifest-fic-get-remote-files.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-get-remote-files:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-nginx.yml b/.drone-manifest-fic-nginx.yml new file mode 100644 index 00000000..cbd2dcce --- /dev/null +++ b/.drone-manifest-fic-nginx.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-nginx:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-qa.yml b/.drone-manifest-fic-qa.yml new file mode 100644 index 00000000..5a158970 --- /dev/null +++ b/.drone-manifest-fic-qa.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-qa:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-receiver.yml b/.drone-manifest-fic-receiver.yml new file mode 100644 index 00000000..eda2fe25 --- /dev/null +++ b/.drone-manifest-fic-receiver.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-receiver:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fic-repochecker.yml b/.drone-manifest-fic-repochecker.yml new file mode 100644 index 00000000..9b239931 --- /dev/null +++ b/.drone-manifest-fic-repochecker.yml @@ -0,0 +1,22 @@ +image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fic-repochecker:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone-manifest-fickit-deploy.yml b/.drone-manifest-fickit-deploy.yml new file mode 100644 index 00000000..bfa37052 --- /dev/null +++ b/.drone-manifest-fickit-deploy.yml @@ -0,0 +1,22 @@ +image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} +{{#if build.tags}} +tags: +{{#each build.tags}} + - {{this}} +{{/each}} +{{/if}} +manifests: + - image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 + platform: + architecture: amd64 + os: linux + - image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 + platform: + architecture: arm64 + os: linux + variant: v8 + - image: nemunaire/fickit-deploy:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm + platform: + architecture: arm + os: linux + variant: v7 diff --git a/.drone.yml b/.drone.yml new file mode 100644 index 00000000..3e0e2cbc --- /dev/null +++ b/.drone.yml @@ -0,0 +1,816 @@ +--- +kind: pipeline +type: docker +name: build-amd64 + +platform: + os: linux + arch: amd64 + +workspace: + base: /go + path: src/srs.epita.fr/fic-server + +steps: + - name: get deps + image: golang:alpine + commands: + - apk --no-cache add git + - go get -v -d ./... + - mkdir deploy + + - name: build qa ui + image: node:23-alpine + commands: + - cd qa/ui + - npm install --network-timeout=100000 + - npm run build + - tar chjf ../../deploy/htdocs-qa.tar.bz2 build + + - name: vet and tests + image: golang:alpine + commands: + - apk --no-cache add build-base + - go vet -buildvcs=false -tags gitgo ./... + - go vet -buildvcs=false ./... + - go test ./... + + - name: build admin + image: golang:alpine + commands: + - go build -buildvcs=false -tags gitgo -o deploy/admin-gitgo-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin + - go build -buildvcs=false -o deploy/admin-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin + - tar chjf deploy/htdocs-admin.tar.bz2 htdocs-admin + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build checker + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/checker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/checker + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build evdist + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/evdist-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/evdist + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build generator + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/generator-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/generator + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build receiver + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/receiver-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/receiver + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build frontend fic ui + image: node:23-alpine + commands: + - cd frontend/fic + - npm install --network-timeout=100000 + - npm run build + - tar chjf ../../deploy/htdocs-frontend-fic.tar.bz2 build + when: + branch: + exclude: + - master + + - name: build dashboard + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/dashboard-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/dashboard + - tar chjf deploy/htdocs-dashboard.tar.bz2 htdocs-dashboard + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build repochecker + image: golang:alpine + commands: + - apk --no-cache add build-base + - go build -buildvcs=false --tags checkupdate -o deploy/repochecker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker + - go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-epita-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/epita + - go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-file-inspector-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/file-inspector + - go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-grammalecte-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/grammalecte + - go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-pcap-inspector-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/pcap-inspector + - go build -buildvcs=false -buildmode=plugin -o deploy/repochecker-videos-rules-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH}.so srs.epita.fr/fic-server/repochecker/videos + - grep "const version" repochecker/update.go | sed -r 's/^.*=\s*(\S.*)$/\1/' > deploy/repochecker.version + when: + branch: + exclude: + - master + + - name: build qa + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/qa-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/qa + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: docker admin + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-admin + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-admin + when: + branch: + - master + + - name: docker checker + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-checker + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-checker + when: + branch: + - master + + - name: docker evdist + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-evdist + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-evdist + when: + branch: + - master + + - name: docker generator + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-generator + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-generator + when: + branch: + - master + + - name: docker receiver + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-receiver + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-receiver + when: + branch: + - master + + - name: docker frontend nginx + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-nginx + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-nginx + when: + branch: + - master + + - name: docker frontend ui + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-frontend-ui + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-frontend-ui + when: + branch: + - master + + - name: docker dashboard + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-dashboard + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-dashboard + when: + branch: + - master + + - name: docker qa + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-qa + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-qa + when: + branch: + - master + + - name: docker repochecker + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-repochecker + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-repochecker + when: + branch: + - master + + - name: docker remote-scores-sync-zqds + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-remote-scores-sync-zqds + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-remote-scores-sync-zqds + when: + branch: + - master + + - name: docker remote-challenge-sync-airbus + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-remote-challenge-sync-airbus + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-remote-challenge-sync-airbus + when: + branch: + - master + + - name: docker fic-get-remote-files + failure: ignore + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-get-remote-files + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-get-remote-files + when: + branch: + - master + + - name: docker fickit-deploy + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fickit-deploy + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-deploy + when: + branch: + - master + +trigger: + event: + - cron + - push + - tag + +--- +kind: pipeline +type: docker +name: build-arm64 + +platform: + os: linux + arch: arm64 + +workspace: + base: /go + path: src/srs.epita.fr/fic-server + +steps: + - name: get deps + image: golang:alpine + commands: + - apk --no-cache add git + - go get -d ./... + - mkdir deploy + + - name: build admin + image: golang:alpine + commands: + - apk --no-cache add build-base + - go build -buildvcs=false -o deploy/admin-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/admin + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build checker + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/checker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/checker + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build evdist + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/evdist-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/evdist + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build generator + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/generator-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/generator + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build receiver + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/receiver-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/receiver + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build frontend fic ui + image: node:23-alpine + commands: + - cd frontend/fic + - npm install --network-timeout=100000 + - npm run build + when: + branch: + exclude: + - master + + - name: build dashboard + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/dashboard-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/dashboard + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build repochecker + image: golang:alpine + commands: + - apk --no-cache add build-base + - go build -buildvcs=false --tags checkupdate -o deploy/repochecker-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: build repochecker for macOS + image: golang:alpine + commands: + - apk --no-cache add build-base + - go build -buildvcs=false --tags checkupdate -o deploy/repochecker-darwin-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/repochecker + environment: + CGO_ENABLED: 0 + GOOS: darwin + GOARCH: arm64 + when: + branch: + exclude: + - master + + - name: build qa ui + image: node:23-alpine + commands: + - cd qa/ui + - npm install --network-timeout=100000 + - npm run build + - tar chjf ../../deploy/htdocs-qa.tar.bz2 build + when: + branch: + exclude: + - master + + - name: build qa + image: golang:alpine + commands: + - go build -buildvcs=false -o deploy/qa-${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} srs.epita.fr/fic-server/qa + environment: + CGO_ENABLED: 0 + when: + branch: + exclude: + - master + + - name: docker admin + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-admin + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-admin + when: + branch: + - master + + - name: docker checker + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-checker + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-checker + when: + branch: + - master + + - name: docker evdist + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-evdist + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-evdist + when: + branch: + - master + + - name: docker generator + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-generator + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-generator + when: + branch: + - master + + - name: docker fic-get-remote-files + failure: ignore + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-get-remote-files + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-get-remote-files + when: + branch: + - master + + - name: docker receiver + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-receiver + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-receiver + when: + branch: + - master + + - name: docker frontend nginx + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-nginx + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-nginx + when: + branch: + - master + + - name: docker dashboard + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-dashboard + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-dashboard + when: + branch: + - master + + - name: docker qa + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-qa + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-qa + when: + branch: + - master + + - name: docker repochecker + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-repochecker + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-repochecker + when: + branch: + - master + +trigger: + event: + - cron + - push + - tag + +--- +kind: pipeline +name: docker-manifest +steps: + - name: publish admin + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-admin.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish checker + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-checker.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish evdist + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-evdist.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish generator + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-generator.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish receiver + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-receiver.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish frontend nginx + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-nginx.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish frontend ui + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-frontend-ui.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish dashboard + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-dashboard.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish repochecker + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-repochecker.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: publish qa + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fic-qa.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + + - name: docker fic-get-remote-files + failure: ignore + image: plugins/docker + settings: + username: + from_secret: docker_username + password: + from_secret: docker_password + repo: nemunaire/fic-get-remote-files + auto_tag: true + auto_tag_suffix: ${DRONE_STAGE_OS}-${DRONE_STAGE_ARCH} + dockerfile: Dockerfile-get-remote-files + when: + branch: + - master + + - name: publish fickit-deploy + image: plugins/manifest + settings: + auto_tag: true + ignore_missing: true + spec: .drone-manifest-fickit-deploy.yml + username: + from_secret: docker_username + password: + from_secret: docker_password + +trigger: + event: + - push + - tag + +depends_on: +- build-amd64 +- build-arm64 diff --git a/.gitignore b/.gitignore index 55080fa9..4327ccd0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,43 @@ +vendor/ +DASHBOARD/ +FILES/ +PKI/ +REMOTE/ +SETTINGS/ +SETTINGSDIST/ +TEAMS/ +submissions/ +admin/sync/README.html +fickit-boot-cmdline +fickit-boot-initrd.img +fickit-boot-kernel fickit-backend-cmdline fickit-backend-initrd.img +fickit-backend-squashfs.img fickit-backend-kernel +fickit-backend-state fickit-frontend-cmdline fickit-frontend-initrd.img +fickit-frontend-squashfs.img fickit-frontend-kernel +fickit-frontend-state +fickit-prepare-bios.img +fickit-prepare-cmdline +fickit-prepare-initrd.img +fickit-prepare-kernel +fickit-prepare-state +fickit-update-cmdline +fickit-update-initrd.img +fickit-update-kernel +fickit-update-squashfs.img +result +started + +# Standalone binaries +admin/get-remote-files/get-remote-files +fic-admin +fic-backend +fic-dashboard +fic-frontend +fic-qa +fic-repochecker diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..0e57016f --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,122 @@ +--- + +stages: + - deps + - build + - fickit + - sast + - qa + - image + - container_scanning + +cache: + paths: + - .go/pkg/mod/ + - qa/ui/node_modules/ + - frontend/ui/node_modules/ + +include: + - '.gitlab-ci/build.yml' + - '.gitlab-ci/image.yml' + - template: SAST.gitlab-ci.yml + - template: Security/Dependency-Scanning.gitlab-ci.yml + - template: Security/Secret-Detection.gitlab-ci.yml + - template: Security/Container-Scanning.gitlab-ci.yml + +.scanners-matrix: + parallel: + matrix: + - IMAGE_NAME: [checker, admin, evdist, frontend-ui, nginx, dashboard, repochecker, qa, receiver, generator, remote-challenge-sync-airbus] + +container_scanning: + stage: container_scanning + extends: + - .scanners-matrix + variables: + DOCKER_SERVICE: localhost + DOCKERFILE_PATH: Dockerfile-${IMAGE_NAME} + CI_APPLICATION_REPOSITORY: ${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${IMAGE_NAME} + CI_APPLICATION_TAG: latest + GIT_STRATEGY: fetch + before_script: + - 'echo "Scanning: ${IMAGE_NAME}"' + rules: + - if: '$CI_COMMIT_BRANCH == "master"' + +sast: + stage: sast + interruptible: true + needs: [] + before_script: + - rm -rf .go/ + +secret_detection: + stage: sast + interruptible: true + needs: [] + +dependency_scanning: + stage: qa + interruptible: true + needs: [] + +get-deps: + stage: deps + image: golang:1-alpine + before_script: + - export GOPATH="$CI_PROJECT_DIR/.go" + - mkdir -p .go + script: + - apk --no-cache add git + - go get -v -d ./... + +vet: + stage: sast + needs: ["build-qa-ui"] + dependencies: + - build-qa-ui + image: golang:1-alpine + before_script: + - export GOPATH="$CI_PROJECT_DIR/.go" + - mkdir -p .go + script: + - apk --no-cache add build-base + - go vet -v -buildvcs=false -tags gitgo ./... + - go vet -v -buildvcs=false ./... + +fickit: + stage: fickit + interruptible: true + needs: ["build-admin","build-checker","build-dashboard","build-evdist","build-generator","build-qa","build-receiver","build-repochecker"] + image: nemunaire/linuxkit + tags: ['docker'] + before_script: + - mkdir -p ~/.docker + - echo "{\"auths\":{\"${CI_REGISTRY}\":{\"username\":\"${CI_REGISTRY_USER}\",\"password\":\"${CI_REGISTRY_PASSWORD}\"}}}" > ~/.docker/config.json + script: + - dockerd & sleep 5 + + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/boot/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/kexec/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/mariadb-client/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/mdadm/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/rsync/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/syslinux/ + - linuxkit pkg push -force -org "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}" fickit-pkg/unbound/ + + - sed -i "s@nemunaire/fic-@${CI_REGISTRY_IMAGE}/master/@;s@nemunaire/@${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/@" fickit-backend.yml fickit-boot.yml fickit-frontend.yml fickit-prepare.yml fickit-update.yml + + - linuxkit build -format kernel+squashfs fickit-backend.yml + - linuxkit build -format kernel+squashfs fickit-frontend.yml + - linuxkit build -format kernel+initrd fickit-boot.yml + - linuxkit build -format kernel+initrd fickit-prepare.yml + - linuxkit build -format kernel+initrd fickit-update.yml + artifacts: + expire_in: 8 hours + paths: + - fickit-backend-squashfs.img + - fickit-frontend-squashfs.img + - fickit-boot-kernel + - fickit-boot-initrd.img + - fickit-prepare-initrd.img + - fickit-update-initrd.img diff --git a/.gitlab-ci/build.yml b/.gitlab-ci/build.yml new file mode 100644 index 00000000..621d8bbb --- /dev/null +++ b/.gitlab-ci/build.yml @@ -0,0 +1,93 @@ +--- + +.build: + stage: build + image: golang:1-alpine + before_script: + - export GOPATH="$CI_PROJECT_DIR/.go" + - mkdir -p .go + variables: + CGO_ENABLED: 0 + +build-qa-ui: + stage: build + image: node:21-alpine + before_script: + script: + - cd qa/ui + - npm install --network-timeout=100000 + - npm run build + artifacts: + paths: + - qa/ui/build/ + when: on_success + +build-checker: + extends: + - .build + script: + - go build -v -buildvcs=false -o deploy/checker srs.epita.fr/fic-server/checker + +build-generator: + extends: + - .build + script: + - go build -v -buildvcs=false -o deploy/generator srs.epita.fr/fic-server/generator + +build-receiver: + extends: + - .build + script: + - go build -v -buildvcs=false -o deploy/receiver srs.epita.fr/fic-server/receiver + +build-admin: + extends: + - .build + script: + - go build -v -buildvcs=false -tags gitgo -o deploy/admin-gitgo srs.epita.fr/fic-server/admin + - go build -v -buildvcs=false -o deploy/admin srs.epita.fr/fic-server/admin + +build-evdist: + extends: + - .build + script: + - go build -v -buildvcs=false -o deploy/evdist srs.epita.fr/fic-server/evdist + +build-frontend-ui: + stage: build + image: node:21-alpine + before_script: + script: + - cd frontend/fic + - npm install --network-timeout=100000 + - npm run build + +build-dashboard: + extends: + - .build + script: + - go build -v -buildvcs=false -o deploy/dashboard srs.epita.fr/fic-server/dashboard + +build-repochecker: + extends: + - .build + variables: + CGO_ENABLED: 1 + script: + - apk --no-cache add build-base + - go build -buildvcs=false --tags checkupdate -v -o deploy/repochecker srs.epita.fr/fic-server/repochecker + - go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-epita-rules.so srs.epita.fr/fic-server/repochecker/epita + - go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-file-inspector-rules.so srs.epita.fr/fic-server/repochecker/file-inspector + - go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-grammalecte-rules.so srs.epita.fr/fic-server/repochecker/grammalecte + - go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-pcap-inspector-rules.so srs.epita.fr/fic-server/repochecker/pcap-inspector + - go build -buildvcs=false -buildmode=plugin -v -o deploy/repochecker-videos-rules.so srs.epita.fr/fic-server/repochecker/videos + - grep "const version" repochecker/update.go | sed -r 's/^.*=\s*(\S.*)$/\1/' > deploy/repochecker.version + +build-qa: + extends: + - .build + needs: ["build-qa-ui"] + dependencies: + - build-qa-ui + script: + - go build -v -buildvcs=false -o deploy/qa srs.epita.fr/fic-server/qa diff --git a/.gitlab-ci/image.yml b/.gitlab-ci/image.yml new file mode 100644 index 00000000..988bb2b0 --- /dev/null +++ b/.gitlab-ci/image.yml @@ -0,0 +1,99 @@ +--- + +.push: + stage: image + interruptible: true + needs: [] + image: + name: gcr.io/kaniko-project/executor:v1.9.0-debug + entrypoint: [""] + before_script: + - mkdir -p /kaniko/.docker + - echo "{\"auths\":{\"${CI_REGISTRY}\":{\"username\":\"${CI_REGISTRY_USER}\",\"password\":\"${CI_REGISTRY_PASSWORD}\"}}}" > /kaniko/.docker/config.json + script: + - | + /kaniko/executor \ + --context . \ + --dockerfile "${DOCKERFILE}" \ + --destination "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${CI_JOB_NAME}:${CI_COMMIT_SHA}" \ + --destination "${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${CI_JOB_NAME}:latest" + only: + - master + +checker: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-checker + +receiver: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-receiver + +generator: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-generator + +admin: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-admin + +fickit-deploy: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-deploy + +get-remote-files: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-get-remote-files + +evdist: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-evdist + +frontend-ui: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-frontend-ui + +nginx: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-nginx + +dashboard: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-dashboard + +repochecker: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-repochecker + +qa: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-qa + +remote-challenge-sync-airbus: + extends: + - .push + variables: + DOCKERFILE: Dockerfile-remote-challenge-sync-airbus diff --git a/Dockerfile-admin b/Dockerfile-admin index 75b68ce8..f2c285f2 100644 --- a/Dockerfile-admin +++ b/Dockerfile-admin @@ -1,24 +1,33 @@ -FROM golang:alpine as gobuild +FROM golang:1-alpine AS gobuild RUN apk add --no-cache git -WORKDIR /go/src/srs.epita.fr/fic-server/admin +WORKDIR /go/src/srs.epita.fr/fic-server/ -ADD settings ../settings/ -ADD frontend/time ../frontend/time/ -ADD libfic ../libfic/ -ADD admin/api ./api/ -ADD admin/pki ./pki/ -ADD admin/sync ./sync/ -ADD admin/*.go ./ +RUN apk add --no-cache binutils-gold build-base -RUN go get -d -v -RUN go build -v +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY admin ./admin/ +COPY repochecker ./repochecker/ + +RUN go get -d -v ./admin && \ + go build -v -o admin/admin ./admin && \ + go build -v -buildmode=plugin -o repochecker/epita-rules.so ./repochecker/epita && \ + go build -v -buildmode=plugin -o repochecker/file-inspector.so ./repochecker/file-inspector && \ + go build -v -buildmode=plugin -o repochecker/grammalecte-rules.so ./repochecker/grammalecte && \ + go build -v -buildmode=plugin -o repochecker/videos-rules.so ./repochecker/videos -FROM alpine +FROM alpine:3.21 -RUN apk add --no-cache openssl +RUN apk add --no-cache \ + ca-certificates \ + git \ + git-lfs \ + openssh-client-default \ + openssl EXPOSE 8081 @@ -27,8 +36,7 @@ WORKDIR /srv ENTRYPOINT ["/srv/admin", "-bind=:8081", "-baseurl=/admin/"] COPY --from=gobuild /go/src/srs.epita.fr/fic-server/admin/admin /srv/admin -COPY admin/static/css/bootstrap.min.css frontend/static/css/glyphicon.css /srv/htdocs-admin/css/ -COPY frontend/static/fonts /srv/htdocs-admin/fonts -COPY admin/static/img /srv/htdocs-admin/img -COPY admin/static/views /srv/htdocs-admin/views -COPY admin/static/js/app.js frontend/static/js/angular.min.js admin/static/js/angular-resource.min.js frontend/static/js/angular-route.min.js frontend/static/js/angular-sanitize.min.js frontend/static/js/bootstrap.min.js frontend/static/js/d3.v3.min.js frontend/static/js/i18n frontend/static/js/jquery.min.js frontend/static/js/popper.min.js /srv/htdocs-admin/js/ +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/epita-rules.so /srv/epita-rules.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/file-inspector.so /usr/lib/file-inspector.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/grammalecte-rules.so /usr/lib/grammalecte-rules.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/videos-rules.so /usr/lib/videos-rules.so diff --git a/Dockerfile-backend b/Dockerfile-backend deleted file mode 100644 index f8727a2e..00000000 --- a/Dockerfile-backend +++ /dev/null @@ -1,21 +0,0 @@ -FROM golang:alpine as gobuild - -RUN apk add --no-cache git - -WORKDIR /go/src/srs.epita.fr/fic-server/backend - -ADD backend/*.go ./ -ADD libfic ../libfic/ -ADD settings ../settings/ - -RUN go get -d -v -RUN go build -v - - -FROM alpine - -WORKDIR /srv - -ENTRYPOINT ["/srv/backend"] - -COPY --from=gobuild /go/src/srs.epita.fr/fic-server/backend/backend /srv/backend diff --git a/Dockerfile-checker b/Dockerfile-checker new file mode 100644 index 00000000..144ac0a7 --- /dev/null +++ b/Dockerfile-checker @@ -0,0 +1,22 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY checker ./checker/ + +RUN go get -d -v ./checker && \ + go build -v -buildvcs=false -o checker/checker ./checker + + +FROM alpine:3.21 + +WORKDIR /srv + +ENTRYPOINT ["/srv/checker"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/checker/checker /srv/checker diff --git a/Dockerfile-dashboard b/Dockerfile-dashboard new file mode 100644 index 00000000..b8291bba --- /dev/null +++ b/Dockerfile-dashboard @@ -0,0 +1,32 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY dashboard ./dashboard/ + +RUN go get -d -v ./dashboard && \ + go build -v -buildvcs=false -o dashboard/dashboard ./dashboard + + +FROM alpine:3.21 + +EXPOSE 8082 + +WORKDIR /srv + +ENTRYPOINT ["/srv/dashboard", "--bind=:8082"] + +VOLUME /srv/htdocs-dashboard/ + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/dashboard/dashboard /srv/dashboard +COPY dashboard/static/index.html /srv/htdocs-dashboard/ +COPY admin/static/css/bootstrap.min.css dashboard/static/css/fic.css admin/static/css/glyphicon.css /srv/htdocs-dashboard/css/ +COPY admin/static/fonts /srv/htdocs-dashboard/fonts +COPY dashboard/static/img/srs.png /srv/htdocs-dashboard/img/ +COPY dashboard/static/js/dashboard.js admin/static/js/angular.min.js dashboard/static/js/angular-animate.min.js admin/static/js/angular-route.min.js admin/static/js/angular-sanitize.min.js admin/static/js/bootstrap.min.js admin/static/js/common.js admin/static/js/d3.v3.min.js admin/static/js/jquery.min.js /srv/htdocs-dashboard/js/ +COPY admin/static/js/i18n/* /srv/htdocs-dashboard/js/i18n/ diff --git a/Dockerfile-deploy b/Dockerfile-deploy new file mode 100644 index 00000000..99765543 --- /dev/null +++ b/Dockerfile-deploy @@ -0,0 +1,24 @@ +FROM alpine:3.21 + +EXPOSE 67/udp +EXPOSE 69/udp +EXPOSE 80/tcp + +ENTRYPOINT ["/usr/sbin/initial-config.sh"] +CMD ["/usr/bin/supervisord", "-c", "/etc/supervisord.conf"] + +WORKDIR /srv/s + +RUN apk add --no-cache \ + busybox-extras \ + supervisor \ + syslinux \ + tftp-hpa + +RUN touch /var/lib/udhcpd/udhcpd.leases && \ + mv /usr/share/syslinux/* /srv + +COPY configs/deploy-initial-config.sh /usr/sbin/initial-config.sh +COPY configs/deploy-supervisord.conf /etc/supervisord.conf +COPY configs/udhcpd-sample.conf /etc/udhcpd.conf +COPY configs/pxelinux.cfg /srv/pxelinux.cfg/default \ No newline at end of file diff --git a/Dockerfile-evdist b/Dockerfile-evdist new file mode 100644 index 00000000..45a2c506 --- /dev/null +++ b/Dockerfile-evdist @@ -0,0 +1,21 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY evdist ./evdist/ + +RUN go get -d -v ./evdist && \ + go build -v -buildvcs=false -o evdist/evdist ./evdist + + +FROM alpine:3.21 + +WORKDIR /srv + +ENTRYPOINT ["/srv/evdist"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/evdist/evdist /srv/evdist diff --git a/Dockerfile-frontend b/Dockerfile-frontend deleted file mode 100644 index 7bb1ff6f..00000000 --- a/Dockerfile-frontend +++ /dev/null @@ -1,27 +0,0 @@ -FROM golang:alpine as gobuild - -RUN apk add --no-cache git - -WORKDIR /go/src/srs.epita.fr/fic-server/frontend - -ADD frontend/*.go ./ -ADD frontend/time ./time/ -ADD libfic ../libfic/ -ADD settings ../settings/ - -RUN go get -d -v -RUN go build -v - - -FROM alpine - -EXPOSE 8080 - -WORKDIR /srv - -ENTRYPOINT ["/srv/frontend", "--bind=:8080"] - -VOLUME /srv/htdocs-frontend/ - -COPY --from=gobuild /go/src/srs.epita.fr/fic-server/frontend/frontend /srv/frontend -COPY frontend/static /srv/htdocs-frontend diff --git a/Dockerfile-frontend-ui b/Dockerfile-frontend-ui new file mode 100644 index 00000000..a14b0c58 --- /dev/null +++ b/Dockerfile-frontend-ui @@ -0,0 +1,13 @@ +FROM node:23-alpine AS nodebuild + +WORKDIR /ui + +COPY frontend/fic/ . + +RUN npm install --network-timeout=100000 && \ + npm run build + + +FROM scratch + +COPY --from=nodebuild /ui/build/ /www/htdocs-frontend diff --git a/Dockerfile-generator b/Dockerfile-generator new file mode 100644 index 00000000..2574e614 --- /dev/null +++ b/Dockerfile-generator @@ -0,0 +1,22 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY generator ./generator/ + +RUN go get -d -v ./generator && \ + go build -v -buildvcs=false -o generator/generator ./generator + + +FROM alpine:3.21 + +WORKDIR /srv + +ENTRYPOINT ["/srv/generator"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/generator/generator /srv/generator diff --git a/Dockerfile-get-remote-files b/Dockerfile-get-remote-files new file mode 100644 index 00000000..95e1c5f3 --- /dev/null +++ b/Dockerfile-get-remote-files @@ -0,0 +1,27 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +RUN apk add --no-cache build-base + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY admin ./admin/ + +RUN go get -d -v ./admin && \ + go build -v -o get-remote-files ./admin/get-remote-files + + +FROM alpine:3.21 + +RUN apk add --no-cache \ + ca-certificates + +WORKDIR /srv + +ENTRYPOINT ["/srv/get-remote-files", "/mnt/fic/"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/get-remote-files /srv/get-remote-files diff --git a/Dockerfile-nginx b/Dockerfile-nginx new file mode 100644 index 00000000..41326840 --- /dev/null +++ b/Dockerfile-nginx @@ -0,0 +1,32 @@ +FROM node:23-alpine AS nodebuild + +WORKDIR /ui + +COPY frontend/fic/ . + +RUN npm install --network-timeout=100000 && \ + npm run build + + +FROM nginx:stable-alpine-slim + +ENV FIC_BASEURL / +ENV HOST_RECEIVER receiver:8080 +ENV HOST_ADMIN admin:8081 +ENV HOST_DASHBOARD dashboard:8082 +ENV HOST_QA qa:8083 +ENV PATH_FILES /srv/FILES +ENV PATH_STARTINGBLOCK /srv/STARTINGBLOCK +ENV PATH_STATIC /srv/htdocs-frontend +ENV PATH_SETTINGS /srv/SETTINGSDIST +ENV PATH_TEAMS /srv/TEAMS + +EXPOSE 80 + +COPY configs/nginx-chbase.sh /docker-entrypoint.d/40-update-baseurl.sh + +COPY configs/nginx/get-team/upstream.conf /etc/nginx/fic-get-team.conf +COPY configs/nginx/auth/none.conf /etc/nginx/fic-auth.conf +COPY configs/nginx/base/docker.conf /etc/nginx/templates/default.conf.template + +COPY --from=nodebuild /ui/build/ /srv/htdocs-frontend diff --git a/Dockerfile-qa b/Dockerfile-qa new file mode 100644 index 00000000..37f3a1b0 --- /dev/null +++ b/Dockerfile-qa @@ -0,0 +1,38 @@ +FROM node:23-alpine AS nodebuild + +WORKDIR /ui + +COPY qa/ui/ . + +RUN npm install --network-timeout=100000 && \ + npm run build + + +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY --from=nodebuild /ui ./qa/ui +COPY qa ./qa/ +COPY admin ./admin/ + +RUN go get -d -v ./qa && \ + go build -v -buildvcs=false -o qa/qa ./qa + + +FROM alpine:3.21 + +EXPOSE 8083 + +WORKDIR /srv + +ENTRYPOINT ["/srv/qa", "--bind=:8083"] + +VOLUME /srv/htdocs-qa/ + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/qa/qa /srv/qa diff --git a/Dockerfile-receiver b/Dockerfile-receiver new file mode 100644 index 00000000..f2cac038 --- /dev/null +++ b/Dockerfile-receiver @@ -0,0 +1,27 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY receiver ./receiver/ + +RUN go get -d -v ./receiver && \ + go build -v -buildvcs=false -o ./receiver/receiver ./receiver + + +FROM alpine:3.21 + +EXPOSE 8080 + +WORKDIR /srv + +ENTRYPOINT ["/usr/sbin/entrypoint.sh"] +CMD ["--bind=:8080"] + +COPY entrypoint-receiver.sh /usr/sbin/entrypoint.sh + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/receiver/receiver /srv/receiver diff --git a/Dockerfile-remote-challenge-sync-airbus b/Dockerfile-remote-challenge-sync-airbus new file mode 100644 index 00000000..47a5e167 --- /dev/null +++ b/Dockerfile-remote-challenge-sync-airbus @@ -0,0 +1,24 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY libfic ./libfic/ +COPY settings ./settings/ +COPY remote/challenge-sync-airbus ./remote/challenge-sync-airbus/ + +RUN go get -d -v ./remote/challenge-sync-airbus && \ + go build -v -buildvcs=false -o ./challenge-sync-airbus ./remote/challenge-sync-airbus + + +FROM alpine:3.21 + +RUN apk add --no-cache openssl ca-certificates + +WORKDIR /srv + +ENTRYPOINT ["/srv/challenge-sync-airbus"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/challenge-sync-airbus /srv/challenge-sync-airbus diff --git a/Dockerfile-remote-scores-sync-zqds b/Dockerfile-remote-scores-sync-zqds new file mode 100644 index 00000000..e5ff87fb --- /dev/null +++ b/Dockerfile-remote-scores-sync-zqds @@ -0,0 +1,24 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +COPY go.mod go.sum ./ +COPY libfic ./libfic/ +COPY settings ./settings/ +COPY remote/scores-sync-zqds ./remote/scores-sync-zqds/ + +RUN go get -d -v ./remote/scores-sync-zqds && \ + go build -v -buildvcs=false -o ./scores-sync-zqds ./remote/scores-sync-zqds + + +FROM alpine:3.21 + +RUN apk add --no-cache openssl ca-certificates + +WORKDIR /srv + +ENTRYPOINT ["/srv/scores-sync-zqds"] + +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/scores-sync-zqds /srv/scores-sync-zqds diff --git a/Dockerfile-repochecker b/Dockerfile-repochecker new file mode 100644 index 00000000..d02bc1de --- /dev/null +++ b/Dockerfile-repochecker @@ -0,0 +1,42 @@ +FROM golang:1-alpine AS gobuild + +RUN apk add --no-cache git + +WORKDIR /go/src/srs.epita.fr/fic-server/ + +RUN apk add --no-cache binutils-gold build-base + +COPY go.mod go.sum ./ +COPY settings settings/ +COPY libfic ./libfic/ +COPY admin ./admin/ +COPY repochecker ./repochecker/ + +RUN go get -d -v ./repochecker && \ + go build -v -o repochecker/repochecker ./repochecker && \ + go build -v -buildmode=plugin -o repochecker/epita-rules.so ./repochecker/epita && \ + go build -v -buildmode=plugin -o repochecker/file-inspector.so ./repochecker/file-inspector && \ + go build -v -buildmode=plugin -o repochecker/grammalecte-rules.so ./repochecker/grammalecte && \ + go build -v -buildmode=plugin -o repochecker/pcap-inspector.so ./repochecker/pcap-inspector && \ + go build -v -buildmode=plugin -o repochecker/videos-rules.so ./repochecker/videos + + +ENV GRAMMALECTE_VERSION 2.1.1 + +ADD https://web.archive.org/web/20240926154729if_/https://grammalecte.net/zip/Grammalecte-fr-v$GRAMMALECTE_VERSION.zip /srv/grammalecte.zip + +RUN mkdir /srv/grammalecte && cd /srv/grammalecte && unzip /srv/grammalecte.zip && sed -i 's/if sys.version_info.major < (3, 7):/if False:/' /srv/grammalecte/grammalecte-server.py + +FROM alpine:3.19 + +ENTRYPOINT ["/usr/bin/repochecker", "--rules-plugins=/usr/lib/epita-rules.so", "--rules-plugins=/usr/lib/file-inspector.so", "--rules-plugins=/usr/lib/grammalecte-rules.so", "--rules-plugins=/usr/lib/pcap-inspector.so", "--rules-plugins=/usr/lib/videos-rules.so"] + +RUN apk add --no-cache git python3 ffmpeg + +COPY --from=gobuild /srv/grammalecte /srv/grammalecte +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/repochecker /usr/bin/repochecker +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/epita-rules.so /usr/lib/epita-rules.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/file-inspector.so /usr/lib/file-inspector.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/grammalecte-rules.so /usr/lib/grammalecte-rules.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/pcap-inspector.so /usr/lib/pcap-inspector.so +COPY --from=gobuild /go/src/srs.epita.fr/fic-server/repochecker/videos-rules.so /usr/lib/videos-rules.so diff --git a/README.md b/README.md index d1d64667..d908ce27 100644 --- a/README.md +++ b/README.md @@ -6,45 +6,180 @@ to be robust, so it uses some uncommon technics like client certificate for authentication, lots of state of the art cryptographic methods and aims to be deployed in a DMZ network architecture. +## Features + +- **Collaborative Challenge Design and Review:** Facilitates large team collaboration for challenge creation and review. +- **Versatile Flag Formats:** Supports flags as strings, numbers, multiple-choice questions, unique-choice questions, selects, multiline inputs, and strings with capture regexp. +- **Engaging Challenge Interface:** A visually appealing interface that incorporates images to illustrate exercises. +- **Public Dashboard:** Allow spectators to follow the competition alongside players. +- **Archival Mode:** Preserve past challenges and data in a static form, with no code. Your archive can lied on a S3 bucket. +- **Export Capabilities:** Export challenges to other CTF platforms. +- **Security-Focused:** Designed with security as a top priority. Each service aims to be isolated with right restrictions. Answers are not stored in the database, ... +- **Choose your Authentication:** Authentication is not part of this project, integrate your own authentication methods. +- **Extensible:** Easily extend and customize the platform. The main codebase in Golang is highly documented, each frontend part can be recreated in another language with ease. +- **Comprehensive Settings:** A wide range of settings for challenge customization. You can have first blood or not, dynamic exercice gain, evenemential bonus, ... +- **Git Integration:** Seamless verification and integration with Git. +- **Infrastructure as Code (IaC):** Ensure read-only and reproducible infrastructure. +- **Last-Minute Checks:** Ensure your challenge is ready with a comprehensive set of checks that can be performed anytime, verifying that downloadable files are as expected by the challenge creators. +- **Lightweight:** Optimized for minimal resource consumption, supporting features like serving gzipped files directly to browsers without CPU usage. +- **Scalable:** Designed to handle large-scale competitions with multiple receivers and frontend servers, smoothly queuing activity peaks on the backend. +- **Offline Capability:** Run your challenges offline. +- **Integrated Exercise Issue Ticketing System:** Manage and track issues related to exercises during the competition directly with teams. During designing phase, this transform in a complete dedicated QA platform. +- **Detailed Statistics:** Provide administrators with insights into exercise preferences and complexity. +- **Change Planning:** Schedule events in advance, such as new exercise availability or ephemeral bonuses, with second-by-second precision. +- **Frontend Time Synchronization:** Ensure accurate remaining time and event synchronization between servers and players. + + +## Overview + +This is a [monorepo](https://danluu.com/monorepo/), containing several +micro-services : + +- `admin` is the web interface and API used to control the challenge + and doing synchronization. +- `checker` is an inotify reacting service that handles submissions + checking. +- `dashboard` is a public interface to explain and follow the + conquest, aims to animate the challenge for visitors. +- `evdist` is an inotify reacting service that handles settings + changes during the challenge (eg. a 30 minutes event where hints are + free, ...). +- `generator` takes care of global and team's files generation. +- `qa` is an interface dedicated to challenge development, it stores + reports to be treated by challenges creators. +- `receiver` is only responsible for receiving submissions. It is the + only dynamic part accessibe to players, so it's codebase is reduce + to the minimum. It does not parse or try to understand players + submissions, it just write it down to a file in the file + system. Parsing and treatment is made by the `checker`. +- `remote/challenge-sync-airbus` is an inotify reacting service that + allows us to synchronize scores and exercice validations with the + Airbus scoring platform. +- `remote/scores-sync-zqds` is an inotify reacting service that allows + us to synchronize scores with the ZQDS scoring platform. +- `repochecker` is a side project to check offline for synchronization + issues. + +Here is how thoses services speak to each others: + +![Overview of the micro-services](doc/micro-services.png) + +In the production setup, each micro-service runs in a dedicated +container, isolated from each other. Moreover, two physical machines +should be used: + +- `phobos` communicates with players: displaying the web interface, + authenticate teams and players, storing contest files and handling + submissions retrieval without understanding them. It can't access + `deimos` so its job stops after writing requests on the filesystem. +- `deimos` is hidden from players, isolated from the network. It can + only access `phobos` via a restricted ssh connection, to retrieve + requests from `phobos` filesystem and pushing to it newly generated + static files. + +Concretely, the L2 looks like this: + +![Layer 2 connections](doc/l2.png) + +So, the general filesystem is organized this way: + +- `DASHBOARD` contains files structuring the content of the dashboard + screen(s). +- `FILES` stores the contest file to be downloaded by players. To be + accessible without authentication and to avoid bruteforce, each file + is placed into a directory with a hashed name (the original file + name is preserved). It's rsynced as is to `deimos`. +- `GENERATOR` contains a socket to allow other services to communicate + with the `generator`. +- `PKI` takes care of the PKI used for the client certiciate + authorization process, and more generaly, all authentication related + files (htpasswd, dexidp config, ...). Only the `shared` subdirectory + is shared with `deimos`, private key and teams P12 don't go out. +- `SETTINGS` stores the challenge config as wanted by admins. It's not + always the config in use: it uses can be delayed waiting for a + trigger. +- `SETTINGSDIST` is the challenge configuration in use. It is the one + shared with players. +- `startingblock` keep the `started` state of the challenge. This + helps `nginx` to know when it can start distributing exercices + related files. +- `TEAMS` stores the static files generated by the `generator`, there is + one subdirectory by team (id of the team), plus some files at the + root, which are common to all teams. There is also symlink pointing + to team directory, each symlink represent an authentication + association (certificate ID, OpenID username, htpasswd user, ...). +- `submissions` is the directory where the `receiver` writes + requests. It creates subdirectories at the name of the + authentication association, as seen in `TEAMS`, `checker` then + resolve the association regarding `TEAMS` directory. There is also a + special directory to handle team registration. + +Here is a diagram showing how each micro-service uses directories it has access to (blue for read access, red for write access): + +![Usage of directories by each micro-service](doc/directories.png) Local developer setup --------------------- -### The importance of clone location - -This is a [monorepo](https://danluu.com/monorepo/), primarly intended for Go -programming. If you want to be able to do programming stuff, you should take -care of the path where you clone this repository, as it should be located -inside [your `GOPATH`](https://github.com/golang/go/wiki/SettingGOPATH): - - git clone https://git.nemunai.re/fic/server.git $GOPATH/src/srs.epita.fr/fic-server - - ### Using Docker Use `docker-compose build`, then `docker-compose up` to launch the infrastructure. After booting, you'll be able to reach the main interface at: - and the admin one at: . + and the admin one at: (or at ). +The dashboard is available at and the QA service at . +In this setup, there is no authentication. You are identfied [as a team](./configs/nginx/get-team/team-1.conf). On first use you'll need to register. + +#### Import folder + +##### Local import folder +The following changes is only required if your are trying to change the local import folder `~/fic` location. + +Make the following changes inside this file `docker-compose.yml`: + + 23 volumes: + 24 - - ~/fic:/mnt/fic:ro + 24 + - /fic:/mnt/fic:ro + +##### Git import +A git repository can be used: + + 29 - command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink + 29 + command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink -git-import-remote git@gitlab.cri.epita.fr:ing/majeures/srs/fic/2042/challenges.git + +##### Owncloud import folder +If your are trying to use the folder available with the Owncloud service, make the following changes inside this file `docker-compose.yml`: + + 29 - command: --baseurl /admin/ -localimport /mnt/fic -localimportsymlink + 29 + command: --baseurl /admin/ -clouddav=https://owncloud.srs.epita.fr/remote.php/webdav/FIC%202019/ -clouduser -cloudpass '' ### Manual builds Running this project requires a web server (configuration is given for nginx), -a database (currently supporting only MySQL), a go compiler for the revision -1.6 at least and a `inotify`-aware system. +a database (currently supporting only MySQL/MariaDB), a Go compiler for the +revision 1.18 at least and a `inotify`-aware system. You'll also need NodeJS to +compile some user interfaces. -1. First, you'll need to retrieve the dependencies: +1. Above all, you need to build Node projects: - go get -d srs.epita.fr/fic-server/admin - go get -d srs.epita.fr/fic-server/backend - go get -d srs.epita.fr/fic-server/frontend + cd frontend/fic; npm install && npm run build + cd qa/ui; npm install && npm run build + +2. First, you'll need to retrieve the dependencies: + + go mod vendor 2. Then, build the three Go projects: - go build -o $GOPATH/src/srs.epita.fr/fic-server/fic-admin srs.epita.fr/fic-server/admin - go build -o $GOPATH/src/srs.epita.fr/fic-server/fic-backend srs.epita.fr/fic-server/backend - go build -o $GOPATH/src/srs.epita.fr/fic-server/fic-frontend srs.epita.fr/fic-server/frontend + go build -o fic-admin ./admin + go build -o fic-checker ./checker + go build -o fic-dashboard ./dashboard + go build -o fic-generator ./generator + go build -o fic-qa ./qa + go build -o fic-receiver ./receiver + go build -o fic-repochecker ./repochecker + ... 3. Before launching anything, you need to create a database: @@ -68,17 +203,36 @@ a database (currently supporting only MySQL), a go compiler for the revision After initializing the database, the server will listen on : this is the administration part. - ./fic-backend & + ./fic-generator & This daemon generates static and team related files and then waits - for new submissions (expected in `submissions` directory). It only - watchs modifications on the file system, it has no web interface. + another process to tell it to regenerate some files. - ./fic-frontend & + ./fic-receiver & - This last server exposes an API that gives time synchronization to - clients and handle submission reception (but without treating - them). + This one exposes an API that gives time synchronization to clients and + handle submission reception (but without treating them). -For the moment, a web server is mandatory to serve static files, look at the -samples given in the `configs/` directory. + ./fic-checker & + + This service waits for new submissions (expected in `submissions` + directory). It only watchs modifications on the file system, it has no web + interface. + + ./fic-dashboard & + + This last server runs the public dashboard. It serves all file, without the + need of a webserver. It listens on port 8082 by default. + + ./fic-qa & + + If you need it, this will launch a web interface on the port 8083 by + default, to perform quality control. + +For the moment, a web server is mandatory to serve static files, look +at the samples given in the `configs/nginx` directory. You need to +pick one base configation flavor in the `configs/nginx/base` +directory, and associated with an authentication mechanism in +`configs/nginx/auth` (named the file `fic-auth.conf` in `/etc/nginx`), +and also pick the corresponding `configs/nginx/get-team` file, you +named `fic-get-team.conf`. diff --git a/admin/api/certificate.go b/admin/api/certificate.go index bdf56454..5c98c116 100644 --- a/admin/api/certificate.go +++ b/admin/api/certificate.go @@ -2,117 +2,366 @@ package api import ( "crypto/rand" - "encoding/json" + "crypto/sha1" + "crypto/x509" + "crypto/x509/pkix" + "encoding/base32" + "encoding/base64" "errors" "fmt" "io/ioutil" "log" + "math" "math/big" + "net/http" "os" "path" - "time" "strconv" + "strings" + "time" "srs.epita.fr/fic-server/admin/pki" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/ca/", apiHandler(infoCA)) - router.GET("/api/ca.pem", apiHandler(getCAPEM)) - router.POST("/api/ca/new", apiHandler( - func(_ httprouter.Params, _ []byte) (interface{}, error) { - return true, pki.GenerateCA(time.Date(2018, 01, 21, 0, 0, 0, 0, time.UTC), time.Date(2018, 01, 24, 23, 59, 59, 0, time.UTC)) - })) +var TeamsDir string - router.GET("/api/teams/:tid/certificates", apiHandler(teamHandler( - func(team fic.Team, _ []byte) (interface{}, error) { - if serials, err := pki.GetTeamSerials(TeamsDir, team.Id); err != nil { - return nil, err - } else { - var certs []fic.Certificate - for _, serial := range serials { - if cert, err := fic.GetCertificate(serial); err == nil { - certs = append(certs, cert) - } else { - log.Println("Unable to get back certificate, whereas an association exists on disk: ", err) - } - } - return certs, nil - } - }))) +func declareCertificateRoutes(router *gin.RouterGroup) { + router.GET("/htpasswd", func(c *gin.Context) { + ret, err := genHtpasswd(true) + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + c.String(http.StatusOK, ret) + }) + router.POST("/htpasswd", func(c *gin.Context) { + if htpasswd, err := genHtpasswd(true); err != nil { + log.Println("Unable to generate htpasswd:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "ficpasswd"), []byte(htpasswd), 0644); err != nil { + log.Println("Unable to write htpasswd:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.AbortWithStatus(http.StatusOK) + }) + router.DELETE("/htpasswd", func(c *gin.Context) { + if err := os.Remove(path.Join(pki.PKIDir, "shared", "ficpasswd")); err != nil { + log.Println("Unable to remove htpasswd:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.AbortWithStatus(http.StatusOK) + }) + router.GET("/htpasswd.apr1", func(c *gin.Context) { + ret, err := genHtpasswd(false) + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + c.String(http.StatusOK, ret) + }) + router.GET("/ca", infoCA) + router.GET("/ca.pem", getCAPEM) + router.POST("/ca/new", func(c *gin.Context) { + var upki PKISettings + err := c.ShouldBindJSON(&upki) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } - router.GET("/api/certs/", apiHandler(getCertificates)) - router.POST("/api/certs/", apiHandler(generateClientCert)) - router.DELETE("/api/certs/", apiHandler(func(_ httprouter.Params, _ []byte) (interface{}, error) { return fic.ClearCertificates() })) + if err := pki.GenerateCA(upki.NotBefore, upki.NotAfter); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } - router.HEAD("/api/certs/:certid", apiHandler(certificateHandler(getTeamP12File))) - router.GET("/api/certs/:certid", apiHandler(certificateHandler(getTeamP12File))) - router.PUT("/api/certs/:certid", apiHandler(certificateHandler(updateCertificateAssociation))) - router.DELETE("/api/certs/:certid", apiHandler(certificateHandler( - func(cert fic.Certificate, _ []byte) (interface{}, error) { return cert.Revoke() }))) + c.JSON(http.StatusCreated, true) + }) + + router.GET("/certs", getCertificates) + router.POST("/certs", generateClientCert) + router.DELETE("/certs", func(c *gin.Context) { + v, err := fic.ClearCertificates() + if err != nil { + log.Println("Unable to ClearCertificates:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, v) + }) + + apiCertificatesRoutes := router.Group("/certs/:certid") + apiCertificatesRoutes.Use(CertificateHandler) + apiCertificatesRoutes.HEAD("", getTeamP12File) + apiCertificatesRoutes.GET("", getTeamP12File) + apiCertificatesRoutes.PUT("", updateCertificateAssociation) + apiCertificatesRoutes.DELETE("", func(c *gin.Context) { + cert := c.MustGet("cert").(*fic.Certificate) + + v, err := cert.Revoke() + if err != nil { + log.Println("Unable to Revoke:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, v) + }) } -func infoCA(_ httprouter.Params, _ []byte) (interface{}, error) { +func declareTeamCertificateRoutes(router *gin.RouterGroup) { + router.GET("/certificates", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + if serials, err := pki.GetTeamSerials(TeamsDir, team.Id); err != nil { + log.Println("Unable to GetTeamSerials:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } else { + var certs []CertExported + for _, serial := range serials { + if cert, err := fic.GetCertificate(serial); err == nil { + certs = append(certs, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, cert.Password, &team.Id, cert.Revoked}) + } else { + log.Println("Unable to get back certificate, whereas an association exists on disk: ", err) + } + } + c.JSON(http.StatusOK, certs) + } + }) + + router.GET("/associations", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + assocs, err := pki.GetTeamAssociations(TeamsDir, team.Id) + if err != nil { + log.Println("Unable to GetTeamAssociations:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, assocs) + }) + + apiTeamAssociationsRoutes := router.Group("/associations/:assoc") + apiTeamAssociationsRoutes.POST("", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + if err := os.Symlink(fmt.Sprintf("%d", team.Id), path.Join(TeamsDir, c.Params.ByName("assoc"))); err != nil { + log.Println("Unable to create association symlink:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to create association symlink: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, c.Params.ByName("assoc")) + }) + apiTeamAssociationsRoutes.DELETE("", func(c *gin.Context) { + err := pki.DeleteTeamAssociation(TeamsDir, c.Params.ByName("assoc")) + if err != nil { + log.Printf("Unable to DeleteTeamAssociation(%s): %s", c.Params.ByName("assoc"), err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to delete association symlink: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, nil) + }) + +} + +func CertificateHandler(c *gin.Context) { + var certid uint64 + var err error + + cid := strings.TrimSuffix(string(c.Params.ByName("certid")), ".p12") + if certid, err = strconv.ParseUint(cid, 10, 64); err != nil { + if certid, err = strconv.ParseUint(cid, 16, 64); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid certficate identifier"}) + return + } + } + + cert, err := fic.GetCertificate(certid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Certificate not found"}) + return + } + + c.Set("cert", cert) + + c.Next() +} + +func genHtpasswd(ssha bool) (ret string, err error) { + var teams []*fic.Team + teams, err = fic.GetTeams() + if err != nil { + return + } + + for _, team := range teams { + var serials []uint64 + serials, err = pki.GetTeamSerials(TeamsDir, team.Id) + if err != nil { + return + } + + if len(serials) == 0 { + // Don't include teams that don't have associated certificates + continue + } + + for _, serial := range serials { + var cert *fic.Certificate + cert, err = fic.GetCertificate(serial) + if err != nil { + // Ignore invalid/incorrect/non-existant certificates + continue + } + + if cert.Revoked != nil { + continue + } + + salt := make([]byte, 5) + if _, err = rand.Read(salt); err != nil { + return + } + + if ssha { + hash := sha1.New() + hash.Write([]byte(cert.Password)) + hash.Write([]byte(salt)) + + passwdline := fmt.Sprintf(":{SSHA}%s\n", base64.StdEncoding.EncodeToString(append(hash.Sum(nil), salt...))) + + ret += strings.ToLower(team.Name) + passwdline + ret += fmt.Sprintf("%0[2]*[1]x", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)) + passwdline + ret += fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)) + passwdline + teamAssociations, _ := pki.GetTeamAssociations(TeamsDir, team.Id) + log.Println(path.Join(TeamsDir, fmt.Sprintf("%d", team.Id)), teamAssociations) + for _, ta := range teamAssociations { + ret += strings.Replace(ta, ":", "", -1) + passwdline + } + } else { + salt32 := base32.StdEncoding.EncodeToString(salt) + ret += fmt.Sprintf( + "%s:$apr1$%s$%s\n", + strings.ToLower(team.Name), + salt32, + fic.Apr1Md5(cert.Password, salt32), + ) + } + } + } + + return +} + +type PKISettings struct { + Version int `json:"version"` + SerialNumber *big.Int `json:"serialnumber"` + Issuer pkix.Name `json:"issuer"` + Subject pkix.Name `json:"subject"` + NotBefore time.Time `json:"notbefore"` + NotAfter time.Time `json:"notafter"` + SignatureAlgorithm x509.SignatureAlgorithm `json:"signatureAlgorithm,"` + PublicKeyAlgorithm x509.PublicKeyAlgorithm `json:"publicKeyAlgorithm"` +} + +func infoCA(c *gin.Context) { _, cacert, err := pki.LoadCA() if err != nil { - return nil, err + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "CA not found"}) + return } - ret := map[string]interface{}{} - - ret["version"] = cacert.Version - ret["serialnumber"] = cacert.SerialNumber - ret["issuer"] = cacert.Issuer - ret["subject"] = cacert.Subject - ret["notbefore"] = cacert.NotBefore - ret["notafter"] = cacert.NotAfter - ret["signatureAlgorithm"] = cacert.SignatureAlgorithm - ret["publicKeyAlgorithm"] = cacert.PublicKeyAlgorithm - - return ret, nil + c.JSON(http.StatusOK, PKISettings{ + Version: cacert.Version, + SerialNumber: cacert.SerialNumber, + Issuer: cacert.Issuer, + Subject: cacert.Subject, + NotBefore: cacert.NotBefore, + NotAfter: cacert.NotAfter, + SignatureAlgorithm: cacert.SignatureAlgorithm, + PublicKeyAlgorithm: cacert.PublicKeyAlgorithm, + }) } -func getCAPEM(_ httprouter.Params, _ []byte) (interface{}, error) { +func getCAPEM(c *gin.Context) { if _, err := os.Stat(pki.CACertPath()); os.IsNotExist(err) { - return nil, errors.New("Unable to locate the CA root certificate. Have you generated it?") + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Unable to locate the CA root certificate. Have you generated it?"}) + return } else if fd, err := os.Open(pki.CACertPath()); err != nil { - return nil, err + log.Println("Unable to open CA root certificate:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return } else { defer fd.Close() - return ioutil.ReadAll(fd) + + cnt, err := ioutil.ReadAll(fd) + if err != nil { + log.Println("Unable to read CA root certificate:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.String(http.StatusOK, string(cnt)) } } -func getTeamP12File(cert fic.Certificate, _ []byte) (interface{}, error) { +func getTeamP12File(c *gin.Context) { + cert := c.MustGet("cert").(*fic.Certificate) + // Create p12 if necessary if _, err := os.Stat(pki.ClientP12Path(cert.Id)); os.IsNotExist(err) { if err := pki.WriteP12(cert.Id, cert.Password); err != nil { - return nil, err + log.Println("Unable to WriteP12:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return } } if _, err := os.Stat(pki.ClientP12Path(cert.Id)); os.IsNotExist(err) { - return nil, errors.New("Unable to locate the p12. Have you generated it?") + log.Println("Unable to compute ClientP12Path:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, errors.New("Unable to locate the p12. Have you generated it?")) + return } else if fd, err := os.Open(pki.ClientP12Path(cert.Id)); err != nil { - return nil, err + log.Println("Unable to open ClientP12Path:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, fmt.Errorf("Unable to open the p12: %w", err)) + return } else { defer fd.Close() - return ioutil.ReadAll(fd) + + data, err := ioutil.ReadAll(fd) + if err != nil { + log.Println("Unable to open ClientP12Path:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, fmt.Errorf("Unable to open the p12: %w", err)) + return + } + + c.Data(http.StatusOK, "application/x-pkcs12", data) } } -func generateClientCert(_ httprouter.Params, _ []byte) (interface{}, error) { +func generateClientCert(c *gin.Context) { // First, generate a new, unique, serial var serial_gen [8]byte if _, err := rand.Read(serial_gen[:]); err != nil { - return nil, err + log.Println("Unable to read enough entropy to generate client certificate:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to read enough entropy"}) + return } for fic.ExistingCertSerial(serial_gen) { if _, err := rand.Read(serial_gen[:]); err != nil { - return nil, err + log.Println("Unable to read enough entropy to generate client certificate:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to read enough entropy"}) + return } } @@ -121,62 +370,83 @@ func generateClientCert(_ httprouter.Params, _ []byte) (interface{}, error) { serial := serial_b.Uint64() // Let's pick a random password - password, err := pki.GeneratePassword() + password, err := fic.GeneratePassword() if err != nil { - return nil, err + log.Println("Unable to generate password:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate password: " + err.Error()}) + return } // Ok, now load CA capriv, cacert, err := pki.LoadCA() if err != nil { - return nil, err + log.Println("Unable to load the CA:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to load the CA"}) + return } // Generate our privkey if err := pki.GenerateClient(serial, cacert.NotBefore, cacert.NotAfter, &cacert, &capriv); err != nil { - return nil, err + log.Println("Unable to generate private key:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate private key: " + err.Error()}) + return } // Save in DB - return fic.RegisterCertificate(serial, password) + cert, err := fic.RegisterCertificate(serial, password) + if err != nil { + log.Println("Unable to register certificate:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to register certificate."}) + return + } + + c.JSON(http.StatusOK, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, cert.Password, nil, cert.Revoked}) } type CertExported struct { Id string `json:"id"` Creation time.Time `json:"creation"` - IdTeam *uint64 `json:"id_team"` + Password string `json:"password,omitempty"` + IdTeam *int64 `json:"id_team"` Revoked *time.Time `json:"revoked"` } -func getCertificates(_ httprouter.Params, _ []byte) (interface{}, error) { - if certificates, err := fic.GetCertificates(); err != nil { - return nil, err - } else { - ret := make([]CertExported, 0) - for _, cert := range certificates { - dstLinkPath := path.Join(TeamsDir, pki.GetCertificateAssociation(cert.Id)) - - var idTeam *uint64 = nil - if lnk, err := os.Readlink(dstLinkPath); err == nil { - if tid, err := strconv.ParseUint(lnk, 10, 64); err == nil { - idTeam = &tid - } - } - - ret = append(ret, CertExported{fmt.Sprintf("%d", cert.Id), cert.Creation, idTeam, cert.Revoked}) - } - return ret, nil +func getCertificates(c *gin.Context) { + certificates, err := fic.GetCertificates() + if err != nil { + log.Println("Unable to retrieve certificates list:", err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during certificates retrieval."}) + return } + ret := make([]CertExported, 0) + for _, cert := range certificates { + dstLinkPath := path.Join(TeamsDir, pki.GetCertificateAssociation(cert.Id)) + + var idTeam *int64 = nil + if lnk, err := os.Readlink(dstLinkPath); err == nil { + if tid, err := strconv.ParseInt(lnk, 10, 64); err == nil { + idTeam = &tid + } + } + + ret = append(ret, CertExported{fmt.Sprintf("%0[2]*[1]X", cert.Id, int(math.Ceil(math.Log2(float64(cert.Id))/8)*2)), cert.Creation, "", idTeam, cert.Revoked}) + } + + c.JSON(http.StatusOK, ret) } type CertUploaded struct { Team *int64 `json:"id_team"` } -func updateCertificateAssociation(cert fic.Certificate, body []byte) (interface{}, error) { +func updateCertificateAssociation(c *gin.Context) { + cert := c.MustGet("cert").(*fic.Certificate) + var uc CertUploaded - if err := json.Unmarshal(body, &uc); err != nil { - return nil, err + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } dstLinkPath := path.Join(TeamsDir, pki.GetCertificateAssociation(cert.Id)) @@ -184,11 +454,26 @@ func updateCertificateAssociation(cert fic.Certificate, body []byte) (interface{ if uc.Team != nil { srcLinkPath := fmt.Sprintf("%d", *uc.Team) if err := os.Symlink(srcLinkPath, dstLinkPath); err != nil { - return nil, err + log.Println("Unable to create certificate symlink:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to create certificate symlink: %s", err.Error())}) + return + } + + // Mark team as active to ensure it'll be generated + if ut, err := fic.GetTeam(*uc.Team); err != nil { + log.Println("Unable to GetTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team retrieval."}) + return + } else if !ut.Active { + ut.Active = true + _, err := ut.Update() + if err != nil { + log.Println("Unable to UpdateTeam after updateCertificateAssociation:", err.Error()) + } } } else { os.Remove(dstLinkPath) } - return cert, nil + c.JSON(http.StatusOK, cert) } diff --git a/admin/api/claim.go b/admin/api/claim.go index bdc22475..cf545fff 100644 --- a/admin/api/claim.go +++ b/admin/api/claim.go @@ -2,62 +2,191 @@ package api import ( "encoding/json" + "fmt" + "io/ioutil" + "log" + "net/http" + "path" + "strconv" "time" + "srs.epita.fr/fic-server/admin/generation" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { +func declareClaimsRoutes(router *gin.RouterGroup) { // Tasks - router.GET("/api/claims/", apiHandler(getClaims)) - router.POST("/api/claims/", apiHandler(newClaim)) - router.DELETE("/api/claims/", apiHandler(clearClaims)) + router.GET("/claims", getClaims) + router.POST("/claims", newClaim) + router.DELETE("/claims", clearClaims) - router.GET("/api/claims/:cid", apiHandler(claimHandler(showClaim))) - router.PUT("/api/claims/:cid", apiHandler(claimHandler(updateClaim))) - router.POST("/api/claims/:cid", apiHandler(claimHandler(addClaimDescription))) - router.DELETE("/api/claims/:cid", apiHandler(claimHandler(deleteClaim))) + apiClaimsRoutes := router.Group("/claims/:cid") + apiClaimsRoutes.Use(ClaimHandler) + apiClaimsRoutes.GET("", showClaim) + apiClaimsRoutes.PUT("", updateClaim) + apiClaimsRoutes.POST("", addClaimDescription) + apiClaimsRoutes.DELETE("", deleteClaim) + + apiClaimsRoutes.GET("/last_update", getClaimLastUpdate) + apiClaimsRoutes.PUT("/descriptions", updateClaimDescription) // Assignees - router.GET("/api/claims-assignees/", apiHandler(getAssignees)) - router.POST("/api/claims-assignees/", apiHandler(newAssignee)) + router.GET("/claims-assignees", getAssignees) + router.POST("/claims-assignees", newAssignee) - router.GET("/api/claims-assignees/:aid", apiHandler(claimAssigneeHandler(showClaimAssignee))) - router.PUT("/api/claims-assignees/:aid", apiHandler(claimAssigneeHandler(updateClaimAssignee))) - router.DELETE("/api/claims-assignees/:aid", apiHandler(claimAssigneeHandler(deleteClaimAssignee))) + apiClaimAssigneesRoutes := router.Group("/claims-assignees/:aid") + apiClaimAssigneesRoutes.Use(ClaimAssigneeHandler) + router.GET("/claims-assignees/:aid", showClaimAssignee) + router.PUT("/claims-assignees/:aid", updateClaimAssignee) + router.DELETE("/claims-assignees/:aid", deleteClaimAssignee) } -func getClaims(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.GetClaims() +func declareExerciceClaimsRoutes(router *gin.RouterGroup) { + router.GET("/claims", getExerciceClaims) +} + +func declareTeamClaimsRoutes(router *gin.RouterGroup) { + router.GET("/api/teams/:tid/issue.json", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + issues, err := team.MyIssueFile() + if err != nil { + log.Printf("Unable to MyIssueFile(tid=%d): %s", team.Id, err.Error()) + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to generate issues.json."}) + return + } + + c.JSON(http.StatusOK, issues) + }) + + router.GET("/claims", getTeamClaims) +} + +func ClaimHandler(c *gin.Context) { + cid, err := strconv.ParseInt(string(c.Params.ByName("cid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid claim identifier"}) + return + } + + claim, err := fic.GetClaim(cid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Requested claim not found"}) + return + } + + c.Set("claim", claim) + + c.Next() +} + +func ClaimAssigneeHandler(c *gin.Context) { + aid, err := strconv.ParseInt(string(c.Params.ByName("aid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid claim assignee identifier"}) + return + } + + assignee, err := fic.GetAssignee(aid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Requested claim-assignee not found"}) + return + } + + c.Set("claim-assignee", assignee) + + c.Next() +} + +func getClaims(c *gin.Context) { + claims, err := fic.GetClaims() + + if err != nil { + log.Println("Unable to getClaims:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claims retrieval."}) + return + } + + c.JSON(http.StatusOK, claims) +} + +func getTeamClaims(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + claims, err := team.GetClaims() + if err != nil { + log.Printf("Unable to GetClaims(tid=%d): %s", team.Id, err.Error()) + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve claim list."}) + return + } + + c.JSON(http.StatusOK, claims) +} + +func getExerciceClaims(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + claims, err := exercice.GetClaims() + if err != nil { + log.Printf("Unable to GetClaims(eid=%d): %s", exercice.Id, err.Error()) + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve claim list."}) + return + } + + c.JSON(http.StatusOK, claims) +} + +func getClaimLastUpdate(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + + v, err := claim.GetLastUpdate() + if err != nil { + log.Printf("Unable to GetLastUpdate: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim last update retrieval."}) + return + } + + c.JSON(http.StatusOK, v) } type ClaimExported struct { - Id int64 `json:"id"` - Subject string `json:"subject"` - IdTeam *int64 `json:"id_team"` - Team *fic.Team `json:"team"` - IdAssignee *int64 `json:"id_assignee"` - Assignee *fic.ClaimAssignee `json:"assignee"` - Creation time.Time `json:"creation"` - LastUpdate time.Time `json:"last_update"` - State string `json:"state"` - Priority string `json:"priority"` - Descriptions []fic.ClaimDescription `json:"descriptions"` + Id int64 `json:"id"` + Subject string `json:"subject"` + IdTeam *int64 `json:"id_team"` + Team *fic.Team `json:"team"` + IdExercice *int64 `json:"id_exercice"` + Exercice *fic.Exercice `json:"exercice"` + IdAssignee *int64 `json:"id_assignee"` + Assignee *fic.ClaimAssignee `json:"assignee"` + Creation time.Time `json:"creation"` + LastUpdate time.Time `json:"last_update"` + State string `json:"state"` + Priority string `json:"priority"` + Descriptions []*fic.ClaimDescription `json:"descriptions"` } -func showClaim(claim fic.Claim, _ []byte) (interface{}, error) { +func showClaim(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + var e ClaimExported var err error if e.Team, err = claim.GetTeam(); err != nil { - return nil, err + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated team: %s", err.Error())}) + return + } + if e.Exercice, err = claim.GetExercice(); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated exercice: %s", err.Error())}) + return } if e.Assignee, err = claim.GetAssignee(); err != nil { - return nil, err + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find associated assignee: %s", err.Error())}) + return } if e.Descriptions, err = claim.GetDescriptions(); err != nil { - return nil, err + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to find claim's descriptions: %s", err.Error())}) + return } e.LastUpdate = e.Creation @@ -70,118 +199,301 @@ func showClaim(claim fic.Claim, _ []byte) (interface{}, error) { e.Id = claim.Id e.IdAssignee = claim.IdAssignee e.IdTeam = claim.IdTeam + e.IdExercice = claim.IdExercice e.Subject = claim.Subject e.Creation = claim.Creation e.State = claim.State e.Priority = claim.Priority - return e, nil + + c.JSON(http.StatusOK, e) } type ClaimUploaded struct { - Subject string `json:"subject"` - Team *int64 `json:"id_team"` - Assignee *int64 `json:"id_assignee"` - Priority string `json:"priority"` + fic.Claim + Whoami *int64 `json:"whoami"` } -func newClaim(_ httprouter.Params, body []byte) (interface{}, error) { +func newClaim(c *gin.Context) { var uc ClaimUploaded - if err := json.Unmarshal(body, &uc); err != nil { - return nil, err + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if uc.Subject == "" { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Claim's subject cannot be empty."}) + return } var t *fic.Team - if uc.Team != nil { - if team, err := fic.GetTeam(*uc.Team); err != nil { - return nil, err + if uc.IdTeam != nil { + if team, err := fic.GetTeam(*uc.IdTeam); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated team: %s", err.Error())}) + return } else { - t = &team + t = team } } else { t = nil } - var a *fic.ClaimAssignee - if uc.Assignee != nil { - if assignee, err := fic.GetAssignee(*uc.Assignee); err != nil { - return nil, err + var e *fic.Exercice + if uc.IdExercice != nil { + if exercice, err := fic.GetExercice(*uc.IdExercice); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated exercice: %s", err.Error())}) + return } else { - a = &assignee + e = exercice + } + } else { + e = nil + } + + var a *fic.ClaimAssignee + if uc.IdAssignee != nil { + if assignee, err := fic.GetAssignee(*uc.IdAssignee); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated assignee: %s", err.Error())}) + return + } else { + a = assignee } } else { a = nil } - return fic.NewClaim(uc.Subject, t, a, uc.Priority) -} - -func clearClaims(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.ClearClaims() -} - -func addClaimDescription(claim fic.Claim, body []byte) (interface{}, error) { - var ud fic.ClaimDescription - if err := json.Unmarshal(body, &ud); err != nil { - return nil, err + if uc.Priority == "" { + uc.Priority = "medium" } - if assignee, err := fic.GetAssignee(ud.IdAssignee); err != nil { - return nil, err + claim, err := fic.NewClaim(uc.Subject, t, e, a, uc.Priority) + if err != nil { + log.Println("Unable to newClaim:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to register new claim"}) + return + } + + c.JSON(http.StatusOK, claim) +} + +func clearClaims(c *gin.Context) { + nb, err := fic.ClearClaims() + if err != nil { + log.Printf("Unable to clearClaims: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claims clearing."}) + return + } + + c.JSON(http.StatusOK, nb) +} + +func generateTeamIssuesFile(team fic.Team) error { + if generation.GeneratorSocket == "" { + if my, err := team.MyIssueFile(); err != nil { + return fmt.Errorf("Unable to generate issue FILE (tid=%d): %w", team.Id, err) + } else if j, err := json.Marshal(my); err != nil { + return fmt.Errorf("Unable to encode issues' file JSON: %w", err) + } else if err = ioutil.WriteFile(path.Join(TeamsDir, fmt.Sprintf("%d", team.Id), "issues.json"), j, 0644); err != nil { + return fmt.Errorf("Unable to write issues' file: %w", err) + } } else { - return claim.AddDescription(ud.Content, assignee) + resp, err := generation.PerformGeneration(fic.GenStruct{Type: fic.GenTeamIssues, TeamId: team.Id}) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + v, _ := ioutil.ReadAll(resp.Body) + return fmt.Errorf("%s", string(v)) + } } + return nil } -func updateClaim(claim fic.Claim, body []byte) (interface{}, error) { - var uc fic.Claim - if err := json.Unmarshal(body, &uc); err != nil { - return nil, err +func addClaimDescription(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + + var ud fic.ClaimDescription + err := c.ShouldBindJSON(&ud) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + assignee, err := fic.GetAssignee(ud.IdAssignee) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Unable to get associated assignee: %s", err.Error())}) + return + } + + description, err := claim.AddDescription(ud.Content, assignee, ud.Publish) + if err != nil { + log.Println("Unable to addClaimDescription:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to add description"}) + return + } + + if team, _ := claim.GetTeam(); team != nil { + err = generateTeamIssuesFile(*team) + if err != nil { + log.Println("Unable to generateTeamIssuesFile after addClaimDescription:", err.Error()) + } + } + + c.JSON(http.StatusOK, description) +} + +func updateClaimDescription(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + + var ud fic.ClaimDescription + err := c.ShouldBindJSON(&ud) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if _, err := ud.Update(); err != nil { + log.Println("Unable to updateClaimDescription:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during claim description updating."}) + return + } + if team, _ := claim.GetTeam(); team != nil { + err = generateTeamIssuesFile(*team) + if err != nil { + log.Println("Unable to generateTeamIssuesFile:", err.Error()) + } + } + + c.JSON(http.StatusOK, ud) +} + +func updateClaim(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + + var uc ClaimUploaded + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } uc.Id = claim.Id - if _, err := uc.Update(); err != nil { - return nil, err + _, err = uc.Update() + if err != nil { + log.Printf("Unable to updateClaim: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim update."}) + return + } + + if claim.State != uc.State { + if uc.Whoami != nil { + if assignee, err := fic.GetAssignee(*uc.Whoami); err == nil { + claim.AddDescription(fmt.Sprintf("%s a changé l'état de la tâche vers %q (était %q).", assignee.Name, uc.State, claim.State), assignee, true) + } + } + } + + if claim.IdAssignee != uc.IdAssignee { + if uc.Whoami != nil { + if whoami, err := fic.GetAssignee(*uc.Whoami); err == nil { + if uc.IdAssignee != nil { + if assignee, err := fic.GetAssignee(*uc.IdAssignee); err == nil { + if assignee.Id != whoami.Id { + claim.AddDescription(fmt.Sprintf("%s a assigné la tâche à %s.", whoami.Name, assignee.Name), whoami, false) + } else { + claim.AddDescription(fmt.Sprintf("%s s'est assigné la tâche.", assignee.Name), whoami, false) + } + } + } else { + claim.AddDescription(fmt.Sprintf("%s a retiré l'attribution de la tâche.", whoami.Name), whoami, false) + } + } + } + } + + if team, _ := claim.GetTeam(); team != nil { + err = generateTeamIssuesFile(*team) + } + + c.JSON(http.StatusOK, uc) +} + +func deleteClaim(c *gin.Context) { + claim := c.MustGet("claim").(*fic.Claim) + + if nb, err := claim.Delete(); err != nil { + log.Println("Unable to deleteClaim:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim deletion."}) + return } else { - return uc, nil + c.JSON(http.StatusOK, nb) } } -func deleteClaim(claim fic.Claim, _ []byte) (interface{}, error) { - return claim.Delete() -} - -func getAssignees(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.GetAssignees() -} - -func showClaimAssignee(assignee fic.ClaimAssignee, _ []byte) (interface{}, error) { - return assignee, nil -} -func newAssignee(_ httprouter.Params, body []byte) (interface{}, error) { - var ua fic.ClaimAssignee - if err := json.Unmarshal(body, &ua); err != nil { - return nil, err +func getAssignees(c *gin.Context) { + assignees, err := fic.GetAssignees() + if err != nil { + log.Println("Unable to getAssignees:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during assignees retrieval."}) + return } - return fic.NewClaimAssignee(ua.Name) + c.JSON(http.StatusOK, assignees) } -func updateClaimAssignee(assignee fic.ClaimAssignee, body []byte) (interface{}, error) { +func showClaimAssignee(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("claim-assignee").(*fic.ClaimAssignee)) +} +func newAssignee(c *gin.Context) { var ua fic.ClaimAssignee - if err := json.Unmarshal(body, &ua); err != nil { - return nil, err + err := c.ShouldBindJSON(&ua) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + assignee, err := fic.NewClaimAssignee(ua.Name) + if err != nil { + log.Println("Unable to newAssignee:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during assignee creation."}) + return + } + + c.JSON(http.StatusOK, assignee) +} + +func updateClaimAssignee(c *gin.Context) { + assignee := c.MustGet("claim-assignee").(*fic.ClaimAssignee) + + var ua fic.ClaimAssignee + err := c.ShouldBindJSON(&ua) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } ua.Id = assignee.Id if _, err := ua.Update(); err != nil { - return nil, err - } else { - return ua, nil + log.Println("Unable to updateClaimAssignee:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during claim assignee update."}) + return } + + c.JSON(http.StatusOK, ua) } -func deleteClaimAssignee(assignee fic.ClaimAssignee, _ []byte) (interface{}, error) { - return assignee.Delete() +func deleteClaimAssignee(c *gin.Context) { + assignee := c.MustGet("claim-assignee").(*fic.ClaimAssignee) + + if _, err := assignee.Delete(); err != nil { + log.Println("Unable to deleteClaimAssignee:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("An error occurs during claim assignee deletion: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, true) } diff --git a/admin/api/events.go b/admin/api/events.go index 8665f62c..17500535 100644 --- a/admin/api/events.go +++ b/admin/api/events.go @@ -3,22 +3,45 @@ package api import ( "encoding/json" "io/ioutil" + "log" + "net/http" "path" + "strconv" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/events/", apiHandler(getEvents)) - router.GET("/api/events.json", apiHandler(getLastEvents)) - router.POST("/api/events/", apiHandler(newEvent)) - router.DELETE("/api/events/", apiHandler(clearEvents)) +func declareEventsRoutes(router *gin.RouterGroup) { + router.GET("/events", getEvents) + router.GET("/events.json", getLastEvents) + router.POST("/events", newEvent) + router.DELETE("/events", clearEvents) - router.GET("/api/events/:evid", apiHandler(eventHandler(showEvent))) - router.PUT("/api/events/:evid", apiHandler(eventHandler(updateEvent))) - router.DELETE("/api/events/:evid", apiHandler(eventHandler(deleteEvent))) + apiEventsRoutes := router.Group("/events/:evid") + apiEventsRoutes.Use(EventHandler) + apiEventsRoutes.GET("", showEvent) + apiEventsRoutes.PUT("", updateEvent) + apiEventsRoutes.DELETE("", deleteEvent) +} + +func EventHandler(c *gin.Context) { + evid, err := strconv.ParseInt(string(c.Params.ByName("evid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid event identifier"}) + return + } + + event, err := fic.GetEvent(evid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Event not found"}) + return + } + + c.Set("event", event) + + c.Next() } func genEventsFile() error { @@ -33,60 +56,99 @@ func genEventsFile() error { return nil } -func getEvents(_ httprouter.Params, _ []byte) (interface{}, error) { - if evts, err := fic.GetEvents(); err != nil { - return nil, err - } else { - return evts, nil +func getEvents(c *gin.Context) { + evts, err := fic.GetEvents() + if err != nil { + log.Println("Unable to GetEvents:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve events list"}) + return } + + c.JSON(http.StatusOK, evts) } -func getLastEvents(_ httprouter.Params, _ []byte) (interface{}, error) { - if evts, err := fic.GetLastEvents(); err != nil { - return nil, err - } else { - return evts, nil +func getLastEvents(c *gin.Context) { + evts, err := fic.GetLastEvents() + + if err != nil { + log.Println("Unable to GetLastEvents:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve last events list"}) + return } + + c.JSON(http.StatusOK, evts) } -func showEvent(event fic.Event, _ []byte) (interface{}, error) { - return event, nil -} - -func newEvent(_ httprouter.Params, body []byte) (interface{}, error) { +func newEvent(c *gin.Context) { var ue fic.Event - if err := json.Unmarshal(body, &ue); err != nil { - return nil, err + err := c.ShouldBindJSON(&ue) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } - if event, err := fic.NewEvent(ue.Text, ue.Kind); err != nil { - return nil, err - } else { - genEventsFile() - return event, nil + event, err := fic.NewEvent(ue.Text, ue.Kind) + if err != nil { + log.Printf("Unable to newEvent: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event creation."}) + return } + + genEventsFile() + + c.JSON(http.StatusOK, event) } -func clearEvents(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.ClearEvents() +func clearEvents(c *gin.Context) { + nb, err := fic.ClearEvents() + if err != nil { + log.Printf("Unable to clearEvent: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event clearing."}) + return + } + + c.JSON(http.StatusOK, nb) } -func updateEvent(event fic.Event, body []byte) (interface{}, error) { +func showEvent(c *gin.Context) { + event := c.MustGet("event").(*fic.Event) + c.JSON(http.StatusOK, event) +} + +func updateEvent(c *gin.Context) { + event := c.MustGet("event").(*fic.Event) + var ue fic.Event - if err := json.Unmarshal(body, &ue); err != nil { - return nil, err + err := c.ShouldBindJSON(&ue) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } ue.Id = event.Id if _, err := ue.Update(); err != nil { - return nil, err - } else { - genEventsFile() - return ue, nil + log.Printf("Unable to updateEvent: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event update."}) + return } + + genEventsFile() + + c.JSON(http.StatusOK, ue) } -func deleteEvent(event fic.Event, _ []byte) (interface{}, error) { - return event.Delete() +func deleteEvent(c *gin.Context) { + event := c.MustGet("event").(*fic.Event) + + _, err := event.Delete() + if err != nil { + log.Printf("Unable to deleteEvent: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during event deletion."}) + return + } + + genEventsFile() + + c.JSON(http.StatusOK, true) } diff --git a/admin/api/exercice.go b/admin/api/exercice.go index e3ca6f3e..2c196153 100644 --- a/admin/api/exercice.go +++ b/admin/api/exercice.go @@ -1,141 +1,771 @@ package api import ( - "encoding/hex" - "encoding/json" - "errors" + "bytes" + "fmt" + "log" + "net/http" + "path" + "reflect" + "strconv" "strings" + "time" "srs.epita.fr/fic-server/admin/sync" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/exercices/", apiHandler(listExercices)) +func declareGlobalExercicesRoutes(router *gin.RouterGroup) { + router.GET("/resolutions.json", exportResolutionMovies) + router.GET("/exercices_stats.json", getExercicesStats) + router.GET("/exercices_forge_bindings.json", getExercicesForgeLinks) + router.GET("/tags", listTags) +} - router.GET("/api/exercices/:eid", apiHandler(exerciceHandler(showExercice))) - router.PUT("/api/exercices/:eid", apiHandler(exerciceHandler(updateExercice))) - router.DELETE("/api/exercices/:eid", apiHandler(exerciceHandler(deleteExercice))) +func declareExercicesRoutes(router *gin.RouterGroup) { + router.GET("/exercices", listExercices) + router.POST("/exercices", createExercice) - router.GET("/api/exercices/:eid/files", apiHandler(exerciceHandler(listExerciceFiles))) - router.POST("/api/exercices/:eid/files", apiHandler(exerciceHandler(createExerciceFile))) - router.GET("/api/exercices/:eid/files/:fid", apiHandler(exerciceFileHandler(showExerciceFile))) - router.DELETE("/api/exercices/:eid/files/:fid", apiHandler(exerciceFileHandler(deleteExerciceFile))) + apiExercicesRoutes := router.Group("/exercices/:eid") + apiExercicesRoutes.Use(ExerciceHandler) + apiExercicesRoutes.GET("", showExercice) + apiExercicesRoutes.PUT("", updateExercice) + apiExercicesRoutes.PATCH("", partUpdateExercice) + apiExercicesRoutes.DELETE("", deleteExercice) - router.GET("/api/exercices/:eid/hints", apiHandler(exerciceHandler(listExerciceHints))) - router.POST("/api/exercices/:eid/hints", apiHandler(exerciceHandler(createExerciceHint))) - router.GET("/api/exercices/:eid/hints/:hid", apiHandler(hintHandler(showExerciceHint))) - router.PUT("/api/exercices/:eid/hints/:hid", apiHandler(hintHandler(updateExerciceHint))) - router.DELETE("/api/exercices/:eid/hints/:hid", apiHandler(hintHandler(deleteExerciceHint))) + apiExercicesRoutes.POST("/diff-sync", APIDiffExerciceWithRemote) - router.GET("/api/exercices/:eid/keys", apiHandler(exerciceHandler(listExerciceKeys))) - router.POST("/api/exercices/:eid/keys", apiHandler(exerciceHandler(createExerciceKey))) - router.GET("/api/exercices/:eid/keys/:kid", apiHandler(keyHandler(showExerciceKey))) - router.PUT("/api/exercices/:eid/keys/:kid", apiHandler(keyHandler(updateExerciceKey))) - router.DELETE("/api/exercices/:eid/keys/:kid", apiHandler(keyHandler(deleteExerciceKey))) + apiExercicesRoutes.GET("/history.json", getExerciceHistory) - router.GET("/api/exercices/:eid/quiz", apiHandler(exerciceHandler(listExerciceQuiz))) - router.GET("/api/exercices/:eid/quiz/:qid", apiHandler(quizHandler(showExerciceQuiz))) - router.DELETE("/api/exercices/:eid/quiz/:qid", apiHandler(quizHandler(deleteExerciceQuiz))) + apiExercicesRoutes.GET("/stats.json", getExerciceStats) - // Synchronize - router.POST("/api/sync/exercices/:eid/files", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceFiles(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/exercices/:eid/hints", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceHints(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/exercices/:eid/keys", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceKeys(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/exercices/:eid/quiz", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceMCQ(sync.GlobalImporter, exercice), nil - }))) + apiExercicesRoutes.GET("/tries", listTries) - router.POST("/api/sync/exercices/:eid/fixurlid", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - if exercice.FixURLId() { - return exercice.Update() + apiTriesRoutes := apiExercicesRoutes.Group("/tries/:trid") + apiTriesRoutes.Use(ExerciceTryHandler) + apiTriesRoutes.GET("", getExerciceTry) + apiTriesRoutes.DELETE("", deleteExerciceTry) + + apiHistoryRoutes := apiExercicesRoutes.Group("/history.json") + apiHistoryRoutes.Use(AssigneeCookieHandler) + apiHistoryRoutes.PUT("", appendExerciceHistory) + apiHistoryRoutes.PATCH("", updateExerciceHistory) + apiHistoryRoutes.DELETE("", delExerciceHistory) + + apiExercicesRoutes.GET("/hints", listExerciceHints) + apiExercicesRoutes.POST("/hints", createExerciceHint) + + apiHintsRoutes := apiExercicesRoutes.Group("/hints/:hid") + apiHintsRoutes.Use(HintHandler) + apiHintsRoutes.GET("", showExerciceHint) + apiHintsRoutes.PUT("", updateExerciceHint) + apiHintsRoutes.DELETE("", deleteExerciceHint) + apiHintsRoutes.GET("/dependancies", showExerciceHintDeps) + + apiExercicesRoutes.GET("/flags", listExerciceFlags) + apiExercicesRoutes.POST("/flags", createExerciceFlag) + + apiFlagsRoutes := apiExercicesRoutes.Group("/flags/:kid") + apiFlagsRoutes.Use(FlagKeyHandler) + apiFlagsRoutes.GET("", showExerciceFlag) + apiFlagsRoutes.PUT("", updateExerciceFlag) + apiFlagsRoutes.POST("/try", tryExerciceFlag) + apiFlagsRoutes.DELETE("/", deleteExerciceFlag) + apiFlagsRoutes.GET("/dependancies", showExerciceFlagDeps) + apiFlagsRoutes.GET("/statistics", showExerciceFlagStats) + apiFlagsRoutes.DELETE("/tries", deleteExerciceFlagTries) + apiFlagsRoutes.GET("/choices/", listFlagChoices) + apiFlagsChoicesRoutes := apiExercicesRoutes.Group("/choices/:cid") + apiFlagsChoicesRoutes.Use(FlagChoiceHandler) + apiFlagsChoicesRoutes.GET("", showFlagChoice) + apiFlagsRoutes.POST("/choices/", createFlagChoice) + apiFlagsChoicesRoutes.PUT("", updateFlagChoice) + apiFlagsChoicesRoutes.DELETE("", deleteFlagChoice) + + apiQuizRoutes := apiExercicesRoutes.Group("/quiz/:qid") + apiQuizRoutes.Use(FlagQuizHandler) + apiExercicesRoutes.GET("/quiz", listExerciceQuiz) + apiQuizRoutes.GET("", showExerciceQuiz) + apiQuizRoutes.PUT("", updateExerciceQuiz) + apiQuizRoutes.DELETE("", deleteExerciceQuiz) + apiQuizRoutes.GET("/dependancies", showExerciceQuizDeps) + apiQuizRoutes.GET("/statistics", showExerciceQuizStats) + apiQuizRoutes.DELETE("/tries", deleteExerciceQuizTries) + + apiExercicesRoutes.GET("/tags", listExerciceTags) + apiExercicesRoutes.POST("/tags", addExerciceTag) + apiExercicesRoutes.PUT("/tags", updateExerciceTags) + + declareFilesRoutes(apiExercicesRoutes) + declareExerciceClaimsRoutes(apiExercicesRoutes) + + // Remote + router.GET("/remote/themes/:thid/exercices/:exid", sync.ApiGetRemoteExercice) + router.GET("/remote/themes/:thid/exercices/:exid/flags", sync.ApiGetRemoteExerciceFlags) + router.GET("/remote/themes/:thid/exercices/:exid/hints", sync.ApiGetRemoteExerciceHints) +} + +type Exercice struct { + *fic.Exercice + ForgeLink string `json:"forge_link,omitempty"` +} + +func ExerciceHandler(c *gin.Context) { + eid, err := strconv.ParseInt(string(c.Params.ByName("eid")), 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid exercice identifier"}) + return + } + + var exercice *fic.Exercice + if theme, exists := c.Get("theme"); exists { + exercice, err = theme.(*fic.Theme).GetExercice(int(eid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Exercice not found"}) + return + } + } else { + exercice, err = fic.GetExercice(eid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Exercice not found"}) + return + } + + if exercice.IdTheme != nil { + theme, err = exercice.GetTheme() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to find the attached theme."}) + return } - return 0, nil - }))) + + c.Set("theme", theme) + } else { + c.Set("theme", &fic.StandaloneExercicesTheme) + } + } + + c.Set("exercice", exercice) + + c.Next() } -func listExercices(_ httprouter.Params, body []byte) (interface{}, error) { - // List all exercices - return fic.GetExercices() +func HintHandler(c *gin.Context) { + hid, err := strconv.ParseInt(string(c.Params.ByName("hid")), 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid hint identifier"}) + return + } + + exercice := c.MustGet("exercice").(*fic.Exercice) + hint, err := exercice.GetHint(hid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Hint not found"}) + return + } + + c.Set("hint", hint) + + c.Next() } -func listExerciceFiles(exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice.GetFiles() +func FlagKeyHandler(c *gin.Context) { + kid, err := strconv.ParseInt(string(c.Params.ByName("kid")), 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid flag identifier"}) + return + } + + var flag *fic.FlagKey + if exercice, exists := c.Get("exercice"); exists { + flag, err = exercice.(*fic.Exercice).GetFlagKey(int(kid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Flag not found"}) + return + } + } else { + flag, err = fic.GetFlagKey(int(kid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Flag not found"}) + return + } + } + + c.Set("flag-key", flag) + + c.Next() } -func listExerciceHints(exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice.GetHints() +func FlagChoiceHandler(c *gin.Context) { + cid, err := strconv.ParseInt(string(c.Params.ByName("cid")), 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid choice identifier"}) + return + } + + flagkey := c.MustGet("flag-key").(*fic.FlagKey) + choice, err := flagkey.GetChoice(int(cid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Choice not found"}) + return + } + + c.Set("flag-choice", choice) + + c.Next() } -func listExerciceKeys(exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice.GetKeys() +func FlagQuizHandler(c *gin.Context) { + qid, err := strconv.ParseInt(string(c.Params.ByName("qid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid quiz identifier"}) + return + } + + var quiz *fic.MCQ + if exercice, exists := c.Get("exercice"); exists { + quiz, err = exercice.(*fic.Exercice).GetMCQById(int(qid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Quiz not found"}) + return + } + } else { + quiz, err = fic.GetMCQ(int(qid)) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Quiz not found"}) + return + } + } + + c.Set("flag-quiz", quiz) + + c.Next() } -func listExerciceQuiz(exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice.GetMCQ() +func listExercices(c *gin.Context) { + if theme, exists := c.Get("theme"); exists { + exercices, err := theme.(*fic.Theme).GetExercices() + if err != nil { + log.Println("Unable to listThemedExercices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercices listing."}) + return + } + + c.JSON(http.StatusOK, exercices) + } else { + exercices, err := fic.GetExercices() + if err != nil { + log.Println("Unable to listThemedExercices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercices listing."}) + return + } + + c.JSON(http.StatusOK, exercices) + } } -func showExercice(exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice, nil +func listTags(c *gin.Context) { + exercices, err := fic.GetExercices() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + ret := map[string][]*fic.Exercice{} + for _, exercice := range exercices { + tags, err := exercice.GetTags() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + for _, t := range tags { + if _, ok := ret[t]; !ok { + ret[t] = []*fic.Exercice{} + } + + ret[t] = append(ret[t], exercice) + } + } + + c.JSON(http.StatusOK, ret) } -func deleteExercice(exercice fic.Exercice, _ []byte) (interface{}, error) { - return exercice.Delete() +// Generate the csv to export with: +// +// curl -s http://127.0.0.1:8081/api/resolutions.json | jq -r ".[] | [ .theme,.level,.title, @uri \"https://fic.srs.epita.fr/$(date +%Y)/\\(.videoURI)\" ] | join(\";\")" +func exportResolutionMovies(c *gin.Context) { + exercices, err := fic.GetExercices() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + export := []map[string]string{} + for _, exercice := range exercices { + var tname string + if exercice.IdTheme != nil { + theme, err := fic.GetTheme(*exercice.IdTheme) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + tname = theme.Name + } + if len(exercice.VideoURI) > 0 { + level, _ := exercice.GetLevel() + export = append(export, map[string]string{ + "videoURI": strings.Replace(exercice.VideoURI, "$FILES$/", "files/", 1), + "theme": tname, + "title": exercice.Title, + "level": fmt.Sprintf("%d", level), + }) + } + } + + c.JSON(http.StatusOK, export) } -func updateExercice(exercice fic.Exercice, body []byte) (interface{}, error) { - var ue fic.Exercice - if err := json.Unmarshal(body, &ue); err != nil { +func loadFlags(n func() ([]fic.Flag, error)) (interface{}, error) { + if flags, err := n(); err != nil { return nil, err + } else { + var ret []fic.Flag + + for _, flag := range flags { + if f, ok := flag.(*fic.FlagKey); ok { + if k, err := fic.GetFlagKey(f.Id); err != nil { + return nil, err + } else { + ret = append(ret, k) + } + } else if f, ok := flag.(*fic.MCQ); ok { + if m, err := fic.GetMCQ(f.Id); err != nil { + return nil, err + } else { + ret = append(ret, m) + } + } else { + return nil, fmt.Errorf("Flag type %T not implemented for this flag.", f) + } + } + + return ret, nil + } +} + +func listExerciceHints(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + hints, err := exercice.GetHints() + if err != nil { + log.Println("Unable to listExerciceHints:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving hints"}) + return + } + + c.JSON(http.StatusOK, hints) +} + +func listExerciceFlags(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + flags, err := exercice.GetFlagKeys() + if err != nil { + log.Println("Unable to listExerciceFlags:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving exercice flags"}) + return + } + + c.JSON(http.StatusOK, flags) +} + +func listFlagChoices(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + choices, err := flag.GetChoices() + if err != nil { + log.Println("Unable to listFlagChoices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving flag choices"}) + return + } + + c.JSON(http.StatusOK, choices) +} + +func listExerciceQuiz(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + quiz, err := exercice.GetMCQ() + if err != nil { + log.Println("Unable to listExerciceQuiz:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving quiz list"}) + return + } + + c.JSON(http.StatusOK, quiz) +} + +func showExercice(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + var forgelink string + if fli, ok := sync.GlobalImporter.(sync.ForgeLinkedImporter); ok { + if u, _ := fli.GetExerciceLink(exercice); u != nil { + forgelink = u.String() + } + } + + c.JSON(http.StatusOK, Exercice{exercice, forgelink}) +} + +func getExerciceHistory(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + history, err := exercice.GetHistory() + if err != nil { + log.Println("Unable to getExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving exercice history"}) + return + } + + c.JSON(http.StatusOK, history) +} + +type exerciceStats struct { + IdExercice int64 `json:"id_exercice,omitempty"` + TeamTries int64 `json:"team_tries"` + TotalTries int64 `json:"total_tries"` + SolvedCount int64 `json:"solved_count"` + FlagSolved []int64 `json:"flag_solved"` + MCQSolved []int64 `json:"mcq_solved"` + CurrentGain int64 `json:"current_gain"` +} + +func getExerciceStats(c *gin.Context) { + e := c.MustGet("exercice").(*fic.Exercice) + + current_gain := e.Gain + if fic.DiscountedFactor > 0 { + decoted_exercice, err := fic.GetDiscountedExercice(e.Id) + if err == nil { + current_gain = decoted_exercice.Gain + } else { + log.Println("Unable to fetch decotedExercice:", err.Error()) + } + } + + c.JSON(http.StatusOK, exerciceStats{ + TeamTries: e.TriedTeamCount(), + TotalTries: e.TriedCount(), + SolvedCount: e.SolvedCount(), + FlagSolved: e.FlagSolved(), + MCQSolved: e.MCQSolved(), + CurrentGain: current_gain, + }) +} + +func getExercicesStats(c *gin.Context) { + exercices, err := fic.GetDiscountedExercices() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + ret := []exerciceStats{} + for _, e := range exercices { + ret = append(ret, exerciceStats{ + IdExercice: e.Id, + TeamTries: e.TriedTeamCount(), + TotalTries: e.TriedCount(), + SolvedCount: e.SolvedCount(), + FlagSolved: e.FlagSolved(), + MCQSolved: e.MCQSolved(), + CurrentGain: e.Gain, + }) + } + + c.JSON(http.StatusOK, ret) +} + +type themeForgeBinding struct { + ThemeName string `json:"name"` + ThemePath string `json:"path"` + ForgeLink string `json:"forge_link"` + Exercices []exerciceForgeBinding `json:"exercices"` +} + +type exerciceForgeBinding struct { + ExerciceName string `json:"name"` + ExercicePath string `json:"path"` + ForgeLink string `json:"forge_link"` +} + +func getExercicesForgeLinks(c *gin.Context) { + themes, err := fic.GetThemesExtended() + if err != nil { + log.Println("Unable to listThemes:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during themes listing."}) + return + } + + fli, ok := sync.GlobalImporter.(sync.ForgeLinkedImporter) + if !ok { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Current importer is not compatible with ForgeLinkedImporter"}) + return + } + + ret := []themeForgeBinding{} + for _, theme := range themes { + exercices, err := theme.GetExercices() + if err != nil { + log.Println("Unable to listExercices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercice listing."}) + return + } + + var exlinks []exerciceForgeBinding + for _, exercice := range exercices { + var forgelink string + if u, _ := fli.GetExerciceLink(exercice); u != nil { + forgelink = u.String() + } + + exlinks = append(exlinks, exerciceForgeBinding{ + ExerciceName: exercice.Title, + ExercicePath: exercice.Path, + ForgeLink: forgelink, + }) + } + + var forgelink string + if u, _ := fli.GetThemeLink(theme); u != nil { + forgelink = u.String() + } + ret = append(ret, themeForgeBinding{ + ThemeName: theme.Name, + ThemePath: theme.Path, + ForgeLink: forgelink, + Exercices: exlinks, + }) + } + + c.JSON(http.StatusOK, ret) +} + +func AssigneeCookieHandler(c *gin.Context) { + myassignee, err := c.Cookie("myassignee") + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "You must be authenticated to perform this action."}) + return + } + + aid, err := strconv.ParseInt(myassignee, 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "You must be authenticated to perform this action: invalid assignee identifier."}) + return + } + + assignee, err := fic.GetAssignee(aid) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "You must be authenticated to perform this action: assignee not found."}) + return + } + + c.Set("assignee", assignee) + + c.Next() +} + +type uploadedExerciceHistory struct { + IdTeam int64 `json:"team_id"` + Kind string + Time time.Time + Secondary *int64 + Coeff float32 +} + +func appendExerciceHistory(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + myassignee := c.MustGet("assignee").(*fic.ClaimAssignee) + + var uh uploadedExerciceHistory + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + err = exercice.AppendHistoryItem(uh.IdTeam, uh.Kind, uh.Secondary) + if err != nil { + log.Println("Unable to appendExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during history moditication."}) + return + } + log.Printf("AUDIT: %s performs an history append: %s for team %d, exercice %d and optional %v", myassignee.Name, uh.Kind, uh.IdTeam, exercice.Id, uh.Secondary) + + c.JSON(http.StatusOK, uh) +} + +func updateExerciceHistory(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + myassignee := c.MustGet("assignee").(*fic.ClaimAssignee) + + var uh uploadedExerciceHistory + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + _, err = exercice.UpdateHistoryItem(uh.Coeff, uh.IdTeam, uh.Kind, uh.Time, uh.Secondary) + if err != nil { + log.Println("Unable to updateExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during history update."}) + return + } + log.Printf("AUDIT: %s performs an history update: %s for team %d, exercice %d and optional %v, with coeff %f", myassignee.Name, uh.Kind, uh.IdTeam, exercice.Id, uh.Secondary, uh.Coeff) + + c.JSON(http.StatusOK, uh) +} + +func delExerciceHistory(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + myassignee := c.MustGet("assignee").(*fic.ClaimAssignee) + + var uh uploadedExerciceHistory + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + _, err = exercice.DelHistoryItem(uh.IdTeam, uh.Kind, uh.Time, uh.Secondary) + if err != nil { + log.Println("Unable to delExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during history deletion."}) + return + } + log.Printf("AUDIT: %s performs an history deletion: %s for team %d, exercice %d and optional %v", myassignee.Name, uh.Kind, uh.IdTeam, exercice.Id, uh.Secondary) + + c.JSON(http.StatusOK, true) +} + +func deleteExercice(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + _, err := exercice.DeleteCascade() + if err != nil { + log.Println("Unable to deleteExercice:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during exercice deletion"}) + return + } + + c.JSON(http.StatusOK, true) +} + +func updateExercice(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + var ue fic.Exercice + err := c.ShouldBindJSON(&ue) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } ue.Id = exercice.Id if len(ue.Title) == 0 { - return nil, errors.New("Exercice's title not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Exercice's title not filled"}) + return } if _, err := ue.Update(); err != nil { - return nil, err + log.Println("Unable to updateExercice:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during exercice update"}) + return } - return ue, nil + c.JSON(http.StatusOK, ue) } -func createExercice(theme fic.Theme, body []byte) (interface{}, error) { +type patchExercice struct { + Language *string `json:"lang,omitempty"` + Title *string `json:"title"` + Disabled *bool `json:"disabled"` + WIP *bool `json:"wip"` + URLId *string `json:"urlid"` + Statement *string `json:"statement"` + Overview *string `json:"overview"` + Headline *string `json:"headline"` + Finished *string `json:"finished"` + Issue *string `json:"issue"` + IssueKind *string `json:"issuekind"` + Gain *int64 `json:"gain"` + Coefficient *float64 `json:"coefficient"` +} + +func partUpdateExercice(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + var ue patchExercice + err := c.ShouldBindJSON(&ue) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + for _, field := range reflect.VisibleFields(reflect.TypeOf(ue)) { + if !reflect.ValueOf(ue).FieldByName(field.Name).IsNil() { + reflect.ValueOf(exercice).Elem().FieldByName(field.Name).Set(reflect.ValueOf(reflect.ValueOf(ue).FieldByName(field.Name).Elem().Interface())) + } + } + + if _, err := exercice.Update(); err != nil { + log.Println("Unable to partUpdateExercice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercice update."}) + return + } + + c.JSON(http.StatusOK, exercice) +} + +func createExercice(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + // Create a new exercice var ue fic.Exercice - if err := json.Unmarshal(body, &ue); err != nil { - return nil, err + err := c.ShouldBindJSON(&ue) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } if len(ue.Title) == 0 { - return nil, errors.New("Title not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Title not filled"}) + return } var depend *fic.Exercice = nil if ue.Depend != nil { if d, err := fic.GetExercice(*ue.Depend); err != nil { - return nil, err + log.Println("Unable to createExercice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercice creation."}) + return } else { - depend = &d + depend = d } } - return theme.AddExercice(ue.Title, ue.URLId, ue.Path, ue.Statement, ue.Overview, depend, ue.Gain, ue.VideoURI) + exercice, err := theme.AddExercice(ue.Title, ue.Authors, ue.Image, ue.BackgroundColor, ue.WIP, ue.URLId, ue.Path, ue.Statement, ue.Overview, ue.Headline, depend, ue.Gain, ue.VideoURI, ue.Resolution, ue.SeeAlso, ue.Finished) + if err != nil { + log.Println("Unable to createExercice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during exercice creation."}) + return + } + + c.JSON(http.StatusOK, exercice) } type uploadedHint struct { @@ -146,132 +776,974 @@ type uploadedHint struct { URI string } -func createExerciceHint(exercice fic.Exercice, body []byte) (interface{}, error) { +func createExerciceHint(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + var uh uploadedHint - if err := json.Unmarshal(body, &uh); err != nil { - return nil, err + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } if len(uh.Content) != 0 { - return exercice.AddHint(uh.Title, uh.Content, uh.Cost) + hint, err := exercice.AddHint(uh.Title, uh.Content, uh.Cost) + if err != nil { + log.Println("Unable to AddHint in createExerciceHint:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to add hint."}) + return + } + + c.JSON(http.StatusOK, hint) } else if len(uh.URI) != 0 { - return sync.ImportFile(sync.GlobalImporter, uh.URI, + hint, err := sync.ImportFile(sync.GlobalImporter, uh.URI, func(filePath string, origin string) (interface{}, error) { return exercice.AddHint(uh.Title, "$FILES"+strings.TrimPrefix(filePath, fic.FilesDir), uh.Cost) }) + + if err != nil { + log.Println("Unable to AddHint (after ImportFile) in createExerciceHint:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to add hint."}) + return + } + + c.JSON(http.StatusOK, hint) } else { - return nil, errors.New("Hint's content not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Hint's content not filled"}) + return } } -func showExerciceHint(hint fic.EHint, body []byte) (interface{}, error) { - return hint, nil +func showExerciceHint(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("hint").(*fic.EHint)) } -func updateExerciceHint(hint fic.EHint, body []byte) (interface{}, error) { +func showExerciceHintDeps(c *gin.Context) { + hint := c.MustGet("hint").(*fic.EHint) + + deps, err := loadFlags(hint.GetDepends) + if err != nil { + log.Println("Unable to loaddeps:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to retrieve hint dependencies."}) + return + } + + c.JSON(http.StatusOK, deps) +} + +func updateExerciceHint(c *gin.Context) { + hint := c.MustGet("hint").(*fic.EHint) + var uh fic.EHint - if err := json.Unmarshal(body, &uh); err != nil { - return nil, err + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } uh.Id = hint.Id if len(uh.Title) == 0 { - return nil, errors.New("Hint's title not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Hint's title not filled"}) + return } if _, err := uh.Update(); err != nil { - return nil, err + log.Println("Unable to updateExerciceHint:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update hint."}) + return } - return uh, nil + c.JSON(http.StatusOK, uh) } -func deleteExerciceHint(hint fic.EHint, _ []byte) (interface{}, error) { - return hint.Delete() -} +func deleteExerciceHint(c *gin.Context) { + hint := c.MustGet("hint").(*fic.EHint) -type uploadedKey struct { - Label string - Key string - Hash []byte -} - -func createExerciceKey(exercice fic.Exercice, body []byte) (interface{}, error) { - var uk uploadedKey - if err := json.Unmarshal(body, &uk); err != nil { - return nil, err + _, err := hint.Delete() + if err != nil { + log.Println("Unable to deleteExerciceHint:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete hint."}) + return } - if len(uk.Key) == 0 { - return nil, errors.New("Key not filled") + c.JSON(http.StatusOK, true) +} + +type uploadedFlag struct { + Type string + Label string + Placeholder string + Help string + IgnoreCase bool + Multiline bool + NoTrim bool + CaptureRe *string `json:"capture_regexp"` + SortReGroups bool `json:"sort_re_grps"` + Flag string + Value []byte + ChoicesCost int32 `json:"choices_cost"` + BonusGain int32 `json:"bonus_gain"` +} + +func createExerciceFlag(c *gin.Context) { + var uk uploadedFlag + err := c.ShouldBindJSON(&uk) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } - return exercice.AddRawKey(uk.Label, uk.Key) + if len(uk.Flag) == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Flag not filled"}) + return + } + + var vre *string = nil + if uk.CaptureRe != nil && len(*uk.CaptureRe) > 0 { + vre = uk.CaptureRe + } + + exercice := c.MustGet("exercice").(*fic.Exercice) + + flag, err := exercice.AddRawFlagKey(uk.Label, uk.Type, uk.Placeholder, uk.IgnoreCase, uk.NoTrim, uk.Multiline, vre, uk.SortReGroups, []byte(uk.Flag), uk.ChoicesCost, uk.BonusGain) + if err != nil { + log.Println("Unable to createExerciceFlag:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to create flag."}) + return + } + + c.JSON(http.StatusOK, flag) } -func showExerciceKey(key fic.Key, _ fic.Exercice, body []byte) (interface{}, error) { - return key, nil +func showExerciceFlag(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("flag-key").(*fic.FlagKey)) } -func updateExerciceKey(key fic.Key, exercice fic.Exercice, body []byte) (interface{}, error) { - var uk uploadedKey - if err := json.Unmarshal(body, &uk); err != nil { - return nil, err +func showExerciceFlagDeps(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + deps, err := loadFlags(flag.GetDepends) + if err != nil { + log.Println("Unable to loaddeps:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to retrieve hint dependencies."}) + return + } + + c.JSON(http.StatusOK, deps) +} + +func showExerciceFlagStats(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + flag := c.MustGet("flag-key").(*fic.FlagKey) + + history, err := exercice.GetHistory() + if err != nil { + log.Println("Unable to getExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving exercice history"}) + return + } + + var completed int64 + + for _, hline := range history { + if hline["kind"].(string) == "flag_found" { + if int(*hline["secondary"].(*int64)) == flag.Id { + completed += 1 + } + } + } + + tries, err := flag.NbTries() + if err != nil { + log.Println("Unable to nbTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving flag tries"}) + return + } + + teams, err := flag.TeamsOnIt() + if err != nil { + log.Println("Unable to teamsOnIt:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving flag related teams"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "completed": completed, + "tries": tries, + "teams": teams, + }) +} + +func deleteExerciceFlagTries(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + err := flag.DeleteTries() + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + c.AbortWithStatusJSON(http.StatusOK, true) +} + +func tryExerciceFlag(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + var uk uploadedFlag + err := c.ShouldBindJSON(&uk) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if len(uk.Flag) == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Empty submission"}) + return + } + + if flag.Check([]byte(uk.Flag)) == 0 { + c.AbortWithStatusJSON(http.StatusOK, true) + return + } + + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Bad submission"}) +} + +func updateExerciceFlag(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + var uk uploadedFlag + err := c.ShouldBindJSON(&uk) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } if len(uk.Label) == 0 { - key.Label = "Flag" + flag.Label = "Flag" } else { - key.Label = uk.Label + flag.Label = uk.Label } - key.Checksum = uk.Hash + flag.Placeholder = uk.Placeholder + flag.Help = uk.Help + flag.IgnoreCase = uk.IgnoreCase + flag.Multiline = uk.Multiline + flag.ChoicesCost = uk.ChoicesCost + flag.BonusGain = uk.BonusGain - if _, err := key.Update(); err != nil { - return nil, err + if uk.CaptureRe != nil && len(*uk.CaptureRe) > 0 { + if flag.CaptureRegexp != uk.CaptureRe && uk.Flag == "" { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Pour changer la capture_regexp, vous devez rentrer la réponse attendue à nouveau, car le flag doit être recalculé."}) + return + } + flag.CaptureRegexp = uk.CaptureRe + } else { + if flag.CaptureRegexp != nil && uk.Flag == "" { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Pour changer la capture_regexp, vous devez rentrer la réponse attendue à nouveau, car le flag doit être recalculé."}) + return + } + flag.CaptureRegexp = nil } - return key, nil -} - -func deleteExerciceKey(key fic.Key, _ fic.Exercice, _ []byte) (interface{}, error) { - return key.Delete() -} - -func showExerciceQuiz(quiz fic.MCQ, _ fic.Exercice, body []byte) (interface{}, error) { - return quiz, nil -} - -func deleteExerciceQuiz(quiz fic.MCQ, _ fic.Exercice, _ []byte) (interface{}, error) { - return quiz.Delete() -} - -type uploadedFile struct { - URI string - Digest string -} - -func createExerciceFile(exercice fic.Exercice, body []byte) (interface{}, error) { - var uf uploadedFile - if err := json.Unmarshal(body, &uf); err != nil { - return nil, err + if len(uk.Flag) > 0 { + var err error + flag.Checksum, err = flag.ComputeChecksum([]byte(uk.Flag)) + if err != nil { + log.Println("Unable to ComputeChecksum:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to compute flag checksum"}) + return + } + } else { + flag.Checksum = uk.Value } - return sync.ImportFile(sync.GlobalImporter, uf.URI, - func(filePath string, origin string) (interface{}, error) { - if digest, err := hex.DecodeString(uf.Digest); err != nil { - return nil, err - } else { - return exercice.ImportFile(filePath, origin, digest) + if _, err := flag.Update(); err != nil { + log.Println("Unable to updateExerciceFlag:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update flag."}) + return + } + + c.JSON(http.StatusOK, flag) +} + +func deleteExerciceFlag(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + _, err := flag.Delete() + if err != nil { + log.Println("Unable to deleteExerciceFlag:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete flag."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func createFlagChoice(c *gin.Context) { + flag := c.MustGet("flag-key").(*fic.FlagKey) + + var uc fic.FlagChoice + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if len(uc.Label) == 0 { + uc.Label = uc.Value + } + + choice, err := flag.AddChoice(&uc) + if err != nil { + log.Println("Unable to createFlagChoice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to create flag choice."}) + return + } + + c.JSON(http.StatusOK, choice) +} + +func showFlagChoice(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("flag-choice").(*fic.FlagChoice)) +} + +func updateFlagChoice(c *gin.Context) { + choice := c.MustGet("flag-choice").(*fic.FlagChoice) + + var uc fic.FlagChoice + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if len(uc.Label) == 0 { + choice.Label = uc.Value + } else { + choice.Label = uc.Label + } + + choice.Value = uc.Value + + if _, err := choice.Update(); err != nil { + log.Println("Unable to updateFlagChoice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to update flag choice."}) + return + } + + c.JSON(http.StatusOK, choice) +} + +func deleteFlagChoice(c *gin.Context) { + choice := c.MustGet("flag-choice").(*fic.FlagChoice) + + _, err := choice.Delete() + if err != nil { + log.Println("Unable to deleteExerciceChoice:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete choice."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func showExerciceQuiz(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("flag-quiz").(*fic.MCQ)) +} + +func showExerciceQuizDeps(c *gin.Context) { + quiz := c.MustGet("flag-quiz").(*fic.MCQ) + + deps, err := loadFlags(quiz.GetDepends) + if err != nil { + log.Println("Unable to loaddeps:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to retrieve quiz dependencies."}) + return + } + + c.JSON(http.StatusOK, deps) +} + +func showExerciceQuizStats(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + quiz := c.MustGet("flag-quiz").(*fic.MCQ) + + history, err := exercice.GetHistory() + if err != nil { + log.Println("Unable to getExerciceHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving exercice history"}) + return + } + + var completed int64 + + for _, hline := range history { + if hline["kind"].(string) == "mcq_found" { + if *hline["secondary"].(*int) == quiz.Id { + completed += 1 } + } + } + + tries, err := quiz.NbTries() + if err != nil { + log.Println("Unable to nbTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving flag tries"}) + return + } + + teams, err := quiz.TeamsOnIt() + if err != nil { + log.Println("Unable to teamsOnIt:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when retrieving flag related teams"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "completed": completed, + "tries": tries, + "teams": teams, + }) +} + +func deleteExerciceQuizTries(c *gin.Context) { + quiz := c.MustGet("flag-quiz").(*fic.MCQ) + + err := quiz.DeleteTries() + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + c.AbortWithStatusJSON(http.StatusOK, true) +} + +func updateExerciceQuiz(c *gin.Context) { + quiz := c.MustGet("flag-quiz").(*fic.MCQ) + + var uq fic.MCQ + err := c.ShouldBindJSON(&uq) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + quiz.Title = uq.Title + + if _, err := quiz.Update(); err != nil { + log.Println("Unable to updateExerciceQuiz:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update quiz."}) + return + } + + // Update and remove old entries + var delete []int + for i, cur := range quiz.Entries { + seen := false + for _, next := range uq.Entries { + if cur.Id == next.Id { + seen = true + + if cur.Label != next.Label || cur.Response != next.Response { + cur.Label = next.Label + cur.Response = next.Response + if _, err := cur.Update(); err != nil { + log.Println("Unable to update MCQ entry:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to update some MCQ entry"}) + return + } + } + + break + } + } + + if seen == false { + if _, err := cur.Delete(); err != nil { + log.Println("Unable to delete MCQ entry:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to delete some MCQ entry"}) + return + } else { + delete = append(delete, i) + } + } + } + for n, i := range delete { + quiz.Entries = append(quiz.Entries[:i-n-1], quiz.Entries[:i-n+1]...) + } + + // Add new choices + for _, choice := range uq.Entries { + if choice.Id == 0 { + if ch, err := quiz.AddEntry(choice); err != nil { + log.Println("Unable to add MCQ entry:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to add some MCQ entry"}) + return + } else { + quiz.Entries = append(quiz.Entries, ch) + } + } + } + + c.JSON(http.StatusOK, quiz) +} + +func deleteExerciceQuiz(c *gin.Context) { + quiz := c.MustGet("flag-quiz").(*fic.MCQ) + + for _, choice := range quiz.Entries { + if _, err := choice.Delete(); err != nil { + log.Println("Unable to deleteExerciceQuiz (entry):", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete quiz entry."}) + return + } + } + + _, err := quiz.Delete() + if err != nil { + log.Println("Unable to deleteExerciceQuiz:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete quiz."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func listExerciceTags(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + tags, err := exercice.GetTags() + if err != nil { + log.Println("Unable to listExerciceTags:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to get tags."}) + return + } + + c.JSON(http.StatusOK, tags) +} + +func addExerciceTag(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + var ut []string + err := c.ShouldBindJSON(&ut) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + // TODO: a DB transaction should be done here: on error we should rollback + for _, t := range ut { + if _, err := exercice.AddTag(t); err != nil { + log.Println("Unable to addExerciceTag:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to add some tag."}) + return + } + } + + c.JSON(http.StatusOK, ut) +} + +func updateExerciceTags(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + exercice.WipeTags() + addExerciceTag(c) +} + +type syncDiff struct { + Field string `json:"field"` + Link string `json:"link"` + Before interface{} `json:"be"` + After interface{} `json:"af"` +} + +func diffExerciceWithRemote(exercice *fic.Exercice, theme *fic.Theme) ([]syncDiff, error) { + var diffs []syncDiff + + // Compare exercice attributes + thid := exercice.Path[:strings.Index(exercice.Path, "/")] + exid := exercice.Path[strings.Index(exercice.Path, "/")+1:] + exercice_remote, err := sync.GetRemoteExercice(thid, exid, theme) + if err != nil { + return nil, err + } + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*exercice)) { + if ((field.Name == "Image") && path.Base(reflect.ValueOf(*exercice_remote).FieldByName(field.Name).String()) != path.Base(reflect.ValueOf(*exercice).FieldByName(field.Name).String())) || ((field.Name == "Depend") && (((exercice_remote.Depend == nil || exercice.Depend == nil) && exercice.Depend != exercice_remote.Depend) || (exercice_remote.Depend != nil && exercice.Depend != nil && *exercice.Depend != *exercice_remote.Depend))) || (field.Name != "Image" && field.Name != "Depend" && !reflect.ValueOf(*exercice_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*exercice).FieldByName(field.Name))) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdTheme" || field.Name == "IssueKind" || field.Name == "Coefficient" || field.Name == "BackgroundColor" { + continue + } + + diffs = append(diffs, syncDiff{ + Field: field.Name, + Link: fmt.Sprintf("exercices/%d", exercice.Id), + Before: reflect.ValueOf(*exercice).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*exercice_remote).FieldByName(field.Name).Interface(), + }) + } + } + + // Compare files + files, err := exercice.GetFiles() + if err != nil { + return nil, fmt.Errorf("Unable to GetFiles: %w", err) + } + + files_remote, err := sync.GetRemoteExerciceFiles(thid, exid) + if err != nil { + return nil, fmt.Errorf("Unable to GetRemoteFiles: %w", err) + } + + for i, file_remote := range files_remote { + if len(files) <= i { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("files[%d]", i), + Link: fmt.Sprintf("exercices/%d", exercice.Id), + Before: nil, + After: file_remote, + }) + continue + } + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*file_remote)) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdExercice" { + continue + } + if ((field.Name == "Path") && path.Base(reflect.ValueOf(*file_remote).FieldByName(field.Name).String()) != path.Base(reflect.ValueOf(*files[i]).FieldByName(field.Name).String())) || ((field.Name == "Checksum" || field.Name == "ChecksumShown") && !bytes.Equal(reflect.ValueOf(*file_remote).FieldByName(field.Name).Bytes(), reflect.ValueOf(*files[i]).FieldByName(field.Name).Bytes())) || (field.Name != "Checksum" && field.Name != "ChecksumShown" && field.Name != "Path" && !reflect.ValueOf(*file_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*files[i]).FieldByName(field.Name))) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("files[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d", exercice.Id), + Before: reflect.ValueOf(*files[i]).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*file_remote).FieldByName(field.Name).Interface(), + }) + } + } + } + + // Compare flags + flags, err := exercice.GetFlags() + if err != nil { + return nil, fmt.Errorf("Unable to GetFlags: %w", err) + } + + flags_remote, err := sync.GetRemoteExerciceFlags(thid, exid) + if err != nil { + return nil, fmt.Errorf("Unable to GetRemoteFlags: %w", err) + } + + var flags_not_found []interface{} + var flags_extra_found []interface{} + + for i, flag_remote := range flags_remote { + if key_remote, ok := flag_remote.(*fic.FlagKey); ok { + found := false + + for _, flag := range flags { + if key, ok := flag.(*fic.FlagKey); ok && (key.Label == key_remote.Label || key.Order == key_remote.Order) { + found = true + + // Parse flag label + if len(key.Label) > 3 && key.Label[0] == '%' { + spl := strings.Split(key.Label, "%") + key.Label = strings.Join(spl[2:], "%") + } + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*key_remote)) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdExercice" { + continue + } + if (field.Name == "Checksum" && !bytes.Equal(key.Checksum, key_remote.Checksum)) || (field.Name == "CaptureRegexp" && ((key.CaptureRegexp == nil || key_remote.CaptureRegexp == nil) && key.CaptureRegexp != key_remote.CaptureRegexp) || (key.CaptureRegexp != nil && key_remote.CaptureRegexp != nil && *key.CaptureRegexp != *key_remote.CaptureRegexp)) || (field.Name != "Checksum" && field.Name != "CaptureRegexp" && !reflect.ValueOf(*key_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*key).FieldByName(field.Name))) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("flags[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d/flags#flag-%d", exercice.Id, key.Id), + Before: reflect.ValueOf(*key).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*key_remote).FieldByName(field.Name).Interface(), + }) + } + } + + break + } + } + + if !found { + flags_not_found = append(flags_not_found, key_remote) + } + } else if mcq_remote, ok := flag_remote.(*fic.MCQ); ok { + found := false + + for _, flag := range flags { + if mcq, ok := flag.(*fic.MCQ); ok && (mcq.Title == mcq_remote.Title || mcq.Order == mcq_remote.Order) { + found = true + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*mcq_remote)) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdExercice" { + continue + } + if field.Name == "Entries" { + var not_found []*fic.MCQ_entry + var extra_found []*fic.MCQ_entry + + for i, entry_remote := range mcq_remote.Entries { + found := false + + for j, entry := range mcq.Entries { + if entry.Label == entry_remote.Label { + for _, field := range reflect.VisibleFields(reflect.TypeOf(*entry_remote)) { + if field.Name == "Id" { + continue + } + + if !reflect.ValueOf(*entry_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*entry).FieldByName(field.Name)) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("flags[%d].entries[%d].%s", i, j, field.Name), + Link: fmt.Sprintf("exercices/%d/flags#quiz-%d", exercice.Id, mcq.Id), + Before: reflect.ValueOf(*mcq.Entries[j]).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*entry_remote).FieldByName(field.Name).Interface(), + }) + } + } + + found = true + break + } + } + + if !found { + not_found = append(not_found, entry_remote) + } + } + + for _, entry := range mcq.Entries { + found := false + for _, entry_remote := range mcq_remote.Entries { + if entry.Label == entry_remote.Label { + found = true + break + } + } + + if !found { + extra_found = append(extra_found, entry) + } + } + + if len(not_found) > 0 || len(extra_found) > 0 { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("flags[%d].entries", i), + Link: fmt.Sprintf("exercices/%d/flags", exercice.Id), + Before: extra_found, + After: not_found, + }) + } + } else if !reflect.ValueOf(*mcq_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*mcq).FieldByName(field.Name)) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("flags[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d/flags", exercice.Id), + Before: reflect.ValueOf(*mcq).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*mcq_remote).FieldByName(field.Name).Interface(), + }) + } + } + + break + } + } + + if !found { + flags_not_found = append(flags_not_found, mcq_remote) + } + } else if label_remote, ok := flag_remote.(*fic.FlagLabel); ok { + found := false + + for _, flag := range flags { + if label, ok := flag.(*fic.FlagLabel); ok && (label.Label == label_remote.Label || label.Order == label_remote.Order) { + found = true + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*label_remote)) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdExercice" { + continue + } + if !reflect.ValueOf(*label_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*label).FieldByName(field.Name)) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("flags[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d/flags#flag-%d", exercice.Id, label.Id), + Before: reflect.ValueOf(*label).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*label_remote).FieldByName(field.Name).Interface(), + }) + } + } + + break + } + } + + if !found { + flags_not_found = append(flags_not_found, label_remote) + } + } else { + log.Printf("unknown flag type: %T", flag_remote) + } + } + + for _, flag := range flags { + if key, ok := flag.(*fic.FlagKey); ok { + found := false + + for _, flag_remote := range flags_remote { + if key_remote, ok := flag_remote.(*fic.FlagKey); ok && (key.Label == key_remote.Label || key.Order == key_remote.Order) { + found = true + break + } + } + + if !found { + flags_extra_found = append(flags_extra_found, flag) + } + } else if mcq, ok := flag.(*fic.MCQ); ok { + found := false + + for _, flag_remote := range flags_remote { + if mcq_remote, ok := flag_remote.(*fic.MCQ); ok && (mcq.Title == mcq_remote.Title || mcq.Order == mcq_remote.Order) { + found = true + break + } + } + + if !found { + flags_extra_found = append(flags_extra_found, flag) + } + } else if label, ok := flag.(*fic.FlagLabel); ok { + found := false + + for _, flag_remote := range flags_remote { + if label_remote, ok := flag_remote.(*fic.FlagLabel); ok && (label.Label == label_remote.Label || label.Order == label_remote.Order) { + found = true + break + } + } + + if !found { + flags_extra_found = append(flags_extra_found, flag) + } + } + } + + if len(flags_not_found) > 0 || len(flags_extra_found) > 0 { + diffs = append(diffs, syncDiff{ + Field: "flags", + Link: fmt.Sprintf("exercices/%d/flags", exercice.Id), + Before: flags_extra_found, + After: flags_not_found, }) + } + + // Compare hints + hints, err := exercice.GetHints() + if err != nil { + return nil, fmt.Errorf("Unable to GetHints: %w", err) + } + + hints_remote, err := sync.GetRemoteExerciceHints(thid, exid) + if err != nil { + return nil, fmt.Errorf("Unable to GetRemoteHints: %w", err) + } + + for i, hint_remote := range hints_remote { + hint_remote.Hint.TreatHintContent() + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*hint_remote.Hint)) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdExercice" { + continue + } + if len(hints) <= i { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("hints[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d", exercice.Id), + Before: nil, + After: reflect.ValueOf(*hint_remote.Hint).FieldByName(field.Name).Interface(), + }) + } else if !reflect.ValueOf(*hint_remote.Hint).FieldByName(field.Name).Equal(reflect.ValueOf(*hints[i]).FieldByName(field.Name)) { + diffs = append(diffs, syncDiff{ + Field: fmt.Sprintf("hints[%d].%s", i, field.Name), + Link: fmt.Sprintf("exercices/%d", exercice.Id), + Before: reflect.ValueOf(*hints[i]).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*hint_remote.Hint).FieldByName(field.Name).Interface(), + }) + } + } + } + + return diffs, err } -func showExerciceFile(file fic.EFile, body []byte) (interface{}, error) { - return file, nil +func APIDiffExerciceWithRemote(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + exercice := c.MustGet("exercice").(*fic.Exercice) + + diffs, err := diffExerciceWithRemote(exercice, theme) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, diffs) } -func deleteExerciceFile(file fic.EFile, _ []byte) (interface{}, error) { - return file.Delete() +func listTries(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + + tries, err := exercice.TriesList() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, tries) +} + +func ExerciceTryHandler(c *gin.Context) { + trid, err := strconv.ParseInt(string(c.Params.ByName("trid")), 10, 32) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid try identifier"}) + return + } + + exercice := c.MustGet("exercice").(*fic.Exercice) + try, err := exercice.GetTry(trid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Try not found"}) + return + } + + c.Set("try", try) + + c.Next() +} + +func getExerciceTry(c *gin.Context) { + try := c.MustGet("try").(*fic.ExerciceTry) + + err := try.FillDetails() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, try) +} + +func deleteExerciceTry(c *gin.Context) { + try := c.MustGet("try").(*fic.ExerciceTry) + + _, err := try.Delete() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.Status(http.StatusNoContent) } diff --git a/admin/api/export.go b/admin/api/export.go new file mode 100644 index 00000000..aa24920a --- /dev/null +++ b/admin/api/export.go @@ -0,0 +1,126 @@ +package api + +import ( + "archive/zip" + "encoding/json" + "io" + "log" + "net/http" + "path" + + "srs.epita.fr/fic-server/admin/sync" + "srs.epita.fr/fic-server/libfic" + "srs.epita.fr/fic-server/settings" + + "github.com/gin-gonic/gin" +) + +func declareExportRoutes(router *gin.RouterGroup) { + router.GET("/archive.zip", func(c *gin.Context) { + challengeinfo, err := GetChallengeInfo() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + my, err := fic.MyJSONTeam(nil, true) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + s, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + s.End = nil + s.NextChangeTime = nil + s.DelegatedQA = []string{} + + teams, err := fic.ExportTeams(false) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + themes, err := fic.ExportThemes() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.Writer.WriteHeader(http.StatusOK) + c.Header("Content-Disposition", "attachment; filename=archive.zip") + c.Header("Content-Type", "application/zip") + + w := zip.NewWriter(c.Writer) + + // challenge.json + f, err := w.Create("challenge.json") + if err == nil { + json.NewEncoder(f).Encode(challengeinfo) + } + + // Include partners' logos from challenge.json + if sync.GlobalImporter != nil { + if len(challengeinfo.MainLogo) > 0 { + for _, logo := range challengeinfo.MainLogo { + fd, closer, err := sync.OpenOrGetFile(sync.GlobalImporter, logo) + if err != nil { + log.Printf("Unable to archive main logo %q: %s", logo, err.Error()) + continue + } + + f, err := w.Create(path.Join("logo", path.Base(logo))) + if err == nil { + io.Copy(f, fd) + } + closer() + } + } + + if len(challengeinfo.Partners) > 0 { + for _, partner := range challengeinfo.Partners { + fd, closer, err := sync.OpenOrGetFile(sync.GlobalImporter, partner.Src) + if err != nil { + log.Printf("Unable to archive partner logo %q: %s", partner.Src, err.Error()) + continue + } + + f, err := w.Create(path.Join("partner", path.Base(partner.Src))) + if err == nil { + io.Copy(f, fd) + } + closer() + } + } + } + + // my.json + f, err = w.Create("my.json") + if err == nil { + json.NewEncoder(f).Encode(my) + } + + // settings.json + f, err = w.Create("settings.json") + if err == nil { + json.NewEncoder(f).Encode(s) + } + + // teams.json + f, err = w.Create("teams.json") + if err == nil { + json.NewEncoder(f).Encode(teams) + } + + // themes.json + f, err = w.Create("themes.json") + if err == nil { + json.NewEncoder(f).Encode(themes) + } + + w.Close() + }) +} diff --git a/admin/api/file.go b/admin/api/file.go index b73058f1..6d94776c 100644 --- a/admin/api/file.go +++ b/admin/api/file.go @@ -1,50 +1,297 @@ package api import ( - "encoding/json" + "encoding/hex" + "fmt" + "log" + "net/http" + "os" + "path/filepath" + "strconv" + "srs.epita.fr/fic-server/admin/sync" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/files/", apiHandler(listFiles)) - router.DELETE("/api/files/", apiHandler(clearFiles)) +func declareFilesGlobalRoutes(router *gin.RouterGroup) { + router.DELETE("/files/", clearFiles) - router.GET("/api/files/:fileid", apiHandler(fileHandler(showFile))) - router.PUT("/api/files/:fileid", apiHandler(fileHandler(updateFile))) - router.DELETE("/api/files/:fileid", apiHandler(fileHandler(deleteFile))) + // Remote + router.GET("/remote/themes/:thid/exercices/:exid/files", sync.ApiGetRemoteExerciceFiles) } -func listFiles(_ httprouter.Params, body []byte) (interface{}, error) { - // List all files - return fic.GetFiles() +func declareFilesRoutes(router *gin.RouterGroup) { + router.GET("/files", listFiles) + router.POST("/files", createExerciceFile) + + apiFilesRoutes := router.Group("/files/:fileid") + apiFilesRoutes.Use(FileHandler) + apiFilesRoutes.GET("", showFile) + apiFilesRoutes.PUT("", updateFile) + apiFilesRoutes.DELETE("", deleteFile) + + apiFileDepsRoutes := apiFilesRoutes.Group("/dependancies/:depid") + apiFileDepsRoutes.Use(FileDepHandler) + apiFileDepsRoutes.DELETE("", deleteFileDep) + + // Check + apiFilesRoutes.POST("/check", checkFile) + apiFilesRoutes.POST("/gunzip", gunzipFile) } -func clearFiles(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.ClearFiles() +func FileHandler(c *gin.Context) { + fileid, err := strconv.ParseInt(string(c.Params.ByName("fileid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid file identifier"}) + return + } + + var file *fic.EFile + if exercice, exists := c.Get("exercice"); exists { + file, err = exercice.(*fic.Exercice).GetFile(fileid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "File not found"}) + return + } + } else { + file, err = fic.GetFile(fileid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "File not found"}) + return + } + } + + c.Set("file", file) + + c.Next() } -func showFile(file fic.EFile, _ []byte) (interface{}, error) { - return file, nil +func FileDepHandler(c *gin.Context) { + depid, err := strconv.ParseInt(string(c.Params.ByName("depid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid dependency identifier"}) + return + } + + c.Set("file-depid", depid) + + c.Next() } -func updateFile(file fic.EFile, body []byte) (interface{}, error) { +type APIFile struct { + *fic.EFile + Depends []fic.Flag `json:"depends,omitempty"` +} + +func genFileList(in []*fic.EFile, e error) (out []APIFile, err error) { + if e != nil { + return nil, e + } + + for _, f := range in { + g := APIFile{EFile: f} + + var deps []fic.Flag + deps, err = f.GetDepends() + if err != nil { + return + } + + for _, d := range deps { + if k, ok := d.(*fic.FlagKey); ok { + k, err = fic.GetFlagKey(k.Id) + if err != nil { + return + } + + g.Depends = append(g.Depends, k) + } else if m, ok := d.(*fic.MCQ); ok { + m, err = fic.GetMCQ(m.Id) + if err != nil { + return + } + + g.Depends = append(g.Depends, m) + } else { + err = fmt.Errorf("Unknown type %T to handle file dependancy", k) + return + } + } + + out = append(out, g) + } + + return +} + +func listFiles(c *gin.Context) { + var files []APIFile + var err error + + if exercice, exists := c.Get("exercice"); exists { + files, err = genFileList(exercice.(*fic.Exercice).GetFiles()) + } else { + files, err = genFileList(fic.GetFiles()) + } + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, files) +} + +func clearFiles(c *gin.Context) { + err := os.RemoveAll(fic.FilesDir) + if err != nil { + log.Println("Unable to remove files:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + err = os.MkdirAll(fic.FilesDir, 0751) + if err != nil { + log.Println("Unable to create FILES:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + _, err = fic.ClearFiles() + if err != nil { + log.Println("Unable to clean DB files:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Les fichiers ont bien été effacés. Mais il n'a pas été possible d'effacer la base de données. Refaites une synchronisation maintenant. " + err.Error()}) + return + } + + c.JSON(http.StatusOK, true) +} + +func showFile(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("file").(*fic.EFile)) +} + +type uploadedFile struct { + URI string + Digest string +} + +func createExerciceFile(c *gin.Context) { + exercice, exists := c.Get("exercice") + if !exists { + c.AbortWithStatusJSON(http.StatusMethodNotAllowed, gin.H{"errmsg": "File can only be added inside an exercice."}) + return + } + + paramsFiles, err := sync.GetExerciceFilesParams(sync.GlobalImporter, exercice.(*fic.Exercice)) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + var uf uploadedFile + err = c.ShouldBindJSON(&uf) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + ret, err := sync.ImportFile(sync.GlobalImporter, uf.URI, + func(filePath string, origin string) (interface{}, error) { + if digest, err := hex.DecodeString(uf.Digest); err != nil { + return nil, err + } else { + published := true + disclaimer := "" + + if f, exists := paramsFiles[filepath.Base(filePath)]; exists { + published = !f.Hidden + + if disclaimer, err = sync.ProcessMarkdown(sync.GlobalImporter, f.Disclaimer, exercice.(*fic.Exercice).Path); err != nil { + return nil, fmt.Errorf("error during markdown formating of disclaimer: %w", err) + } + } + + return exercice.(*fic.Exercice).ImportFile(filePath, origin, digest, nil, disclaimer, published) + } + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, ret) +} + +func updateFile(c *gin.Context) { + file := c.MustGet("file").(*fic.EFile) + var uf fic.EFile - if err := json.Unmarshal(body, &uf); err != nil { - return nil, err + err := c.ShouldBindJSON(&uf) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } uf.Id = file.Id if _, err := uf.Update(); err != nil { - return nil, err - } else { - return uf, nil + log.Println("Unable to updateFile:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update file."}) + return } + + c.JSON(http.StatusOK, uf) } -func deleteFile(file fic.EFile, _ []byte) (interface{}, error) { - return file.Delete() +func deleteFile(c *gin.Context) { + file := c.MustGet("file").(*fic.EFile) + + _, err := file.Delete() + if err != nil { + log.Println("Unable to updateFile:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to update file."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func deleteFileDep(c *gin.Context) { + file := c.MustGet("file").(*fic.EFile) + depid := c.MustGet("file-depid").(int64) + + err := file.DeleteDepend(&fic.FlagKey{Id: int(depid)}) + if err != nil { + log.Println("Unable to deleteFileDep:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to delete file dependency."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func checkFile(c *gin.Context) { + file := c.MustGet("file").(*fic.EFile) + + err := file.CheckFileOnDisk() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, true) +} + +func gunzipFile(c *gin.Context) { + file := c.MustGet("file").(*fic.EFile) + + err := file.GunzipFileOnDisk() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, true) } diff --git a/admin/api/handlers.go b/admin/api/handlers.go deleted file mode 100644 index 8b04955f..00000000 --- a/admin/api/handlers.go +++ /dev/null @@ -1,296 +0,0 @@ -package api - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "log" - "net/http" - "strconv" - - "srs.epita.fr/fic-server/libfic" - - "github.com/julienschmidt/httprouter" -) - -type DispatchFunction func(httprouter.Params, []byte) (interface{}, error) - -func apiHandler(f DispatchFunction) func(http.ResponseWriter, *http.Request, httprouter.Params) { - return func(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { - if addr := r.Header.Get("X-Forwarded-For"); addr != "" { - r.RemoteAddr = addr - } - log.Printf("%s \"%s %s\" [%s]\n", r.RemoteAddr, r.Method, r.URL.Path, r.UserAgent()) - - // Read the body - if r.ContentLength < 0 || r.ContentLength > 6553600 { - http.Error(w, fmt.Sprintf("{errmsg:\"Request too large or request size unknown\"}"), http.StatusRequestEntityTooLarge) - return - } - var body []byte - if r.ContentLength > 0 { - tmp := make([]byte, 1024) - for { - n, err := r.Body.Read(tmp) - for j := 0; j < n; j++ { - body = append(body, tmp[j]) - } - if err != nil || n <= 0 { - break - } - } - } - - var ret interface{} - var err error = nil - - ret, err = f(ps, body) - - // Format response - resStatus := http.StatusOK - if err != nil { - ret = map[string]string{"errmsg": err.Error()} - resStatus = http.StatusBadRequest - log.Println(r.RemoteAddr, resStatus, err.Error()) - } - - if ret == nil { - ret = map[string]string{"errmsg": "Page not found"} - resStatus = http.StatusNotFound - } - - if str, found := ret.(string); found { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(resStatus) - io.WriteString(w, str) - } else if bts, found := ret.([]byte); found { - w.Header().Set("Content-Type", "application/octet-stream") - w.Header().Set("Content-Disposition", "attachment") - w.Header().Set("Content-Transfer-Encoding", "binary") - w.WriteHeader(resStatus) - w.Write(bts) - } else if j, err := json.Marshal(ret); err != nil { - w.Header().Set("Content-Type", "application/json") - http.Error(w, fmt.Sprintf("{\"errmsg\":%q}", err), http.StatusInternalServerError) - } else { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(resStatus) - w.Write(j) - } - } -} - -func teamPublicHandler(f func(*fic.Team, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if tid, err := strconv.ParseInt(string(ps.ByName("tid")), 10, 64); err != nil { - return nil, err - } else if tid == 0 { - return f(nil, body) - } else if team, err := fic.GetTeam(tid); err != nil { - return nil, err - } else { - return f(&team, body) - } - } -} - -func teamHandler(f func(fic.Team, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if tid, err := strconv.ParseInt(string(ps.ByName("tid")), 10, 64); err != nil { - return nil, err - } else if team, err := fic.GetTeam(tid); err != nil { - return nil, err - } else { - return f(team, body) - } - } -} - -func themeHandler(f func(fic.Theme, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if thid, err := strconv.Atoi(string(ps.ByName("thid"))); err != nil { - return nil, err - } else if theme, err := fic.GetTheme(thid); err != nil { - return nil, err - } else { - return f(theme, body) - } - } -} - -func exerciceHandler(f func(fic.Exercice, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if eid, err := strconv.Atoi(string(ps.ByName("eid"))); err != nil { - return nil, err - } else if exercice, err := fic.GetExercice(int64(eid)); err != nil { - return nil, err - } else { - return f(exercice, body) - } - } -} - -func themedExerciceHandler(f func(fic.Theme, fic.Exercice, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - var theme fic.Theme - var exercice fic.Exercice - - themeHandler(func(th fic.Theme, _ []byte) (interface{}, error) { - theme = th - return nil, nil - })(ps, body) - - exerciceHandler(func(ex fic.Exercice, _ []byte) (interface{}, error) { - exercice = ex - return nil, nil - })(ps, body) - - return f(theme, exercice, body) - } -} - -func hintHandler(f func(fic.EHint, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if hid, err := strconv.Atoi(string(ps.ByName("hid"))); err != nil { - return nil, err - } else if hint, err := fic.GetHint(int64(hid)); err != nil { - return nil, err - } else { - return f(hint, body) - } - } -} - -func keyHandler(f func(fic.Key, fic.Exercice, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - var exercice fic.Exercice - exerciceHandler(func(ex fic.Exercice, _ []byte) (interface{}, error) { - exercice = ex - return nil, nil - })(ps, body) - - if kid, err := strconv.Atoi(string(ps.ByName("kid"))); err != nil { - return nil, err - } else if keys, err := exercice.GetKeys(); err != nil { - return nil, err - } else { - for _, key := range keys { - if key.Id == int64(kid) { - return f(key, exercice, body) - } - } - return nil, errors.New("Unable to find the requested key") - } - } -} - -func quizHandler(f func(fic.MCQ, fic.Exercice, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - var exercice fic.Exercice - exerciceHandler(func(ex fic.Exercice, _ []byte) (interface{}, error) { - exercice = ex - return nil, nil - })(ps, body) - - if qid, err := strconv.Atoi(string(ps.ByName("qid"))); err != nil { - return nil, err - } else if mcqs, err := exercice.GetMCQ(); err != nil { - return nil, err - } else { - for _, mcq := range mcqs { - if mcq.Id == int64(qid) { - return f(mcq, exercice, body) - } - } - return nil, errors.New("Unable to find the requested key") - } - } -} - -func exerciceFileHandler(f func(fic.EFile, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - var exercice fic.Exercice - exerciceHandler(func(ex fic.Exercice, _ []byte) (interface{}, error) { - exercice = ex - return nil, nil - })(ps, body) - - if fid, err := strconv.Atoi(string(ps.ByName("fid"))); err != nil { - return nil, err - } else if files, err := exercice.GetFiles(); err != nil { - return nil, err - } else { - for _, file := range files { - if file.Id == int64(fid) { - return f(file, body) - } - } - return nil, errors.New("Unable to find the requested file") - } - } -} - -func eventHandler(f func(fic.Event, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if evid, err := strconv.Atoi(string(ps.ByName("evid"))); err != nil { - return nil, err - } else if event, err := fic.GetEvent(evid); err != nil { - return nil, err - } else { - return f(event, body) - } - } -} - -func claimHandler(f func(fic.Claim, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if cid, err := strconv.Atoi(string(ps.ByName("cid"))); err != nil { - return nil, err - } else if claim, err := fic.GetClaim(cid); err != nil { - return nil, err - } else { - return f(claim, body) - } - } -} - -func claimAssigneeHandler(f func(fic.ClaimAssignee, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if aid, err := strconv.Atoi(string(ps.ByName("aid"))); err != nil { - return nil, err - } else if assignee, err := fic.GetAssignee(int64(aid)); err != nil { - return nil, err - } else { - return f(assignee, body) - } - } -} - -func fileHandler(f func(fic.EFile, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if fileid, err := strconv.Atoi(string(ps.ByName("fileid"))); err != nil { - return nil, err - } else if file, err := fic.GetFile(fileid); err != nil { - return nil, err - } else { - return f(file, body) - } - } -} - -func certificateHandler(f func(fic.Certificate, []byte) (interface{}, error)) func(httprouter.Params, []byte) (interface{}, error) { - return func(ps httprouter.Params, body []byte) (interface{}, error) { - if certid, err := strconv.ParseUint(string(ps.ByName("certid")), 10, 64); err != nil { - return nil, err - } else if cert, err := fic.GetCertificate(certid); err != nil { - return nil, err - } else { - return f(cert, body) - } - } -} - -func notFound(ps httprouter.Params, _ []byte) (interface{}, error) { - return nil, nil -} diff --git a/admin/api/health.go b/admin/api/health.go new file mode 100644 index 00000000..13d7a0cc --- /dev/null +++ b/admin/api/health.go @@ -0,0 +1,156 @@ +package api + +import ( + "fmt" + "io/ioutil" + "log" + "net/http" + "os" + "path" + "strings" + "time" + + "srs.epita.fr/fic-server/admin/pki" + "srs.epita.fr/fic-server/libfic" + + "github.com/gin-gonic/gin" +) + +var TimestampCheck = "submissions" + +func declareHealthRoutes(router *gin.RouterGroup) { + router.GET("/timestamps.json", func(c *gin.Context) { + stat, err := os.Stat(TimestampCheck) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("timestamp.json: %s", err.Error())}) + return + } + now := time.Now().UTC() + c.JSON(http.StatusOK, gin.H{ + "frontend": stat.ModTime().UTC(), + "backend": now, + "diffFB": now.Sub(stat.ModTime()), + }) + }) + router.GET("/health.json", GetHealth) + router.GET("/submissions-stats.json", GetSubmissionsStats) + router.GET("/validations-stats.json", GetValidationsStats) + + router.DELETE("/submissions/*path", func(c *gin.Context) { + err := os.Remove(path.Join(TimestampCheck, c.Params.ByName("path"))) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": err.Error()}) + return + } + c.Status(http.StatusOK) + }) +} + +type healthFileReport struct { + IdTeam string `json:"id_team,omitempty"` + Path string `json:"path"` + Error string `json:"error"` +} + +func getHealth(pathname string) (ret []healthFileReport) { + if ds, err := ioutil.ReadDir(pathname); err != nil { + ret = append(ret, healthFileReport{ + Path: strings.TrimPrefix(pathname, TimestampCheck), + Error: fmt.Sprintf("unable to ReadDir: %s", err), + }) + return + } else { + for _, d := range ds { + p := path.Join(pathname, d.Name()) + if d.IsDir() && d.Name() != ".tmp" && d.Mode()&os.ModeSymlink == 0 { + ret = append(ret, getHealth(p)...) + } else if !d.IsDir() && d.Mode()&os.ModeSymlink == 0 && time.Since(d.ModTime()) > 2*time.Second { + if d.Name() == ".locked" { + continue + } + + teamDir := strings.TrimPrefix(pathname, TimestampCheck) + idteam, _ := pki.GetAssociation(path.Join(TeamsDir, teamDir)) + ret = append(ret, healthFileReport{ + IdTeam: idteam, + Path: path.Join(teamDir, d.Name()), + Error: "existing untreated file", + }) + } + } + return + } +} + +func GetHealth(c *gin.Context) { + if _, err := os.Stat(TimestampCheck); err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("health.json: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, getHealth(TimestampCheck)) +} + +type SubmissionsStats struct { + NbSubmissionLastMinute uint `json:"nbsubminute"` + NbSubmissionLast5Minute uint `json:"nbsub5minute"` + NbSubmissionLastQuarter uint `json:"nbsubquarter"` + NbSubmissionLastHour uint `json:"nbsubhour"` + NbSubmissionLastDay uint `json:"nbsubday"` +} + +func calcSubmissionsStats(tries []time.Time) (stats SubmissionsStats) { + lastMinute := time.Now().Add(-1 * time.Minute) + last5Minute := time.Now().Add(-5 * time.Minute) + lastQuarter := time.Now().Add(-15 * time.Minute) + lastHour := time.Now().Add(-1 * time.Hour) + lastDay := time.Now().Add(-24 * time.Hour) + + for _, t := range tries { + if lastMinute.Before(t) { + stats.NbSubmissionLastMinute += 1 + stats.NbSubmissionLast5Minute += 1 + stats.NbSubmissionLastQuarter += 1 + stats.NbSubmissionLastHour += 1 + stats.NbSubmissionLastDay += 1 + } else if last5Minute.Before(t) { + stats.NbSubmissionLast5Minute += 1 + stats.NbSubmissionLastQuarter += 1 + stats.NbSubmissionLastHour += 1 + stats.NbSubmissionLastDay += 1 + } else if lastQuarter.Before(t) { + stats.NbSubmissionLastQuarter += 1 + stats.NbSubmissionLastHour += 1 + stats.NbSubmissionLastDay += 1 + } else if lastHour.Before(t) { + stats.NbSubmissionLastHour += 1 + stats.NbSubmissionLastDay += 1 + } else if lastDay.Before(t) { + stats.NbSubmissionLastDay += 1 + } + } + + return +} + +func GetSubmissionsStats(c *gin.Context) { + tries, err := fic.GetTries(nil, nil) + if err != nil { + log.Println("Unable to GetTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."}) + return + } + + c.JSON(http.StatusOK, calcSubmissionsStats(tries)) +} + +func GetValidationsStats(c *gin.Context) { + tries, err := fic.GetValidations(nil, nil) + if err != nil { + log.Println("Unable to GetTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."}) + return + } + + c.JSON(http.StatusOK, calcSubmissionsStats(tries)) +} diff --git a/admin/api/monitor.go b/admin/api/monitor.go new file mode 100644 index 00000000..3d6a9114 --- /dev/null +++ b/admin/api/monitor.go @@ -0,0 +1,98 @@ +package api + +import ( + "bufio" + "io/ioutil" + "net/http" + "os" + "strconv" + "strings" + + "github.com/gin-gonic/gin" +) + +func declareMonitorRoutes(router *gin.RouterGroup) { + router.GET("/monitor", func(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "localhost": genLocalConstants(), + }) + }) +} + +func readLoadAvg(fd *os.File) (ret map[string]float64) { + if s, err := ioutil.ReadAll(fd); err == nil { + f := strings.Fields(strings.TrimSpace(string(s))) + if len(f) >= 3 { + ret = map[string]float64{} + ret["1m"], _ = strconv.ParseFloat(f[0], 64) + ret["5m"], _ = strconv.ParseFloat(f[1], 64) + ret["15m"], _ = strconv.ParseFloat(f[2], 64) + } + } + return +} + +func readMeminfo(fd *os.File) (ret map[string]uint64) { + ret = map[string]uint64{} + + scanner := bufio.NewScanner(fd) + for scanner.Scan() { + f := strings.Fields(strings.TrimSpace(scanner.Text())) + if len(f) >= 2 { + if v, err := strconv.ParseUint(f[1], 10, 64); err == nil { + ret[strings.ToLower(strings.TrimSuffix(f[0], ":"))] = v * 1024 + } + } + } + + return +} + +func readCPUStats(fd *os.File) (ret map[string]map[string]uint64) { + ret = map[string]map[string]uint64{} + + scanner := bufio.NewScanner(fd) + for scanner.Scan() { + f := strings.Fields(strings.TrimSpace(scanner.Text())) + if len(f[0]) >= 4 && f[0][0:3] == "cpu" && len(f) >= 8 { + ret[f[0]] = map[string]uint64{} + var total uint64 = 0 + for i, k := range []string{"user", "nice", "system", "idle", "iowait", "irq", "softirq"} { + if v, err := strconv.ParseUint(f[i+1], 10, 64); err == nil { + ret[f[0]][k] = v + total += v + } + } + ret[f[0]]["total"] = total + } + } + + return +} + +func genLocalConstants() interface{} { + ret := map[string]interface{}{} + + fi, err := os.Open("/proc/loadavg") + if err != nil { + return err + } + defer fi.Close() + ret["loadavg"] = readLoadAvg(fi) + + fi, err = os.Open("/proc/meminfo") + if err != nil { + return err + } + defer fi.Close() + ret["meminfo"] = readMeminfo(fi) + + fi, err = os.Open("/proc/stat") + if err != nil { + return err + } + defer fi.Close() + ret["cpustat"] = readCPUStats(fi) + + return ret +} diff --git a/admin/api/password.go b/admin/api/password.go new file mode 100644 index 00000000..d37153e9 --- /dev/null +++ b/admin/api/password.go @@ -0,0 +1,360 @@ +package api + +import ( + "bytes" + "fmt" + "io/ioutil" + "log" + "net/http" + "os" + "path" + "text/template" + "unicode" + + "srs.epita.fr/fic-server/admin/pki" + "srs.epita.fr/fic-server/libfic" + + "github.com/gin-gonic/gin" +) + +var ( + OidcIssuer = "live.fic.srs.epita.fr" + OidcClientId = "epita-challenge" + OidcSecret = "" +) + +func declarePasswordRoutes(router *gin.RouterGroup) { + router.POST("/password", func(c *gin.Context) { + passwd, err := fic.GeneratePassword() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, gin.H{"password": passwd}) + }) + router.GET("/oauth-status", func(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "secret_defined": OidcSecret != "", + }) + }) + router.GET("/dex.yaml", func(c *gin.Context) { + cfg, err := genDexConfig() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.String(http.StatusOK, string(cfg)) + }) + router.POST("/dex.yaml", func(c *gin.Context) { + if dexcfg, err := genDexConfig(); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "dex-config.yaml"), []byte(dexcfg), 0644); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, true) + }) + router.GET("/dex-password.tpl", func(c *gin.Context) { + passtpl, err := genDexPasswordTpl() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.String(http.StatusOK, string(passtpl)) + }) + router.POST("/dex-password.tpl", func(c *gin.Context) { + if dexcfg, err := genDexPasswordTpl(); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "dex-password.tpl"), []byte(dexcfg), 0644); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, true) + }) + router.GET("/vouch-proxy.yaml", func(c *gin.Context) { + cfg, err := genVouchProxyConfig() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.String(http.StatusOK, string(cfg)) + }) + router.POST("/vouch-proxy.yaml", func(c *gin.Context) { + if dexcfg, err := genVouchProxyConfig(); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } else if err := ioutil.WriteFile(path.Join(pki.PKIDir, "shared", "vouch-config.yaml"), []byte(dexcfg), 0644); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, true) + }) +} + +func declareTeamsPasswordRoutes(router *gin.RouterGroup) { + router.GET("/password", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + if team.Password != nil { + c.String(http.StatusOK, *team.Password) + } else { + c.AbortWithStatusJSON(http.StatusNotFound, nil) + } + }) + router.POST("/password", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + if passwd, err := fic.GeneratePassword(); err != nil { + log.Println("Unable to GeneratePassword:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Something went wrong when generating the new team password"}) + return + } else { + team.Password = &passwd + + _, err := team.Update() + if err != nil { + log.Println("Unable to Update Team:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Something went wrong when updating the new team password"}) + return + } + + c.JSON(http.StatusOK, team) + } + }) +} + +const dexcfgtpl = `issuer: {{ .Issuer }} +storage: + type: sqlite3 + config: + file: /var/dex/dex.db +web: + http: 0.0.0.0:5556 +frontend: + issuer: {{ .Name }} + logoURL: {{ .LogoPath }} + dir: /srv/dex/web/ +oauth2: + skipApprovalScreen: true +staticClients: +{{ range $c := .Clients }} +- id: {{ $c.Id }} + name: {{ $c.Name }} + redirectURIs: [{{ range $u := $c.RedirectURIs }}'{{ $u }}'{{ end }}] + secret: {{ $c.Secret }} +{{ end }} +enablePasswordDB: true +staticPasswords: +{{ range $t := .Teams }} + - email: "team{{ printf "%02d" $t.Id }}" + hash: "{{with $t }}{{ .HashedPassword }}{{end}}" +{{ end }} +` + +const dexpasswdtpl = `{{ "{{" }} template "header.html" . {{ "}}" }} + +
+

+ Bienvenue au {{ .Name }} ! +

+
+
+
+ +
+ +
+
+
+ +
+ +
+ + {{ "{{" }} if .Invalid {{ "}}" }} +
+ Identifiants incorrects. +
+ {{ "{{" }} end {{ "}}" }} + + + +
+ {{ "{{" }} if .BackLink {{ "}}" }} + + {{ "{{" }} end {{ "}}" }} +
+ +{{ "{{" }} template "footer.html" . {{ "}}" }} +` + +type dexConfigClient struct { + Id string + Name string + RedirectURIs []string + Secret string +} + +type dexConfig struct { + Name string + Issuer string + Clients []dexConfigClient + Teams []*fic.Team + LogoPath string +} + +func genDexConfig() ([]byte, error) { + if OidcSecret == "" { + return nil, fmt.Errorf("Unable to generate dex configuration: OIDC Secret not defined. Please define FICOIDC_SECRET in your environment.") + } + + teams, err := fic.GetTeams() + if err != nil { + return nil, err + } + + b := bytes.NewBufferString("") + + challengeInfo, err := GetChallengeInfo() + if err != nil { + return nil, fmt.Errorf("Cannot create template: %w", err) + } + + // Lower the first letter to be included in a sentence. + name := []rune(challengeInfo.Title) + if len(name) > 0 { + name[0] = unicode.ToLower(name[0]) + } + + logoPath := "" + if len(challengeInfo.MainLogo) > 0 { + logoPath = path.Join("../../files", "logo", path.Base(challengeInfo.MainLogo[len(challengeInfo.MainLogo)-1])) + } + + dexTmpl, err := template.New("dexcfg").Parse(dexcfgtpl) + if err != nil { + return nil, fmt.Errorf("Cannot create template: %w", err) + } + + err = dexTmpl.Execute(b, dexConfig{ + Name: string(name), + Issuer: "https://" + OidcIssuer, + Clients: []dexConfigClient{ + dexConfigClient{ + Id: OidcClientId, + Name: challengeInfo.Title, + RedirectURIs: []string{"https://" + OidcIssuer + "/challenge_access/auth"}, + Secret: OidcSecret, + }, + }, + Teams: teams, + LogoPath: logoPath, + }) + if err != nil { + return nil, fmt.Errorf("An error occurs during template execution: %w", err) + } + + // Also generate team associations + for _, team := range teams { + if _, err := os.Stat(path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err == nil { + if err = os.Remove(path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err != nil { + log.Println("Unable to remove existing association symlink:", err.Error()) + return nil, fmt.Errorf("Unable to remove existing association symlink: %s", err.Error()) + } + } + if err := os.Symlink(fmt.Sprintf("%d", team.Id), path.Join(TeamsDir, fmt.Sprintf("team%02d", team.Id))); err != nil { + log.Println("Unable to create association symlink:", err.Error()) + return nil, fmt.Errorf("Unable to create association symlink: %s", err.Error()) + } + } + + return b.Bytes(), nil +} + +func genDexPasswordTpl() ([]byte, error) { + challengeInfo, err := GetChallengeInfo() + if err != nil { + return nil, fmt.Errorf("Cannot create template: %w", err) + } + + if teams, err := fic.GetTeams(); err != nil { + return nil, err + } else { + b := bytes.NewBufferString("") + + if dexTmpl, err := template.New("dexpasswd").Parse(dexpasswdtpl); err != nil { + return nil, fmt.Errorf("Cannot create template: %w", err) + } else if err = dexTmpl.Execute(b, dexConfig{ + Teams: teams, + Name: challengeInfo.Title, + }); err != nil { + return nil, fmt.Errorf("An error occurs during template execution: %w", err) + } else { + return b.Bytes(), nil + } + } +} + +const vouchcfgtpl = `# CONFIGURATION FILE HANDLED BY fic-admin +# DO NOT MODIFY IT BY HAND + +vouch: + logLevel: debug + allowAllUsers: true + document_root: /challenge_access + + cookie: + domain: {{ .Domain }} + +oauth: + provider: oidc + client_id: {{ .ClientId }} + client_secret: {{ .ClientSecret }} + callback_urls: + - https://{{ .Domain }}/challenge_access/auth + auth_url: https://{{ .Domain }}/auth + token_url: http://127.0.0.1:5556/token + user_info_url: http://127.0.0.1:5556/userinfo + scopes: + - openid + - email +` + +type vouchProxyConfig struct { + Domain string + ClientId string + ClientSecret string +} + +func genVouchProxyConfig() ([]byte, error) { + if OidcSecret == "" { + return nil, fmt.Errorf("Unable to generate vouch proxy configuration: OIDC Secret not defined. Please define FICOIDC_SECRET in your environment.") + } + + b := bytes.NewBufferString("") + + if vouchTmpl, err := template.New("vouchcfg").Parse(vouchcfgtpl); err != nil { + return nil, fmt.Errorf("Cannot create template: %w", err) + } else if err = vouchTmpl.Execute(b, vouchProxyConfig{ + Domain: OidcIssuer, + ClientId: OidcClientId, + ClientSecret: OidcSecret, + }); err != nil { + return nil, fmt.Errorf("An error occurs during template execution: %w", err) + } else { + return b.Bytes(), nil + } +} diff --git a/admin/api/public.go b/admin/api/public.go index 37db0e4a..f2de4ea0 100644 --- a/admin/api/public.go +++ b/admin/api/public.go @@ -3,18 +3,24 @@ package api import ( "encoding/json" "fmt" + "log" + "net/http" "os" "path" + "strconv" + "strings" + "time" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -var TeamsDir string +var DashboardDir string -func init() { - router.GET("/api/public/:sid", apiHandler(getPublic)) - router.DELETE("/api/public/:sid", apiHandler(deletePublic)) - router.PUT("/api/public/:sid", apiHandler(savePublic)) +func declarePublicRoutes(router *gin.RouterGroup) { + router.GET("/public/", listPublic) + router.GET("/public/:sid", getPublic) + router.DELETE("/public/:sid", deletePublic) + router.PUT("/public/:sid", savePublic) } type FICPublicScene struct { @@ -22,8 +28,22 @@ type FICPublicScene struct { Params map[string]interface{} `json:"params"` } -func readPublic(path string) ([]FICPublicScene, error) { - var s []FICPublicScene +type FICPublicDisplay struct { + Scenes []FICPublicScene `json:"scenes"` + Side []FICPublicScene `json:"side"` + CustomCountdown map[string]interface{} `json:"customCountdown"` + HideEvents bool `json:"hideEvents"` + HideCountdown bool `json:"hideCountdown"` + HideCarousel bool `json:"hideCarousel"` + PropagationTime *time.Time `json:"propagationTime,omitempty"` +} + +func InitDashboardPresets(dir string) error { + return nil +} + +func readPublic(path string) (FICPublicDisplay, error) { + var s FICPublicDisplay if fd, err := os.Open(path); err != nil { return s, err } else { @@ -38,7 +58,7 @@ func readPublic(path string) ([]FICPublicScene, error) { } } -func savePublicTo(path string, s []FICPublicScene) error { +func savePublicTo(path string, s FICPublicDisplay) error { if fd, err := os.Create(path); err != nil { return err } else { @@ -53,37 +73,134 @@ func savePublicTo(path string, s []FICPublicScene) error { } } -func getPublic(ps httprouter.Params, body []byte) (interface{}, error) { - if _, err := os.Stat(path.Join(TeamsDir, "public", fmt.Sprintf("public%s.json", ps.ByName("sid")))); !os.IsNotExist(err) { - return readPublic(path.Join(TeamsDir, "public", fmt.Sprintf("public%s.json", ps.ByName("sid")))) - } else { - return []FICPublicScene{}, nil - } +type DashboardFiles struct { + Presets []string `json:"presets"` + Nexts []*NextDashboardFile `json:"nexts"` } -func deletePublic(ps httprouter.Params, body []byte) (interface{}, error) { - if err := savePublicTo(path.Join(TeamsDir, "public", fmt.Sprintf("public%s.json", ps.ByName("sid"))), []FICPublicScene{}); err != nil { - return nil, err - } else { - return []FICPublicScene{}, err - } +type NextDashboardFile struct { + Name string `json:"name"` + Screen int `json:"screen"` + Date time.Time `json:"date"` } -func savePublic(ps httprouter.Params, body []byte) (interface{}, error) { - var scenes []FICPublicScene - if err := json.Unmarshal(body, &scenes); err != nil { - return nil, err +func listPublic(c *gin.Context) { + files, err := os.ReadDir(DashboardDir) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return } - if _, err := os.Stat(path.Join(TeamsDir, "public")); os.IsNotExist(err) { - if err := os.Mkdir(path.Join(TeamsDir, "public"), 0750); err != nil { - return nil, err + var ret DashboardFiles + for _, file := range files { + if strings.HasPrefix(file.Name(), "preset-") { + ret.Presets = append(ret.Presets, strings.TrimSuffix(strings.TrimPrefix(file.Name(), "preset-"), ".json")) + continue + } + + if !strings.HasPrefix(file.Name(), "public") || len(file.Name()) < 18 { + continue + } + + ts, err := strconv.ParseInt(file.Name()[8:18], 10, 64) + if err == nil { + s, _ := strconv.Atoi(file.Name()[6:7]) + ret.Nexts = append(ret.Nexts, &NextDashboardFile{ + Name: file.Name()[6:18], + Screen: s, + Date: time.Unix(ts, 0), + }) } } - if err := savePublicTo(path.Join(TeamsDir, "public", fmt.Sprintf("public%s.json", ps.ByName("sid"))), scenes); err != nil { - return nil, err - } else { - return scenes, err - } + c.JSON(http.StatusOK, ret) +} + +func getPublic(c *gin.Context) { + if strings.Contains(c.Params.ByName("sid"), "/") { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"}) + return + } + + filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid")) + if strings.HasPrefix(c.Params.ByName("sid"), "preset-") { + filename = fmt.Sprintf("%s.json", c.Params.ByName("sid")) + } + + if _, err := os.Stat(path.Join(DashboardDir, filename)); !os.IsNotExist(err) { + p, err := readPublic(path.Join(DashboardDir, filename)) + if err != nil { + log.Println("Unable to readPublic in getPublic:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene retrieval."}) + return + } + + c.JSON(http.StatusOK, p) + return + } + + c.JSON(http.StatusOK, FICPublicDisplay{Scenes: []FICPublicScene{}, Side: []FICPublicScene{}}) +} + +func deletePublic(c *gin.Context) { + if strings.Contains(c.Params.ByName("sid"), "/") { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"}) + return + } + + filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid")) + if strings.HasPrefix(c.Params.ByName("sid"), "preset-") { + filename = fmt.Sprintf("%s.json", c.Params.ByName("sid")) + } + + if len(filename) == 12 { + if err := savePublicTo(path.Join(DashboardDir, filename), FICPublicDisplay{}); err != nil { + log.Println("Unable to deletePublic:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene deletion."}) + return + } + } else { + if err := os.Remove(path.Join(DashboardDir, filename)); err != nil { + log.Println("Unable to deletePublic:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene deletion."}) + return + } + } + + c.JSON(http.StatusOK, FICPublicDisplay{Scenes: []FICPublicScene{}, Side: []FICPublicScene{}}) +} + +func savePublic(c *gin.Context) { + if strings.Contains(c.Params.ByName("sid"), "/") { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "sid cannot contains /"}) + return + } + + var scenes FICPublicDisplay + err := c.ShouldBindJSON(&scenes) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + filename := fmt.Sprintf("public%s.json", c.Params.ByName("sid")) + if c.Request.URL.Query().Has("t") { + t, err := time.Parse(time.RFC3339, c.Request.URL.Query().Get("t")) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + filename = fmt.Sprintf("public%s-%d.json", c.Params.ByName("sid"), t.Unix()) + } else if c.Request.URL.Query().Has("p") { + filename = fmt.Sprintf("preset-%s.json", c.Request.URL.Query().Get("p")) + } + + if err := savePublicTo(path.Join(DashboardDir, filename), scenes); err != nil { + log.Println("Unable to savePublicTo:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during scene saving."}) + return + } + + c.JSON(http.StatusOK, scenes) } diff --git a/admin/api/qa.go b/admin/api/qa.go new file mode 100644 index 00000000..10bc02c4 --- /dev/null +++ b/admin/api/qa.go @@ -0,0 +1,119 @@ +package api + +import ( + "log" + "net/http" + "strconv" + + "srs.epita.fr/fic-server/libfic" + + "github.com/gin-gonic/gin" +) + +func declareQARoutes(router *gin.RouterGroup) { + router.POST("/qa/", importExerciceQA) + + apiQARoutes := router.Group("/qa/:qid") + apiQARoutes.Use(QAHandler) + apiQARoutes.POST("/comments", importQAComment) +} + +func QAHandler(c *gin.Context) { + qid, err := strconv.ParseInt(string(c.Params.ByName("qid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid QA identifier"}) + return + } + + qa, err := fic.GetQAQuery(qid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "QA query not found"}) + return + } + + c.Set("qa-query", qa) + + c.Next() +} + +func importExerciceQA(c *gin.Context) { + // Create a new query + var uq fic.QAQuery + err := c.ShouldBindJSON(&uq) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + var exercice *fic.Exercice + if uq.IdExercice == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "id_exercice not filled"}) + return + } else if exercice, err = fic.GetExercice(uq.IdExercice); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Unable to find requested exercice"}) + return + } + + if len(uq.State) == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "State not filled"}) + return + } + + if len(uq.Subject) == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Subject not filled"}) + return + } + + if qa, err := exercice.NewQAQuery(uq.Subject, uq.IdTeam, uq.User, uq.State, nil); err != nil { + log.Println("Unable to importExerciceQA:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during query creation."}) + return + } else { + qa.Creation = uq.Creation + qa.Solved = uq.Solved + qa.Closed = uq.Closed + + _, err = qa.Update() + if err != nil { + log.Println("Unable to update in importExerciceQA:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during query updating."}) + return + } + + c.JSON(http.StatusOK, qa) + } +} + +func importQAComment(c *gin.Context) { + query := c.MustGet("qa-query").(*fic.QAQuery) + + // Create a new query + var uc fic.QAComment + err := c.ShouldBindJSON(&uc) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if len(uc.Content) == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Empty comment"}) + return + } + + if qac, err := query.AddComment(uc.Content, uc.IdTeam, uc.User); err != nil { + log.Println("Unable to AddComment in importQAComment:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during comment creation."}) + return + } else { + qac.Date = uc.Date + + _, err = qac.Update() + if err != nil { + log.Println("Unable to Update comment in importQAComment") + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during comment creation."}) + return + } + + c.JSON(http.StatusOK, qac) + } +} diff --git a/admin/api/repositories.go b/admin/api/repositories.go new file mode 100644 index 00000000..9afaea5d --- /dev/null +++ b/admin/api/repositories.go @@ -0,0 +1,67 @@ +package api + +import ( + "net/http" + "strings" + + "srs.epita.fr/fic-server/admin/sync" + + "github.com/gin-gonic/gin" +) + +func declareRepositoriesRoutes(router *gin.RouterGroup) { + if gi, ok := sync.GlobalImporter.(sync.GitImporter); ok { + router.GET("/repositories", func(c *gin.Context) { + mod, err := gi.GetSubmodules() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"repositories": mod}) + }) + + router.GET("/repositories/*repopath", func(c *gin.Context) { + repopath := strings.TrimPrefix(c.Param("repopath"), "/") + + mod, err := gi.GetSubmodule(repopath) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.JSON(http.StatusOK, mod) + }) + + router.POST("/repositories/*repopath", func(c *gin.Context) { + repopath := strings.TrimPrefix(c.Param("repopath"), "/") + + mod, err := gi.IsRepositoryUptodate(repopath) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.JSON(http.StatusOK, mod) + }) + + router.DELETE("/repositories/*repopath", func(c *gin.Context) { + di, ok := sync.GlobalImporter.(sync.DeletableImporter) + if !ok { + c.AbortWithStatusJSON(http.StatusNotImplemented, gin.H{"errmsg": "Not implemented"}) + return + } + + if strings.Contains(c.Param("repopath"), "..") { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Repopath contains invalid characters"}) + return + } + + repopath := strings.TrimPrefix(c.Param("repopath"), "/") + + err := di.DeleteDir(repopath) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + c.JSON(http.StatusOK, true) + }) + } +} diff --git a/admin/api/router.go b/admin/api/router.go index a6bd873b..743bc22e 100644 --- a/admin/api/router.go +++ b/admin/api/router.go @@ -1,11 +1,29 @@ package api import ( - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -var router = httprouter.New() +func DeclareRoutes(router *gin.RouterGroup) { + apiRoutes := router.Group("/api") -func Router() *httprouter.Router { - return router + declareCertificateRoutes(apiRoutes) + declareClaimsRoutes(apiRoutes) + declareEventsRoutes(apiRoutes) + declareExercicesRoutes(apiRoutes) + declareExportRoutes(apiRoutes) + declareFilesGlobalRoutes(apiRoutes) + declareFilesRoutes(apiRoutes) + declareGlobalExercicesRoutes(apiRoutes) + declareHealthRoutes(apiRoutes) + declareMonitorRoutes(apiRoutes) + declarePasswordRoutes(apiRoutes) + declarePublicRoutes(apiRoutes) + declareQARoutes(apiRoutes) + declareRepositoriesRoutes(apiRoutes) + declareTeamsRoutes(apiRoutes) + declareThemesRoutes(apiRoutes) + declareSettingsRoutes(apiRoutes) + declareSyncRoutes(apiRoutes) + DeclareVersionRoutes(apiRoutes) } diff --git a/admin/api/settings.go b/admin/api/settings.go index 9a3649bb..c16d698d 100644 --- a/admin/api/settings.go +++ b/admin/api/settings.go @@ -2,85 +2,424 @@ package api import ( "encoding/json" - "errors" + "fmt" + "io" + "log" + "net/http" + "os" "path" + "strconv" "time" + "srs.epita.fr/fic-server/admin/generation" "srs.epita.fr/fic-server/admin/sync" "srs.epita.fr/fic-server/libfic" "srs.epita.fr/fic-server/settings" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/settings-ro.json", apiHandler(getROSettings)) - router.GET("/api/settings.json", apiHandler(getSettings)) - router.PUT("/api/settings.json", apiHandler(saveSettings)) +var IsProductionEnv = false - router.POST("/api/reset", apiHandler(reset)) +func declareSettingsRoutes(router *gin.RouterGroup) { + router.GET("/challenge.json", getChallengeInfo) + router.PUT("/challenge.json", saveChallengeInfo) + + router.GET("/settings.json", getSettings) + router.PUT("/settings.json", saveSettings) + router.DELETE("/settings.json", func(c *gin.Context) { + err := ResetSettings() + if err != nil { + log.Println("Unable to ResetSettings:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during setting reset."}) + return + } + + c.JSON(http.StatusOK, true) + }) + + router.GET("/settings-next", listNextSettings) + + apiNextSettingsRoutes := router.Group("/settings-next/:ts") + apiNextSettingsRoutes.Use(NextSettingsHandler) + apiNextSettingsRoutes.GET("", getNextSettings) + apiNextSettingsRoutes.DELETE("", deleteNextSettings) + + router.POST("/reset", reset) + router.POST("/full-generation", fullGeneration) + + router.GET("/prod", func(c *gin.Context) { + c.JSON(http.StatusOK, IsProductionEnv) + }) + router.PUT("/prod", func(c *gin.Context) { + err := c.ShouldBindJSON(&IsProductionEnv) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, IsProductionEnv) + }) } -func getROSettings(_ httprouter.Params, body []byte) (interface{}, error) { - syncMtd := "Disabled" +func NextSettingsHandler(c *gin.Context) { + ts, err := strconv.ParseInt(string(c.Params.ByName("ts")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid next settings identifier"}) + return + } + + nsf, err := settings.ReadNextSettingsFile(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", ts)), ts) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Next settings not found"}) + return + } + + c.Set("next-settings", nsf) + + c.Next() +} + +func fullGeneration(c *gin.Context) { + resp, err := generation.FullGeneration() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{ + "errmsg": err.Error(), + }) + return + } + defer resp.Body.Close() + + v, _ := io.ReadAll(resp.Body) + c.JSON(resp.StatusCode, gin.H{ + "errmsg": string(v), + }) +} + +func GetChallengeInfo() (*settings.ChallengeInfo, error) { + var challengeinfo string + var err error + if sync.GlobalImporter == nil { + if fd, err := os.Open(path.Join(settings.SettingsDir, settings.ChallengeFile)); err == nil { + defer fd.Close() + var buf []byte + buf, err = io.ReadAll(fd) + if err == nil { + challengeinfo = string(buf) + } + } + } else { + challengeinfo, err = sync.GetFileContent(sync.GlobalImporter, settings.ChallengeFile) + } + + if err != nil { + log.Println("Unable to retrieve challenge.json:", err.Error()) + return nil, fmt.Errorf("Unable to retrive challenge.json: %w", err) + } + + s, err := settings.ReadChallengeInfo(challengeinfo) + if err != nil { + log.Println("Unable to ReadChallengeInfo:", err.Error()) + return nil, fmt.Errorf("Unable to read challenge info: %w", err) + } + + return s, nil +} + +func getChallengeInfo(c *gin.Context) { + if s, err := GetChallengeInfo(); err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + } else { + c.JSON(http.StatusOK, s) + } +} + +func saveChallengeInfo(c *gin.Context) { + var info *settings.ChallengeInfo + err := c.ShouldBindJSON(&info) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + if sync.GlobalImporter != nil { - syncMtd = sync.GlobalImporter.Kind() + jenc, err := json.Marshal(info) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + err = sync.WriteFileContent(sync.GlobalImporter, "challenge.json", jenc) + if err != nil { + log.Println("Unable to SaveChallengeInfo:", err.Error()) + // Ignore the error, try to continue + } + + err = sync.ImportChallengeInfo(info, DashboardDir) + if err != nil { + log.Println("Unable to ImportChallengeInfo:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Sprintf("Something goes wrong when trying to import related files: %s", err.Error())}) + return + } } - return map[string]interface{}{ - "sync": syncMtd, - }, nil + if err := settings.SaveChallengeInfo(path.Join(settings.SettingsDir, settings.ChallengeFile), info); err != nil { + log.Println("Unable to SaveChallengeInfo:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to save distributed challenge info: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, info) } -func getSettings(_ httprouter.Params, body []byte) (interface{}, error) { - if settings.ExistsSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) { - return settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) +func getSettings(c *gin.Context) { + s, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) + if err != nil { + log.Println("Unable to ReadSettings:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to read settings: %s", err.Error())}) + return + } + + s.WorkInProgress = !IsProductionEnv + c.Writer.Header().Add("X-FIC-Time", fmt.Sprintf("%d", time.Now().Unix())) + c.JSON(http.StatusOK, s) +} + +func saveSettings(c *gin.Context) { + var config *settings.Settings + err := c.ShouldBindJSON(&config) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + // Is this a future setting? + if c.Request.URL.Query().Has("t") { + t, err := time.Parse(time.RFC3339, c.Request.URL.Query().Get("t")) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + // Load current settings to perform diff later + init_settings, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)) + if err != nil { + log.Println("Unable to ReadSettings:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to read settings: %s", err.Error())}) + return + } + + current_settings := init_settings + // Apply already registered settings + nsu, err := settings.MergeNextSettingsUntil(&t) + if err == nil { + current_settings = settings.MergeSettings(*init_settings, nsu) + } else { + log.Println("Unable to MergeNextSettingsUntil:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to merge next settings: %s", err.Error())}) + return + } + + // Keep only diff + diff := settings.DiffSettings(current_settings, config) + + hasItems := false + for _, _ = range diff { + hasItems = true + break + } + + if !hasItems { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "No difference to apply."}) + return + } + + if !c.Request.URL.Query().Has("erase") { + // Check if there is already diff to apply at the given time + if nsf, err := settings.ReadNextSettingsFile(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", t.Unix())), t.Unix()); err == nil { + for k, v := range nsf.Values { + if _, ok := diff[k]; !ok { + diff[k] = v + } + } + } + } + + // Save the diff + settings.SaveSettings(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", t.Unix())), diff) + + // Return current settings + c.JSON(http.StatusOK, current_settings) } else { - return settings.FICSettings{ - Title: "Challenge FIC", - Authors: "Laboratoire SRS, ÉPITA", - Start: time.Unix(0, 0), - End: time.Unix(0, 0), - Generation: time.Unix(0, 0), - FirstBlood: fic.FirstBlood, - SubmissionCostBase: fic.SubmissionCostBase, - AllowRegistration: false, - DenyNameChange: false, - EnableResolutionRoute: false, - PartialValidation: true, - EnableExerciceDepend: true, - }, nil + // Just apply settings right now! + if err := settings.SaveSettings(path.Join(settings.SettingsDir, settings.SettingsFile), config); err != nil { + log.Println("Unable to SaveSettings:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to save settings: %s", err.Error())}) + return + } + + ApplySettings(config) + c.JSON(http.StatusOK, config) } } -func saveSettings(_ httprouter.Params, body []byte) (interface{}, error) { - var config settings.FICSettings - if err := json.Unmarshal(body, &config); err != nil { - return nil, err +func listNextSettings(c *gin.Context) { + nsf, err := settings.ListNextSettingsFiles() + if err != nil { + log.Println("Unable to ListNextSettingsFiles:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to list next settings files: %s", err.Error())}) + return } - if err := settings.SaveSettings(path.Join(settings.SettingsDir, settings.SettingsFile), config); err != nil { - return nil, err - } else { - return config, err + c.JSON(http.StatusOK, nsf) +} + +func getNextSettings(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("next-settings").(*settings.NextSettingsFile)) +} + +func deleteNextSettings(c *gin.Context) { + nsf := c.MustGet("next-settings").(*settings.NextSettingsFile) + + err := os.Remove(path.Join(settings.SettingsDir, fmt.Sprintf("%d.json", nsf.Id))) + if err != nil { + log.Println("Unable to remove the file:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to remove the file: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, true) +} + +func ApplySettings(config *settings.Settings) { + fic.PartialValidation = config.PartialValidation + fic.UnlockedChallengeDepth = config.UnlockedChallengeDepth + fic.UnlockedChallengeUpTo = config.UnlockedChallengeUpTo + fic.DisplayAllFlags = config.DisplayAllFlags + fic.HideCaseSensitivity = config.HideCaseSensitivity + fic.UnlockedStandaloneExercices = config.UnlockedStandaloneExercices + fic.UnlockedStandaloneExercicesByThemeStepValidation = config.UnlockedStandaloneExercicesByThemeStepValidation + fic.UnlockedStandaloneExercicesByStandaloneExerciceValidation = config.UnlockedStandaloneExercicesByStandaloneExerciceValidation + fic.DisplayMCQBadCount = config.DisplayMCQBadCount + fic.FirstBlood = config.FirstBlood + fic.SubmissionCostBase = config.SubmissionCostBase + fic.HintCoefficient = config.HintCurCoefficient + fic.WChoiceCoefficient = config.WChoiceCurCoefficient + fic.ExerciceCurrentCoefficient = config.ExerciceCurCoefficient + fic.GlobalScoreCoefficient = config.GlobalScoreCoefficient + fic.SubmissionCostBase = config.SubmissionCostBase + fic.SubmissionUniqueness = config.SubmissionUniqueness + fic.CountOnlyNotGoodTries = config.CountOnlyNotGoodTries + fic.QuestionGainRatio = config.QuestionGainRatio + + if config.DiscountedFactor != fic.DiscountedFactor { + fic.DiscountedFactor = config.DiscountedFactor + if err := fic.DBRecreateDiscountedView(); err != nil { + log.Println("Unable to recreate exercices_discounted view:", err.Error()) + } } } -func reset(_ httprouter.Params, body []byte) (interface{}, error) { +func ResetSettings() error { + return settings.SaveSettings(path.Join(settings.SettingsDir, settings.SettingsFile), &settings.Settings{ + WorkInProgress: IsProductionEnv, + FirstBlood: fic.FirstBlood, + SubmissionCostBase: fic.SubmissionCostBase, + ExerciceCurCoefficient: 1, + HintCurCoefficient: 1, + WChoiceCurCoefficient: 1, + GlobalScoreCoefficient: 1, + DiscountedFactor: 0, + QuestionGainRatio: 0, + UnlockedStandaloneExercices: 10, + UnlockedStandaloneExercicesByThemeStepValidation: 1, + UnlockedStandaloneExercicesByStandaloneExerciceValidation: 0, + AllowRegistration: false, + CanJoinTeam: false, + DenyTeamCreation: false, + DenyNameChange: false, + AcceptNewIssue: true, + QAenabled: false, + EnableResolutionRoute: false, + PartialValidation: true, + UnlockedChallengeDepth: 0, + SubmissionUniqueness: true, + CountOnlyNotGoodTries: true, + DisplayAllFlags: false, + DisplayMCQBadCount: false, + EventKindness: false, + }) +} + +func ResetChallengeInfo() error { + return settings.SaveChallengeInfo(path.Join(settings.SettingsDir, settings.ChallengeFile), &settings.ChallengeInfo{ + Title: "Challenge forensic", + SubTitle: "sous le patronage du commandement de la cyberdéfense", + Authors: "Laboratoire SRS, ÉPITA", + VideosLink: "", + Description: `

Le challenge forensic vous place dans la peau de spécialistes en investigation numérique. Nous mettons à votre disposition une vingtaine de scénarios différents, dans lesquels vous devrez faire les différentes étapes de la caractérisation d’une réponse à incident proposées.

+

Chaque scénario met en scène un contexte d’entreprise, ayant découvert récemment qu’elle a été victime d’une cyberattaque. Elle vous demande alors de l’aider à caractériser, afin de mieux comprendre la situation, notamment le mode opératoire de l’adversaire, les impacts de la cyberattaque, le périmètre technique compromis, etc. Il faudra parfois aussi l’éclairer sur les premières étapes de la réaction.

`, + Rules: `

Déroulement

+

Pendant toute la durée du challenge, vous aurez accès à tous les scénarios, mais seulement à la première des 5 étapes. Chaque étape supplémentaire est débloquée lorsque vous validez l’intégralité de l’étape précédente. Toutefois, pour dynamiser le challenge toutes les étapes et tous les scénarios seront débloquées pour la dernière heure du challenge.

+

Nous mettons à votre disposition une plateforme sur laquelle vous pourrez obtenir les informations sur le contexte de l’entreprise et, généralement, une série de fichiers qui semblent appropriés pour avancer dans l’investigation.

+

La validation d’une étape se fait sur la plateforme, après avoir analysé les informations fournies, en répondant à des questions plus ou moins précises. Il s’agit le plus souvent des mots-clefs que l’on placerait dans un rapport.

+

Pour vous débloquer ou accélérer votre investigation, vous pouvez accéder à quelques indices, en échange d’une décote sur votre score d’un certain nombre de points préalablement affichés.

+

Calcul des points, bonus, malus et classement

+

Chaque équipe dispose d’un compteur de points dans l’intervalle ]-∞;+∞[ (aux détails techniques près), à partir duquel le classement est établi.

+

Vous perdez des points en dévoilant des indices, en demandant des propositions de réponses en remplacement de certains champs de texte, ou en essayant un trop grand nombre de fois une réponse.

+

Le nombre de points que vous fait perdre un indice dépend habituellement de l’aide qu’il vous apportera et est indiqué avant de le dévoiler, car il peut fluctuer en fonction de l’avancement du challenge.

+

Pour chaque champ de texte, vous disposez de 10 tentatives avant de perdre des points (vous perdez les points même si vous ne validez pas l’étape) pour chaque tentative supplémentaire : -0,25 point entre 11 et 20, -0,5 entre 21 et 30, -0,75 entre 31 et 40, …

+

La seule manière de gagner des points est de valider une étape d’un scénario dans son intégralité. Le nombre de points gagnés dépend de la difficulté théorique de l’étape ainsi que d’éventuels bonus. Un bonus de 10 % est accordé à la première équipe qui valide une étape. D’autres bonus peuvent ponctuer le challenge, détaillé dans la partie suivante.

+

Le classement est établi par équipe, selon le nombre de points récoltés et perdus par tous les membres. En cas d’égalité au score, les équipes sont départagées en fonction de leur ordre d’arrivée à ce score.

+

Temps forts

+

Le challenge forensic est jalonné de plusieurs temps forts durant lesquels certains calculs détaillés dans la partie précédente peuvent être altérés. L’équipe d’animation du challenge vous avertira environ 15 minutes avant le début de la modification.

+

Chaque modification se répercute instantanément dans votre interface, attendez simplement qu’elle apparaisse afin d’être certain d’en bénéficier. Un compte à rebours est généralement affiché sur les écrans pour indiquer la fin d’un temps fort. La fin d’application d’un bonus est déterminé par l’heure d’arrivée de votre demande sur nos serveurs.

+

Sans y être limité ou assuré, sachez que durant les précédentes éditions du challenge forensic, nous avons par exemple : doublé les points de défis peu tentés, doublé les points de tous les défis pendant 30 minutes, réduit le coût des indices pendant 15 minutes, etc.

+

+

Tous les étudiants de la majeure Système, Réseaux et Sécurité de l’ÉPITA, son équipe enseignante ainsi que le commandement de la cyberdéfense vous souhaitent bon courage pour cette nouvelle éditions du challenge !

`, + YourMission: `

Bienvenue au challenge forensic !

+

Vous voici aujourd'hui dans la peau de spécialistes en investigation numérique. Vous avez à votre disposition une vingtaine de scénarios différents dans lesquels vous devrez faire les différentes étapes de la caractérisation d’une réponse à incident.

+

Chaque scénario est découpé en 5 grandes étapes de difficulté croissante. Un certain nombre de points est attribué à chaque étape, avec un processus de validation automatique.

+

Un classement est établi en temps réel, tenant compte des différents bonus, en fonction du nombre de points de chaque équipe.

`, + }) +} + +func reset(c *gin.Context) { var m map[string]string - if err := json.Unmarshal(body, &m); err != nil { - return nil, err + err := c.ShouldBindJSON(&m) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } - if t, ok := m["type"]; !ok { - return nil, errors.New("Field type not found") - } else if t == "teams" { - return true, fic.ResetTeams() - } else if t == "challenges" { - return true, fic.ResetExercices() - } else if t == "game" { - return true, fic.ResetGame() - } else { - return nil, errors.New("Unknown reset type") + t, ok := m["type"] + if !ok { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Field type not found"}) } + + switch t { + case "teams": + err = fic.ResetTeams() + case "challenges": + err = fic.ResetExercices() + case "game": + err = fic.ResetGame() + case "annexes": + err = fic.ResetAnnexes() + case "settings": + err = ResetSettings() + case "challengeInfo": + err = ResetChallengeInfo() + default: + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Unknown reset type"}) + return + } + + if err != nil { + log.Printf("Unable to reset (type=%q): %s", t, err) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to performe the reset: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, true) } diff --git a/admin/api/sync.go b/admin/api/sync.go new file mode 100644 index 00000000..582c55e0 --- /dev/null +++ b/admin/api/sync.go @@ -0,0 +1,411 @@ +package api + +import ( + "fmt" + "log" + "net/http" + "net/url" + "os" + "path" + "reflect" + "strings" + + "srs.epita.fr/fic-server/admin/generation" + "srs.epita.fr/fic-server/admin/sync" + "srs.epita.fr/fic-server/libfic" + + "github.com/gin-gonic/gin" + "go.uber.org/multierr" +) + +var lastSyncError = "" + +func flatifySyncErrors(errs error) (ret []string) { + for _, err := range multierr.Errors(errs) { + ret = append(ret, err.Error()) + } + return +} + +func declareSyncRoutes(router *gin.RouterGroup) { + apiSyncRoutes := router.Group("/sync") + + // Return the global sync status + apiSyncRoutes.GET("/status", func(c *gin.Context) { + syncMtd := "Disabled" + if sync.GlobalImporter != nil { + syncMtd = sync.GlobalImporter.Kind() + } + + var syncId *string + if sync.GlobalImporter != nil { + syncId = sync.GlobalImporter.Id() + } + + c.JSON(http.StatusOK, gin.H{ + "sync-type": reflect.TypeOf(sync.GlobalImporter).Name(), + "sync-id": syncId, + "sync": syncMtd, + "pullMutex": !sync.OneGitPullStatus(), + "syncMutex": !sync.OneDeepSyncStatus() && !sync.OneThemeDeepSyncStatus(), + "progress": sync.DeepSyncProgress, + "lastError": lastSyncError, + }) + }) + + // Base sync checks if the local directory is in sync with remote one. + apiSyncRoutes.POST("/base", func(c *gin.Context) { + err := sync.GlobalImporter.Sync() + if err != nil { + lastSyncError = err.Error() + c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": err.Error()}) + } else { + lastSyncError = "" + c.JSON(http.StatusOK, true) + } + }) + + // Speedy sync performs a recursive synchronization without importing files. + apiSyncRoutes.POST("/speed", func(c *gin.Context) { + st := sync.SpeedySyncDeep(sync.GlobalImporter) + sync.EditDeepReport(&st, false) + c.JSON(http.StatusOK, st) + }) + + // Deep sync: a fully recursive synchronization (can be limited by theme). + apiSyncRoutes.POST("/deep", func(c *gin.Context) { + r := sync.SyncDeep(sync.GlobalImporter) + lastSyncError = "" + c.JSON(http.StatusOK, r) + }) + + apiSyncRoutes.POST("/local-diff", APIDiffDBWithRemote) + + apiSyncDeepRoutes := apiSyncRoutes.Group("/deep/:thid") + apiSyncDeepRoutes.Use(ThemeHandler) + // Special route to handle standalone exercices + apiSyncRoutes.POST("/deep/0", func(c *gin.Context) { + var st []string + for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, &fic.StandaloneExercicesTheme, 0, 250, nil)) { + st = append(st, se.Error()) + } + sync.EditDeepReport(&sync.SyncReport{Exercices: st}, false) + sync.DeepSyncProgress = 255 + lastSyncError = "" + c.JSON(http.StatusOK, st) + }) + apiSyncDeepRoutes.POST("", func(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + + exceptions := sync.LoadThemeException(sync.GlobalImporter, theme) + + var st []string + for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, theme, 0, 250, exceptions)) { + st = append(st, se.Error()) + } + sync.EditDeepReport(&sync.SyncReport{Themes: map[string][]string{theme.Name: st}}, false) + sync.DeepSyncProgress = 255 + lastSyncError = "" + c.JSON(http.StatusOK, st) + }) + + // Auto sync: to use with continuous deployment, in a development env + apiSyncRoutes.POST("/auto/*p", autoSync) + + // Themes + apiSyncRoutes.POST("/fixurlids", fixAllURLIds) + + apiSyncRoutes.POST("/themes", func(c *gin.Context) { + _, errs := sync.SyncThemes(sync.GlobalImporter) + lastSyncError = "" + c.JSON(http.StatusOK, flatifySyncErrors(errs)) + }) + + apiSyncThemesRoutes := apiSyncRoutes.Group("/themes/:thid") + apiSyncThemesRoutes.Use(ThemeHandler) + apiSyncThemesRoutes.POST("/fixurlid", func(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + if theme.FixURLId() { + v, err := theme.Update() + if err != nil { + log.Println("Unable to UpdateTheme after fixurlid:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when saving the theme."}) + return + } + + c.JSON(http.StatusOK, v) + } else { + c.AbortWithStatusJSON(http.StatusOK, 0) + } + }) + + // Exercices + declareSyncExercicesRoutes(apiSyncRoutes) + declareSyncExercicesRoutes(apiSyncThemesRoutes) + + // Videos sync imports resolution.mp4 from path stored in database. + apiSyncRoutes.POST("/videos", func(c *gin.Context) { + exercices, err := fic.GetExercices() + if err != nil { + log.Println("Unable to GetExercices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve exercices list."}) + return + } + + for _, e := range exercices { + if len(e.VideoURI) == 0 || !strings.HasPrefix(e.VideoURI, "$RFILES$/") { + continue + } + + vpath, err := url.PathUnescape(strings.TrimPrefix(e.VideoURI, "$RFILES$/")) + if err != nil { + c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": fmt.Sprintf("Unable to perform URL unescape: %s", err.Error())}) + return + } + + _, err = sync.ImportFile(sync.GlobalImporter, vpath, func(filePath, URI string) (interface{}, error) { + e.VideoURI = path.Join("$FILES$", strings.TrimPrefix(filePath, fic.FilesDir)) + return e.Update() + }) + if err != nil { + c.JSON(http.StatusExpectationFailed, gin.H{"errmsg": err.Error()}) + return + } + } + + c.JSON(http.StatusOK, true) + }) + + // Remove soluces from the database. + apiSyncRoutes.POST("/drop_soluces", func(c *gin.Context) { + exercices, err := fic.GetExercices() + if err != nil { + log.Println("Unable to GetExercices:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve exercices list."}) + return + } + + var errs error + for _, e := range exercices { + // Remove any published video + if len(e.VideoURI) > 0 && strings.HasPrefix(e.VideoURI, "$FILES$") { + vpath := path.Join(fic.FilesDir, strings.TrimPrefix(e.VideoURI, "$FILES$/")) + err = os.Remove(vpath) + if err != nil { + errs = multierr.Append(errs, fmt.Errorf("unable to delete published video (%q): %w", e.VideoURI, err)) + } + } + + // Clean the database + if len(e.VideoURI) > 0 || len(e.Resolution) > 0 { + e.VideoURI = "" + e.Resolution = "" + + _, err = e.Update() + if err != nil { + errs = multierr.Append(errs, fmt.Errorf("unable to update exercice (%d: %s): %w", e.Id, e.Title, err)) + } + } + } + + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": flatifySyncErrors(err)}) + } else { + c.JSON(http.StatusOK, true) + } + }) +} + +func declareSyncExercicesRoutes(router *gin.RouterGroup) { + router.POST("/exercices", func(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + exceptions := sync.LoadThemeException(sync.GlobalImporter, theme) + + _, errs := sync.SyncExercices(sync.GlobalImporter, theme, exceptions) + c.JSON(http.StatusOK, flatifySyncErrors(errs)) + }) + apiSyncExercicesRoutes := router.Group("/exercices/:eid") + apiSyncExercicesRoutes.Use(ExerciceHandler) + apiSyncExercicesRoutes.POST("", func(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + exercice := c.MustGet("exercice").(*fic.Exercice) + + exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil) + + _, _, _, errs := sync.SyncExercice(sync.GlobalImporter, theme, exercice.Path, nil, exceptions) + c.JSON(http.StatusOK, flatifySyncErrors(errs)) + }) + apiSyncExercicesRoutes.POST("/files", func(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + theme := c.MustGet("theme").(*fic.Theme) + + exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil) + + c.JSON(http.StatusOK, flatifySyncErrors(sync.ImportExerciceFiles(sync.GlobalImporter, exercice, exceptions))) + }) + apiSyncExercicesRoutes.POST("/fixurlid", func(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + if exercice.FixURLId() { + v, err := exercice.Update() + if err != nil { + log.Println("Unable to UpdateExercice after fixurlid:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when saving the exercice."}) + return + } + + c.JSON(http.StatusOK, v) + } else { + c.AbortWithStatusJSON(http.StatusOK, 0) + } + }) + apiSyncExercicesRoutes.POST("/hints", func(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + theme := c.MustGet("theme").(*fic.Theme) + + exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil) + + _, errs := sync.SyncExerciceHints(sync.GlobalImporter, exercice, sync.ExerciceFlagsMap(sync.GlobalImporter, exercice), exceptions) + c.JSON(http.StatusOK, flatifySyncErrors(errs)) + }) + apiSyncExercicesRoutes.POST("/flags", func(c *gin.Context) { + exercice := c.MustGet("exercice").(*fic.Exercice) + theme := c.MustGet("theme").(*fic.Theme) + + exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil) + _, errs := sync.SyncExerciceFlags(sync.GlobalImporter, exercice, exceptions) + _, herrs := sync.SyncExerciceHints(sync.GlobalImporter, exercice, sync.ExerciceFlagsMap(sync.GlobalImporter, exercice), exceptions) + c.JSON(http.StatusOK, flatifySyncErrors(multierr.Append(errs, herrs))) + }) +} + +// autoSync tries to performs a smart synchronization, when in development environment. +// It'll sync most of modified things, and will delete out of sync data. +// Avoid using it in a production environment. +func autoSync(c *gin.Context) { + p := strings.Split(strings.TrimPrefix(c.Params.ByName("p"), "/"), "/") + + if !IsProductionEnv { + if err := sync.GlobalImporter.Sync(); err != nil { + lastSyncError = err.Error() + log.Println("Unable to sync.GI.Sync:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to perform the pull."}) + return + } + lastSyncError = "" + } + + themes, err := fic.GetThemes() + if err != nil { + log.Println("Unable to GetThemes:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve theme list."}) + return + } + + // No argument, do a deep sync + if len(p) == 0 { + if !IsProductionEnv { + for _, theme := range themes { + theme.DeleteDeep() + } + } + + st := sync.SyncDeep(sync.GlobalImporter) + c.JSON(http.StatusOK, st) + return + } + + var theTheme *fic.Theme + + // Find the given theme + for _, theme := range themes { + if theme.Path == p[0] { + theTheme = theme + break + } + } + + if theTheme == nil { + // The theme doesn't exists locally, perhaps it has not been imported already? + rThemes, err := sync.GetThemes(sync.GlobalImporter) + if err == nil { + for _, theme := range rThemes { + if theme == p[0] { + sync.SyncThemes(sync.GlobalImporter) + + themes, err := fic.GetThemes() + if err == nil { + for _, theme := range themes { + if theme.Path == p[0] { + theTheme = theme + break + } + } + } + + break + } + } + } + + if theTheme == nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": fmt.Sprintf("Theme not found %q", p[0])}) + return + } + } + + if !IsProductionEnv { + exercices, err := theTheme.GetExercices() + if err == nil { + for _, exercice := range exercices { + if len(p) <= 1 || exercice.Path == path.Join(p[0], p[1]) { + exercice.DeleteDeep() + } + } + } + } + + exceptions := sync.LoadThemeException(sync.GlobalImporter, theTheme) + + var st []string + for _, se := range multierr.Errors(sync.SyncThemeDeep(sync.GlobalImporter, theTheme, 0, 250, exceptions)) { + st = append(st, se.Error()) + } + sync.EditDeepReport(&sync.SyncReport{Themes: map[string][]string{theTheme.Name: st}}, false) + sync.DeepSyncProgress = 255 + + resp, err := generation.FullGeneration() + if err == nil { + defer resp.Body.Close() + } + + c.JSON(http.StatusOK, st) +} + +func diffDBWithRemote() (map[string][]syncDiff, error) { + diffs := map[string][]syncDiff{} + + themes, err := fic.GetThemesExtended() + if err != nil { + return nil, err + } + + // Compare inner themes + for _, theme := range themes { + diffs[theme.Name], err = diffThemeWithRemote(theme) + if err != nil { + return nil, fmt.Errorf("Unable to diffThemeWithRemote: %w", err) + } + } + + return diffs, err +} + +func APIDiffDBWithRemote(c *gin.Context) { + diffs, err := diffDBWithRemote() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, diffs) +} diff --git a/admin/api/team.go b/admin/api/team.go index 0755468b..d5d21992 100644 --- a/admin/api/team.go +++ b/admin/api/team.go @@ -3,199 +3,639 @@ package api import ( "encoding/json" "fmt" + "log" + "net/http" + "strconv" "strings" "time" + "srs.epita.fr/fic-server/admin/pki" "srs.epita.fr/fic-server/libfic" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/teams.json", apiHandler( - func(httprouter.Params, []byte) (interface{}, error) { - return fic.ExportTeams() - })) - router.GET("/api/teams-binding", apiHandler( - func(httprouter.Params, []byte) (interface{}, error) { - return bindingTeams() - })) - router.GET("/api/teams-nginx-members", apiHandler( - func(httprouter.Params, []byte) (interface{}, error) { - return nginxGenMember() - })) - router.GET("/api/teams-tries.json", apiHandler( - func(httprouter.Params, []byte) (interface{}, error) { - return fic.GetTries(nil, nil) - })) +func declareTeamsRoutes(router *gin.RouterGroup) { + router.GET("/teams.json", func(c *gin.Context) { + teams, err := fic.ExportTeams(false) + if err != nil { + log.Println("Unable to ExportTeams:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams export."}) + return + } - router.GET("/api/teams/", apiHandler( - func(httprouter.Params, []byte) (interface{}, error) { - return fic.GetTeams() - })) - router.POST("/api/teams/", apiHandler(createTeam)) + c.JSON(http.StatusOK, teams) + }) + router.GET("/teams-members.json", func(c *gin.Context) { + teams, err := fic.ExportTeams(true) + if err != nil { + log.Println("Unable to ExportTeams:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams export."}) + return + } - router.GET("/api/teams/:tid/", apiHandler(teamHandler( - func(team fic.Team, _ []byte) (interface{}, error) { - return team, nil - }))) - router.PUT("/api/teams/:tid/", apiHandler(teamHandler(updateTeam))) - router.POST("/api/teams/:tid/", apiHandler(teamHandler(addTeamMember))) - router.DELETE("/api/teams/:tid/", apiHandler(teamHandler( - func(team fic.Team, _ []byte) (interface{}, error) { - return team.Delete() - }))) - router.GET("/api/teams/:tid/my.json", apiHandler(teamPublicHandler( - func(team *fic.Team, _ []byte) (interface{}, error) { - return fic.MyJSONTeam(team, true) - }))) - router.GET("/api/teams/:tid/wait.json", apiHandler(teamPublicHandler( - func(team *fic.Team, _ []byte) (interface{}, error) { - return fic.MyJSONTeam(team, false) - }))) - router.GET("/api/teams/:tid/stats.json", apiHandler(teamPublicHandler( - func(team *fic.Team, _ []byte) (interface{}, error) { - if team != nil { - return team.GetStats() - } else { - return fic.GetTeamsStats(nil) + c.JSON(http.StatusOK, teams) + }) + router.GET("/teams-associations.json", allAssociations) + router.GET("/teams-binding", bindingTeams) + router.GET("/teams-nginx", nginxGenTeams) + router.POST("/refine_colors", refineTeamsColors) + router.POST("/disableinactiveteams", disableInactiveTeams) + router.POST("/enableallteams", enableAllTeams) + router.GET("/teams-members-nginx", nginxGenMember) + router.GET("/teams-tries.json", func(c *gin.Context) { + tries, err := fic.GetTries(nil, nil) + if err != nil { + log.Println("Unable to GetTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieves tries."}) + return + } + + c.JSON(http.StatusOK, tries) + }) + + router.GET("/teams", func(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during teams listing."}) + return + } + + c.JSON(http.StatusOK, teams) + }) + router.POST("/teams", createTeam) + + apiTeamsRoutes := router.Group("/teams/:tid") + apiTeamsRoutes.Use(TeamHandler) + apiTeamsRoutes.GET("/", func(c *gin.Context) { + c.JSON(http.StatusOK, c.MustGet("team").(*fic.Team)) + }) + apiTeamsRoutes.PUT("/", updateTeam) + apiTeamsRoutes.POST("/", addTeamMember) + apiTeamsRoutes.DELETE("/", deleteTeam) + apiTeamsRoutes.GET("/score-grid.json", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + sg, err := team.ScoreGrid() + if err != nil { + log.Printf("Unable to get ScoreGrid(tid=%d): %s", team.Id, err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during score grid calculation."}) + return + } + + c.JSON(http.StatusOK, sg) + }) + + apiTeamsPublicRoutes := router.Group("/teams/:tid") + apiTeamsPublicRoutes.Use(TeamPublicHandler) + apiTeamsPublicRoutes.GET("/my.json", func(c *gin.Context) { + var team *fic.Team + if t, ok := c.Get("team"); ok && t != nil { + team = t.(*fic.Team) + } + tfile, err := fic.MyJSONTeam(team, true) + if err != nil { + log.Println("Unable to get MyJSONTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team JSON generation."}) + return + } + + c.JSON(http.StatusOK, tfile) + }) + apiTeamsPublicRoutes.GET("/wait.json", func(c *gin.Context) { + var team *fic.Team + if t, ok := c.Get("team"); ok && t != nil { + team = t.(*fic.Team) + } + tfile, err := fic.MyJSONTeam(team, false) + if err != nil { + log.Println("Unable to get MyJSONTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team JSON generation."}) + return + } + + c.JSON(http.StatusOK, tfile) + }) + apiTeamsPublicRoutes.GET("/stats.json", func(c *gin.Context) { + var team *fic.Team + if t, ok := c.Get("team"); ok && t != nil { + team = t.(*fic.Team) + } + if team != nil { + stats, err := team.GetStats() + if err != nil { + log.Println("Unable to get GetStats:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during stats calculation."}) + return } - }))) - router.GET("/api/teams/:tid/history.json", apiHandler(teamPublicHandler( - func(team *fic.Team, _ []byte) (interface{}, error) { - if team != nil { - return team.GetHistory() - } else { - return fic.GetTeamsStats(nil) + + c.JSON(http.StatusOK, stats) + } else { + stats, err := fic.GetTeamsStats(nil) + if err != nil { + log.Println("Unable to get GetTeamsStats:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during global stats calculation."}) + return } - }))) - router.DELETE("/api/teams/:tid/history.json", apiHandler(teamPublicHandler(delHistory))) - router.GET("/api/teams/:tid/tries", apiHandler(teamPublicHandler( - func(team *fic.Team, _ []byte) (interface{}, error) { - return fic.GetTries(team, nil) - }))) - router.GET("/api/teams/:tid/members", apiHandler(teamHandler( - func(team fic.Team, _ []byte) (interface{}, error) { - return team.GetMembers() - }))) - router.POST("/api/teams/:tid/members", apiHandler(teamHandler(addTeamMember))) - router.PUT("/api/teams/:tid/members", apiHandler(teamHandler(setTeamMember))) + + c.JSON(http.StatusOK, stats) + } + }) + apiTeamsRoutes.GET("/history.json", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + history, err := team.GetHistory() + if err != nil { + log.Println("Unable to get GetHistory:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during history calculation."}) + return + } + + c.JSON(http.StatusOK, history) + }) + apiTeamsRoutes.PATCH("/history.json", updateHistory) + apiTeamsRoutes.DELETE("/history.json", delHistory) + apiTeamsPublicRoutes.GET("/tries", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + tries, err := fic.GetTries(team, nil) + if err != nil { + log.Println("Unable to GetTries:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during tries calculation."}) + return + } + + c.JSON(http.StatusOK, tries) + }) + apiTeamsRoutes.GET("/members", func(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + members, err := team.GetMembers() + if err != nil { + log.Println("Unable to GetMembers:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during members retrieval."}) + return + } + + c.JSON(http.StatusOK, members) + }) + apiTeamsRoutes.POST("/members", addTeamMember) + apiTeamsRoutes.PUT("/members", setTeamMember) + + declareTeamsPasswordRoutes(apiTeamsRoutes) + declareTeamClaimsRoutes(apiTeamsRoutes) + declareTeamCertificateRoutes(apiTeamsRoutes) + + // Import teams from cyberrange + router.POST("/cyberrange-teams.json", importTeamsFromCyberrange) } -func nginxGenMember() (string, error) { - if teams, err := fic.GetTeams(); err != nil { - return "", err +func TeamHandler(c *gin.Context) { + tid, err := strconv.ParseInt(string(c.Params.ByName("tid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid team identifier"}) + return + } + + team, err := fic.GetTeam(tid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Team not found"}) + return + } + + c.Set("team", team) + + c.Next() +} + +func TeamPublicHandler(c *gin.Context) { + tid, err := strconv.ParseInt(string(c.Params.ByName("tid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid team identifier"}) + return + } + + if tid != 0 { + team, err := fic.GetTeam(tid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Team not found"}) + return + } + + c.Set("team", team) } else { - ret := "" + c.Set("team", nil) + } + + c.Next() +} + +func nginxGenTeams(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + ret := "" + for _, team := range teams { + ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%d\"; }\n", strings.ToLower(team.Name), team.Id) + } + + c.String(http.StatusOK, ret) +} + +func nginxGenMember(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + ret := "" + for _, team := range teams { + if members, err := team.GetMembers(); err == nil { + for _, member := range members { + ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%d\"; }\n", member.Nickname, team.Id) + } + } else { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + } + + c.String(http.StatusOK, ret) +} + +func bindingTeams(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + ret := "" + for _, team := range teams { + if members, err := team.GetMembers(); err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } else { + var mbs []string + for _, member := range members { + mbs = append(mbs, fmt.Sprintf("%s %s", member.Firstname, member.Lastname)) + } + ret += fmt.Sprintf("%d;%s;%s\n", team.Id, team.Name, strings.Join(mbs, ";")) + } + } + + c.String(http.StatusOK, ret) +} + +type teamAssociation struct { + Association string `json:"association"` + TeamId int64 `json:"team_id"` +} + +func allAssociations(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + var ret []teamAssociation + + for _, team := range teams { + assocs, err := pki.GetTeamAssociations(TeamsDir, team.Id) + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + for _, a := range assocs { + ret = append(ret, teamAssociation{a, team.Id}) + } + } + + c.JSON(http.StatusOK, ret) +} + +func importTeamsFromCyberrange(c *gin.Context) { + file, err := c.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"errmsg": "Failed to get file: " + err.Error()}) + return + } + + src, err := file.Open() + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": "Failed to open file: " + err.Error()}) + return + } + defer src.Close() + + var ut []fic.CyberrangeTeamBase + err = json.NewDecoder(src).Decode(&fic.CyberrangeAPIResponse{Data: &ut}) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + teams, err := fic.GetTeams() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible de récupérer la liste des équipes actuelles: %s", err.Error())}) + return + } + + for _, crteam := range ut { + var exist_team *fic.Team for _, team := range teams { - if members, err := team.GetMembers(); err == nil { - for _, member := range members { - ret += fmt.Sprintf(" if ($remote_user = \"%s\") { set $team \"%d\"; }\n", member.Nickname, team.Id) - } - } else { - return "", err + if team.Name == crteam.Name || team.ExternalId == crteam.UUID { + exist_team = team + break } } - return ret, nil - } -} + if exist_team != nil { + exist_team.Name = crteam.Name + exist_team.ExternalId = crteam.UUID + _, err = exist_team.Update() + } else { + exist_team, err = fic.CreateTeam(crteam.Name, fic.RandomColor().ToRGB(), crteam.UUID) + } -func bindingTeams() (string, error) { - if teams, err := fic.GetTeams(); err != nil { - return "", err - } else { - ret := "" - for _, team := range teams { - if members, err := team.GetMembers(); err != nil { - return "", err - } else { - var mbs []string - for _, member := range members { - mbs = append(mbs, fmt.Sprintf("%s %s", member.Firstname, member.Lastname)) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible d'ajouter/de modifier l'équipe %v: %s", crteam, err.Error())}) + return + } + + // Import members + if c.DefaultQuery("nomembers", "0") != "" && len(crteam.Members) > 0 { + exist_team.ClearMembers() + + for _, member := range crteam.Members { + _, err = exist_team.AddMember(member.Name, "", member.Nickname, exist_team.Name) + if err != nil { + log.Printf("Unable to add member %q to team %s (tid=%d): %s", member.UUID, exist_team.Name, exist_team.Id, err.Error()) } - ret += fmt.Sprintf("%d;%s;%s\n", team.Id, team.Name, strings.Join(mbs, ";")) } } - return ret, nil - } -} - -type uploadedTeam struct { - Name string - Color uint32 -} - -type uploadedMember struct { - Firstname string - Lastname string - Nickname string - Company string -} - -func createTeam(_ httprouter.Params, body []byte) (interface{}, error) { - var ut uploadedTeam - if err := json.Unmarshal(body, &ut); err != nil { - return nil, err } - return fic.CreateTeam(strings.TrimSpace(ut.Name), ut.Color) + teams, err = fic.GetTeams() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Impossible de récupérer la liste des équipes après import: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, teams) } -func updateTeam(team fic.Team, body []byte) (interface{}, error) { +func createTeam(c *gin.Context) { var ut fic.Team - if err := json.Unmarshal(body, &ut); err != nil { - return nil, err + err := c.ShouldBindJSON(&ut) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + if ut.Color == 0 { + ut.Color = fic.RandomColor().ToRGB() + } + + team, err := fic.CreateTeam(strings.TrimSpace(ut.Name), ut.Color, ut.ExternalId) + if err != nil { + log.Println("Unable to CreateTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team creation."}) + return + } + + c.JSON(http.StatusOK, team) +} + +func updateTeam(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + var ut fic.Team + err := c.ShouldBindJSON(&ut) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } ut.Id = team.Id - if _, err := ut.Update(); err != nil { - return nil, err + if ut.Password != nil && *ut.Password == "" { + ut.Password = nil } - return ut, nil + _, err = ut.Update() + if err != nil { + log.Println("Unable to updateTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team updating."}) + return + } + + c.JSON(http.StatusOK, ut) } -func addTeamMember(team fic.Team, body []byte) (interface{}, error) { - var members []uploadedMember - if err := json.Unmarshal(body, &members); err != nil { - return nil, err +func refineTeamsColors(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + for i, team := range teams { + team.Color = fic.HSL{ + H: float64(i)/float64(len(teams)) - 0.2, + S: float64(1) / float64(1+i%2), + L: 0.25 + float64(0.5)/float64(1+i%3), + }.ToRGB() + + _, err = team.Update() + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + } + + c.JSON(http.StatusOK, teams) +} + +func disableInactiveTeams(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + for _, team := range teams { + var serials []uint64 + serials, err = pki.GetTeamSerials(TeamsDir, team.Id) + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + var assocs []string + assocs, err = pki.GetTeamAssociations(TeamsDir, team.Id) + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + if len(serials) == 0 && len(assocs) == 0 { + if team.Active { + team.Active = false + team.Update() + } + } else if !team.Active { + team.Active = true + team.Update() + } + } + + c.JSON(http.StatusOK, true) +} + +func enableAllTeams(c *gin.Context) { + teams, err := fic.GetTeams() + if err != nil { + log.Println("Unable to GetTeams:", err.Error()) + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + for _, team := range teams { + if !team.Active { + team.Active = true + team.Update() + } + } + + c.JSON(http.StatusOK, true) +} + +func deleteTeam(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + assocs, err := pki.GetTeamAssociations(TeamsDir, team.Id) + if err != nil { + log.Printf("Unable to GetTeamAssociations(tid=%d): %s", team.Id, err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to retrieve team association."}) + return + } + + for _, assoc := range assocs { + err = pki.DeleteTeamAssociation(TeamsDir, assoc) + if err != nil { + log.Printf("Unable to DeleteTeamAssociation(assoc=%s): %s", assoc, err.Error()) + return + } + } + + _, err = team.Delete() + if err != nil { + log.Println("Unable to deleteTeam:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during team deletion."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func addTeamMember(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + var members []fic.Member + err := c.ShouldBindJSON(&members) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } for _, member := range members { - team.AddMember(strings.TrimSpace(member.Firstname), strings.TrimSpace(member.Lastname), strings.TrimSpace(member.Nickname), strings.TrimSpace(member.Company)) + _, err := team.AddMember(strings.TrimSpace(member.Firstname), strings.TrimSpace(member.Lastname), strings.TrimSpace(member.Nickname), strings.TrimSpace(member.Company)) + if err != nil { + log.Println("Unable to AddMember:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during member creation."}) + return + } } - return team.GetMembers() + mmbrs, err := team.GetMembers() + if err != nil { + log.Println("Unable to retrieve members list:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "Unable to retrieve members list."}) + return + } + + c.JSON(http.StatusOK, mmbrs) } -func setTeamMember(team fic.Team, body []byte) (interface{}, error) { - var members []uploadedMember - if err := json.Unmarshal(body, &members); err != nil { - return nil, err - } - +func setTeamMember(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) team.ClearMembers() - for _, member := range members { - team.AddMember(strings.TrimSpace(member.Firstname), strings.TrimSpace(member.Lastname), strings.TrimSpace(member.Nickname), strings.TrimSpace(member.Company)) - } - - return team.GetMembers() + addTeamMember(c) } type uploadedHistory struct { - Kind string - Time time.Time - Primary *int64 - Secondary *int64 + Kind string + Time time.Time + Primary *int64 + Secondary *int64 + Coefficient float32 } -func delHistory(team *fic.Team, body []byte) (interface{}, error) { +func updateHistory(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + var uh uploadedHistory - if err := json.Unmarshal(body, &uh); err != nil { - return nil, err + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } - return team.DelHistoryItem(uh.Kind, uh.Time, uh.Primary, uh.Secondary) + var givenId int64 + if uh.Secondary != nil { + givenId = *uh.Secondary + } else if uh.Primary != nil { + givenId = *uh.Primary + } + + _, err = team.UpdateHistoryCoeff(uh.Kind, uh.Time, givenId, uh.Coefficient) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to update this history line: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, true) +} + +func delHistory(c *gin.Context) { + team := c.MustGet("team").(*fic.Team) + + var uh uploadedHistory + err := c.ShouldBindJSON(&uh) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return + } + + _, err = team.DelHistoryItem(uh.Kind, uh.Time, uh.Primary, uh.Secondary) + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to delete this history line: %s", err.Error())}) + return + } + + c.JSON(http.StatusOK, true) } diff --git a/admin/api/theme.go b/admin/api/theme.go index 8ad60fae..c1057397 100644 --- a/admin/api/theme.go +++ b/admin/api/theme.go @@ -1,89 +1,95 @@ package api import ( - "encoding/json" - "errors" "fmt" + "log" + "net/http" + "path" + "reflect" "strconv" + "strings" "srs.epita.fr/fic-server/admin/sync" "srs.epita.fr/fic-server/libfic" + "srs.epita.fr/fic-server/settings" - "github.com/julienschmidt/httprouter" + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/themes", apiHandler(listThemes)) - router.POST("/api/themes", apiHandler(createTheme)) - router.GET("/api/themes.json", apiHandler(exportThemes)) - router.GET("/api/files-bindings", apiHandler(bindingFiles)) +func declareThemesRoutes(router *gin.RouterGroup) { + router.GET("/themes", listThemes) + router.POST("/themes", createTheme) + router.GET("/themes.json", exportThemes) + router.GET("/session-forensic.yaml", func(c *gin.Context) { + if s, err := settings.ReadSettings(path.Join(settings.SettingsDir, settings.SettingsFile)); err != nil { + log.Printf("Unable to ReadSettings: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during settings reading."}) + return - router.GET("/api/themes/:thid", apiHandler(themeHandler(showTheme))) - router.PUT("/api/themes/:thid", apiHandler(themeHandler(updateTheme))) - router.DELETE("/api/themes/:thid", apiHandler(themeHandler(deleteTheme))) + } else if challengeinfo, err := sync.GetFileContent(sync.GlobalImporter, "challenge.json"); err != nil { + log.Println("Unable to retrieve challenge.json:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to retrive challenge.json: %s", err.Error())}) + return + } else if ch, err := settings.ReadChallengeInfo(challengeinfo); err != nil { + log.Printf("Unable to ReadChallengeInfo: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during challenge info reading."}) + return + } else if sf, err := fic.GenZQDSSessionFile(ch, s); err != nil { + log.Printf("Unable to GenZQDSSessionFile: %s", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during session file generation."}) + return + } else { + c.JSON(http.StatusOK, sf) + } + }) + router.GET("/files-bindings", bindingFiles) - router.GET("/api/themes/:thid/exercices", apiHandler(themeHandler(listThemedExercices))) - router.POST("/api/themes/:thid/exercices", apiHandler(themeHandler(createExercice))) + apiThemesRoutes := router.Group("/themes/:thid") + apiThemesRoutes.Use(ThemeHandler) + apiThemesRoutes.GET("", showTheme) + apiThemesRoutes.PUT("", updateTheme) + apiThemesRoutes.DELETE("", deleteTheme) - router.GET("/api/themes/:thid/exercices/:eid", apiHandler(exerciceHandler(showExercice))) - router.PUT("/api/themes/:thid/exercices/:eid", apiHandler(exerciceHandler(updateExercice))) - router.DELETE("/api/themes/:thid/exercices/:eid", apiHandler(exerciceHandler(deleteExercice))) + apiThemesRoutes.POST("/diff-sync", APIDiffThemeWithRemote) - router.GET("/api/themes/:thid/exercices/:eid/files", apiHandler(exerciceHandler(listExerciceFiles))) - router.POST("/api/themes/:thid/exercices/:eid/files", apiHandler(exerciceHandler(createExerciceFile))) + apiThemesRoutes.GET("/exercices_stats.json", getThemedExercicesStats) - router.GET("/api/themes/:thid/exercices/:eid/hints", apiHandler(exerciceHandler(listExerciceHints))) - router.POST("/api/themes/:thid/exercices/:eid/hints", apiHandler(exerciceHandler(createExerciceHint))) - - router.GET("/api/themes/:thid/exercices/:eid/keys", apiHandler(exerciceHandler(listExerciceKeys))) - router.POST("/api/themes/:thid/exercices/:eid/keys", apiHandler(exerciceHandler(createExerciceKey))) + declareExercicesRoutes(apiThemesRoutes) // Remote - router.GET("/api/remote/themes", apiHandler(sync.ApiListRemoteThemes)) - router.GET("/api/remote/themes/:thid", apiHandler(sync.ApiGetRemoteTheme)) - router.GET("/api/remote/themes/:thid/exercices", apiHandler(themeHandler(sync.ApiListRemoteExercices))) - - // Synchronize - router.POST("/api/sync/deep", apiHandler( - func(_ httprouter.Params, _ []byte) (interface{}, error) { - return sync.SyncDeep(sync.GlobalImporter), nil - })) - router.POST("/api/sync/themes", apiHandler( - func(_ httprouter.Params, _ []byte) (interface{}, error) { - return sync.SyncThemes(sync.GlobalImporter), nil - })) - router.POST("/api/sync/themes/:thid/exercices", apiHandler(themeHandler( - func(theme fic.Theme, _ []byte) (interface{}, error) { - return sync.SyncExercices(sync.GlobalImporter, theme), nil - }))) - router.POST("/api/sync/themes/:thid/exercices/:eid/files", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceFiles(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/themes/:thid/exercices/:eid/hints", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceHints(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/themes/:thid/exercices/:eid/keys", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceKeys(sync.GlobalImporter, exercice), nil - }))) - router.POST("/api/sync/themes/:thid/exercices/:eid/quiz", apiHandler(exerciceHandler( - func(exercice fic.Exercice, _ []byte) (interface{}, error) { - return sync.SyncExerciceMCQ(sync.GlobalImporter, exercice), nil - }))) - - router.POST("/api/sync/themes/:thid/fixurlid", apiHandler(themeHandler( - func(theme fic.Theme, _ []byte) (interface{}, error) { - if theme.FixURLId() { - return theme.Update() - } - return 0, nil - }))) - router.POST("/api/sync/fixurlids", apiHandler(fixAllURLIds)) + router.GET("/remote/themes", sync.ApiListRemoteThemes) + router.GET("/remote/themes/:thid", sync.ApiGetRemoteTheme) + router.GET("/remote/themes/:thid/exercices", sync.ApiListRemoteExercices) } -func fixAllURLIds(_ httprouter.Params, _ []byte) (interface{}, error) { +type Theme struct { + *fic.Theme + ForgeLink string `json:"forge_link,omitempty"` +} + +func ThemeHandler(c *gin.Context) { + thid, err := strconv.ParseInt(string(c.Params.ByName("thid")), 10, 64) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Invalid theme identifier"}) + return + } + + if thid == 0 { + c.Set("theme", &fic.StandaloneExercicesTheme) + } else { + theme, err := fic.GetTheme(thid) + if err != nil { + c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"errmsg": "Theme not found"}) + return + } + + c.Set("theme", theme) + } + + c.Next() +} + +func fixAllURLIds(c *gin.Context) { nbFix := 0 if themes, err := fic.GetThemes(); err == nil { for _, theme := range themes { @@ -103,92 +109,268 @@ func fixAllURLIds(_ httprouter.Params, _ []byte) (interface{}, error) { } } - return nbFix, nil + c.JSON(http.StatusOK, nbFix) } -func bindingFiles(_ httprouter.Params, body []byte) (interface{}, error) { - if files, err := fic.GetFiles(); err != nil { - return "", err - } else { - ret := "" - for _, file := range files { - ret += fmt.Sprintf("%s;%s\n", file.GetOrigin(), file.Path) +func bindingFiles(c *gin.Context) { + files, err := fic.GetFiles() + if err != nil { + c.AbortWithError(http.StatusInternalServerError, err) + return + } + + ret := "" + for _, file := range files { + ret += fmt.Sprintf("%s;%s\n", file.GetOrigin(), file.Path) + } + + c.String(http.StatusOK, ret) +} + +func listThemes(c *gin.Context) { + themes, err := fic.GetThemes() + if err != nil { + log.Println("Unable to listThemes:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to list themes."}) + return + } + + if has, _ := fic.HasStandaloneExercice(); has { + themes = append([]*fic.Theme{&fic.StandaloneExercicesTheme}, themes...) + } + + c.JSON(http.StatusOK, themes) +} + +func exportThemes(c *gin.Context) { + themes, err := fic.ExportThemes() + if err != nil { + log.Println("Unable to exportthemes:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs when trying to export themes."}) + return + } + + c.JSON(http.StatusOK, themes) +} + +func showTheme(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + + var forgelink string + if fli, ok := sync.GlobalImporter.(sync.ForgeLinkedImporter); ok { + if u, _ := fli.GetThemeLink(theme); u != nil { + forgelink = u.String() } - return ret, nil } + + c.JSON(http.StatusOK, Theme{theme, forgelink}) } -func getExercice(args []string) (fic.Exercice, error) { - if tid, err := strconv.Atoi(string(args[0])); err != nil { - return fic.Exercice{}, err - } else if theme, err := fic.GetTheme(tid); err != nil { - return fic.Exercice{}, err - } else if eid, err := strconv.Atoi(string(args[1])); err != nil { - return fic.Exercice{}, err - } else { - return theme.GetExercice(eid) - } -} - -func listThemes(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.GetThemes() -} - -func exportThemes(_ httprouter.Params, _ []byte) (interface{}, error) { - return fic.ExportThemes() -} - -func showTheme(theme fic.Theme, _ []byte) (interface{}, error) { - return theme, nil -} - -func listThemedExercices(theme fic.Theme, _ []byte) (interface{}, error) { - return theme.GetExercices() -} - -func showThemedExercice(theme fic.Theme, exercice fic.Exercice, body []byte) (interface{}, error) { - return exercice, nil -} - -type uploadedTheme struct { - Name string - URLId string - Authors string - Intro string -} - -func createTheme(_ httprouter.Params, body []byte) (interface{}, error) { - var ut uploadedTheme - if err := json.Unmarshal(body, &ut); err != nil { - return nil, err +func createTheme(c *gin.Context) { + var ut fic.Theme + err := c.ShouldBindJSON(&ut) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } if len(ut.Name) == 0 { - return nil, errors.New("Theme's name not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Theme's name not filled"}) + return } - return fic.CreateTheme(ut.Name, ut.URLId, ut.Authors, ut.Intro) + th, err := fic.CreateTheme(&ut) + if err != nil { + log.Println("Unable to createTheme:", err.Error()) + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": "An error occurs during theme creation."}) + return + } + + c.JSON(http.StatusOK, th) } -func updateTheme(theme fic.Theme, body []byte) (interface{}, error) { +func updateTheme(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + var ut fic.Theme - if err := json.Unmarshal(body, &ut); err != nil { - return nil, err + err := c.ShouldBindJSON(&ut) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": err.Error()}) + return } ut.Id = theme.Id if len(ut.Name) == 0 { - return nil, errors.New("Theme's name not filled") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "Theme's name not filled"}) + return } if _, err := ut.Update(); err != nil { - return nil, err - } else { - return ut, nil + log.Println("Unable to updateTheme:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during theme update."}) + return } + + if theme.Locked != ut.Locked { + exercices, err := theme.GetExercices() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + for _, exercice := range exercices { + if exercice.Disabled != ut.Locked { + exercice.Disabled = ut.Locked + _, err = exercice.Update() + if err != nil { + log.Println("Unable to enable/disable exercice: ", exercice.Id, err.Error()) + } + } + } + } + + c.JSON(http.StatusOK, ut) } -func deleteTheme(theme fic.Theme, _ []byte) (interface{}, error) { - return theme.Delete() +func deleteTheme(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + + _, err := theme.Delete() + if err != nil { + log.Println("Unable to deleteTheme:", err.Error()) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": "An error occurs during theme deletion."}) + return + } + + c.JSON(http.StatusOK, true) +} + +func getThemedExercicesStats(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + + exercices, err := theme.GetExercices() + if err != nil { + c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"errmsg": fmt.Sprintf("Unable to fetch exercices: %s", err.Error())}) + return + } + + ret := []exerciceStats{} + for _, e := range exercices { + ret = append(ret, exerciceStats{ + IdExercice: e.Id, + TeamTries: e.TriedTeamCount(), + TotalTries: e.TriedCount(), + SolvedCount: e.SolvedCount(), + FlagSolved: e.FlagSolved(), + MCQSolved: e.MCQSolved(), + }) + } + c.JSON(http.StatusOK, ret) +} + +func diffThemeWithRemote(theme *fic.Theme) ([]syncDiff, error) { + var diffs []syncDiff + + // Compare theme attributes + theme_remote, err := sync.GetRemoteTheme(theme.Path) + if err != nil { + return nil, err + } + + for _, field := range reflect.VisibleFields(reflect.TypeOf(*theme)) { + if ((field.Name == "Image") && path.Base(reflect.ValueOf(*theme_remote).FieldByName(field.Name).String()) != path.Base(reflect.ValueOf(*theme).FieldByName(field.Name).String())) || (field.Name != "Image" && !reflect.ValueOf(*theme_remote).FieldByName(field.Name).Equal(reflect.ValueOf(*theme).FieldByName(field.Name))) { + if !field.IsExported() || field.Name == "Id" || field.Name == "IdTheme" || field.Name == "IssueKind" || field.Name == "BackgroundColor" { + continue + } + + diffs = append(diffs, syncDiff{ + Field: field.Name, + Link: fmt.Sprintf("themes/%d", theme.Id), + Before: reflect.ValueOf(*theme).FieldByName(field.Name).Interface(), + After: reflect.ValueOf(*theme_remote).FieldByName(field.Name).Interface(), + }) + } + } + + // Compare exercices list + exercices, err := theme.GetExercices() + if err != nil { + return nil, fmt.Errorf("Unable to GetExercices: %w", err) + } + + exercices_remote, err := sync.ListRemoteExercices(theme.Path) + if err != nil { + return nil, fmt.Errorf("Unable to ListRemoteExercices: %w", err) + } + + var not_found []string + var extra_found []string + + for _, exercice_remote := range exercices_remote { + found := false + for _, exercice := range exercices { + if exercice.Path[strings.Index(exercice.Path, "/")+1:] == exercice_remote { + found = true + break + } + } + + if !found { + not_found = append(not_found, exercice_remote) + } + } + + for _, exercice := range exercices { + found := false + for _, exercice_remote := range exercices_remote { + if exercice.Path[strings.Index(exercice.Path, "/")+1:] == exercice_remote { + found = true + break + } + } + + if !found { + extra_found = append(extra_found, exercice.Path[strings.Index(exercice.Path, "/")+1:]) + } + } + + if len(not_found) > 0 || len(extra_found) > 0 { + diffs = append(diffs, syncDiff{ + Field: "theme.Exercices", + Link: fmt.Sprintf("themes/%d", theme.Id), + Before: strings.Join(extra_found, ", "), + After: strings.Join(not_found, ", "), + }) + } + + // Compare inner exercices + for i, exercice := range exercices { + exdiffs, err := diffExerciceWithRemote(exercice, theme) + if err != nil { + return nil, fmt.Errorf("Unable to diffExerciceWithRemote: %w", err) + } + + for _, exdiff := range exdiffs { + if theme.Id == 0 { + exdiff.Field = fmt.Sprintf("exercices[%d].%s", exercice.Id, exdiff.Field) + } else { + exdiff.Field = fmt.Sprintf("exercices[%d].%s", i, exdiff.Field) + } + diffs = append(diffs, exdiff) + } + } + + return diffs, err +} + +func APIDiffThemeWithRemote(c *gin.Context) { + theme := c.MustGet("theme").(*fic.Theme) + + diffs, err := diffThemeWithRemote(theme) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"errmsg": err.Error()}) + return + } + + c.JSON(http.StatusOK, diffs) } diff --git a/admin/api/version.go b/admin/api/version.go index a65f9c85..52cb0726 100644 --- a/admin/api/version.go +++ b/admin/api/version.go @@ -1,13 +1,15 @@ package api import ( - "github.com/julienschmidt/httprouter" + "net/http" + + "github.com/gin-gonic/gin" ) -func init() { - router.GET("/api/version", apiHandler(showVersion)) +func DeclareVersionRoutes(router *gin.RouterGroup) { + router.GET("/version", showVersion) } -func showVersion(_ httprouter.Params, body []byte) (interface{}, error) { - return map[string]interface{}{"version": 0.5}, nil +func showVersion(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{"version": 1.0}) } diff --git a/admin/app.go b/admin/app.go new file mode 100644 index 00000000..1ba1910e --- /dev/null +++ b/admin/app.go @@ -0,0 +1,81 @@ +package main + +import ( + "context" + "log" + "net/http" + "path/filepath" + "strings" + "time" + + "github.com/gin-gonic/gin" + + "srs.epita.fr/fic-server/admin/api" + "srs.epita.fr/fic-server/settings" +) + +type App struct { + router *gin.Engine + srv *http.Server + cfg *settings.Settings + bind string +} + +func NewApp(cfg *settings.Settings, baseURL string, bind string) App { + if !cfg.WorkInProgress { + gin.SetMode(gin.ReleaseMode) + } + gin.ForceConsoleColor() + router := gin.Default() + + api.DeclareRoutes(router.Group("")) + + var baserouter *gin.RouterGroup + if len(baseURL) > 0 { + router.GET("/", func(c *gin.Context) { + c.Redirect(http.StatusFound, baseURL) + }) + router.GET(filepath.Dir(baseURL)+"/files/*_", func(c *gin.Context) { + path := c.Request.URL.Path + c.Redirect(http.StatusFound, filepath.Join(baseURL, strings.TrimPrefix(path, filepath.Dir(baseURL)))) + }) + + baserouter = router.Group(baseURL) + + api.DeclareRoutes(baserouter) + declareStaticRoutes(baserouter, cfg, baseURL) + } else { + declareStaticRoutes(router.Group(""), cfg, "") + } + + app := App{ + router: router, + bind: bind, + } + + return app +} + +func (app *App) Start() { + app.srv = &http.Server{ + Addr: app.bind, + Handler: app.router, + ReadHeaderTimeout: 15 * time.Second, + ReadTimeout: 15 * time.Second, + WriteTimeout: 10 * time.Second, + IdleTimeout: 30 * time.Second, + } + + log.Printf("Ready, listening on %s\n", app.bind) + if err := app.srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Fatalf("listen: %s\n", err) + } +} + +func (app *App) Stop() { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := app.srv.Shutdown(ctx); err != nil { + log.Fatal("Server Shutdown:", err) + } +} diff --git a/admin/fill_exercices.sh b/admin/fill_exercices.sh deleted file mode 100755 index e2ce2141..00000000 --- a/admin/fill_exercices.sh +++ /dev/null @@ -1,262 +0,0 @@ -#!/bin/bash - -BASEURL="http://localhost:8081" -BASEURI="https://owncloud.srs.epita.fr/remote.php/webdav/FIC 2018" -BASEFILE="/mnt/fic/" -CLOUDPASS="$CLOUD_USER:$CLOUD_PASS" - -new_theme() { - NAME=`echo $1 | sed 's/"/\\\\"/g'` - AUTHORS=`echo $2 | sed 's/"/\\\\"/g'` - curl -f -s -d "{\"name\": \"$NAME\", \"authors\": \"$AUTHORS\"}" "${BASEURL}/api/themes" | - grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+" -} - -new_exercice() { - THEME="$1" - TITLE=`echo "$2" | sed 's/"/\\\\"/g'` - STATEMENT=`echo "$3" | sed 's/"/\\\\"/g' | sed ':a;N;$!ba;s/\n/
/g'` - DEPEND="$4" - GAIN="$5" - VIDEO="$6" - - curl -f -s -d "{\"title\": \"$TITLE\", \"statement\": \"$STATEMENT\", \"depend\": $DEPEND, \"gain\": $GAIN, \"videoURI\": \"$VIDEO\"}" "${BASEURL}/api/themes/$THEME/exercices" | - grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+" -} - -new_file() ( - THEME="$1" - EXERCICE="$2" - URI="$3" - DIGEST="$4" - ARGS="$5" - - FIRST= - PARTS=$(echo "$ARGS" | while read arg - do - [ -n "$arg" ] && { - [ -z "${FIRST}" ] || echo -n "," - echo "\"$arg\"" - } - FIRST=1 - done) - - [ -n "${DIGEST}" ] && DIGEST=", \"digest\": \"${DIGEST}\"" - - cat <&2 -{"path": "${BASEFILE}${URI}"${DIGEST}, "parts": [${PARTS}]} -EOF - -# curl -f -s -d "{\"URI\": \"${BASEFILE}${URI}\"}" "${BASEURL}/api/themes/$THEME/$EXERCICE/files" | - curl -f -s -d @- "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/files" </g'` - COST="$5" - URI="$6" - - [ -n "${CONTENT}" ] && CONTENT=", \"content\": \"${CONTENT}\"" - [ -n "${URI}" ] && URI=", \"path\": \"${BASEFILE}${URI}\"" - - curl -f -s -d "{\"title\": \"$TITLE\"$CONTENT$URI, \"cost\": $COST}" "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/hints" | - grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+" -} - -new_key() { - THEME="$1" - EXERCICE="$2" - TYPE="$3" - KEY=`echo $4 | sed 's#\\\\#\\\\\\\\#g' | sed 's/"/\\\\"/g'` - - curl -f -s -d "{\"type\": \"$TYPE\", \"key\": \"$KEY\"}" "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/keys" | - grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+" -} - -get_dir_from_cloud() { - curl -f -s -X PROPFIND -u "${CLOUDPASS}" "${BASEURI}$1" | xmllint --format - | grep 'd:href' | sed -E 's/^.*>(.*)<.*$/\1/' -} -get_dir() { - ls "${BASEFILE}$1" 2> /dev/null -} -#alias get_dir=get_dir_from_cloud - -get_file_from_cloud() { - curl -f -s -u "${CLOUDPASS}" "${BASEURI}$1" | tr -d '\r' -} -get_file() { - cat "${BASEFILE}$1" 2> /dev/null | tr -d '\r' - echo -} -#alias get_file=get_file_from_cloud - -unhtmlentities() { - cat | sed -E 's/%20/ /g' | sed -E "s/%27/'/g" | sed -E 's/%c3%a9/é/g' | sed -E 's/%c3%a8/è/g' -} - -# Theme -{ - if [ $# -ge 1 ]; then - echo $1 - else - get_dir "" - fi -} | while read f; do basename "$f"; done | while read THEME_URI -do - THM_BASEURI="/${THEME_URI}/" - THEME_NAME=$(echo "${THEME_URI#*-}" | unhtmlentities) - THEME_AUTHORS=$(get_file "${THM_BASEURI}/AUTHORS.txt" | sed '/^$/d;s/$/, /' | tr -d '\n' | sed 's/, $//') - THEME_ID=`new_theme "$THEME_NAME" "$THEME_AUTHORS"` - if [ -z "$THEME_ID" ]; then - echo -e "\e[31;01m!!! An error occured during theme add\e[00m" - continue - else - echo -e "\e[33m>>> New theme created:\e[00m $THEME_ID - $THEME_NAME" - fi - - LAST=null - EXO_NUM=0 - { - if [ $# -ge 2 ]; then - echo "$2" - else - get_dir "${THM_BASEURI}" - fi - } | while read f; do basename "$f"; done | while read EXO_URI - do - case ${EXO_URI} in - [0-9]-*) - ;; - *) - continue;; - esac - - #EXO_NUM=$((EXO_NUM + 1)) - EXO_NUM=${EXO_URI%-*} - EXO_NAME=$(echo "${EXO_URI#*-}" | unhtmlentities) - echo - echo -e "\e[36m--- Filling exercice ${EXO_NUM} in theme ${THEME_NAME}\e[00m" - - EXO_BASEURI="${EXO_URI}/" - - EXO_VIDEO=$(get_dir "${THM_BASEURI}${EXO_BASEURI}/resolution/" | grep -E "\.(mov|mkv|mp4|avi|flv|ogv|webm)$" | while read f; do basename "$f"; done | tail -1) - [ -n "$EXO_VIDEO" ] && EXO_VIDEO="/resolution${THM_BASEURI}${EXO_BASEURI}resolution/${EXO_VIDEO}" - - if [ "${LAST}" = "null" ]; then - echo ">>> Assuming this exercice has no dependency" - else - echo ">>> Assuming this exercice depends on the last entry (id=${LAST})" - fi - - EXO_GAIN=$((3 * (2 ** $EXO_NUM) - 1)) - HINT_COST=$(($EXO_GAIN / 4)) - echo ">>> Using default gain: ${EXO_GAIN} points" - - EXO_SCENARIO=$(get_file "${THM_BASEURI}${EXO_BASEURI}/scenario.txt") - - EXO_ID=`new_exercice "${THEME_ID}" "${EXO_NAME}" "${EXO_SCENARIO}" "${LAST}" "${EXO_GAIN}" "${EXO_VIDEO}"` - if [ -z "$EXO_ID" ]; then - echo -e "\e[31;01m!!! An error occured during exercice add.\e[00m" - continue - else - echo -e "\e[32m>>> New exercice created:\e[00m $EXO_ID - $EXO_NAME" - fi - - - # Keys - get_file "${THM_BASEURI}${EXO_BASEURI}/flags.txt" | while read KEYLINE - do - [ -z "${KEYLINE}" ] && continue - - KEY_NAME=$(echo "$KEYLINE" | cut -d$'\t' -f 1) - KEY_RAW=$(echo "$KEYLINE" | cut -d$'\t' -f 2-) - - if [ -z "${KEY_RAW}" ] || [ "${KEY_NAME}" = "${KEY_RAW}" ]; then - KEY_NAME=$(echo "$KEYLINE" | cut -d : -f 1) - KEY_RAW=$(echo "$KEYLINE" | cut -d : -f 2-) - fi - - if [ -z "${KEY_NAME}" ]; then - KEY_NAME="Flag" - fi - - KEY_ID=`new_key "${THEME_ID}" "${EXO_ID}" "${KEY_NAME}" "${KEY_RAW}"` - if [ -z "$KEY_ID" ]; then - echo -e "\e[31;01m!!! An error occured during key import!\e[00m (name=${KEYNAME};raw=${KEY_RAW})" - else - echo -e "\e[32m>>> New key added:\e[00m $KEY_ID - $KEY_NAME" - fi - done - - - # Hints - HINTS=$(get_dir "${THM_BASEURI}${EXO_BASEURI}/hints/" | sed -E 's#(.*)#hints/\1#') - [ -z "${HINTS}" ] && HINTS=$(get_dir "${THM_BASEURI}${EXO_BASEURI}/" | grep ^hint.) - [ -z "${HINTS}" ] && HINTS="hint.txt" - HINT_COUNT=1 - echo "${HINTS}" | while read HINT - do - EXO_HINT=$(get_file "${THM_BASEURI}${EXO_BASEURI}/${HINT}") - if [ -n "$EXO_HINT" ]; then - EXO_HINT_TYPE=$(echo "${EXO_HINT}" | file --mime-type -b -) - if echo "${EXO_HINT_TYPE}" | grep text/ && [ $(echo "${EXO_HINT}" | wc -l) -lt 25 ]; then - HINT_ID=`new_hint "${THEME_ID}" "${EXO_ID}" "Astuce #${HINT_COUNT}" "${EXO_HINT}" "${HINT_COST}"` - else - HINT_ID=`new_hint "${THEME_ID}" "${EXO_ID}" "Astuce #${HINT_COUNT}" "" "${HINT_COST}" "${THM_BASEURI}${EXO_BASEURI}/${HINT}"` - fi - - if [ -z "$HINT_ID" ]; then - echo -e "\e[31;01m!!! An error occured during hint import!\e[00m (title=Astuce #${HINT_COUNT};content::${EXO_HINT_TYPE};cost=${HINT_COST})" - else - echo -e "\e[32m>>> New hint added:\e[00m $HINT_ID - Astuce #${HINT_COUNT}" - fi - fi - HINT_COUNT=$(($HINT_COUNT + 1)) - done - - - # Files: splited - get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep -v DIGESTS.txt | grep '[0-9][0-9]$' | sed -E 's/\.?([0-9][0-9])$//' | sort | uniq | while read f; do basename "$f"; done | while read FILE_URI - do - DIGEST=$(get_file "${THM_BASEURI}${EXO_BASEURI}files/DIGESTS.txt" | grep "${FILE_URI}\$" | awk '{ print $1; }') - - PARTS= - for part in $(get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep "${FILE_URI}" | sort) - do - PARTS="${PARTS}${BASEFILE}${THM_BASEURI}${EXO_BASEURI}files/${part} -" - done - echo -e "\e[35mImport splited file ${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI} from\e[00m `echo ${PARTS} | tr '\n' ' '`" - - FILE_ID=`new_file "${THEME_ID}" "${EXO_ID}" "${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}" "${DIGEST}" "${PARTS}"` - if [ -z "$FILE_ID" ]; then - echo -e "\e[31;01m!!! An error occured during file import! Please check path.\e[00m" - else - echo -e "\e[32m>>> New file added:\e[00m $FILE_ID - $FILE_URI" - fi - done - - # Files: entire - get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep -v DIGESTS.txt | grep -v '[0-9][0-9]$' | while read f; do basename "$f"; done | while read FILE_URI - do - DIGEST=$(get_file "${THM_BASEURI}${EXO_BASEURI}files/DIGESTS.txt" | grep "${FILE_URI}\$" | awk '{ print $1; }') - - echo "Import file ${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}" - FILE_ID=`new_file "${THEME_ID}" "${EXO_ID}" "${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}" "${DIGEST}"` - if [ -z "$FILE_ID" ]; then - echo -e "\e[31;01m!!! An error occured during file import! Please check path.\e[00m" - else - echo -e "\e[32m>>> New file added:\e[00m $FILE_ID - $FILE_URI" - fi - done - - - LAST=$EXO_ID - done - echo -done diff --git a/admin/fill_teams.sh b/admin/fill_teams.sh index 496e93ca..6a1ea7a5 100755 --- a/admin/fill_teams.sh +++ b/admin/fill_teams.sh @@ -2,13 +2,14 @@ BASEURL="http://127.0.0.1:8081/admin" GEN_CERTS=0 +GEN_PASSWD=0 EXTRA_TEAMS=0 CSV_SPLITER="," -CSV_COL_LASTNAME=1 -CSV_COL_FIRSTNAME=2 -CSV_COL_NICKNAME=3 -CSV_COL_COMPANY=7 -CSV_COL_TEAM=7 +CSV_COL_LASTNAME=2 +CSV_COL_FIRSTNAME=3 +CSV_COL_NICKNAME=5 +CSV_COL_COMPANY=6 +CSV_COL_TEAM=1 usage() { echo "$0 [options] csv_file" @@ -16,6 +17,7 @@ usage() { echo " -S -csv-spliter SEP CSV separator (default: $CSV_SPLITER)" echo " -e -extra-teams NBS Number of extra teams to generate (default: ${EXTRA_TEAMS})" echo " -c -generate-certificate Should team certificates be generated? (default: no)" + echo " -p -generate-password Should generate team password to teams.pass? (default: no)" } # Parse options @@ -33,6 +35,8 @@ do shift;; -c|-generate-certificates) GEN_CERTS=1;; + -p|-generate-password) + GEN_PASSWD=1;; *) echo "Unknown option '$1'" usage @@ -41,8 +45,7 @@ do shift done -[ "$#" -lt 1 ] && { usage; exit 1; } -PART_FILE="$1" +[ "$#" -lt 1 ] && [ "${EXTRA_TEAMS}" -eq 0 ] && { usage; exit 1; } new_team() { head -n "$1" team-names.txt | tail -1 | sed -E 's/^.*\|\[\[([^|]+\|)?([^|]+)\]\][^|]*\|([A-Fa-f0-9]{1,2})\|([A-Fa-f0-9]{1,2})\|([A-Fa-f0-9]{1,2})\|([0-9]{1,3})\|([0-9]{1,3})\|([0-9]{1,3})\|.*$/\6 \7 \8 \2/' | @@ -59,7 +62,7 @@ new_team() { COLOR=$((($R*256 + $G) * 256 + $B)) - curl -s -d "{\"name\": \"$N\",\"color\": $COLOR}" "${BASEURL}/api/teams/" + curl -s -d "{\"name\": \"$N\",\"color\": $COLOR}" "${BASEURL}/api/teams" done | grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+" } @@ -76,10 +79,30 @@ do if [ "${GEN_CERTS}" -eq 1 ] && ! curl -s -f "${BASEURL}/api/teams/${TID}/certificate" > /dev/null then curl -s -f "${BASEURL}/api/teams/${TID}/certificate/generate" + elif [ "${GEN_PASSWD}" -eq 1 ] + then + TEAMID=$(curl -s -f "${BASEURL}/api/teams/${TID}/" | jq -r .name) + PASSWD=$(curl -X POST -s -f "${BASEURL}/api/teams/${TID}/password" | jq -r .password) + NP=$(echo "${TEAMID}" | cut -d : -f 1 | sed 's/[[:upper:]]/\l&/g;s/[âáàä]/a/g;s/[êéèë]/e/g') + cat >> teams.pass <> htpasswd.ssha <> htpasswd.apr1 < /dev/null then curl -s -f "${BASEURL}/api/teams/${TID}/certificate/generate" + elif [ "${GEN_PASSWD}" -eq 1 ] + then + PASSWD=$(curl -X POST -s -f "${BASEURL}/api/teams/${TID}/password" | jq -r .password) + NP=$(echo "${TEAMID}" | cut -d : -f 1 | sed 's/[[:upper:]]/\l&/g;s/[âáàä]/a/g;s/[êéèë]/e/g') + cat >> teams.pass <> htpasswd.ssha <> htpasswd.apr1 < - Challenge Forensic - Administration - - + {{ .title }} - Administration + + - + -