-
diff --git a/availabili.tf/src/router/index.ts b/availabili.tf/src/router/index.ts
index 4c87438..2c6e884 100644
--- a/availabili.tf/src/router/index.ts
+++ b/availabili.tf/src/router/index.ts
@@ -2,6 +2,7 @@ import { createRouter, createWebHistory } from "vue-router";
import HomeView from "../views/HomeView.vue";
import ScheduleView from "../views/ScheduleView.vue";
import RosterBuilderView from "../views/RosterBuilderView.vue";
+import LoginView from "../views/LoginView.vue";
const router = createRouter({
history: createWebHistory(import.meta.env.BASE_URL),
@@ -11,6 +12,11 @@ const router = createRouter({
name: "home",
component: HomeView
},
+ {
+ path: "/login",
+ name: "login",
+ component: LoginView
+ },
{
path: "/schedule",
name: "schedule",
diff --git a/availabili.tf/src/stores/auth.ts b/availabili.tf/src/stores/auth.ts
new file mode 100644
index 0000000..ace7d3b
--- /dev/null
+++ b/availabili.tf/src/stores/auth.ts
@@ -0,0 +1,38 @@
+import { defineStore } from "pinia";
+import { ref } from "vue";
+
+export const useAuthStore = defineStore("auth", () => {
+ const steamId = ref(NaN);
+ const username = ref("");
+ const isLoggedIn = ref(false);
+ const isRegistering = ref(false);
+
+ async function login(queryParams: { [key: string]: string }) {
+ return fetch(import.meta.env.VITE_API_BASE_URL + "/login/authenticate", {
+ headers: {
+ "Content-Type": "application/json",
+ },
+ credentials: "same-origin",
+ method: "POST",
+ body: JSON.stringify(queryParams),
+ })
+ .then((response) => response.json())
+ .then((response) => {
+ isRegistering.value = response.isRegistering;
+ if (!isRegistering.value) {
+ steamId.value = response.steamId;
+ username.value = response.username;
+ isLoggedIn.value = true;
+ isRegistering.value = false;
+ }
+ });
+ }
+
+ return {
+ steamId,
+ username,
+ isLoggedIn,
+ isRegistering,
+ login,
+ }
+});
diff --git a/availabili.tf/src/stores/schedule.ts b/availabili.tf/src/stores/schedule.ts
index de46a14..6a47d05 100644
--- a/availabili.tf/src/stores/schedule.ts
+++ b/availabili.tf/src/stores/schedule.ts
@@ -1,17 +1,64 @@
+import { computed } from "@vue/reactivity";
import { defineStore } from "pinia";
import { reactive, ref, watch } from "vue";
+import { useRoute, useRouter } from "vue-router";
export const useScheduleStore = defineStore("schedule", () => {
- const dateStart = ref(new Date(2024, 9, 21));
+ const dateStart = ref(new Date(2024, 9, 21, 0, 30));
+
+ const windowStart = computed(() => Math.floor(dateStart.value.getTime() / 1000));
const availability = reactive(new Array(168));
+ const route = useRoute();
+ const router = useRouter();
+
+ const teamId = computed({
+ get: () => route.query.teamId,
+ set: (value) => router.push({ query: { teamId: value } }),
+ });
+
watch(dateStart, () => {
availability.fill(0);
+ fetchSchedule();
});
+ async function fetchSchedule() {
+ return fetch(import.meta.env.VITE_API_BASE_URL + "/schedule?" + new URLSearchParams({
+ window_start: windowStart.value.toString(),
+ team_id: "1",
+ }).toString(),{
+ credentials: "include",
+ })
+ .then((response) => response.json())
+ .then((response) => {
+ response.availability.forEach((value: number, i: number) => {
+ availability[i] = value;
+ });
+ return response;
+ });
+ }
+
+ async function saveSchedule() {
+ return fetch(import.meta.env.VITE_API_BASE_URL + "/schedule", {
+ method: "PUT",
+ credentials: "include",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ window_start: Math.floor(dateStart.value.getTime() / 1000),
+ team_id: 1,
+ availability: availability,
+ })
+ });
+ }
+
return {
dateStart,
+ windowStart,
availability,
+ fetchSchedule,
+ saveSchedule,
};
});
diff --git a/availabili.tf/src/stores/teams.ts b/availabili.tf/src/stores/teams.ts
new file mode 100644
index 0000000..a07a589
--- /dev/null
+++ b/availabili.tf/src/stores/teams.ts
@@ -0,0 +1,36 @@
+import Cacheable from "@/cacheable";
+import { defineStore } from "pinia";
+import { computed, reactive, ref, type Reactive, type Ref } from "vue";
+
+interface Team {
+ id: number,
+ teamName: string,
+}
+
+export const useTeamsStore = defineStore("teams", () => {
+ //const teams: Reactive> =
+ // reactive(new Cacheable([], 0));
+ const teams: Ref<{ [id: number]: Team }> = ref({ });
+
+ async function fetchTeams() {
+ return new Promise((res, rej) => {
+ fetch(import.meta.env.VITE_API_BASE_URL + "/team/view", {
+ credentials: "include",
+ })
+ .then((response) => response.json())
+ .then((response: Array) => {
+ teams.value = response
+ .reduce((acc, team: Team) => {
+ return { ...acc, [team.id]: team }
+ });
+ res(teams.value);
+ })
+ .catch(() => rej());
+ });
+ }
+
+ return {
+ teams,
+ fetchTeams,
+ }
+});
diff --git a/availabili.tf/src/views/LoginView.vue b/availabili.tf/src/views/LoginView.vue
new file mode 100644
index 0000000..e59eb91
--- /dev/null
+++ b/availabili.tf/src/views/LoginView.vue
@@ -0,0 +1,48 @@
+
+
+
+
+
+
+ Register
+
+ Register
+
+
+ Logging in...
+
+
+
+
diff --git a/availabili.tf/src/views/ScheduleView.vue b/availabili.tf/src/views/ScheduleView.vue
index 314ea79..519b119 100644
--- a/availabili.tf/src/views/ScheduleView.vue
+++ b/availabili.tf/src/views/ScheduleView.vue
@@ -2,16 +2,22 @@
import AvailabilityGrid from "../components/AvailabilityGrid.vue";
import AvailabilityComboBox from "../components/AvailabilityComboBox.vue";
import WeekSelectionBox from "../components/WeekSelectionBox.vue";
-import { reactive, ref } from "vue";
-import { useScheduleStore } from "../stores/schedule.ts";
+import { computed, onMounted, reactive, ref } from "vue";
+import { useTeamsStore } from "../stores/teams";
+import { useScheduleStore } from "../stores/schedule";
+const teams = useTeamsStore();
const schedule = useScheduleStore();
-const options = reactive([
+const options = ref([
"TEAM PEPEJA forsenCD",
"The Snus Brotherhood",
]);
+const firstHour = computed(() => shouldShowAllHours.value ? 0 : 14);
+const lastHour = computed(() => shouldShowAllHours.value ? 23 : 22);
+const shouldShowAllHours = ref(false);
+
const comboBoxIndex = ref(0);
//const availability = reactive(new Array(168));
@@ -20,6 +26,24 @@ const availability = schedule.availability;
const selectionMode = ref(1);
const isEditing = ref(false);
+
+function saveSchedule() {
+ schedule.saveSchedule()
+ .then(() => {
+ isEditing.value = false;
+ });
+}
+
+onMounted(() => {
+ teams.fetchTeams()
+ .then((teamsList) => {
+ options.value = Object.values(teamsList);
+ schedule.fetchSchedule()
+ .then(() => {
+
+ });
+ })
+});
@@ -31,16 +55,25 @@ const isEditing = ref(false);
-
+
diff --git a/availabili.tf/vite.config.ts b/availabili.tf/vite.config.ts
index 21d97d5..9610216 100644
--- a/availabili.tf/vite.config.ts
+++ b/availabili.tf/vite.config.ts
@@ -12,5 +12,29 @@ export default defineConfig({
alias: {
'@': fileURLToPath(new URL('./src', import.meta.url))
}
+ },
+ server: {
+ proxy: {
+ '/api': {
+ target: 'http://localhost:5000',
+ changeOrigin: true,
+ secure: false,
+ configure: (proxy) => {
+ proxy.on('proxyReq', (proxyReq, req, res) => {
+ const cookie = req.headers.cookie;
+ if (cookie) {
+ proxyReq.setHeader('Cookie', cookie);
+ }
+ });
+
+ proxy.on('proxyRes', (proxyRes, req, res) => {
+ const cookie = proxyRes.headers['set-cookie'];
+ if (cookie) {
+ res.setHeader('Set-Cookie', cookie);
+ }
+ });
+ }
+ }
+ }
}
})
diff --git a/backend-flask/.env b/backend-flask/.env
new file mode 100644
index 0000000..3900a3f
--- /dev/null
+++ b/backend-flask/.env
@@ -0,0 +1 @@
+export FLASK_APP=app.py
diff --git a/backend-flask/app.py b/backend-flask/app.py
new file mode 100644
index 0000000..975ccd2
--- /dev/null
+++ b/backend-flask/app.py
@@ -0,0 +1,30 @@
+from flask import Blueprint, Flask, make_response, request
+from flask_sqlalchemy import SQLAlchemy
+from flask_cors import CORS
+
+import login
+import schedule
+import team
+from models import init_db
+
+app = Flask(__name__)
+CORS(app, origins=["http://localhost:5173"], supports_credentials=True)
+CORS(login.api_login, origins=["http://localhost:5173"], supports_credentials=True)
+CORS(schedule.api_schedule, origins=["http://localhost:5173"], supports_credentials=True)
+
+init_db(app)
+
+api = Blueprint("api", __name__, url_prefix="/api")
+api.register_blueprint(login.api_login)
+api.register_blueprint(schedule.api_schedule)
+api.register_blueprint(team.api_team)
+
+@api.get("/debug/set-cookie")
+@api.post("/debug/set-cookie")
+def debug_set_cookie():
+ res = make_response()
+ for key, value in request.args.items():
+ res.set_cookie(key, value)
+ return res, 200
+
+app.register_blueprint(api)
diff --git a/backend-flask/forms.py b/backend-flask/forms.py
new file mode 100644
index 0000000..d7d7fce
--- /dev/null
+++ b/backend-flask/forms.py
@@ -0,0 +1,7 @@
+import pydantic
+
+class User(pydantic.BaseModel):
+ steam_id: int = pydantic.Field(2)
+
+class TestForm(pydantic.BaseModel):
+ value: str = "lol"
diff --git a/backend-flask/login.py b/backend-flask/login.py
new file mode 100644
index 0000000..0179a3e
--- /dev/null
+++ b/backend-flask/login.py
@@ -0,0 +1,137 @@
+import random
+import string
+import urllib.parse
+from flask import Blueprint, abort, make_response, redirect, request, url_for
+import requests
+import models
+from models import AuthSession, Player, db
+from middleware import requires_authentication
+
+api_login = Blueprint("login", __name__, url_prefix="/login")
+
+STEAM_OPENID_URL = "https://steamcommunity.com/openid/login"
+
+@api_login.get("/")
+def index():
+ return "test"
+
+def get_steam_login_url(return_to):
+ """Build the Steam OpenID URL for login"""
+ params = {
+ "openid.ns": "http://specs.openid.net/auth/2.0",
+ "openid.mode": "checkid_setup",
+ "openid.return_to": return_to,
+ "openid.identity": "http://specs.openid.net/auth/2.0/identifier_select",
+ "openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select",
+ }
+ return f"{STEAM_OPENID_URL}?{urllib.parse.urlencode(params)}"
+
+#@api_login.get("/steam/")
+#def steam_login():
+# return_to = url_for("api.login.steam_login_callback", _external=True)
+# steam_login_url = get_steam_login_url(return_to)
+# return redirect(steam_login_url)
+#
+#@api_login.get("/steam/callback/")
+#def steam_login_callback():
+# params = request.args.to_dict()
+# params["openid.mode"] = "check_authentication"
+# response = requests.post(STEAM_OPENID_URL, data=params)
+#
+# # Check if authentication was successful
+# if "is_valid:true" in response.text:
+# claimed_id = request.args.get("openid.claimed_id")
+# steam_id = extract_steam_id_from_response(claimed_id)
+# print("User logged in as", steam_id)
+#
+# player = create_or_get_user_from_steam_id(int(steam_id))
+# auth_session = create_auth_session_for_player(player)
+#
+# resp = make_response("Logged in")
+# resp.set_cookie("auth", auth_session.key, secure=True, httponly=True)
+# return resp
+# return "no"
+
+@api_login.post("/authenticate")
+def steam_authenticate():
+ params = request.get_json()
+ params["openid.mode"] = "check_authentication"
+ response = requests.post(STEAM_OPENID_URL, data=params)
+
+ # check if authentication was successful
+ if "is_valid:true" in response.text:
+ claimed_id = params["openid.claimed_id"]
+ steam_id = int(extract_steam_id_from_response(claimed_id))
+ print("User logged in as", steam_id)
+
+ #player = create_or_get_user_from_steam_id(int(steam_id))
+ player = db.session.query(
+ Player
+ ).where(
+ Player.steam_id == steam_id
+ ).one_or_none()
+
+ if not player:
+ if "username" in params:
+ # we are registering, so create user
+ player = Player()
+ player.username = params["username"]
+ player.steam_id = steam_id
+ else:
+ # prompt client to resend with username field
+ return make_response({
+ "message": "Awaiting registration",
+ "hint": "Resend the POST request with a username field",
+ "isRegistering": True,
+ })
+
+ auth_session = create_auth_session_for_player(player)
+
+ resp = make_response({
+ "message": "Logged in",
+ "steamId": player.steam_id,
+ "username": player.username,
+ })
+
+ # TODO: secure=True in production
+ resp.set_cookie("auth", auth_session.key, httponly=True)
+ return resp
+ return abort(401)
+
+@api_login.delete("/")
+@requires_authentication
+def logout(**kwargs):
+ auth_session: AuthSession = kwargs["auth_session"]
+ db.session.delete(auth_session)
+ response = make_response(200)
+ response.delete_cookie("auth")
+ return response
+
+def create_or_get_user_from_steam_id(steam_id: int, username: str) -> Player:
+ statement = db.select(Player).filter_by(steam_id=steam_id)
+ player = db.session.execute(statement).scalar_one_or_none()
+ if not player:
+ player = Player()
+ player.steam_id = steam_id
+ player.username = username
+ db.session.add(player)
+ db.session.commit()
+ return player
+
+def generate_base36(length):
+ alphabet = string.digits + string.ascii_uppercase
+ return "".join(random.choice(alphabet) for _ in range(length))
+
+def create_auth_session_for_player(player: models.Player):
+ session = AuthSession()
+ session.player = player
+
+ random_key = generate_base36(31)
+ session.key = random_key
+
+ player.auth_sessions.append(session)
+ db.session.commit()
+ return session
+
+def extract_steam_id_from_response(claimed_id_url):
+ return claimed_id_url.split("/")[-1]
diff --git a/backend-flask/middleware.py b/backend-flask/middleware.py
new file mode 100644
index 0000000..46e0175
--- /dev/null
+++ b/backend-flask/middleware.py
@@ -0,0 +1,27 @@
+from functools import wraps
+from flask import abort, make_response, request
+from models import db
+import models
+
+
+def requires_authentication(f):
+ @wraps(f)
+ def decorator(*args, **kwargs):
+ auth = request.cookies.get("auth")
+
+ if not auth:
+ abort(401)
+
+ statement = db.select(models.AuthSession).filter_by(key=auth)
+ auth_session: models.AuthSession | None = \
+ db.session.execute(statement).scalar_one_or_none()
+
+ if not auth_session:
+ abort(make_response({
+ "error": "Invalid auth token"
+ }, 401))
+ player = auth_session.player
+ kwargs["player"] = player
+ kwargs["auth_session"] = auth_session
+ return f(*args, **kwargs)
+ return decorator
diff --git a/backend-flask/migrations/README b/backend-flask/migrations/README
new file mode 100644
index 0000000..0e04844
--- /dev/null
+++ b/backend-flask/migrations/README
@@ -0,0 +1 @@
+Single-database configuration for Flask.
diff --git a/backend-flask/migrations/alembic.ini b/backend-flask/migrations/alembic.ini
new file mode 100644
index 0000000..ec9d45c
--- /dev/null
+++ b/backend-flask/migrations/alembic.ini
@@ -0,0 +1,50 @@
+# A generic, single database configuration.
+
+[alembic]
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic,flask_migrate
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[logger_flask_migrate]
+level = INFO
+handlers =
+qualname = flask_migrate
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/backend-flask/migrations/env.py b/backend-flask/migrations/env.py
new file mode 100644
index 0000000..4c97092
--- /dev/null
+++ b/backend-flask/migrations/env.py
@@ -0,0 +1,113 @@
+import logging
+from logging.config import fileConfig
+
+from flask import current_app
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+logger = logging.getLogger('alembic.env')
+
+
+def get_engine():
+ try:
+ # this works with Flask-SQLAlchemy<3 and Alchemical
+ return current_app.extensions['migrate'].db.get_engine()
+ except (TypeError, AttributeError):
+ # this works with Flask-SQLAlchemy>=3
+ return current_app.extensions['migrate'].db.engine
+
+
+def get_engine_url():
+ try:
+ return get_engine().url.render_as_string(hide_password=False).replace(
+ '%', '%%')
+ except AttributeError:
+ return str(get_engine().url).replace('%', '%%')
+
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+config.set_main_option('sqlalchemy.url', get_engine_url())
+target_db = current_app.extensions['migrate'].db
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def get_metadata():
+ if hasattr(target_db, 'metadatas'):
+ return target_db.metadatas[None]
+ return target_db.metadata
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url, target_metadata=get_metadata(), literal_binds=True
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+
+ # this callback is used to prevent an auto-migration from being generated
+ # when there are no changes to the schema
+ # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
+ def process_revision_directives(context, revision, directives):
+ if getattr(config.cmd_opts, 'autogenerate', False):
+ script = directives[0]
+ if script.upgrade_ops.is_empty():
+ directives[:] = []
+ logger.info('No changes in schema detected.')
+
+ conf_args = current_app.extensions['migrate'].configure_args
+ if conf_args.get("process_revision_directives") is None:
+ conf_args["process_revision_directives"] = process_revision_directives
+
+ connectable = get_engine()
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=get_metadata(),
+ **conf_args
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/backend-flask/migrations/script.py.mako b/backend-flask/migrations/script.py.mako
new file mode 100644
index 0000000..2c01563
--- /dev/null
+++ b/backend-flask/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/backend-flask/migrations/versions/062a154a0797_add_column_players_teams_availability_.py b/backend-flask/migrations/versions/062a154a0797_add_column_players_teams_availability_.py
new file mode 100644
index 0000000..3688a54
--- /dev/null
+++ b/backend-flask/migrations/versions/062a154a0797_add_column_players_teams_availability_.py
@@ -0,0 +1,32 @@
+"""Add column players_teams_availability.availability
+
+Revision ID: 062a154a0797
+Revises: 4fb63c11ee8c
+Create Date: 2024-10-30 23:54:22.877218
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '062a154a0797'
+down_revision = '4fb63c11ee8c'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('players_teams_availability', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('availability', sa.Integer(), nullable=False, default=2))
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('players_teams_availability', schema=None) as batch_op:
+ batch_op.drop_column('availability')
+
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/273f73c81783_.py b/backend-flask/migrations/versions/273f73c81783_.py
new file mode 100644
index 0000000..d3166b8
--- /dev/null
+++ b/backend-flask/migrations/versions/273f73c81783_.py
@@ -0,0 +1,33 @@
+"""empty message
+
+Revision ID: 273f73c81783
+Revises: ce676db8c655
+Create Date: 2024-10-29 23:12:40.743611
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '273f73c81783'
+down_revision = 'ce676db8c655'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('auth_session',
+ sa.Column('player_id', sa.BigInteger(), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.ForeignKeyConstraint(['player_id'], ['players.steam_id'], ),
+ sa.PrimaryKeyConstraint('player_id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('auth_session')
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/2b2f3ae2ec7f_make_player_role_primary_key.py b/backend-flask/migrations/versions/2b2f3ae2ec7f_make_player_role_primary_key.py
new file mode 100644
index 0000000..3a38700
--- /dev/null
+++ b/backend-flask/migrations/versions/2b2f3ae2ec7f_make_player_role_primary_key.py
@@ -0,0 +1,26 @@
+"""Make player role primary key
+
+Revision ID: 2b2f3ae2ec7f
+Revises: 958df14798d5
+Create Date: 2024-10-31 19:07:02.960849
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '2b2f3ae2ec7f'
+down_revision = '958df14798d5'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ with op.batch_alter_table('players_teams_roles', schema=None) as batch_op:
+ batch_op.create_primary_key('pk_players_teams_roles', ['player_id', 'team_id', 'role'])
+
+
+def downgrade():
+ with op.batch_alter_table('players_teams_roles', schema=None) as batch_op:
+ batch_op.drop_constraint('pk_players_teams_roles')
diff --git a/backend-flask/migrations/versions/4fb63c11ee8c_rename_table_players_teams_availability.py b/backend-flask/migrations/versions/4fb63c11ee8c_rename_table_players_teams_availability.py
new file mode 100644
index 0000000..0b94f0e
--- /dev/null
+++ b/backend-flask/migrations/versions/4fb63c11ee8c_rename_table_players_teams_availability.py
@@ -0,0 +1,44 @@
+"""Rename table players_teams_availability
+
+Revision ID: 4fb63c11ee8c
+Revises: 8ea29cf493f5
+Create Date: 2024-10-30 22:45:51.227298
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '4fb63c11ee8c'
+down_revision = '8ea29cf493f5'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('players_teams_availability',
+ sa.Column('player_id', sa.Integer(), nullable=False),
+ sa.Column('team_id', sa.Integer(), nullable=False),
+ sa.Column('start_time', sa.TIMESTAMP(), nullable=False),
+ sa.Column('end_time', sa.TIMESTAMP(), nullable=False),
+ sa.ForeignKeyConstraint(['player_id', 'team_id'], ['players_teams.player_id', 'players_teams.team_id'], ),
+ sa.PrimaryKeyConstraint('player_id', 'team_id', 'start_time')
+ )
+ op.drop_table('player_team_availability')
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('player_team_availability',
+ sa.Column('player_id', sa.INTEGER(), nullable=False),
+ sa.Column('team_id', sa.INTEGER(), nullable=False),
+ sa.Column('start_time', sa.TIMESTAMP(), nullable=False),
+ sa.Column('end_time', sa.TIMESTAMP(), nullable=False),
+ sa.ForeignKeyConstraint(['player_id', 'team_id'], ['players_teams.player_id', 'players_teams.team_id'], ),
+ sa.PrimaryKeyConstraint('player_id', 'team_id')
+ )
+ op.drop_table('players_teams_availability')
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/8ea29cf493f5_make_playerteamavailability_a_db_model.py b/backend-flask/migrations/versions/8ea29cf493f5_make_playerteamavailability_a_db_model.py
new file mode 100644
index 0000000..430233f
--- /dev/null
+++ b/backend-flask/migrations/versions/8ea29cf493f5_make_playerteamavailability_a_db_model.py
@@ -0,0 +1,35 @@
+"""Make PlayerTeamAvailability a db.Model
+
+Revision ID: 8ea29cf493f5
+Revises: b00632365b58
+Create Date: 2024-10-30 22:21:13.718428
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '8ea29cf493f5'
+down_revision = 'b00632365b58'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('player_team_availability',
+ sa.Column('player_id', sa.Integer(), nullable=False),
+ sa.Column('team_id', sa.Integer(), nullable=False),
+ sa.Column('start_time', sa.TIMESTAMP(), nullable=False),
+ sa.Column('end_time', sa.TIMESTAMP(), nullable=False),
+ sa.ForeignKeyConstraint(['player_id', 'team_id'], ['players_teams.player_id', 'players_teams.team_id'], ),
+ sa.PrimaryKeyConstraint('player_id', 'team_id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('player_team_availability')
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/958df14798d5_add_team_discord_webhook_url.py b/backend-flask/migrations/versions/958df14798d5_add_team_discord_webhook_url.py
new file mode 100644
index 0000000..c4028fd
--- /dev/null
+++ b/backend-flask/migrations/versions/958df14798d5_add_team_discord_webhook_url.py
@@ -0,0 +1,32 @@
+"""Add team.discord_webhook_url
+
+Revision ID: 958df14798d5
+Revises: 062a154a0797
+Create Date: 2024-10-31 09:56:43.335627
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '958df14798d5'
+down_revision = '062a154a0797'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('teams', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('discord_webhook_url', sa.String(length=255), nullable=True))
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('teams', schema=None) as batch_op:
+ batch_op.drop_column('discord_webhook_url')
+
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/a340b3da0f2a_add_auth_session_key.py b/backend-flask/migrations/versions/a340b3da0f2a_add_auth_session_key.py
new file mode 100644
index 0000000..33c3459
--- /dev/null
+++ b/backend-flask/migrations/versions/a340b3da0f2a_add_auth_session_key.py
@@ -0,0 +1,32 @@
+"""Add auth_session.key
+
+Revision ID: a340b3da0f2a
+Revises: 273f73c81783
+Create Date: 2024-10-29 23:17:29.296293
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'a340b3da0f2a'
+down_revision = '273f73c81783'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('auth_session', schema=None) as batch_op:
+ batch_op.add_column(sa.Column('key', sa.String(length=31), nullable=False))
+
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('auth_session', schema=None) as batch_op:
+ batch_op.drop_column('key')
+
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/b00632365b58_.py b/backend-flask/migrations/versions/b00632365b58_.py
new file mode 100644
index 0000000..cf9ceef
--- /dev/null
+++ b/backend-flask/migrations/versions/b00632365b58_.py
@@ -0,0 +1,42 @@
+"""empty message
+
+Revision ID: b00632365b58
+Revises: a340b3da0f2a
+Create Date: 2024-10-29 23:27:37.306568
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'b00632365b58'
+down_revision = 'a340b3da0f2a'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('auth_sessions',
+ sa.Column('key', sa.String(length=31), nullable=False),
+ sa.Column('player_id', sa.BigInteger(), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.ForeignKeyConstraint(['player_id'], ['players.steam_id'], ),
+ sa.PrimaryKeyConstraint('key')
+ )
+ op.drop_table('auth_session')
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('auth_session',
+ sa.Column('player_id', sa.BIGINT(), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.Column('key', sa.VARCHAR(length=31), nullable=False),
+ sa.ForeignKeyConstraint(['player_id'], ['players.steam_id'], ),
+ sa.PrimaryKeyConstraint('player_id')
+ )
+ op.drop_table('auth_sessions')
+ # ### end Alembic commands ###
diff --git a/backend-flask/migrations/versions/ce676db8c655_initial_migration.py b/backend-flask/migrations/versions/ce676db8c655_initial_migration.py
new file mode 100644
index 0000000..d3b3601
--- /dev/null
+++ b/backend-flask/migrations/versions/ce676db8c655_initial_migration.py
@@ -0,0 +1,61 @@
+"""Initial migration
+
+Revision ID: ce676db8c655
+Revises:
+Create Date: 2024-10-28 17:42:13.639729
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'ce676db8c655'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('players',
+ sa.Column('steam_id', sa.BigInteger(), nullable=False),
+ sa.Column('username', sa.String(length=63), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.PrimaryKeyConstraint('steam_id')
+ )
+ op.create_table('teams',
+ sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column('team_name', sa.String(length=63), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('team_name')
+ )
+ op.create_table('players_teams',
+ sa.Column('player_id', sa.BigInteger(), nullable=False),
+ sa.Column('team_id', sa.Integer(), nullable=False),
+ sa.Column('team_role', sa.Enum('Player', 'CoachMentor', name='teamrole'), nullable=False),
+ sa.Column('playtime', sa.Interval(), nullable=False),
+ sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
+ sa.ForeignKeyConstraint(['player_id'], ['players.steam_id'], ),
+ sa.ForeignKeyConstraint(['team_id'], ['teams.id'], ),
+ sa.PrimaryKeyConstraint('player_id', 'team_id')
+ )
+ op.create_table('players_teams_roles',
+ sa.Column('player_id', sa.Integer(), nullable=False),
+ sa.Column('team_id', sa.Integer(), nullable=False),
+ sa.Column('role', sa.Enum('Unknown', 'Scout', 'PocketScout', 'FlankScout', 'Soldier', 'PocketSoldier', 'Roamer', 'Pyro', 'Demoman', 'HeavyWeapons', 'Engineer', 'Medic', 'Sniper', 'Spy', name='role'), nullable=False),
+ sa.Column('is_main', sa.Boolean(), nullable=False),
+ sa.ForeignKeyConstraint(['player_id', 'team_id'], ['players_teams.player_id', 'players_teams.team_id'], ),
+ sa.PrimaryKeyConstraint('player_id', 'team_id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('players_teams_roles')
+ op.drop_table('players_teams')
+ op.drop_table('teams')
+ op.drop_table('players')
+ # ### end Alembic commands ###
diff --git a/backend-flask/models.py b/backend-flask/models.py
new file mode 100644
index 0000000..b1603fe
--- /dev/null
+++ b/backend-flask/models.py
@@ -0,0 +1,146 @@
+from datetime import date, datetime, timedelta
+import enum
+from typing import List
+from flask import Flask
+from flask_sqlalchemy import SQLAlchemy
+from flask_migrate import Migrate
+from sqlalchemy import TIMESTAMP, BigInteger, Boolean, Enum, ForeignKey, ForeignKeyConstraint, Integer, Interval, MetaData, String, func
+from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
+from sqlalchemy_utc import UtcDateTime
+
+class Base(DeclarativeBase):
+ pass
+
+convention = {
+ "ix": "ix_%(column_0_label)s",
+ "uq": "uq_%(table_name)s_%(column_0_name)s",
+ "ck": "ck_%(table_name)s_%(constraint_name)s",
+ "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
+ "pk": "pk_%(table_name)s"
+}
+
+metadata = MetaData(naming_convention=convention)
+db = SQLAlchemy(model_class=Base, metadata=metadata)
+migrate = Migrate(render_as_batch=True)
+
+class Player(db.Model):
+ __tablename__ = "players"
+
+ steam_id: Mapped[int] = mapped_column(BigInteger, primary_key=True)
+ username: Mapped[str] = mapped_column(String(63))
+
+ teams: Mapped[List["PlayerTeam"]] = relationship(back_populates="player")
+ auth_sessions: Mapped[List["AuthSession"]] = relationship(back_populates="player")
+
+ created_at: Mapped[datetime] = mapped_column(TIMESTAMP, server_default=func.now())
+
+class Team(db.Model):
+ __tablename__ = "teams"
+
+ id: Mapped[int] = mapped_column(Integer, autoincrement=True, primary_key=True)
+ team_name: Mapped[str] = mapped_column(String(63), unique=True)
+ discord_webhook_url: Mapped[str] = mapped_column(String(255), nullable=True)
+
+ players: Mapped[List["PlayerTeam"]] = relationship(back_populates="team")
+
+ created_at: Mapped[datetime] = mapped_column(TIMESTAMP, server_default=func.now())
+
+class PlayerTeam(db.Model):
+ __tablename__ = "players_teams"
+
+ class TeamRole(enum.Enum):
+ Player = 0
+ CoachMentor = 1
+
+ player_id: Mapped[int] = mapped_column(ForeignKey("players.steam_id"), primary_key=True)
+ team_id: Mapped[int] = mapped_column(ForeignKey("teams.id"), primary_key=True)
+
+ player: Mapped["Player"] = relationship(back_populates="teams")
+ team: Mapped["Team"] = relationship(back_populates="players")
+
+ player_roles: Mapped[List["PlayerTeamRole"]] = relationship(back_populates="player_team")
+ availability: Mapped[List["PlayerTeamAvailability"]] = relationship(back_populates="player_team")
+
+ team_role: Mapped[TeamRole] = mapped_column(Enum(TeamRole), default=TeamRole.Player)
+ playtime: Mapped[timedelta] = mapped_column(Interval)
+ created_at: Mapped[datetime] = mapped_column(TIMESTAMP, server_default=func.now())
+
+class PlayerTeamRole(db.Model):
+ __tablename__ = "players_teams_roles"
+
+ class Role(enum.Enum):
+ Unknown = 0
+
+ Scout = 1
+ PocketScout = 2
+ FlankScout = 3
+
+ Soldier = 4
+ PocketSoldier = 5
+ Roamer = 6
+
+ Pyro = 7
+ Demoman = 8
+ HeavyWeapons = 9
+ Engineer = 10
+ Medic = 11
+ Sniper = 12
+ Spy = 13
+
+ player_id: Mapped[int] = mapped_column(primary_key=True)
+ team_id: Mapped[int] = mapped_column(primary_key=True)
+
+ player_team: Mapped["PlayerTeam"] = relationship("PlayerTeam", back_populates="player_roles")
+
+ #player: Mapped["Player"] = relationship(back_populates="teams")
+
+ role: Mapped[Role] = mapped_column(Enum(Role))
+ is_main: Mapped[bool] = mapped_column(Boolean)
+
+ __table_args__ = (
+ ForeignKeyConstraint(
+ [player_id, team_id],
+ [PlayerTeam.player_id, PlayerTeam.team_id]
+ ),
+ )
+
+class PlayerTeamAvailability(db.Model):
+ __tablename__ = "players_teams_availability"
+
+ player_id: Mapped[int] = mapped_column(primary_key=True)
+ team_id: Mapped[int] = mapped_column(primary_key=True)
+ start_time: Mapped[datetime] = mapped_column(UtcDateTime, primary_key=True)
+
+ player_team: Mapped["PlayerTeam"] = relationship(
+ "PlayerTeam",back_populates="availability")
+
+ availability: Mapped[int] = mapped_column(Integer, default=2)
+ end_time: Mapped[datetime] = mapped_column(UtcDateTime)
+
+ __table_args__ = (
+ ForeignKeyConstraint(
+ [player_id, team_id],
+ [PlayerTeam.player_id, PlayerTeam.team_id]
+ ),
+ )
+
+class AuthSession(db.Model):
+ __tablename__ = "auth_sessions"
+
+ @staticmethod
+ def gen_cookie_expiration():
+ valid_until = date.today() + timedelta(days=7)
+ AuthSession.gen_cookie_expiration()
+ return valid_until
+
+ key: Mapped[str] = mapped_column(String(31), primary_key=True)
+ player_id: Mapped[int] = mapped_column(ForeignKey("players.steam_id"))
+ created_at: Mapped[datetime] = mapped_column(TIMESTAMP, server_default=func.now())
+
+ player: Mapped["Player"] = relationship(back_populates="auth_sessions")
+
+def init_db(app: Flask):
+ app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db.sqlite3"
+ db.init_app(app)
+ migrate.init_app(app, db)
+ return app
diff --git a/backend-flask/pyrightconfig.json b/backend-flask/pyrightconfig.json
new file mode 100644
index 0000000..98e12df
--- /dev/null
+++ b/backend-flask/pyrightconfig.json
@@ -0,0 +1,3 @@
+{
+ "typeCheckingMode": "standard"
+}
diff --git a/backend-flask/requirements.txt b/backend-flask/requirements.txt
new file mode 100644
index 0000000..0db7e48
--- /dev/null
+++ b/backend-flask/requirements.txt
@@ -0,0 +1,15 @@
+flask
+
+Flask-CORS
+
+sqlalchemy
+Flask-SQLAlchemy
+SQLAlchemy-Utc
+
+pydantic
+Flask-Pydantic
+
+alembic
+Flask-Migrate
+
+requests
diff --git a/backend-flask/schedule.py b/backend-flask/schedule.py
new file mode 100644
index 0000000..8f1d5e2
--- /dev/null
+++ b/backend-flask/schedule.py
@@ -0,0 +1,237 @@
+import datetime
+from flask import Blueprint, abort, jsonify, make_response, request
+import pydantic
+from flask_pydantic import validate
+from models import Player, PlayerTeam, PlayerTeamAvailability, PlayerTeamRole, db
+
+from middleware import requires_authentication
+import models
+import utc
+
+
+api_schedule = Blueprint("schedule", __name__, url_prefix="/schedule")
+
+class ViewScheduleForm(pydantic.BaseModel):
+ window_start: datetime.datetime
+ team_id: int
+ window_size_days: int = 7
+
+@api_schedule.get("/")
+@validate(query=ViewScheduleForm)
+@requires_authentication
+def get(query: ViewScheduleForm, *args, **kwargs):
+ window_start = query.window_start
+ window_end = window_start + datetime.timedelta(days=query.window_size_days)
+ player: Player = kwargs["player"]
+
+ availability_regions = db.session.query(
+ PlayerTeamAvailability
+ ).where(
+ PlayerTeamAvailability.player_id == player.steam_id
+ ).where(
+ PlayerTeamAvailability.team_id == query.team_id
+ ).where(
+ PlayerTeamAvailability.start_time.between(window_start, window_end) |
+ PlayerTeamAvailability.end_time.between(window_start, window_end) |
+
+ # handle edge case where someone for some reason might list their
+ # availability spanning more than a week total
+ ((PlayerTeamAvailability.start_time < window_start) &
+ (PlayerTeamAvailability.end_time > window_end))
+ ).all()
+
+ window_size_hours = 24 * query.window_size_days
+ availability = [0] * window_size_hours
+ for region in availability_regions:
+ region: PlayerTeamAvailability
+
+ # this is the start time relative to the window (as timedelta)
+ #relative_start_time = (region.start_time.replace(tzinfo=utc.utc) - window_start)
+ #relative_start_hour = int(relative_start_time.total_seconds() // 3600)
+ #relative_end_time = (region.end_time.replace(tzinfo=utc.utc) - window_start)
+ #relative_end_hour = int(relative_end_time.total_seconds() // 3600)
+
+ relative_start_time = region.start_time - window_start
+ relative_start_hour = int(relative_start_time.total_seconds() // 3600)
+ relative_end_time = region.end_time - window_start
+ relative_end_hour = int(relative_end_time.total_seconds() // 3600)
+
+ i = max(0, relative_start_hour)
+ while i < window_size_hours and i < relative_end_hour:
+ print(i, "=", region.availability)
+ availability[i] = region.availability
+ i += 1
+ return {
+ "availability": availability
+ }
+
+class PutScheduleForm(pydantic.BaseModel):
+ window_start: datetime.datetime
+ window_size_days: int = 7
+ team_id: int
+ availability: list[int]
+
+def find_consecutive_blocks(arr: list[int]) -> list[tuple[int, int, int]]:
+ blocks: list[tuple[int, int, int]] = []
+ current_block_value = 0
+ current_block_start = 0
+
+ for i in range(len(arr)):
+ if arr[i] != current_block_value:
+ # we find a different value
+ if current_block_value > 0:
+ blocks.append((current_block_value, current_block_start, i))
+ # begin a new block
+ current_block_start = i
+ current_block_value = arr[i]
+
+ if current_block_value > 0:
+ blocks.append((current_block_value, current_block_start, len(arr)))
+
+ return blocks
+
+@api_schedule.put("/")
+@validate(body=PutScheduleForm, get_json_params={})
+@requires_authentication
+def put(body: PutScheduleForm, **kwargs):
+ window_start = body.window_start.replace(tzinfo=utc.utc)
+ window_end = window_start + datetime.timedelta(days=body.window_size_days)
+ player: Player = kwargs["player"]
+ if not player:
+ abort(400)
+
+ # TODO: add error message
+ if len(body.availability) != 168:
+ abort(400, {
+ "error": "Availability must be length " + str(168)
+ })
+
+ cur_availability = db.session.query(
+ PlayerTeamAvailability
+ ).where(
+ PlayerTeamAvailability.player_id == player.steam_id
+ ).where(
+ PlayerTeamAvailability.team_id == body.team_id
+ ).where(
+ PlayerTeamAvailability.start_time.between(window_start, window_end) |
+ PlayerTeamAvailability.end_time.between(window_start, window_end)
+ ).order_by(
+ PlayerTeamAvailability.start_time
+ ).all()
+
+ # cut the availability times so that they do not intersect our window
+ if len(cur_availability) > 0:
+ if cur_availability[0].start_time < window_start:
+ if cur_availability[0].end_time > window_end:
+ # if the availability overlaps the entire window, duplicate it
+ # this way, we can trim the start_time of the duplicate
+ cur_availability.append(cur_availability[0])
+ cur_availability[0].end_time = window_start
+ if cur_availability[-1].end_time > window_end:
+ cur_availability[-1].start_time = window_end
+
+ # remove all availability regions strictly inside window
+ i = 0
+ for region in cur_availability[:]:
+ if region.start_time >= window_start and region.end_time <= window_end:
+ print("Deleting", region)
+ db.session.delete(region)
+ cur_availability.pop(i)
+ else:
+ i += 1
+
+ if len(cur_availability) > 2:
+ # this is not supposed to happen
+ db.session.rollback()
+ raise ValueError()
+
+ # create time regions inside our window based on the availability array
+ availability_blocks = []
+
+ for block in find_consecutive_blocks(body.availability):
+ availability_value = block[0]
+ hour_start = block[1]
+ hour_end = block[2]
+
+ abs_start = window_start + datetime.timedelta(hours=hour_start)
+ abs_end = window_start + datetime.timedelta(hours=hour_end)
+
+ print("Create availability from", abs_start, "to", abs_end)
+
+ new_availability = PlayerTeamAvailability()
+ new_availability.availability = availability_value
+ new_availability.start_time = abs_start
+ new_availability.end_time = abs_end
+ new_availability.player_id = player.steam_id
+ new_availability.team_id = body.team_id
+
+ availability_blocks.append(new_availability)
+
+ # merge availability blocks if needed
+ if len(cur_availability) > 0 and len(availability_blocks) > 0:
+ if availability_blocks[0].start_time == cur_availability[0].end_time:
+ cur_availability[0].end_time = availability_blocks[0].end_time
+ availability_blocks.pop(0)
+
+ if len(cur_availability) > 0 and len(availability_blocks) > 0:
+ if availability_blocks[-1].end_time == cur_availability[-1].start_time:
+ cur_availability[-1].start_time = availability_blocks[-1].start_time
+ availability_blocks.pop(-1)
+
+ db.session.add_all(availability_blocks)
+ db.session.commit()
+ return make_response({ }, 300)
+
+class ViewAvailablePlayersForm(pydantic.BaseModel):
+ start_time: datetime.datetime
+ team_id: int
+
+@api_schedule.get("/view-available")
+@validate()
+@requires_authentication
+def view_available(query: ViewAvailablePlayersForm, **kwargs):
+ start_time = query.start_time.replace(tzinfo=utc.utc)
+ player: Player = kwargs["player"]
+
+ #q = (
+ # db.select(PlayerTeamAvailability)
+ # .filter(
+ # (PlayerTeamAvailability.player_id == player.steam_id) &
+ # (PlayerTeamAvailability.team_id == query.team_id) &
+ # (PlayerTeamAvailability.start_time == start_time)
+ # )
+ #)
+
+ #availability: Sequence[PlayerTeamAvailability] = \
+ # db.session.execute(q).scalars().all()
+
+ availability = db.session.query(
+ PlayerTeamAvailability
+ ).where(
+ PlayerTeamAvailability.player_id == player.steam_id
+ ).where(
+ PlayerTeamAvailability.team_id == query.team_id
+ ).where(
+ (PlayerTeamAvailability.start_time <= start_time) &
+ (PlayerTeamAvailability.end_time > start_time)
+ ).all()
+
+ def map_roles_to_json(roles: list[PlayerTeamRole],
+ player_team: PlayerTeam,
+ entry: PlayerTeamAvailability):
+ for role in roles:
+ yield {
+ "steamId": entry.player_id,
+ "username": entry.player_team.player.username,
+ "role": role.role.name,
+ "isMain": role.is_main,
+ "availability": entry.availability,
+ "playtime": int(player_team.playtime.total_seconds()),
+ }
+
+ def map_availability_to_json(entry: PlayerTeamAvailability):
+ player_team = entry.player_team
+ player_roles = player_team.player_roles
+ return list(map_roles_to_json(player_roles, player_team, entry))
+
+ return jsonify(list(map(map_availability_to_json, availability)))
diff --git a/backend-flask/team.py b/backend-flask/team.py
new file mode 100644
index 0000000..ecea56a
--- /dev/null
+++ b/backend-flask/team.py
@@ -0,0 +1,53 @@
+import datetime
+from typing import List
+from flask import Blueprint, jsonify, request
+import pydantic
+from flask_pydantic import validate
+from models import Player, PlayerTeam, Team, db
+from middleware import requires_authentication
+import models
+
+
+api_team = Blueprint("team", __name__, url_prefix="/team")
+
+@api_team.get("/view/")
+@api_team.get("/view//")
+@requires_authentication
+def view(team_id = None, **kwargs):
+ player: Player = kwargs["player"]
+
+ q_filter = PlayerTeam.player_id == player.steam_id
+ if team_id is not None:
+ q_filter = q_filter & (PlayerTeam.team_id == team_id)
+
+ q = db.session.query(
+ Team
+ ).join(
+ PlayerTeam
+ ).join(
+ Player
+ ).filter(
+ PlayerTeam.player_id == player.steam_id
+ )
+
+ def map_player_team_to_player_json(player_team: PlayerTeam):
+ return {
+ "steamId": player_team.player.steam_id,
+ "username": player_team.player.username,
+ }
+
+ def map_team_to_json(team: Team):
+ return {
+ "teamName": team.team_name,
+ "id": team.id,
+ "players": list(map(map_player_team_to_player_json, team.players)),
+ }
+
+ if team_id is None:
+ teams = q.all()
+ return jsonify(list(map(map_team_to_json, teams)))
+ else:
+ team = q.one_or_none()
+ if team:
+ return jsonify(map_team_to_json(team))
+ return jsonify(), 404
diff --git a/backend-flask/utc.py b/backend-flask/utc.py
new file mode 100644
index 0000000..87a748e
--- /dev/null
+++ b/backend-flask/utc.py
@@ -0,0 +1,15 @@
+from datetime import timedelta, tzinfo
+
+delta_zero = timedelta(0)
+
+class UTC(tzinfo):
+ def utcoffset(self, dt):
+ return delta_zero
+
+ def dst(self, dt):
+ return delta_zero
+
+ def tzname(self, dt):
+ return "UTC"
+
+utc = UTC()