From feea5845a880e6ebb48418cf9a917693afdd70d1 Mon Sep 17 00:00:00 2001 From: Untone Date: Sun, 25 Feb 2024 19:27:41 +0300 Subject: [PATCH] dockerfile-update-3 --- Dockerfile | 80 +++++++++++++++++++++++++++++++++++-------- orm/author.py | 4 ++- resolvers/reaction.py | 5 ++- 3 files changed, 73 insertions(+), 16 deletions(-) diff --git a/Dockerfile b/Dockerfile index 096201c1..7af0acc5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,23 +1,75 @@ -FROM python:alpine +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 + +FROM python:alpine as python-base + + # python +ENV PYTHONUNBUFFERED=1 \ + # prevents python creating .pyc files + PYTHONDONTWRITEBYTECODE=1 \ + \ + # pip + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + \ + # poetry + # https://python-poetry.org/docs/configuration/#using-environment-variables + POETRY_VERSION=1.8.0 \ + # make poetry install to this location + POETRY_HOME="/opt/poetry" \ + # make poetry create the virtual environment in the project's root + # it gets named `.venv` + POETRY_VIRTUALENVS_IN_PROJECT=true \ + # do not ask any interactive question + POETRY_NO_INTERACTION=1 \ + \ + # paths + # this is where our requirements + virtual environment will live + PYSETUP_PATH="/opt/pysetup" \ + VENV_PATH="/opt/pysetup/.venv" + + +# prepend poetry and venv to path +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + + +################################ +# BUILDER-BASE +# Used to build deps + create our virtual environment +################################ +FROM python-base as builder # Update package lists and install necessary dependencies RUN apk update && \ - apk add --no-cache build-base icu-data-full curl python3-dev musl-dev postgresql-dev postgresql-client && \ - curl -sSL https://install.python-poetry.org | python + apk add --no-cache build-base icu-data-full curl python3-dev musl-dev postgresql-dev postgresql-client + +# install poetry - respects $POETRY_VERSION & $POETRY_HOME +# The --mount will mount the buildx cache directory to where +# Poetry and Pip store their cache so that they can re-use it +RUN --mount=type=cache,target=/root/.cache \ + curl -sSL https://install.python-poetry.org | python3 - + +# copy project requirement files here to ensure they will be cached. +WORKDIR $PYSETUP_PATH +COPY poetry.lock pyproject.toml ./ + +# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally +RUN --mount=type=cache,target=/root/.cache \ + poetry install --without=dev + + +################################ +# Final image used for runtime +################################ +FROM python-base + +COPY --from=builder $PYSETUP_PATH $PYSETUP_PATH +COPY ./app /app/ -# Set working directory WORKDIR /app -# Install dependencies -RUN poetry config virtualenvs.create false && \ - poetry install --no-dev - -# Copy just the dependency manifests first -COPY poetry.lock pyproject.toml /app/ - -# Copy the rest of the application -COPY . /app - # Expose the port EXPOSE 8000 diff --git a/orm/author.py b/orm/author.py index dba50baf..0e565490 100644 --- a/orm/author.py +++ b/orm/author.py @@ -41,4 +41,6 @@ class Author(Base): updated_at = Column(Integer, nullable=False, default=lambda: int(time.time())) deleted_at = Column(Integer, nullable=True, comment='Deleted at') - search_vector = Column(TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian")) + search_vector = Column( + TSVectorType('name', 'slug', 'bio', 'about', regconfig='pg_catalog.russian') + ) diff --git a/resolvers/reaction.py b/resolvers/reaction.py index 081b4373..8905aa83 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -445,7 +445,10 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S # Sort shouts by the `last_comment` field combined_query = ( - union(q1, q2).order_by(desc(text('last_comment_stat'))).limit(limit).offset(offset) + union(q1, q2) + .order_by(desc(text('last_comment_stat'))) + .limit(limit) + .offset(offset) ) results = session.execute(combined_query).scalars()