Skip to content

Commit

Permalink
python 3.11 (#462)
Browse files Browse the repository at this point in the history
* test: graph-tool properly installed using tox-conda (requires conda)

* docker: bullseye,python=3.11,graph-tool=2.58,bigtable/v1.19.0

* fix(meshing): task-queue upgrade

* fix: resolve column filter ambiguity

* fix(meshing): handle new DracoPy.decode_buffer_to_mesh format

* fix(meshing): get_mesh deprecated in zmesh 1.7

* fix(cloudbuild.yaml): Adapt for multistage build

* change cloudbuild

* change docker login

* change docker login t2

* try different login

* deps: switch to pinned dependencies for production

* feat(cloudbuild.yaml): Use Kaniko to enable caching

* fix(app): `traceback.format_exception` changed in py3.10 (`etype` -> positional-only `exc`

* fix(flask): flask 2.3 requires JSONProvider

* recompiled reqs

* fix np.int deprecation

* fix(meshgen): nonhomogeneous ndarray creation requires dtype=object

* fix(meshgen): no need to transpose seg with zmesh 1.7

* hack(requirements): trick conda into installing PyPI zstandard==0.21.0

* add zstandard pip install

* replace with nico's hack

* trying a different hack

* fix(remesh): ignore bad messages

---------

Co-authored-by: Nico Kemnitz <[email protected]>
Co-authored-by: Akhilesh Halageri <[email protected]>
  • Loading branch information
3 people authored Jan 9, 2024
1 parent 0773e7e commit 53eb720
Show file tree
Hide file tree
Showing 22 changed files with 464 additions and 251 deletions.
127 changes: 114 additions & 13 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,23 +1,124 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

build
dist
__pycache__
.pytest_cache
.tox
*.egg-info
*.egg/
*.pyc
*.swp
# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
.idea/*

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.vscode
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

.devcontainer/
*.rdb
# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/

# Visual Code
.vscode/

# terraform
.terraform/
*.lock.hcl
*.tfstate
*.tfstate.*
*.tfstate.*


# local dev stuff
.devcontainer/
*.ipynb
*.rdb
/protobuf*

# Git
.git/
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ jobs:
- name: Build image and run tests
run: |
docker build --tag seunglab/pychunkedgraph:$GITHUB_SHA .
docker run --rm seunglab/pychunkedgraph:$GITHUB_SHA /bin/sh -c "tox -v -- --cov-config .coveragerc --cov=pychunkedgraph && codecov"
docker run --rm seunglab/pychunkedgraph:$GITHUB_SHA /bin/sh -c "pytest --cov-config .coveragerc --cov=pychunkedgraph ./pychunkedgraph/tests && codecov"
71 changes: 67 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,70 @@
# FROM gcr.io/neuromancer-seung-import/pychunkedgraph:graph-tool_dracopy
FROM seunglab/pychunkedgraph:graph-tool_dracopy
ARG PYTHON_VERSION=3.11
ARG BASE_IMAGE=tiangolo/uwsgi-nginx-flask:python${PYTHON_VERSION}


######################################################
# Build Image - PCG dependencies
######################################################
FROM ${BASE_IMAGE} AS pcg-build
ENV PATH="/root/miniconda3/bin:${PATH}"
ENV CONDA_ENV="pychunkedgraph"

# Setup Miniconda
RUN apt-get update && apt-get install build-essential wget -y
RUN wget \
https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \
&& mkdir /root/.conda \
&& bash Miniconda3-latest-Linux-x86_64.sh -b \
&& rm -f Miniconda3-latest-Linux-x86_64.sh \
&& conda update conda

# Install PCG dependencies - especially graph-tool
# Note: uwsgi has trouble with pip and python3.11, so adding this with conda, too
COPY requirements.txt .
COPY requirements.yml .
COPY requirements-dev.txt .
RUN conda env create -n ${CONDA_ENV} -f requirements.yml

# Shrink conda environment into portable non-conda env
RUN conda install conda-pack -c conda-forge

RUN conda-pack -n ${CONDA_ENV} --ignore-missing-files -o /tmp/env.tar \
&& mkdir -p /app/venv \
&& cd /app/venv \
&& tar xf /tmp/env.tar \
&& rm /tmp/env.tar
RUN /app/venv/bin/conda-unpack


######################################################
# Build Image - Bigtable Emulator (without Google SDK)
######################################################
FROM golang:bullseye as bigtable-emulator-build
RUN mkdir -p /usr/src
WORKDIR /usr/src
ENV GOOGLE_CLOUD_GO_VERSION bigtable/v1.19.0
RUN apt-get update && apt-get install git -y
RUN git clone --depth=1 --branch="$GOOGLE_CLOUD_GO_VERSION" https://github.com/googleapis/google-cloud-go.git . \
&& cd bigtable \
&& go install -v ./cmd/emulator


######################################################
# Production Image
######################################################
FROM ${BASE_IMAGE}
ENV VIRTUAL_ENV=/app/venv
ENV PATH="$VIRTUAL_ENV/bin:$PATH"

COPY --from=pcg-build /app/venv /app/venv
COPY --from=bigtable-emulator-build /go/bin/emulator /app/venv/bin/cbtemulator
COPY override/gcloud /app/venv/bin/gcloud
COPY override/timeout.conf /etc/nginx/conf.d/timeout.conf
COPY override/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY requirements.txt /app
RUN pip install pip==20.2 && pip install --no-cache-dir --upgrade -r requirements.txt
# Hack to get zstandard from PyPI - remove if conda-forge linked lib issue is resolved
RUN pip install --no-cache-dir --no-deps --force-reinstall zstandard==0.21.0
COPY . /app

RUN mkdir -p /home/nginx/.cloudvolume/secrets \
&& chown -R nginx /home/nginx \
&& usermod -d /home/nginx -s /bin/bash nginx
24 changes: 17 additions & 7 deletions cloudbuild.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
steps:
# Login to Docker Hub
- name: "gcr.io/cloud-builders/docker"
entrypoint: "bash"
args: ["-c", "docker login --username=$$USERNAME --password=$$PASSWORD"]
secretEnv: ["USERNAME", "PASSWORD"]
# - name: 'gcr.io/cloud-builders/docker'
# args: [ 'build', '-t', 'gcr.io/$PROJECT_ID/pychunkedgraph', '.' ]

# Build the final stage image - Kaniko takes care caching
- name: "gcr.io/kaniko-project/executor:latest"
args:
- "--cache=true"
- "--destination=gcr.io/$PROJECT_ID/pychunkedgraph:$TAG_NAME"
timeout: 1800s

# TODO: can't figure out how to just re-tag and push to Dockerhub
- name: "gcr.io/cloud-builders/docker"
entrypoint: "bash"
args:
- "-c"
- |
docker build -t gcr.io/$PROJECT_ID/pychunkedgraph:$TAG_NAME .
timeout: 600s
args: ["-c", "docker pull gcr.io/$PROJECT_ID/pychunkedgraph:$TAG_NAME"]

# Additional tag for Dockerhub
- name: "gcr.io/cloud-builders/docker"
entrypoint: "bash"
args:
Expand All @@ -20,12 +26,16 @@ steps:
"docker tag gcr.io/$PROJECT_ID/pychunkedgraph:$TAG_NAME $$USERNAME/pychunkedgraph:$TAG_NAME",
]
secretEnv: ["USERNAME"]

# Push the final image to Dockerhub
- name: "gcr.io/cloud-builders/docker"
entrypoint: "bash"
args: ["-c", "docker push $$USERNAME/pychunkedgraph:$TAG_NAME"]
secretEnv: ["USERNAME"]

images:
- "gcr.io/$PROJECT_ID/pychunkedgraph:$TAG_NAME"

availableSecrets:
secretManager:
- versionName: projects/$PROJECT_ID/secrets/docker-password/versions/1
Expand Down
55 changes: 55 additions & 0 deletions override/gcloud
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
#!/app/venv/bin/python

import argparse
import os
import subprocess
import re

CONFIG_ENV = os.path.expanduser("~/.config/gcloud/emulators/bigtable/env.yaml")

def env_init(args):
try:
with open(CONFIG_ENV, "r") as f:
hostport = re.findall(r"BIGTABLE_EMULATOR_HOST:\s+([:\w]+:\d+)", f.read())[0]
print(f"export BIGTABLE_EMULATOR={hostport}")
except Exception:
print(f"export BIGTABLE_EMULATOR=localhost:9000")

def start(args):
os.makedirs(os.path.dirname(CONFIG_ENV), exist_ok=True)
with open(CONFIG_ENV, "w") as f:
f.write(f"---\nBIGTABLE_EMULATOR_HOST: {args.host_port}")

host, port = args.host_port.rsplit(':', 1)
subprocess.Popen(["cbtemulator", "-host", host, "-port", port], start_new_session=True)

def usage(args):
print("""This is not gcloud. Only supported commands are:
- gcloud beta emulators bigtable env-init
- gcloud beta emulators bigtable start [--host-port localhost:9000]""")

if __name__ == '__main__':
parser_gcloud = argparse.ArgumentParser(prog='gcloud')
parser_gcloud.set_defaults(func=usage)
subparser_gcloud = parser_gcloud.add_subparsers()

parser_beta = subparser_gcloud.add_parser('beta')
subparser_beta = parser_beta.add_subparsers()

parser_emulators = subparser_beta.add_parser('emulators')
subparser_emulators = parser_emulators.add_subparsers()

parser_bigtable = subparser_emulators.add_parser('bigtable')
subparser_bigtable = parser_bigtable.add_subparsers()

parser_env_init = subparser_bigtable.add_parser('env-init')
parser_env_init.set_defaults(func=env_init)

parser_start = subparser_bigtable.add_parser('start')
parser_start.add_argument('--host-port', default='localhost:9000')
parser_start.set_defaults(func=start)

args = parser_gcloud.parse_args()
args.func(args)


2 changes: 1 addition & 1 deletion override/supervisord.conf
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ nodaemon=true


[program:uwsgi]
command=/usr/local/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini --die-on-term --need-app
command=%(ENV_VIRTUAL_ENV)s/bin/uwsgi --ini /etc/uwsgi/uwsgi.ini --die-on-term --need-app
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
Expand Down
8 changes: 7 additions & 1 deletion pychunkedgraph/app/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import numpy as np
import redis
from flask import Flask
from flask.json.provider import DefaultJSONProvider
from flask.logging import default_handler
from flask_cors import CORS
from rq import Queue
Expand Down Expand Up @@ -45,13 +46,18 @@ def default(self, obj):
return json.JSONEncoder.default(self, obj)


class CustomJSONProvider(DefaultJSONProvider):
def dumps(self, obj, **kwargs):
return super().dumps(obj, default=None, cls=CustomJsonEncoder, **kwargs)


def create_app(test_config=None):
app = Flask(
__name__,
instance_path=get_instance_folder_path(),
instance_relative_config=True,
)
app.json_encoder = CustomJsonEncoder
app.json = CustomJSONProvider(app)

CORS(app, expose_headers="WWW-Authenticate")

Expand Down
6 changes: 3 additions & 3 deletions pychunkedgraph/app/app_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,14 +124,14 @@ def assert_node_prop(prop):
def jsonify_with_kwargs(data, as_response=True, **kwargs):
kwargs.setdefault("separators", (",", ":"))

if current_app.config["JSONIFY_PRETTYPRINT_REGULAR"] or current_app.debug:
if current_app.json.compact == False or current_app.debug:
kwargs["indent"] = 2
kwargs["separators"] = (", ", ": ")

resp = json.dumps(data, **kwargs)
if as_response:
return current_app.response_class(
resp + "\n", mimetype=current_app.config["JSONIFY_MIMETYPE"]
resp + "\n", mimetype=current_app.json.mimetype
)
else:
return resp
Expand Down Expand Up @@ -228,7 +228,7 @@ def ccs(coordinates_nm_):
ccs = [np.array(list(cc)) for cc in nx.connected_components(graph)]
return ccs

coordinates = np.array(coordinates, dtype=np.int)
coordinates = np.array(coordinates, dtype=int)
coordinates_nm = coordinates * cg.meta.resolution
node_ids = np.array(node_ids, dtype=np.uint64)
if len(coordinates.shape) != 2:
Expand Down
Loading

0 comments on commit 53eb720

Please sign in to comment.