assert _instr('RUN', 'echo hello') == 'RUN echo hello'
assert _from('python', '3.11') == 'FROM python:3.11'
assert _from('ubuntu', as_='builder') == 'FROM ubuntu AS builder'
assert _from('alpine') == 'FROM alpine'
assert _run('apt-get update') == 'RUN apt-get update'
r = _run(['apt-get update', 'apt-get install -y curl'])
assert 'apt-get update && ' in r
assert 'apt-get install -y curl' in r
assert _apt_install('curl', 'wget', y=True) == 'RUN apt-get update && apt-get install -y curl wget && rm -rf /var/lib/apt/lists/*'
assert _apt_install('git', clean=False) == 'RUN apt-get update && apt-get install git'
assert _cmd(['python', 'app.py']) == 'CMD ["python", "app.py"]'
assert _cmd('echo hello') == 'CMD echo hello'
assert _copy('.', '/app') == 'COPY . /app'
assert _copy('/build/out', '/app', from_='builder') == 'COPY --from=builder /build/out /app'
assert _copy('app/', '.', link=True) == 'COPY --link app/ .'
assert _copy('/app', '/app', from_='builder', link=True) == 'COPY --from=builder --link /app /app'
assert _workdir('/app') == 'WORKDIR /app'
assert _env('PATH', '/usr/local/bin') == 'ENV PATH=/usr/local/bin'
assert _env('DEBIAN_FRONTEND=noninteractive') == 'ENV DEBIAN_FRONTEND=noninteractive'
assert _expose(8080) == 'EXPOSE 8080'
assert _entrypoint(['python', '-m', 'flask']) == 'ENTRYPOINT ["python", "-m", "flask"]'
assert _arg('VERSION', '1.0') == 'ARG VERSION=1.0'
assert _arg('VERSION') == 'ARG VERSION'
assert _label(version='1.0', maintainer='me') == 'LABEL version="1.0" maintainer="me"'
assert _volume('/data') == 'VOLUME /data'
assert _volume(['/data', '/logs']) == 'VOLUME ["/data", "/logs"]'
assert _shell(['/bin/bash', '-c']) == 'SHELL ["/bin/bash", "-c"]'
assert 'CMD curl' in _healthcheck('curl -f http://localhost/', i='30s')
assert _healthcheck('curl localhost', i='30s', t='10s') == 'HEALTHCHECK --interval=30s --timeout=10s CMD curl localhost'
assert _healthcheck('curl localhost') == 'HEALTHCHECK CMD curl localhost'
assert _on_build(_run('echo triggered')) == 'ONBUILD RUN echo triggered'core
DockerFile
mk_flags
def mk_flags(
a:VAR_POSITIONAL, short:bool=True, sym:str='=', kw:VAR_KEYWORD
):
Build CLI flag list: single-char kwargs → -k [v], multi-char → –key[=v]. short=False: all keys use –key[=v]
Dockerfile Instructions
instr creates a Dockerfile instruction string from a keyword and value. Factory functions (from_, run_, cmd_, etc.) wrap instr for each Dockerfile keyword, handling formatting details like tag joining, JSON exec form, and multi-command chaining.
Instruction factory functions
Each function maps to a Dockerfile keyword with a trailing _ to avoid clashing with Python builtins.
Dockerfile Builder
The Dockerfile class provides a fluent interface for building Dockerfiles. Start with a base image, chain instruction methods, then render or save.
Each method is one line – it creates an instruction and appends it, returning self for chaining.
parsed = _parse("# comment\nFROM python:3.11\nRUN apt-get update && \\\n apt-get install -y curl\nCOPY . /app")
print(parsed)
assert len(parsed) == 3
assert parsed[0] == 'FROM python:3.11'
assert 'apt-get install -y curl' in parsed[1]['FROM python:3.11', 'RUN apt-get update && apt-get install -y curl', 'COPY . /app']
Dockerfile
def Dockerfile(
items:NoneType=None, rest:VAR_POSITIONAL, use_list:bool=False, match:NoneType=None
):
Fluent builder for Dockerfiles
df = (Dockerfile().from_('python:3.11-slim')
.run('pip install flask')
.copy('.', '/app')
.workdir('/app')
.expose(5000)
.cmd(['python', 'app.py']))
expected = """FROM python:3.11-slim
RUN pip install flask
COPY . /app
WORKDIR /app
EXPOSE 5000
CMD [\"python\", \"app.py\"]"""
print(df)
assert str(df) == expectedFROM python:3.11-slim
RUN pip install flask
COPY . /app
WORKDIR /app
EXPOSE 5000
CMD ["python", "app.py"]
# run_mount: cache mounts for fast rebuilds
df = (Dockerfile().from_('python:3.12-slim')
.run_mount('pip install -r requirements.txt', target='/root/.cache/pip')
.run_mount('uv sync --frozen', target='/root/.cache/uv')
.run_mount('apt-get install -y curl', type='cache', target='/var/cache/apt'))
s = str(df)
assert "RUN --mount=type=cache,target=/root/.cache/pip pip install -r requirements.txt" in s
assert "RUN --mount=type=cache,target=/root/.cache/uv uv sync --frozen" in s
print(df)FROM python:3.12-slim
RUN --mount=type=cache,target=/root/.cache/pip pip install -r requirements.txt
RUN --mount=type=cache,target=/root/.cache/uv uv sync --frozen
RUN --mount=type=cache,target=/var/cache/apt apt-get install -y curl
Multi-stage builds work naturally:
df = (Dockerfile().from_('golang:1.21', as_='builder')
.workdir('/src')
.copy('.', '.')
.run('go build -o /app')
.from_('alpine')
.copy('/app', '/app', from_='builder')
.cmd(['/app']))
assert 'FROM golang:1.21 AS builder' in str(df)
assert 'COPY --from=builder /app /app' in str(df)
print(df)FROM golang:1.21 AS builder
WORKDIR /src
COPY . .
RUN go build -o /app
FROM alpine
COPY --from=builder /app /app
CMD ["/app"]
Multi-command RUN chains with &&:
df = (Dockerfile().from_('ubuntu:22.04').run(['apt-get update', 'apt-get install -y python3', 'rm -rf /var/lib/apt/lists/*']))
print(df)FROM ubuntu:22.04
RUN apt-get update && apt-get install -y python3 && rm -rf /var/lib/apt/lists/*
Loading from an existing Dockerfile
Use Dockerfile.load() to read an existing Dockerfile. save() returns the Path it wrote to.
import tempfiletmp = tempfile.mkdtemp()
Path(f'{tmp}/Dockerfile').write_text("# My app\nFROM python:3.11-slim\nRUN apt-get update && \\\n apt-get install -y curl\nCOPY . /app\nCMD [\"python\", \"app.py\"]")
# Load existing Dockerfile
df = Dockerfile.load(f'{tmp}/Dockerfile')
assert len(df) == 4
assert df[0] == 'FROM python:3.11-slim'
# save writes the file and returns self (Dockerfile), not the path
p = f'{tmp}/Dockerfile'
df.save(p)
assert Path(p).exists()
# chain after loading
df2 = df.run('echo hi')
assert len(df2) == 5
print(df)FROM python:3.11-slim
RUN apt-get update && apt-get install -y curl
COPY . /app
CMD ["python", "app.py"]
Build, Run, Test
These top-level functions wrap the Docker CLI for the common workflow: build an image from a Dockerfile, run a container, and test that a command succeeds inside an image.
Requires Docker daemon
The functions below need a running Docker daemon.
Cli
def Cli(
args:VAR_POSITIONAL, kwargs:VAR_KEYWORD
):
*Base: call builds flags → _run(), getattr dispatches subcommands*
Docker
def Docker(
no_creds:bool=False
):
Unified docker CLI wrapper. no_creds strips credential helpers from config.
drun
def drun(
img:str, detach:bool=False, ports:NoneType=None, name:NoneType=None, remove:bool=False, command:NoneType=None,
check:bool=True
):
Run a container, return container ID (detached) or output. check=True raises on startup failure.
test
def test(
img_or_tag:str, cmd
):
Run cmd in image, return True if exit code 0
Dockerfile.build
def build(
df:Dockerfile, tag:str=None, path:str='.', no_creds:bool=False, fn:str='Dockerfile'
):
Build image from Dockerfile via docker compose build (uses daemon BuildKit, no buildx required).
from fastcore.test import test_fail# Debug: see what exception drun actually raises
try:
drun('alpine', detach=True, command=['sh', '-c', 'exit 1'], check=True)
print('NO EXCEPTION RAISED')
except Exception as e:
print(f'Exception type: {type(e).__name__}')
print(f'Exception message: {str(e)[:300]}')Exception type: RuntimeError
Exception message: Container '578084b28be52a324fd50dc429702be1a47132fc89cfe73c9525251755bbfe30' failed to start.
Convenience functions
rmi
def rmi(
nm, force:bool=False
):
Call self as a function.
rm
def rm(
nm, force:bool=False
):
Call self as a function.
logs
def logs(
nm, n:int=10
):
Call self as a function.
stop
def stop(
nm
):
Call self as a function.
images
def images(
):
Call self as a function.
containers
def containers(
all:bool=False
):
Call self as a function.
Example: FastHTML app with uv
A realistic Dockerfile for a FastHTML app that uses uv for dependency management, installs system packages, and is designed to run with a mounted volume for persistent data.
df = (Dockerfile().from_('python', '3.12-slim')
.apt_install('curl', 'sqlite3', y=True)
.run('pip install uv')
.workdir('/app')
.copy('pyproject.toml', '.')
.run('uv export --no-hashes -o requirements.txt && pip install -r requirements.txt')
.copy('.', '.')
.volume('/app/data')
.expose(5001)
.cmd(['python', 'main.py']))
print(df)FROM python:3.12-slim
RUN apt-get update && apt-get install -y curl sqlite3 && rm -rf /var/lib/apt/lists/*
RUN pip install uv
WORKDIR /app
COPY pyproject.toml .
RUN uv export --no-hashes -o requirements.txt && pip install -r requirements.txt
COPY . .
VOLUME /app/data
EXPOSE 5001
CMD ["python", "main.py"]
tmp = tempfile.mkdtemp()
df = Dockerfile().from_('alpine').run('echo hello > /greeting.txt').cmd(['cat', '/greeting.txt'])
tag = df.build(tag='fastops-test:hello', path=tmp, no_creds=True)
print(f'Built: {tag}')
out = drun(tag, remove=True)
print(f'Output: {out}')
rmi(tag, force=True)
print('Cleaned up.')Built: fastops-test:hello
Output: hello
Cleaned up.
End-to-end: FastHTML + FastLite todo app
app_dir = Path(tempfile.mkdtemp()) / 'fasthtml-todo'
app_dir.mkdir()
code = {'main.py' :'''
from json import dumps
from fasthtml.common import *
db = database('data/todos.db')
todos = db.t.todos
if todos not in db.t: todos.create(id=int, title=str, done=bool, pk='id')
Todo = todos.dataclass()
app, rt = fast_app(live=False)
@rt('/')
def get():
items = [Li(f"{'✓' if t.done else '○'} {t.title}", id=f'todo-{t.id}') for t in todos()]
return Titled('Todos',
Ul(*items),
Form(Input(name='title', placeholder='New todo...'), Button('Add'), action='/add', method='post'))
@rt('/add', methods=['post'])
def post(title: str):
todos.insert(Todo(title=title, done=False))
return Redirect('/')
@rt('/api/todos')
def api():
data = [dict(id=t.id, title=t.title, done=t.done) for t in todos()]
return Response(dumps(data), media_type='application/json')
serve(host='0.0.0.0', port=5001)
''', 'requirements.txt':'python-fasthtml\n'}
for k,v in code.items(): Path(app_dir / k).write_text(v)
# Add some todos via POST
def test_post(url='http://localhost:5001'):
for t in ['Buy milk', 'Write docs', 'Ship fastops']: urlread(f'{url}/add', title=t)
# Fetch the JSON API
for t in urljson(f'{url}/api/todos'): print(f" {'✓' if t['done'] else '○'} {t['title']}")
def cleanup(nm, tag):
rm(nm, force=True)
rmi(tag, force=True)
print(f'App dir: {app_dir}')
print('Files:', os.listdir(app_dir))App dir: /var/folders/kg/9vdw4mdd1fs58svgh4k1qhr09x7dqh/T/tmphf0q8sp4/fasthtml-todo
Files: ['requirements.txt', 'main.py']
df = (Dockerfile()
.from_('python', '3.12-slim')
.workdir('/app')
.copy('requirements.txt', '.')
.run('pip install --no-cache-dir -r requirements.txt')
.copy('.', '.')
.volume('/app/data')
.expose(5001)
.cmd(['python', 'main.py']))
print(df)FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
VOLUME /app/data
EXPOSE 5001
CMD ["python", "main.py"]
import time
from fastcore.net import urlread, urljsontag = 'fastops-fasthtml:latest'
name = 'fastops-fasthtml-demo'
df.build(tag=tag, path=str(app_dir), no_creds=True)
print(f'Built: {tag}')
for cid in Docker()('ps', '-aq', '--filter', f'ancestor={tag}').splitlines(): rm(cid, force=True)
rm(name, force=True)
cid = drun(tag, detach=True, ports={'5001/tcp': 5001}, name=name, check=True)
print(f'Container: {cid[:12]}')
time.sleep(3)
try: test_post()
except Exception as e:
print(f'Test failed: {e}')
print(f'\nLogs:')
print(logs(name, n=3))
finally: cleanup(name, tag)
print('Cleaned up.')Built: fastops-fasthtml:latest
Container: bf1125b54374
○ Buy milk
○ Write docs
○ Ship fastops
Cleaned up.
Docker Compose
A builder for
docker-compose.ymlfiles, with a fluent interface for defining services, networks, and volumes. Useup()anddown()to run the stack directly from the builder.
Service
A service is a component of a Docker Compose stack, defined by its image or build context, ports, environment variables, volumes, and other configuration. The service() function creates a service definition that can be added to a Compose builder.
service
def service(
image:NoneType=None, build:NoneType=None, ports:NoneType=None, env:NoneType=None, volumes:NoneType=None,
depends_on:NoneType=None, command:NoneType=None, kw:VAR_KEYWORD
):
Create a docker-compose service dict
dict2str
def dict2str(
d:dict, sep:str=':'
):
Call self as a function.
d = service(image='nginx', ports={80: 80})
assert d['image'] == 'nginx'
assert d['ports'] == ['80:80']d = service(image='postgres:15', env={'POSTGRES_PASSWORD': 'secret'}, volumes={'pgdata': '/var/lib/postgresql/data'})
assert d['environment'] == ['POSTGRES_PASSWORD=secret']
assert d['volumes'] == ['pgdata:/var/lib/postgresql/data']Compose
The Compose class provides a fluent builder for docker-compose files. Chain .svc(), .network(), and .volume() calls, then render with str() or save to disk.
Services are stored as plain dicts. to_dict() just assembles the top-level compose structure.
Compose
def Compose(
items:NoneType=None, rest:VAR_POSITIONAL, use_list:bool=False, match:NoneType=None
):
Fluent builder for docker-compose.yml files
dc = (Compose()
.svc('web', image='nginx', ports={80: 80}, networks=['backend'])
.svc('redis', image='redis:alpine')
.svc('db', image='postgres:15', env={'POSTGRES_PASSWORD': 'secret'},
volumes={'pgdata': '/var/lib/postgresql/data'})
.network('backend').volume('pgdata'))
d = dc.to_dict()
assert 'networks' in d
assert 'volumes' in d
assert 'pgdata' in d['volumes']
print(dc)services:
web:
image: nginx
ports:
- 80:80
networks:
- backend
redis:
image: redis:alpine
db:
image: postgres:15
environment:
- POSTGRES_PASSWORD=secret
volumes:
- pgdata:/var/lib/postgresql/data
networks:
backend: null
volumes:
pgdata: null
Compose up, test, down, and load from an existing docker-compose.yml
dc = (Compose()
.svc('app',
build='.',
ports={5001: 5001},
restart='unless-stopped',
volumes={'app_data': '/app/data'})
.volume('app_data')).save(app_dir/'docker-compose.yml')
assert (app_dir/'docker-compose.yml').exists()
print((app_dir/'docker-compose.yml').read_text())services:
app:
ports:
- 5001:5001
build: .
volumes:
- app_data:/app/data
restart: unless-stopped
volumes:
app_data: null
from fastcore.foundation import working_directorywith working_directory(app_dir) as w:
dc.up(no_creds=True)
time.sleep(3)
test_post()
dc.down(v=True,rmi='all',remove_orphans=True)
print('\n Compose load test: \n')
print(Compose.load()) ○ Buy milk
○ Write docs
○ Ship fastops
Compose load test:
services:
app:
ports:
- 5001:5001
build: .
volumes:
- app_data:/app/data
restart: unless-stopped
volumes:
app_data: null
App Builders
Two uv strategies available as @patch methods on Dockerfile:
inst_uv()— single-stage: copies uv binary from its official image, syncs deps in placewith_uv(uv_image, image, workdir)— multistage: full uv compile stage → slim runtime, only.venvcopied across
packages(*pkgs) wraps apt_install but is a no-op when called with no args, so callers can always write .packages(*listify(pkgs)) without an if pkgs guard.
Dockerfile.packages
def packages(
pkgs:VAR_POSITIONAL
):
Install system packages with apt-get. pkgs can be a list or space-separated string.
Dockerfile.with_uv
def with_uv(
uv_image, image, workdir
):
Multistage uv builder: appends uv builder stage then runtime base onto df
Dockerfile.inst_uv
def inst_uv(
req:bool=False, wd:str='/app'
):
Single-stage uv install: copy uv binary and sync deps. req=True uses requirements.txt instead of pyproject.toml
Framework builders
| Function | Default port | Strategy |
|---|---|---|
python_app |
8000 | multistage uv builder → slim |
fasthtml_app |
5001 | python_app bound to port 5001 |
fastapi_react |
8000 | Node frontend + Python backend |
go_app |
8080 | Go builder → distroless |
rust_app |
8080 | Cargo builder → distroless |
node_app |
3000 | Node slim (or two-stage static serve) |
detect_app |
auto | Sniffs project files, delegates above |
fasthtml_app is python_app partially applied to port=5001 via bind.
detect_app
def detect_app(
path:str='.', multistage:bool=True, kw:VAR_KEYWORD
):
*A naive project type detector from path and return the appropriate Dockerfile. for **kw lookup other app builders*
node_app
def node_app(
port:int=3000, node_version:str='20', cmd:NoneType=None, build_cmd:str='npm run build', static:bool=False
):
Node.js Dockerfile. static=True does two-stage build → serve dist/; False runs node directly.
rust_app
def rust_app(
port:int=8080, rust_version:str='1', binary:str='app', runtime:str='gcr.io/distroless/static',
features:NoneType=None
):
Two-stage Rust Dockerfile: cargo build –release → distroless runtime
go_app
def go_app(
port:int=8080, go_version:str='1.22', binary:str='app', runtime:str='gcr.io/distroless/static',
cmd:NoneType=None, cgo:bool=False
):
Two-stage Go Dockerfile: go compiler + go mod cache → distroless runtime
fastapi_react
def fastapi_react(
port:int=8000, node_version:str='20', frontend_dir:str='frontend', build_dir:str='dist',
image:str='python:3.13-slim', pkgs:NoneType=None, healthcheck:str='/health', multistage:bool=False,
uv_image:str='ghcr.io/astral-sh/uv:python3.13-bookworm'
):
Two-stage (default) or three-stage (multistage=True) Dockerfile: Node.js frontend + Python/FastAPI backend
python_app
def python_app(
port:int=8000, cmd:NoneType=None, im:str='python:3.13-slim', wd:str='/app', pkgs:NoneType=None,
vols:NoneType=None, multistage:bool=True, uv_image:str='ghcr.io/astral-sh/uv:python3.13-bookworm',
healthcheck:NoneType=None, req:bool=False
):
Python app Dockerfile. multistage=True (default): uv-builder → slim. False: single-stage with uv binary copy. req=True: use requirements.txt instead of pyproject.toml (forces multistage=False).
App builder tests
df = Dockerfile().from_('python:3.13-slim').workdir('/app').inst_uv()
s = str(df)
assert 'COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv' in s
assert 'uv sync --no-dev' in s
assert 'ENV PATH=/app/.venv/bin:$PATH' in s
assert 'COPY uv.lock' not in s
print('inst_uv OK')
df = Dockerfile().with_uv('ghcr.io/astral-sh/uv:python3.13-bookworm', 'python:3.13-slim', '/app')
s = str(df)
assert 'uv:python3.13-bookworm AS builder' in s
assert 'ENV UV_COMPILE_BYTECODE=1' in s and 'ENV UV_LINK_MODE=copy' in s
assert 'FROM python:3.13-slim' in s
print('build_uv OK')
assert len(Dockerfile().from_('alpine').packages()) == 1 # no-op
assert 'apt-get install -y curl' in str(Dockerfile().from_('alpine').packages('curl'))
print('packages OK')
df = Dockerfile().from_('python:3.13-slim').workdir('/app').inst_uv(req=True)
s = str(df)
assert 'COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv' in s
assert 'COPY requirements.txt .' in s
assert 'uv pip install --system -r requirements.txt' in s
assert 'pyproject.toml' not in s and 'uv sync' not in s
assert 'ENV PATH' not in s
print('inst_uv(req=True) OK')df = python_app(); s = str(df)
assert 'uv:python3.13-bookworm AS builder' in s and 'ENV UV_COMPILE_BYTECODE=1' in s
assert 'COPY --from=builder /app /app' in s and 'ENV PATH=/app/.venv/bin:$PATH' in s
assert 'FROM python:3.13-slim' in s and 'EXPOSE 8000' in s and 'CMD ["python", "main.py"]' in s
print('python_app() multistage OK')
df = python_app(port=5001, multistage=False, pkgs=['libpq-dev', 'curl'], vols=['/app/data'], healthcheck='/health')
s = str(df)
assert 'COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv' in s
assert 'apt-get install -y libpq-dev curl' in s
assert 'ENV PATH=/app/.venv/bin:$PATH' in s
assert 'mkdir -p /app/data' in s and 'HEALTHCHECK' in s and 'EXPOSE 5001' in s
print('python_app() single-stage OK')
df = python_app(multistage=False, req=True); s = str(df)
assert 'COPY requirements.txt .' in s
assert 'uv pip install --system -r requirements.txt' in s
assert 'pyproject.toml' not in s
assert 'ENV PATH' not in s
print('python_app(req=True) OK')df = fasthtml_app(); s = str(df)
assert 'EXPOSE 5001' in s and 'uv sync --no-dev' in s and 'CMD ["python", "app.py"]' in s
assert 'ENV PATH=/app/.venv/bin:$PATH' in s and 'AS builder' not in s
df2 = fasthtml_app(pkgs=['rclone'], vols=['/app/data'], healthcheck='/health'); s2 = str(df2)
assert 'rclone' in s2 and 'mkdir -p /app/data' in s2 and 'HEALTHCHECK' in s2
print('fasthtml_app() OK')
df = fastapi_react(); s = str(df)
assert s.count('FROM') == 2 and 'FROM node:20-slim AS frontend' in s
assert 'COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv' in s
assert 'COPY --from=frontend /build/dist /app/static' in s and 'uvicorn' in s
print('fastapi_react() 2-stage OK')
df = fastapi_react(multistage=True); s = str(df)
assert s.count('FROM') == 3 and 'uv:python3.13-bookworm AS builder' in s
assert 'COPY --from=builder /app /app' in s and 'ENV PATH=/app/.venv/bin:$PATH' in s
assert 'COPY --from=frontend /build/dist /app/static' in s
print('fastapi_react() 3-stage OK')df = go_app(); s = str(df)
assert 'FROM golang:1.22-alpine AS builder' in s and 'go mod download' in s
assert 'ENV CGO_ENABLED=0' in s and '-o /app' in s
assert 'FROM gcr.io/distroless/static' in s and 'CMD ["/app"]' in s
df2 = go_app(binary='srv'); s2 = str(df2)
assert '-o /srv' in s2 and 'COPY --from=builder /srv /srv' in s2 and 'CMD ["/srv"]' in s2
print('go_app() OK')
df = rust_app(); s = str(df)
assert 'FROM rust:1-slim-bookworm AS builder' in s and 'cargo build --release' in s
assert 'COPY --from=builder /src/target/release/app /app' in s and 'CMD ["/app"]' in s
df2 = rust_app(binary='myapp', features='postgres'); s2 = str(df2)
assert '--features postgres' in s2 and '/src/target/release/myapp' in s2
print('rust_app() OK')
df = node_app(); s = str(df)
assert 'FROM node:20-slim' in s and 'npm ci' in s and 'EXPOSE 3000' in s and 'CMD ["node", "index.js"]' in s
df2 = node_app(static=True); s2 = str(df2)
assert 'FROM node:20-slim AS builder' in s2 and 'npm run build' in s2
assert 'COPY --from=builder /app/dist .' in s2 and 'serve' in s2
print('node_app() OK')go_app() OK
rust_app() OK
node_app() OK
def _tmp(files):
d = Path(tempfile.mkdtemp())
for f, c in files.items(): (d/f).write_text(c)
return d
assert 'golang' in str(detect_app(_tmp({'go.mod': 'module x'})))
assert 'rust' in str(detect_app(_tmp({'Cargo.toml': '[package]'}))).lower()
assert 'EXPOSE 3000' in str(detect_app(_tmp({'package.json': '{}'})))
assert 'uvicorn' in str(detect_app(_tmp({'package.json': '{}', 'pyproject.toml': '[project]'})))
assert 'EXPOSE 5001' in str(detect_app(_tmp({'pyproject.toml': '[project]\ndependencies=["python-fasthtml"]'})))
assert 'EXPOSE 8000' in str(detect_app(_tmp({'pyproject.toml': '[project]'})))
s = str(detect_app(_tmp({'requirements.txt': 'flask\n'})))
assert 'uv pip install --system -r requirements.txt' in s and 'AS builder' not in s
try: detect_app(_tmp({'README.md': ''}))
except ValueError: pass
else: raise AssertionError('Expected ValueError')
print('detect_app() OK')Config and Secrets
Non-secret config
env_set / env_get store plain config (VPS IP, SSH key path, server name) in ~/.config/fastops/.env with mode 0600. Writes are reflected into os.environ immediately so the current process sees changes without a restart.
The file uses the same security model as ~/.aws/credentials, ~/.config/hcloud/cli.toml, and SSH keys — owner-read-only on disk.
env_get
def env_get(
key, path:NoneType=None, default:NoneType=None
):
Read key: os.environ first, then fastops .env file, then default.
env_set
def env_set(
key, value, path:NoneType=None
):
Upsert key=value into fastops .env file and os.environ. Returns True if changed.
tmp = Path(tempfile.mkdtemp()) / '.env'
env_set('_FO_TEST_IP', '1.2.3.4', path=tmp)
assert os.environ['_FO_TEST_IP'] == '1.2.3.4'
assert env_get('_FO_TEST_IP', path=tmp) == '1.2.3.4'
env_set('_FO_TEST_IP', '1.2.3.4', path=tmp)
del os.environ['_FO_TEST_IP']
assert env_get('_FO_TEST_IP', path=tmp) == '1.2.3.4'
assert env_get('_FO_TEST_MISSING', path=tmp, default='fallback') == 'fallback'
env_set('_FO_TEST_IP', '5.6.7.8', path=tmp)
assert env_get('_FO_TEST_IP', path=tmp) == '5.6.7.8'
print('env_set/env_get OK')Key _FO_TEST_IP not found in /var/folders/kg/9vdw4mdd1fs58svgh4k1qhr09x7dqh/T/tmp2xqc3991/.env.
Key _FO_TEST_MISSING not found in /var/folders/kg/9vdw4mdd1fs58svgh4k1qhr09x7dqh/T/tmp2xqc3991/.env.
env_set/env_get OK
Secrets
secret_set / secret_get store sensitive values (API tokens, tunnel tokens).
| Environment | Backend |
|---|---|
| macOS | OS Keychain via keyring (AES-256, Secure Enclave on Apple Silicon) |
| Headless Linux / CI / VPS | os.environ fallback — inject secrets at boot via cloud-init or systemd EnvironmentFile= |
keyring is an optional dependency. If unavailable (or no keychain daemon running), secret_set silently skips keychain storage and the env var is the source of truth for the current session. Between sessions on a server, secrets must come from the environment.
secret_get
def secret_get(
key, service:str='fastops', default:NoneType=None, path:NoneType=None
):
Read secret: OS keychain → os.environ → default.
secret_set
def secret_set(
key, value, service:str='fastops', save:bool=True, path:NoneType=None
):
Store secret in OS keychain (if available) and os.environ. Silent fallback on headless/VPS.
secret_set('_FO_TEST_TOKEN', 'tok123')
assert os.environ.get('_FO_TEST_TOKEN') == 'tok123'
assert secret_get('_FO_TEST_TOKEN') == 'tok123'
del os.environ['_FO_TEST_TOKEN']
assert secret_get('_FO_TEST_MISSING_XYZ', default='fallback') == 'fallback'
print('secret_set/secret_get OK')secret_set/secret_get OK
secrets
def secrets(
keys:VAR_POSITIONAL, service:str='fastops', path:NoneType=None
):
Read multiple secrets from keychain/env into a dict. Warns and skips any that are missing.
secret_set('_FO_TEST_A', 'val_a')
secret_set('_FO_TEST_B', 'val_b')
result = secrets('_FO_TEST_A', '_FO_TEST_B', '_FO_TEST_MISSING')
assert result == {'_FO_TEST_A': 'val_a', '_FO_TEST_B': 'val_b'}
print('secrets() OK')secrets() OK