Initial commit

This commit is contained in:
Sarah 2021-10-17 02:09:38 +02:00
commit c96063470a
No known key found for this signature in database
GPG key ID: 708F7ACE058F0186
14 changed files with 830 additions and 0 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
eval "$(lorri direnv)"

147
.gitignore vendored Normal file
View file

@ -0,0 +1,147 @@
# Created by https://www.toptal.com/developers/gitignore/api/python
# Edit at https://www.toptal.com/developers/gitignore?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# End of https://www.toptal.com/developers/gitignore/api/python
result/

50
nix/sources.json Normal file
View file

@ -0,0 +1,50 @@
{
"libnixstore-c": {
"branch": "master",
"description": "a brutal attempt at making C bindings for Nix",
"homepage": null,
"owner": "andir",
"repo": "libnixstore-c",
"rev": "7e680c54ad0bdc38a4f6a6a51e4d6427d8942bb3",
"sha256": "1a8nxcpp93hly410ni77q1zw5097dpvkszw3h6l5cr25wnb31ngq",
"type": "tarball",
"url": "https://github.com/andir/libnixstore-c/archive/7e680c54ad0bdc38a4f6a6a51e4d6427d8942bb3.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"mach-nix": {
"branch": "master",
"description": "Create highly reproducible python environments",
"homepage": "",
"owner": "DavHau",
"repo": "mach-nix",
"rev": "b56a541af15efd2062ffb9abb69f63dcceafb64d",
"sha256": "0zdifqdq478q938wm3pwdph8xv9ksk9qvf6s3kckchyzw18x28k0",
"type": "tarball",
"url": "https://github.com/DavHau/mach-nix/archive/b56a541af15efd2062ffb9abb69f63dcceafb64d.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e",
"sha256": "17mirpsx5wyw262fpsd6n6m47jcgw8k2bwcp1iwdnrlzy4dhcgqh",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-unstable",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2cf9db0e3d45b9d00f16f2836cb1297bcadc475e",
"sha256": "0sij1a5hlbigwcgx10dkw6mdbjva40wzz4scn0wchv7yyi9ph48l",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/2cf9db0e3d45b9d00f16f2836cb1297bcadc475e.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

174
nix/sources.nix Normal file
View file

@ -0,0 +1,174 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

0
peerix/__init__.py Normal file
View file

13
peerix/__main__.py Normal file
View file

@ -0,0 +1,13 @@
import asyncio
import uvloop
from hypercorn import Config
from hypercorn.asyncio import serve
from peerix.app import app
if __name__ == "__main__":
uvloop.install()
config = Config()
config.bind = ["0.0.0.0:12304"]
asyncio.run(serve(app, config))

71
peerix/app.py Normal file
View file

@ -0,0 +1,71 @@
import asyncio
import contextlib
from starlette.requests import Request
from starlette.responses import Response, StreamingResponse
from starlette.applications import Starlette
from peerix.local import local
from peerix.remote import remote
from peerix.prefix import PrefixStore
@contextlib.asynccontextmanager
async def _setup_stores(local_port: int):
global l_access, r_access
async with local() as l:
l_access = PrefixStore("local/nar", l)
lp = PrefixStore("local", l)
async with remote(lp, local_port, "0.0.0.0") as r:
r_access = PrefixStore("remote", r)
yield
setup_store = _setup_stores(12304)
app = Starlette()
@app.on_event("startup")
async def _setup_stores_init():
await setup_store.__aenter__()
@app.on_event("shutdown")
async def _setup_stores_deinit():
await setup_store.__aexit__(None, None, None)
@app.route("/nix-cache-info")
async def cache_info(_: Request) -> Response:
ci = await l_access.cache_info()
ci = ci._replace(priority=20)
return Response(content=ci.dump())
@app.route("/{hash:str}.narinfo")
async def narinfo(req: Request) -> Response:
if req.client.host != "127.0.0.1":
return Response(content="Permission denied.", status_code=403)
ni = await r_access.narinfo(req.path_params["hash"])
if ni is None:
return Response(content="Not found", status_code=404)
return Response(content=ni.dump(), status_code=200, media_type="text/x-nix-narinfo")
@app.route("/local/{hash:str}.narinfo")
async def access_narinfo(req: Request) -> Response:
ni = await l_access.narinfo(req.path_params["hash"])
if ni is None:
return Response(content="Not found", status_code=404)
return Response(content=ni.dump(), status_code=200, media_type="text/x-nix-narinfo")
@app.route("/local/nar/{path:str}")
async def push_nar(req: Request) -> Response:
return StreamingResponse(l_access.nar(f"local/nar/{req.path_params['path']}"), media_type="text/plain")
@app.route("/remote/{path:path}")
async def pull_nar(req: Request) -> Response:
return StreamingResponse(l_access.nar(f"remote/{req.path_params['path']}"), media_type="text/plain")

106
peerix/local.py Normal file
View file

@ -0,0 +1,106 @@
import typing as t
import contextlib
import subprocess
import tempfile
import asyncio
import shutil
import base64
import sys
import os
import aiohttp
from peerix.store import NarInfo, CacheInfo, Store
nix_serve = shutil.which("nix-serve")
if nix_serve is None:
raise RuntimeError("nix-serve is not installed.")
nix = shutil.which("nix")
if nix is None:
raise RuntimeError("nix is not installed.")
assert nix_serve is not None
assert nix is not None
class LocalStore(Store):
def __init__(self, session: aiohttp.ClientSession):
self.session = session
self._cache: t.Optional[CacheInfo] = None
async def cache_info(self) -> CacheInfo:
if self._cache is None:
async with self.session.get("http://_/nix-cache-info") as resp:
storeDir = ""
wantMassQuery = -1
priority = 50
for line in (await resp.text()).splitlines():
k, v = line.split(":", 1)
v = v.strip()
k = k.strip()
if k == "StoreDir":
storeDir = v
elif k == "WantMassQuery":
wantMassQuery = int(v)
elif k == "Priority":
priority = int(v)
self._cache = CacheInfo(storeDir, wantMassQuery, priority)
return self._cache
async def narinfo(self, hsh: str) -> t.Optional[NarInfo]:
async with self.session.get(f"http://_/{hsh}.narinfo") as resp:
if resp.status == 404:
return None
info = NarInfo.parse(await resp.text())
return info._replace(url=base64.b64encode(info.storePath.encode("utf-8")).replace(b"/", b"_").decode("ascii"))
async def nar(self, sp: str) -> t.AsyncIterable[bytes]:
path = base64.b64decode(sp.replace("_", "/")).decode("utf-8")
if not path.startswith((await self.cache_info()).storeDir):
raise FileNotFoundError()
process = await asyncio.create_subprocess_exec(
nix, "dump-path", "--", path,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
stdin=subprocess.DEVNULL,
)
assert process.stdout is not None
while not process.stdout.at_eof():
yield await process.stdout.read(10*1024*1024)
@contextlib.asynccontextmanager
async def local():
with tempfile.TemporaryDirectory() as tmpdir:
sock = f"{tmpdir}/server.sock"
process = await asyncio.create_subprocess_exec(
nix_serve, "--listen", sock,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=sys.stderr
)
for _ in range(10):
if os.path.exists(sock):
break
await asyncio.sleep(1)
else:
raise RuntimeError("Failed to start up local store.")
try:
connector = aiohttp.UnixConnector(sock)
async with aiohttp.ClientSession(connector_owner=True, connector=connector) as session:
yield LocalStore(session)
finally:
process.terminate()

25
peerix/prefix.py Normal file
View file

@ -0,0 +1,25 @@
import typing as t
from peerix.store import NarInfo, Store
class PrefixStore(Store):
def __init__(self, prefix: str, backend: Store):
self.backend = backend
self.prefix = prefix
async def cache_info(self):
return await self.backend.cache_info()
async def narinfo(self, hsh: str) -> t.Optional[NarInfo]:
info = await self.backend.narinfo(hsh)
if info is None:
return None
return info._replace(url=f"{self.prefix}/{info.url}")
async def nar(self, path: str) -> t.AsyncIterable[bytes]:
if not path.startswith(self.prefix + "/"):
raise FileNotFoundError("Not found.")
async for chunk in self.backend.nar(path[len(self.prefix)+1:]):
yield chunk

108
peerix/remote.py Normal file
View file

@ -0,0 +1,108 @@
import typing as t
import socket
import asyncio
import contextlib
import aiohttp
from peerix.store import NarInfo, Store
class DiscoveryProtocol(asyncio.DatagramProtocol, Store):
idx: int
transport: asyncio.DatagramTransport
waiters: t.Dict[int, asyncio.Future]
store: Store
session: aiohttp.ClientSession
local_port: int
def __init__(self, store: Store, session: aiohttp.ClientSession, local_port: int):
self.idx = 0
self.waiters = {}
self.store = store
self.session = session
self.local_port = local_port
def connection_made(self, transport):
self.transport = transport
def datagram_received(self, data: bytes, addr: None) -> None:
# 0 => Response to a command of mine.
if data[0] == 1:
idx = int.from_bytes(data[1:5], "big")
if idx not in self.waiters:
return
self.waiters[idx].set_result((data[6:].decode("utf-8"), addr))
# 1 => Request from another server.
elif data[0] == 0:
asyncio.create_task(self.respond(data, addr))
def stop(self):
self.transport.close()
async def cache_info(self):
return await self.store.cache_info()
async def respond(self, data: bytes, addr: None) -> None:
hsh = data[6:].decode("utf-8")
print("Got request from {addr[0]}:{addr[1]} for {hsh}")
narinfo = await self.store.narinfo(hsh)
if narinfo is None:
return
self.transport.sendto(b"\x01" + data[1:5] + self.local_port.to_bytes(4, "big") + narinfo.url.encode("utf-8"))
async def narinfo(self, hsh: str) -> t.Optional[NarInfo]:
fut = asyncio.get_running_loop().create_future()
self.idx = (idx := self.idx)+1
self.waiters[idx] = fut
fut.add_done_callback(lambda _: self.waiters.pop(idx, None))
print(f"Requesting {hsh} from direct local network.")
self.transport.sendto(b"".join([b"\x00", idx.to_bytes(4, "big"), hsh.encode("utf-8")]), ("255.255.255.255", self.local_port))
try:
data, addr = await asyncio.wait_for(fut, 0.5)
except asyncio.TimeoutError:
print(f"No response for {hsh}")
return None
port = int.from_bytes(data[0:4], "big")
url = data[5:].decode("utf-8")
print(f"{addr[0]}:{addr[1]} responded for {hsh} with http://{addr[0]}:{port}/{url}")
async with self.session.get(f"http://{addr[0]}:{port}/{url}") as resp:
if resp.status != 200:
return
info = NarInfo.parse(await resp.text())
return info._replace(url = f"{addr[0]}/{port}/{info.url}")
async def nar(self, sp: str) -> t.AsyncIterable[bytes]:
addr1, addr2, p = sp.split("/", 2)
async with self.session.get(f"http://{addr1}:{addr2}/{p}") as resp:
if resp.status != 200:
raise FileNotFoundError("Not found.")
content = resp.content
while not content.at_eof():
yield await content.readany()
@contextlib.asynccontextmanager
async def remote(store: Store, local_port: int, local_addr: str="0.0.0.0"):
protocol: DiscoveryProtocol
async with aiohttp.ClientSession() as session:
_, protocol = await asyncio.get_running_loop().create_datagram_endpoint(
lambda: DiscoveryProtocol(store, session, local_port),
local_addr=(local_addr, local_port),
family=socket.AF_INET,
allow_broadcast=True
)
try:
yield protocol
finally:
protocol.stop()

91
peerix/store.py Normal file
View file

@ -0,0 +1,91 @@
import typing as t
class NarInfo(t.NamedTuple):
storePath: str
url: str
compression: t.Literal["none"]
narHash: str
narSize: int
references: t.Sequence[str]
deriver: t.Optional[str]
signatures: t.Sequence[str]
def dump(self) -> str:
lines = [
f"StorePath: {self.storePath}",
f"URL: {self.url}",
f"Compression: {self.compression}",
f"NarHash: {self.narHash}",
f"NarSize: {self.narSize}"
]
if self.references:
lines.append(f"References: {' '.join(self.references)}")
if self.deriver:
lines.append(f"Deriver: {self.deriver} ")
for sig in self.signatures:
lines.append(f"Sig: {sig}")
return "\n".join(lines)
@classmethod
def parse(cls, data: str) -> "NarInfo":
storePath = ""
url = ""
compression = "none"
narHash = ""
narSize = -1
references = []
deriver = None
signatures = []
for line in data.splitlines():
k, v = line.split(":", 1)
v = v.strip()
k = k.strip()
if k == "StorePath":
storePath = v
elif k == "URL":
url = v
elif k == "Compression" and v == "none":
compression = v
elif k == "NarHash":
narHash = v
elif k == "NarSize":
narSize = int(v)
elif k == "References":
references = v.split(" ")
elif k == "Deriver":
deriver = v
elif k == "Sig":
signatures.append(v)
return NarInfo(storePath, url, compression, narHash, narSize, references, deriver, signatures)
class CacheInfo(t.NamedTuple):
storeDir: str
wantMassQuery: int
priority: int
def dump(self) -> str:
return "\n".join((
f"StoreDir: {self.storeDir}",
f"WantMassQuery: {self.wantMassQuery}",
f"Priority: {self.priority}"
))
class Store:
async def cache_info(self) -> CacheInfo:
raise NotImplementedError()
async def narinfo(self, hsh: str) -> t.Optional[NarInfo]:
raise NotImplementedError()
async def nar(self, url: str) -> t.AsyncIterable[bytes]:
raise NotImplementedError()

4
requirements.txt Normal file
View file

@ -0,0 +1,4 @@
aiohttp
uvloop
hypercorn
starlette

21
setup.py Normal file
View file

@ -0,0 +1,21 @@
#setup.py:
from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
ext_modules = [
Extension(
name="peerix._nix",
sources=["peerix/_nix.pyx"],
language="c++",
extra_compile_args=["-std=c++17"],
)
]
ext_modules = cythonize(ext_modules)
setup(
name="peerix",
ext_modules=ext_modules,
)

19
shell.nix Normal file
View file

@ -0,0 +1,19 @@
let
sources = import ./nix/sources.nix {};
in
{ pkgs ? import sources.nixpkgs {} }:
let
mach-nix = import sources.mach-nix {
inherit pkgs;
};
in
pkgs.mkShell {
buildInputs = with pkgs; [
nix-serve
niv
(mach-nix.mkPython {
python = "python39";
requirements = (builtins.readFile ./requirements.txt) + "\nipython";
})
];
}