Compare commits

...

6 Commits

Author SHA1 Message Date
Anonymous
b6a13cf1ff move to subdir
Some checks failed
Deploy Jekyll with GitHub Pages dependencies preinstalled / build (push) Has been cancelled
Deploy Jekyll with GitHub Pages dependencies preinstalled / deploy (push) Has been cancelled
2024-06-09 21:16:11 +08:00
Junlin Liu
ad4fca6006
Create jekyll-gh-pages.yml 2024-06-09 21:15:24 +08:00
Anonymous
d63f28ba3a fix 2024-06-09 21:00:36 +08:00
Anonymous
19c63bba60 add test suite 2024-06-09 20:59:04 +08:00
Anonymous
2c84cffa55 lint 2024-06-09 20:42:40 +08:00
Anonymous
b9f978b20a lint 2024-06-09 20:42:30 +08:00
25 changed files with 421 additions and 57 deletions

51
.github/workflows/jekyll-gh-pages.yml vendored Normal file
View File

@ -0,0 +1,51 @@
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
name: Deploy Jekyll with GitHub Pages dependencies preinstalled
on:
# Runs on pushes targeting the default branch
push:
branches: ["master"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Build job
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Build with Jekyll
uses: actions/jekyll-build-pages@v1
with:
source: ./
destination: ./_site
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@ -28,18 +28,11 @@ LightMirrors是一个开源的缓存镜像站服务用于加速软件包下
由于国内访问国外软件源的速度较慢特别是DockerHub缺少国内镜像站 由于国内访问国外软件源的速度较慢特别是DockerHub缺少国内镜像站
因此我们在本地部署镜像站来加速网络访问和节省外网带宽。 因此我们在本地部署镜像站来加速网络访问和节省外网带宽。
---
## Quick Start ## Quick Start
### Prerequisites 执行以下命令试用LightMirrors
- docker + docker-compose.
- 一个域名,设置 `*.yourdomain` 的A记录指向您服务器的IP.
- `*.local.homeinfra.org` 默认指向 `127.0.0.1`,本地测试可以直接使用。
- 代理服务器(如有必要).
> 如果需要使用HTTPS可以在外层新增一个HTTP网关如Caddy请参考后续章节。
执行以下命令以启动LightMirrors
```bash ```bash
@ -61,6 +54,18 @@ pip3 download -i http://torch.local.homeinfra.org/whl/ torch --trusted-host torc
### Deployment ### Deployment
### Prerequisites
- docker + docker-compose.
- 一个域名,设置 `*.yourdomain` 的A记录指向您服务器的IP.
- `*.local.homeinfra.org` 默认指向 `127.0.0.1`,本地测试可以直接使用。
- 代理服务器(如有必要).
> 如果需要使用HTTPS可以在外层新增一个HTTP网关如Caddy请参考后续章节。
> **对于DockerHub镜像我们强烈建议启用HTTPS**
修改 `.env` 文件,设置下列参数: 修改 `.env` 文件,设置下列参数:
- `BASE_DOMAIN`: 基础域名,如 `local.homeinfra.org`,可以通过 `*.local.homeinfra.org` 访问镜像站。 - `BASE_DOMAIN`: 基础域名,如 `local.homeinfra.org`,可以通过 `*.local.homeinfra.org` 访问镜像站。

View File

@ -1,9 +1,10 @@
services: services:
lightmirrors: lightmirrors:
image: lightmirrors/mirrors image: lightmirrors/mirrors
build: ./mirrors build:
context: ./src
volumes: volumes:
- ./mirrors:/app - ./src/:/app
- ./data/cache:/app/cache - ./data/cache:/app/cache
env_file: env_file:
- .env - .env
@ -29,4 +30,5 @@ services:
restart: unless-stopped restart: unless-stopped
networks: networks:
app: app:
name: lightmirrors_app
driver: bridge driver: bridge

View File

@ -8,7 +8,6 @@ ADD https://hub.gitmirror.com/https://github.com/mayswind/AriaNg/releases/downlo
RUN unzip AriaNg-1.3.7.zip && rm AriaNg-1.3.7.zip RUN unzip AriaNg-1.3.7.zip && rm AriaNg-1.3.7.zip
ADD requirements.txt /app/requirements.txt ADD requirements.txt /app/requirements.txt
RUN pip install -r /app/requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple/ RUN pip install -r /app/requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple/
@ -17,6 +16,6 @@ WORKDIR /app
EXPOSE 8080 EXPOSE 8080
CMD ["python", "server.py"] CMD ["python", "mirrorsrun/server.py"]

8
src/Makefile Normal file
View File

@ -0,0 +1,8 @@
.PHONY: check lint
check:
black . --exclude .venv --check
flake8 . --exclude=.venv
mypy . --check-untyped-defs --exclude .venv
lint:
black . --exclude .venv

View File

@ -4,15 +4,16 @@ import uuid
import httpx import httpx
from config import RPC_SECRET, ARIA2_RPC_URL from mirrorsrun.config import RPC_SECRET, ARIA2_RPC_URL
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def send_request(method, params=None): async def send_request(method, params=None):
request_id = uuid.uuid4().hex
payload = { payload = {
"jsonrpc": "2.0", "jsonrpc": "2.0",
"id": uuid.uuid4().hex, "id": request_id,
"method": method, "method": method,
"params": [f"token:{RPC_SECRET}"] + (params or []), "params": [f"token:{RPC_SECRET}"] + (params or []),
} }
@ -22,13 +23,12 @@ async def send_request(method, params=None):
mounts={"all://": httpx.AsyncHTTPTransport()} mounts={"all://": httpx.AsyncHTTPTransport()}
) as client: ) as client:
response = await client.post(ARIA2_RPC_URL, json=payload) response = await client.post(ARIA2_RPC_URL, json=payload)
logger.info(
f"aria2 request: {method} {params} -> {response.status_code} {response.text}"
)
try: try:
return response.json() return response.json()
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logger.warning(f"aria2 response: {response.status_code} {response.text}") logger.warning(
f"aria2 request failed, response: {response.status_code} {response.text}"
)
raise e raise e

View File

@ -8,5 +8,5 @@ SCHEME = os.environ.get("SCHEME", None)
assert SCHEME in ["http", "https"] assert SCHEME in ["http", "https"]
CACHE_DIR = os.environ.get("CACHE_DIR", "/app/cache/") CACHE_DIR = os.environ.get("CACHE_DIR", "/app/cache/")
EXTERNAL_HOST_ARIA2 = f"aria2." + BASE_DOMAIN EXTERNAL_HOST_ARIA2 = f"aria2.{BASE_DOMAIN}"
EXTERNAL_URL_ARIA2 = f"{SCHEME}://{EXTERNAL_HOST_ARIA2}/aria2/index.html" EXTERNAL_URL_ARIA2 = f"{SCHEME}://{EXTERNAL_HOST_ARIA2}/aria2/index.html"

View File

@ -51,9 +51,9 @@ async def direct_proxy(
res_headers = upstream_response.headers res_headers = upstream_response.headers
cl = res_headers.pop("content-length", None) res_headers.pop("content-length", None)
ce = res_headers.pop("content-encoding", None) res_headers.pop("content-encoding", None)
# print(target_url, cl, ce)
content = upstream_response.content content = upstream_response.content
response = Response( response = Response(
headers=res_headers, headers=res_headers,

View File

@ -11,9 +11,9 @@ from starlette.requests import Request
from starlette.responses import Response from starlette.responses import Response
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_504_GATEWAY_TIMEOUT from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_504_GATEWAY_TIMEOUT
from aria2_api import add_download from mirrorsrun.aria2_api import add_download
from config import CACHE_DIR, EXTERNAL_URL_ARIA2 from mirrorsrun.config import CACHE_DIR, EXTERNAL_URL_ARIA2
from typing import Optional, Callable from typing import Optional, Callable
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,3 +1,7 @@
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import base64 import base64
import signal import signal
import urllib.parse import urllib.parse
@ -10,17 +14,17 @@ from starlette.requests import Request
from starlette.responses import RedirectResponse, Response from starlette.responses import RedirectResponse, Response
from starlette.staticfiles import StaticFiles from starlette.staticfiles import StaticFiles
from config import ( from mirrorsrun.config import (
BASE_DOMAIN, BASE_DOMAIN,
RPC_SECRET, RPC_SECRET,
EXTERNAL_URL_ARIA2, EXTERNAL_URL_ARIA2,
EXTERNAL_HOST_ARIA2, EXTERNAL_HOST_ARIA2,
SCHEME, SCHEME,
) )
from sites.docker import docker from mirrorsrun.sites.docker import docker
from sites.npm import npm from mirrorsrun.sites.npm import npm
from sites.pypi import pypi from mirrorsrun.sites.pypi import pypi
from sites.torch import torch from mirrorsrun.sites.torch import torch
app = FastAPI() app = FastAPI()

View File

View File

@ -1,5 +1,6 @@
import base64 import base64
import json import json
import logging
import re import re
import time import time
from typing import Dict from typing import Dict
@ -8,38 +9,49 @@ import httpx
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import Response from starlette.responses import Response
from proxy.file_cache import try_file_based_cache from mirrorsrun.proxy.direct import direct_proxy
from proxy.direct import direct_proxy from mirrorsrun.proxy.file_cache import try_file_based_cache
logger = logging.getLogger(__name__)
BASE_URL = "https://registry-1.docker.io" BASE_URL = "https://registry-1.docker.io"
cached_token: Dict[str, str] = {}
class CachedToken:
token: str
exp: int
def __init__(self, token, exp):
self.token = token
self.exp = exp
cached_tokens: Dict[str, CachedToken] = {}
# https://github.com/opencontainers/distribution-spec/blob/main/spec.md # https://github.com/opencontainers/distribution-spec/blob/main/spec.md
name_regex = "[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*(\/[a-z0-9]+((\.|_|__|-+)[a-z0-9]+)*)*" name_regex = "[a-z0-9]+((.|_|__|-+)[a-z0-9]+)*(/[a-z0-9]+((.|_|__|-+)[a-z0-9]+)*)*"
reference_regex = "[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}" reference_regex = "[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}"
def try_extract_image_name(path): def try_extract_image_name(path):
pattern = rf"^/v2/(.*)/([a-zA-Z]+)/(.*)$" pattern = r"^/v2/(.*)/([a-zA-Z]+)/(.*)$"
match = re.search(pattern, path) match = re.search(pattern, path)
if match: if match:
assert len(match.groups()) == 3 assert len(match.groups()) == 3
name, operation, reference = match.groups() name, resource, reference = match.groups()
assert re.match(name_regex, name) assert re.match(name_regex, name)
assert re.match(reference_regex, reference) assert re.match(reference_regex, reference)
assert operation in ["manifests", "blobs"] assert resource in ["manifests", "blobs", "tags"]
return name, operation, reference return name, resource, reference
return None, None, None return None, None, None
def get_docker_token(name): def get_docker_token(name):
cached = cached_token.get(name, {}) cached = cached_tokens.get(name, None)
exp = cached.get("exp", 0) if cached and cached.exp > time.time():
if exp > time.time(): return cached.token
return cached.get("token", 0)
url = "https://auth.docker.io/token" url = "https://auth.docker.io/token"
params = { params = {
@ -61,7 +73,7 @@ def get_docker_token(name):
assert payload["iss"] == "auth.docker.io" assert payload["iss"] == "auth.docker.io"
assert len(payload["access"]) > 0 assert len(payload["access"]) > 0
cached_token[name] = {"exp": payload["exp"], "token": token} cached_tokens[name] = CachedToken(exp=payload["exp"], token=token)
return token return token
@ -75,8 +87,6 @@ def inject_token(name: str, req: Request, httpx_req: httpx.Request):
async def post_process(request: Request, response: Response): async def post_process(request: Request, response: Response):
if response.status_code == 307: if response.status_code == 307:
location = response.headers["location"] location = response.headers["location"]
# TODO: logger
print("[redirect]", location)
return await try_file_based_cache(request, location) return await try_file_based_cache(request, location)
return response return response
@ -84,7 +94,6 @@ async def post_process(request: Request, response: Response):
async def docker(request: Request): async def docker(request: Request):
path = request.url.path path = request.url.path
print("[request]", request.method, request.url)
if not path.startswith("/v2/"): if not path.startswith("/v2/"):
return Response(content="Not Found", status_code=404) return Response(content="Not Found", status_code=404)
@ -92,7 +101,7 @@ async def docker(request: Request):
return Response(content="OK") return Response(content="OK")
# return await direct_proxy(request, BASE_URL + '/v2/') # return await direct_proxy(request, BASE_URL + '/v2/')
name, operation, reference = try_extract_image_name(path) name, resource, reference = try_extract_image_name(path)
if not name: if not name:
return Response(content="404 Not Found", status_code=404) return Response(content="404 Not Found", status_code=404)
@ -101,10 +110,9 @@ async def docker(request: Request):
if "/" not in name: if "/" not in name:
name = f"library/{name}" name = f"library/{name}"
target_url = BASE_URL + f"/v2/{name}/{operation}/{reference}" target_url = BASE_URL + f"/v2/{name}/{resource}/{reference}"
# logger logger.info(f"got docker request, {path=} {name=} {resource=} {reference=} {target_url=}")
print("[PARSED]", path, name, operation, reference, target_url)
return await direct_proxy( return await direct_proxy(
request, request,

View File

@ -1,6 +1,6 @@
from starlette.requests import Request from starlette.requests import Request
from proxy.direct import direct_proxy from mirrorsrun.proxy.direct import direct_proxy
BASE_URL = "https://registry.npmjs.org/" BASE_URL = "https://registry.npmjs.org/"

View File

@ -3,8 +3,8 @@ import re
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import Response from starlette.responses import Response
from proxy.direct import direct_proxy from mirrorsrun.proxy.direct import direct_proxy
from proxy.file_cache import try_file_based_cache from mirrorsrun.proxy.file_cache import try_file_based_cache
pypi_file_base_url = "https://files.pythonhosted.org" pypi_file_base_url = "https://files.pythonhosted.org"
pypi_base_url = "https://pypi.org" pypi_base_url = "https://pypi.org"

View File

@ -1,8 +1,8 @@
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import Response from starlette.responses import Response
from proxy.file_cache import try_file_based_cache from mirrorsrun.proxy.file_cache import try_file_based_cache
from proxy.direct import direct_proxy from mirrorsrun.proxy.direct import direct_proxy
BASE_URL = "https://download.pytorch.org" BASE_URL = "https://download.pytorch.org"

View File

@ -33,6 +33,52 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"] trio = ["trio (>=0.23)"]
[[package]]
name = "black"
version = "24.3.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
{file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
{file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
{file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
{file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
{file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
{file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
{file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
{file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
{file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
{file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
{file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
{file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
{file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
{file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
{file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
{file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
{file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
{file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
{file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
{file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
{file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
{file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2024.2.2" version = "2024.2.2"
@ -102,6 +148,22 @@ typing-extensions = ">=4.8.0"
[package.extras] [package.extras]
all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "flake8"
version = "7.0.0"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"},
{file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.11.0,<2.12.0"
pyflakes = ">=3.2.0,<3.3.0"
[[package]] [[package]]
name = "h11" name = "h11"
version = "0.14.0" version = "0.14.0"
@ -169,6 +231,123 @@ files = [
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
] ]
[[package]]
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
optional = false
python-versions = ">=3.6"
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "mypy"
version = "1.9.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"},
{file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"},
{file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"},
{file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"},
{file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"},
{file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"},
{file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"},
{file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"},
{file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"},
{file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"},
{file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"},
{file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"},
{file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"},
{file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"},
{file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"},
{file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"},
{file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"},
{file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"},
{file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"},
{file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"},
{file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"},
{file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"},
{file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"},
{file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"},
{file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"},
{file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"},
{file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=4.1.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
install-types = ["pip"]
mypyc = ["setuptools (>=50)"]
reports = ["lxml"]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "24.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
]
[[package]]
name = "pathspec"
version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "platformdirs"
version = "4.2.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.8"
files = [
{file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"},
{file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
[[package]]
name = "pycodestyle"
version = "2.11.1"
description = "Python style guide checker"
optional = false
python-versions = ">=3.8"
files = [
{file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"},
{file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"},
]
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.6.0" version = "2.6.0"
@ -279,6 +458,17 @@ files = [
[package.dependencies] [package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pyflakes"
version = "3.2.0"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.8"
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
]
[[package]] [[package]]
name = "sniffio" name = "sniffio"
version = "1.3.0" version = "1.3.0"
@ -308,6 +498,17 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""
[package.extras] [package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.9.0" version = "4.9.0"
@ -341,4 +542,4 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "3f85c224110f9432dab85aaffc90d271b06475dfb7ab97e89d8817d2afeab223" content-hash = "95fe42214f7987d04a55af59e60128b0a3e224b1b2abbdf962053703379a7160"

View File

@ -12,6 +12,11 @@ uvicorn = "^0.27.0.post1"
httpx = "^0.26.0" httpx = "^0.26.0"
[tool.poetry.group.dev.dependencies]
flake8 = "^7.0.0"
black = "^24.3.0"
mypy = "^1.9.0"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"

2
src/setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 99

7
test/Dockerfile Normal file
View File

@ -0,0 +1,7 @@
FROM alpine:3
RUN apk update && apk add python3 py3-pip docker-cli
WORKDIR /app
CMD tail -f /dev/null

25
test/docker-compose.yml Normal file
View File

@ -0,0 +1,25 @@
services:
test:
image: lightmirrors/test
build:
context: .
dockerfile: Dockerfile
volumes:
- .:/app
- /var/run/docker.sock:/var/run/docker.sock
networks:
- lightmirrors_app
external_links:
- lightmirrors:aria2.local.homeinfra.org
- lightmirrors:docker.local.homeinfra.org
- lightmirrors:pypi.local.homeinfra.org
- lightmirrors:torch.local.homeinfra.org
- lightmirrors:npm.local.homeinfra.org
- lightmirrors:ubuntu.local.homeinfra.org
- lightmirrors:debian.local.homeinfra.org
- lightmirrors:proxy.local.homeinfra.org
- lightmirrors:github.local.homeinfra.org
- lightmirrors:alpine.local.homeinfra.org
networks:
lightmirrors_app:
external: true

20
test/mirrors_test.py Normal file
View File

@ -0,0 +1,20 @@
import unittest
from utils import call
PYPI_HOST = "pypi.local.homeinfra.org"
PYPI_INDEX = f"http://{PYPI_HOST}/simple"
TORCH_HOST = "torch.local.homeinfra.org"
TORCH_INDEX = f"http://{TORCH_HOST}/whl"
class TestPypi(unittest.TestCase):
def test_pypi_http(self):
call(f"pip download -i {PYPI_INDEX} django --trusted-host {PYPI_HOST} --dest /tmp/pypi/")
def test_torch_http(self):
call(f"pip download -i {TORCH_INDEX} tqdm --trusted-host {TORCH_HOST} --dest /tmp/torch/")
def test_docker_pull(self):
call(f"docker pull docker.local.homeinfra.org/alpine:3.12")

27
test/utils.py Normal file
View File

@ -0,0 +1,27 @@
import os
import subprocess
from pathlib import Path
test_dir = Path(__file__).parent
root_dir = Path(__file__).parent.parent
def call(cmd):
print(f">> {cmd}")
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
assert p.returncode == 0, f"Error: {stderr.decode()}"
print(">>", stdout.decode())
return stdout.decode(), stderr.decode()
class SetupMirrors():
def __enter__(self):
os.chdir(root_dir)
call("docker-compose up -d")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
call("docker-compose down")
os.chdir(test_dir)
return False