bandersnatch package

Package contents

Submodules

bandersnatch.configuration module

Module containing classes to access the bandersnatch configuration file

class bandersnatch.configuration.BandersnatchConfig(*args: Any, **kwargs: Any)[source]

Bases: object

SHOWN_DEPRECATIONS = False
check_for_deprecations()None[source]
load_configuration()None[source]

Read the configuration from a configuration file

class bandersnatch.configuration.SetConfigValues(json_save, root_uri, diff_file_path, diff_append_epoch, digest_name, storage_backend_name, cleanup, release_files_save, compare_method)[source]

Bases: tuple

cleanup: bool

Alias for field number 6

compare_method: str

Alias for field number 8

diff_append_epoch: bool

Alias for field number 3

diff_file_path: str

Alias for field number 2

digest_name: str

Alias for field number 4

json_save: bool

Alias for field number 0

release_files_save: bool

Alias for field number 7

root_uri: str

Alias for field number 1

storage_backend_name: str

Alias for field number 5

class bandersnatch.configuration.Singleton[source]

Bases: type

bandersnatch.configuration.validate_config_values(config: configparser.ConfigParser)bandersnatch.configuration.SetConfigValues[source]

bandersnatch.delete module

async bandersnatch.delete.delete_packages(config: configparser.ConfigParser, args: argparse.Namespace, master: bandersnatch.master.Master)int[source]
async bandersnatch.delete.delete_path(blob_path: pathlib.Path, dry_run: bool = False)int[source]

bandersnatch.filter module

Blocklist management

class bandersnatch.filter.Filter(*args: Any, **kwargs: Any)[source]

Bases: object

Base Filter class

property allowlist
property blocklist
check_match(**kwargs: Any)bool[source]

Check if the plugin matches based on the arguments provides.

Returns

True if the values match a filter rule, False otherwise

Return type

bool

deprecated_name: str = ''
filter(metadata: dict)bool[source]

Check if the plugin matches based on the package’s metadata.

Returns

True if the values match a filter rule, False otherwise

Return type

bool

initialize_plugin()None[source]

Code to initialize the plugin

name = 'filter'
class bandersnatch.filter.FilterMetadataPlugin(*args: Any, **kwargs: Any)[source]

Bases: bandersnatch.filter.Filter

Plugin that blocks sync operations for an entire project based on info fields.

name = 'metadata_plugin'
class bandersnatch.filter.FilterProjectPlugin(*args: Any, **kwargs: Any)[source]

Bases: bandersnatch.filter.Filter

Plugin that blocks sync operations for an entire project

name = 'project_plugin'
class bandersnatch.filter.FilterReleaseFilePlugin(*args: Any, **kwargs: Any)[source]

Bases: bandersnatch.filter.Filter

Plugin that modify the download of specific release or dist files

name = 'release_file_plugin'
class bandersnatch.filter.FilterReleasePlugin(*args: Any, **kwargs: Any)[source]

Bases: bandersnatch.filter.Filter

Plugin that modifies the download of specific releases or dist files

name = 'release_plugin'
class bandersnatch.filter.LoadedFilters(load_all: bool = False)[source]

Bases: object

A class to load all of the filters enabled

ENTRYPOINT_GROUPS = ['bandersnatch_filter_plugins.v2.project', 'bandersnatch_filter_plugins.v2.metadata', 'bandersnatch_filter_plugins.v2.release', 'bandersnatch_filter_plugins.v2.release_file']
filter_metadata_plugins()List[bandersnatch.filter.Filter][source]

Load and return the metadata filtering plugin objects

Returns

List of objects derived from the bandersnatch.filter.Filter class

Return type

list of bandersnatch.filter.Filter

filter_project_plugins()List[bandersnatch.filter.Filter][source]

Load and return the project filtering plugin objects

Returns

List of objects derived from the bandersnatch.filter.Filter class

Return type

list of bandersnatch.filter.Filter

filter_release_file_plugins()List[bandersnatch.filter.Filter][source]

Load and return the release file filtering plugin objects

Returns

List of objects derived from the bandersnatch.filter.Filter class

Return type

list of bandersnatch.filter.Filter

filter_release_plugins()List[bandersnatch.filter.Filter][source]

Load and return the release filtering plugin objects

Returns

List of objects derived from the bandersnatch.filter.Filter class

Return type

list of bandersnatch.filter.Filter

bandersnatch.log module

bandersnatch.log.setup_logging(args: Any)logging.StreamHandler[source]

bandersnatch.main module

async bandersnatch.main.async_main(args: argparse.Namespace, config: configparser.ConfigParser)int[source]
bandersnatch.main.main(loop: Optional[asyncio.events.AbstractEventLoop] = None)int[source]

bandersnatch.master module

class bandersnatch.master.Master(url: str, timeout: float = 10.0, global_timeout: Optional[float] = 18000.0)[source]

Bases: object

async all_packages()Dict[str, int][source]
async changed_packages(last_serial: int)Dict[str, int][source]
async check_for_stale_cache(path: str, required_serial: Optional[int], got_serial: Optional[int])None[source]
get(path: str, required_serial: Optional[int], **kw: Any)AsyncGenerator[aiohttp.client_reqrep.ClientResponse, None][source]
async get_package_metadata(package_name: str, serial: int = 0)Any[source]
async rpc(method_name: str, serial: int = 0)Any[source]
async url_fetch(url: str, file_path: pathlib.Path, executor: Optional[Union[concurrent.futures.process.ProcessPoolExecutor, concurrent.futures.thread.ThreadPoolExecutor]] = None, chunk_size: int = 65536)None[source]
property xmlrpc_url
exception bandersnatch.master.StalePage[source]

Bases: Exception

We got a page back from PyPI that doesn’t meet our expected serial.

exception bandersnatch.master.XmlRpcError[source]

Bases: aiohttp.client_exceptions.ClientError

Issue getting package listing from PyPI Repository

bandersnatch.mirror module

class bandersnatch.mirror.BandersnatchMirror(homedir: pathlib.Path, master: bandersnatch.master.Master, storage_backend: Optional[str] = None, stop_on_error: bool = False, workers: int = 3, hash_index: bool = False, json_save: bool = False, digest_name: Optional[str] = None, root_uri: Optional[str] = None, keep_index_versions: int = 0, diff_file: Optional[Union[pathlib.Path, str]] = None, diff_append_epoch: bool = False, diff_full_path: Optional[Union[pathlib.Path, str]] = None, flock_timeout: int = 1, diff_file_list: Optional[List] = None, *, cleanup: bool = False, release_files_save: bool = True, compare_method: Optional[str] = None)[source]

Bases: bandersnatch.mirror.Mirror

async cleanup_non_pep_503_paths(package: bandersnatch.package.Package)None[source]

Before 4.0 we use to store backwards compatible named dirs for older pip This function checks for them and cleans them up

async determine_packages_to_sync()None[source]

Update the self.packages_to_sync to contain packages that need to be synced.

async download_file(url: str, file_size: str, upload_time: datetime.datetime, sha256sum: str, chunk_size: int = 65536)Optional[pathlib.Path][source]
errors = False
finalize_sync()None[source]
find_package_indexes_in_dir(simple_dir: pathlib.Path)List[str][source]

Given a directory that contains simple packages indexes, return a sorted list of normalized package names. This presumes every directory within is a simple package index directory.

find_target_serial()int[source]
gen_data_requires_python(release: Dict)str[source]
generate_simple_page(package: bandersnatch.package.Package)str[source]
property generationfile
get_simple_dirs(simple_dir: pathlib.Path)List[pathlib.Path][source]

Return a list of simple index directories that should be searched for package indexes when compiling the main index page.

json_file(package_name: str)pathlib.Path[source]
need_index_sync = True
need_wrapup = False
on_error(exception: BaseException, **kwargs: Dict)None[source]
async process_package(package: bandersnatch.package.Package)None[source]
record_finished_package(name: str)None[source]
save_json_metadata(package_info: Dict, name: str)bool[source]

Take the JSON metadata we just fetched and save to disk

simple_directory(package: bandersnatch.package.Package)pathlib.Path[source]
property statusfile
sync_index_page()None[source]
async sync_release_files(package: bandersnatch.package.Package)None[source]

Purge + download files returning files removed + added

sync_simple_page(package: bandersnatch.package.Package)None[source]
property todolist
property webdir
wrapup_successful_sync()None[source]
class bandersnatch.mirror.Mirror(master: bandersnatch.master.Master, workers: int = 3)[source]

Bases: object

async determine_packages_to_sync()None[source]

Update the self.packages_to_sync to contain packages that need to be synced.

finalize_sync()None[source]
now = None
on_error(exception: BaseException, **kwargs: Dict)None[source]
async package_syncer(idx: int)None[source]
packages_to_sync: Dict[str, Union[int, str]] = {}
async process_package(package: bandersnatch.package.Package)None[source]
async sync_packages()None[source]
synced_serial: Optional[int] = 0
async synchronize(specific_packages: Optional[List[str]] = None)Dict[str, Set[str]][source]
target_serial: Optional[int] = None
async bandersnatch.mirror.mirror(config: configparser.ConfigParser, specific_packages: Optional[List[str]] = None)int[source]

bandersnatch.package module

class bandersnatch.package.Package(name: str, serial: int = 0)[source]

Bases: object

filter_all_releases(release_filters: List[Filter])bool[source]

Filter releases and removes releases that fail the filters

filter_all_releases_files(release_file_filters: List[Filter])bool[source]

Filter release files and remove empty releases after doing so.

filter_metadata(metadata_filters: List[Filter])bool[source]

Run the metadata filtering plugins

property info
property last_serial
property metadata
property release_files
property releases
async update_metadata(master: Master, attempts: int = 3)None[source]

bandersnatch.storage module

Storage management

class bandersnatch.storage.Storage(*args: Any, config: Optional[configparser.ConfigParser] = None, **kwargs: Any)[source]

Bases: object

Base Storage class

PATH_BACKEND

alias of pathlib.Path

static canonicalize_package(name: str)str[source]
compare_files(file1: Union[pathlib.Path, str], file2: Union[pathlib.Path, str])bool[source]

Compare two files and determine whether they contain the same data. Return True if they match

copy_file(source: Union[pathlib.Path, str], dest: Union[pathlib.Path, str])None[source]

Copy a file from source to dest

delete(path: Union[pathlib.Path, str], dry_run: bool = False)int[source]

Delete the provided path.

delete_file(path: Union[pathlib.Path, str], dry_run: bool = False)int[source]

Delete the provided path, recursively if necessary.

property directory
exists(path: Union[pathlib.Path, str])bool[source]

Check whether the provided path exists

find(root: Union[pathlib.Path, str], dirs: bool = True)str[source]

A test helper simulating ‘find’.

Iterates over directories and filenames, given as relative paths to the root.

get_file_size(path: Union[pathlib.Path, str])int[source]

Get the size of a given path in bytes

get_flock_path()Union[pathlib.Path, str][source]
get_hash(path: Union[pathlib.Path, str], function: str = 'sha256')str[source]

Get the sha256sum of a given path

get_json_paths(name: str)Sequence[Union[pathlib.Path, str]][source]
get_lock(path: str)filelock.BaseFileLock[source]

Retrieve the appropriate FileLock backend for this storage plugin

Parameters

path (str) – The path to use for locking

Returns

A FileLock backend for obtaining locks

Return type

filelock.BaseFileLock

get_upload_time(path: Union[pathlib.Path, str])datetime.datetime[source]

Get the upload time of a given path

hash_file(path: Union[pathlib.Path, str], function: str = 'sha256')str[source]
initialize_plugin()None[source]

Code to initialize the plugin

is_dir(path: Union[pathlib.Path, str])bool[source]

Check whether the provided path is a directory.

is_file(path: Union[pathlib.Path, str])bool[source]

Check whether the provided path is a file.

iter_dir(path: Union[pathlib.Path, str])Generator[Union[pathlib.Path, str], None, None][source]

Iterate over the path, returning the sub-paths

mkdir(path: Union[pathlib.Path, str], exist_ok: bool = False, parents: bool = False)None[source]

Create the provided directory

move_file(source: Union[pathlib.Path, str], dest: Union[pathlib.Path, str])None[source]

Move a file from source to dest

name = 'storage'
open_file(path: Union[pathlib.Path, str], text: bool = True)Generator[IO, None, None][source]

Yield a file context to iterate over. If text is true, open the file with ‘rb’ mode specified.

read_file(path: Union[pathlib.Path, str], text: bool = True, encoding: str = 'utf-8', errors: Optional[str] = None)Union[str, bytes][source]

Yield a file context to iterate over. If text is true, open the file with ‘rb’ mode specified.

rewrite(filepath: Union[pathlib.Path, str], mode: str = 'w', **kw: Any)Generator[IO, None, None][source]

Rewrite an existing file atomically to avoid programs running in parallel to have race conditions while reading.

rmdir(path: Union[pathlib.Path, str], recurse: bool = False, force: bool = False, ignore_errors: bool = False, dry_run: bool = False)int[source]

Remove the directory. If recurse is True, allow removing empty children. If force is true, remove contents destructively.

set_upload_time(path: Union[pathlib.Path, str], time: datetime.datetime)None[source]

Set the upload time of a given path

Create a symlink at dest that points back at source

update_safe(filename: Union[pathlib.Path, str], **kw: Any)Generator[IO, None, None][source]

Rewrite a file atomically.

Clients are allowed to delete the tmpfile to signal that they don’t want to have it updated.

write_file(path: Union[pathlib.Path, str], contents: Union[str, bytes])None[source]

Write data to the provided path. If contents is a string, the file will be opened and written in “r” + “utf-8” mode, if bytes are supplied it will be accessed using “rb” mode (i.e. binary write).

class bandersnatch.storage.StoragePlugin(*args: Any, config: Optional[configparser.ConfigParser] = None, **kwargs: Any)[source]

Bases: bandersnatch.storage.Storage

Plugin that provides a storage backend for bandersnatch

flock_path: Union[pathlib.Path, str]
name = 'storage_plugin'
bandersnatch.storage.load_storage_plugins(entrypoint_group: str, enabled_plugin: Optional[str] = None, config: Optional[configparser.ConfigParser] = None, clear_cache: bool = False)Set[bandersnatch.storage.Storage][source]

Load all storage plugins that are registered with pkg_resources

Parameters
  • entrypoint_group (str) – The entrypoint group name to load plugins from

  • enabled_plugin (str) – The optional enabled storage plugin to search for

  • config (configparser.ConfigParser) – The optional configparser instance to pass in

  • clear_cache (bool) – Whether to clear the plugin cache

Returns

A list of objects derived from the Storage class

Return type

List of Storage

bandersnatch.storage.storage_backend_plugins(backend: Optional[str] = 'filesystem', config: Optional[configparser.ConfigParser] = None, clear_cache: bool = False)Iterable[bandersnatch.storage.Storage][source]

Load and return the release filtering plugin objects

Parameters
  • backend (str) – The optional enabled storage plugin to search for

  • config (configparser.ConfigParser) – The optional configparser instance to pass in

  • clear_cache (bool) – Whether to clear the plugin cache

Returns

List of objects derived from the bandersnatch.storage.Storage class

Return type

list of bandersnatch.storage.Storage

bandersnatch.utils module

bandersnatch.utils.bandersnatch_safe_name(name: str)str[source]

Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single ‘-‘.

  • This was copied from pkg_resources (part of setuptools)

bandersnatch also lower cases the returned name

bandersnatch.utils.convert_url_to_path(url: str)str[source]
bandersnatch.utils.find(root: Union[pathlib.Path, str], dirs: bool = True)str[source]

A test helper simulating ‘find’.

Iterates over directories and filenames, given as relative paths to the root.

bandersnatch.utils.hash(path: pathlib.Path, function: str = 'sha256')str[source]
bandersnatch.utils.make_time_stamp()str[source]

Helper function that returns a timestamp suitable for use in a filename on any OS

bandersnatch.utils.recursive_find_files(files: Set[pathlib.Path], base_dir: pathlib.Path)None[source]
bandersnatch.utils.rewrite(filepath: Union[str, pathlib.Path], mode: str = 'w', **kw: Any)Generator[IO, None, None][source]

Rewrite an existing file atomically to avoid programs running in parallel to have race conditions while reading.

Remove a file and if the dir is empty remove it

bandersnatch.utils.user_agent()str[source]

bandersnatch.verify module

async bandersnatch.verify.delete_unowned_files(mirror_base: pathlib.Path, executor: concurrent.futures.thread.ThreadPoolExecutor, all_package_files: List[pathlib.Path], dry_run: bool)int[source]
async bandersnatch.verify.get_latest_json(master: bandersnatch.master.Master, json_path: pathlib.Path, config: configparser.ConfigParser, executor: Optional[concurrent.futures.thread.ThreadPoolExecutor] = None, delete_removed_packages: bool = False)None[source]
async bandersnatch.verify.metadata_verify(config: configparser.ConfigParser, args: argparse.Namespace)int[source]

Crawl all saved JSON metadata or online to check we have all packages if delete - generate a diff of unowned files

bandersnatch.verify.on_error(stop_on_error: bool, exception: BaseException, package: str)None[source]
async bandersnatch.verify.verify(master: bandersnatch.master.Master, config: configparser.ConfigParser, json_file: str, mirror_base_path: pathlib.Path, all_package_files: List[pathlib.Path], args: argparse.Namespace, executor: Optional[concurrent.futures.thread.ThreadPoolExecutor] = None, releases_key: str = 'releases')None[source]
async bandersnatch.verify.verify_producer(master: bandersnatch.master.Master, config: configparser.ConfigParser, all_package_files: List[pathlib.Path], mirror_base_path: pathlib.Path, json_files: List[str], args: argparse.Namespace, executor: Optional[concurrent.futures.thread.ThreadPoolExecutor] = None)None[source]