diff --git a/Dockerfile--std-all.tmpl b/Dockerfile--std-all.tmpl index d19f52a6..0cdaf54b 100644 --- a/Dockerfile--std-all.tmpl +++ b/Dockerfile--std-all.tmpl @@ -12,6 +12,9 @@ ENV PYTHON_VERSION=3 # --------------------------------------------- final FROM base2_with_python-${PYTHON_VERSION} as final +# it is required for getting external packages +RUN apk add --no-cache git + #RUN apk add --no-cache mc # Full version of "ps" command diff --git a/Dockerfile--std.tmpl b/Dockerfile--std.tmpl index 67aa30b4..778b700b 100644 --- a/Dockerfile--std.tmpl +++ b/Dockerfile--std.tmpl @@ -12,6 +12,9 @@ ENV PYTHON_VERSION=3 # --------------------------------------------- final FROM base2_with_python-${PYTHON_VERSION} as final +# it is required for getting external packages +RUN apk add --no-cache git + ENV LANG=C.UTF-8 ADD . /pg/testgres diff --git a/Dockerfile--ubuntu_24_04.tmpl b/Dockerfile--ubuntu_24_04.tmpl index 7a559776..42fef4e1 100644 --- a/Dockerfile--ubuntu_24_04.tmpl +++ b/Dockerfile--ubuntu_24_04.tmpl @@ -47,6 +47,9 @@ ENV PYTHON_VERSION=3 # --------------------------------------------- final FROM base2_with_python-${PYTHON_VERSION} as final +# it is required for getting external packages +RUN apt install -y git + ADD . /pg/testgres WORKDIR /pg/testgres RUN chown -R postgres /pg diff --git a/docs/Makefile b/docs/Makefile index f33f6be0..d6818981 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -17,4 +17,5 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile + @pip install --force-reinstall .. @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 688a850f..f231cd90 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,11 +14,19 @@ # import os import sys -sys.path.insert(0, os.path.abspath('../..')) +import testgres + +assert testgres.__path__ is not None +assert len(testgres.__path__) == 1 +assert type(testgres.__path__[0] == str) # noqa: E721 +p = os.path.dirname(testgres.__path__[0]) +assert type(p) == str # noqa: E721 +sys.path.insert(0, os.path.abspath(p)) # -- Project information ----------------------------------------------------- project = u'testgres' +package_name = u'testgres' copyright = u'2016-2023, Postgres Professional' author = u'Postgres Professional' @@ -55,7 +63,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/setup.py b/setup.py index 4ed2445e..e8f41c30 100755 --- a/setup.py +++ b/setup.py @@ -22,6 +22,10 @@ if sys.version_info < (3, 3): install_requires.append("ipaddress") +install_requires.append( + "testgres.os_ops @ git+https://github.com/postgrespro/testgres.os_ops.git" +) + # Get contents of README file with open('README.md', 'r') as f: readme = f.read() @@ -29,7 +33,8 @@ setup( version='1.11.1', name='testgres', - packages=['testgres', 'testgres.operations', 'testgres.impl'], + packages=['testgres', 'testgres.impl'], + package_dir={"testgres": "src"}, description='Testing utility for PostgreSQL and its extensions', url='https://github.com/postgrespro/testgres', long_description=readme, diff --git a/testgres/__init__.py b/src/__init__.py similarity index 90% rename from testgres/__init__.py rename to src/__init__.py index 555784bf..bebd6878 100644 --- a/testgres/__init__.py +++ b/src/__init__.py @@ -51,9 +51,9 @@ from .config import testgres_config -from .operations.os_ops import OsOperations, ConnectionParams -from .operations.local_ops import LocalOperations -from .operations.remote_ops import RemoteOperations +from testgres.operations.os_ops import OsOperations, ConnectionParams +from testgres.operations.local_ops import LocalOperations +from testgres.operations.remote_ops import RemoteOperations __all__ = [ "get_new_node", diff --git a/testgres/api.py b/src/api.py similarity index 100% rename from testgres/api.py rename to src/api.py diff --git a/testgres/backup.py b/src/backup.py similarity index 99% rename from testgres/backup.py rename to src/backup.py index 1d8454c3..06e6ef2d 100644 --- a/testgres/backup.py +++ b/src/backup.py @@ -13,7 +13,7 @@ from .exceptions import BackupException -from .operations.os_ops import OsOperations +from testgres.operations.os_ops import OsOperations from .utils import \ get_bin_path2, \ diff --git a/testgres/cache.py b/src/cache.py similarity index 96% rename from testgres/cache.py rename to src/cache.py index e323c5d1..95ae0a94 100644 --- a/testgres/cache.py +++ b/src/cache.py @@ -16,8 +16,8 @@ get_bin_path2, \ execute_utility2 -from .operations.local_ops import LocalOperations -from .operations.os_ops import OsOperations +from testgres.operations.local_ops import LocalOperations +from testgres.operations.os_ops import OsOperations def cached_initdb(data_dir, logfile=None, params=None, os_ops: OsOperations = None, bin_path=None, cached=True): diff --git a/testgres/config.py b/src/config.py similarity index 97% rename from testgres/config.py rename to src/config.py index 55d52426..1d09ccb8 100644 --- a/testgres/config.py +++ b/src/config.py @@ -9,8 +9,8 @@ from contextlib import contextmanager from .consts import TMP_CACHE -from .operations.os_ops import OsOperations -from .operations.local_ops import LocalOperations +from testgres.operations.os_ops import OsOperations +from testgres.operations.local_ops import LocalOperations log_level = os.getenv('LOGGING_LEVEL', 'WARNING').upper() log_format = os.getenv('LOGGING_FORMAT', '%(asctime)s - %(levelname)s - %(message)s') diff --git a/testgres/connection.py b/src/connection.py similarity index 100% rename from testgres/connection.py rename to src/connection.py diff --git a/testgres/consts.py b/src/consts.py similarity index 100% rename from testgres/consts.py rename to src/consts.py diff --git a/testgres/decorators.py b/src/decorators.py similarity index 100% rename from testgres/decorators.py rename to src/decorators.py diff --git a/testgres/defaults.py b/src/defaults.py similarity index 100% rename from testgres/defaults.py rename to src/defaults.py diff --git a/testgres/enums.py b/src/enums.py similarity index 100% rename from testgres/enums.py rename to src/enums.py diff --git a/src/exceptions.py b/src/exceptions.py new file mode 100644 index 00000000..b4aad645 --- /dev/null +++ b/src/exceptions.py @@ -0,0 +1,71 @@ +# coding: utf-8 + +import six + +from testgres.operations.exceptions import TestgresException +from testgres.operations.exceptions import ExecUtilException +from testgres.operations.exceptions import InvalidOperationException + + +class PortForException(TestgresException): + pass + + +@six.python_2_unicode_compatible +class QueryException(TestgresException): + def __init__(self, message=None, query=None): + super(QueryException, self).__init__(message) + + self.message = message + self.query = query + + def __str__(self): + msg = [] + + if self.message: + msg.append(self.message) + + if self.query: + msg.append(u'Query: {}'.format(self.query)) + + return six.text_type('\n').join(msg) + + +class TimeoutException(QueryException): + pass + + +class CatchUpException(QueryException): + pass + + +@six.python_2_unicode_compatible +class StartNodeException(TestgresException): + def __init__(self, message=None, files=None): + super(StartNodeException, self).__init__(message) + + self.message = message + self.files = files + + def __str__(self): + msg = [] + + if self.message: + msg.append(self.message) + + for f, lines in self.files or []: + msg.append(u'{}\n----\n{}\n'.format(f, lines)) + + return six.text_type('\n').join(msg) + + +class InitNodeException(TestgresException): + pass + + +class BackupException(TestgresException): + pass + + +assert ExecUtilException.__name__ == "ExecUtilException" +assert InvalidOperationException.__name__ == "InvalidOperationException" diff --git a/testgres/impl/port_manager__generic.py b/src/impl/port_manager__generic.py similarity index 98% rename from testgres/impl/port_manager__generic.py rename to src/impl/port_manager__generic.py index 567ff265..6c156992 100755 --- a/testgres/impl/port_manager__generic.py +++ b/src/impl/port_manager__generic.py @@ -1,4 +1,4 @@ -from ..operations.os_ops import OsOperations +from testgres.operations.os_ops import OsOperations from ..port_manager import PortManager from ..exceptions import PortForException diff --git a/testgres/impl/port_manager__this_host.py b/src/impl/port_manager__this_host.py similarity index 100% rename from testgres/impl/port_manager__this_host.py rename to src/impl/port_manager__this_host.py diff --git a/testgres/logger.py b/src/logger.py similarity index 100% rename from testgres/logger.py rename to src/logger.py diff --git a/testgres/node.py b/src/node.py similarity index 99% rename from testgres/node.py rename to src/node.py index 60d9e305..be9408be 100644 --- a/testgres/node.py +++ b/src/node.py @@ -104,9 +104,9 @@ from .backup import NodeBackup -from .operations.os_ops import ConnectionParams -from .operations.os_ops import OsOperations -from .operations.local_ops import LocalOperations +from testgres.operations.os_ops import ConnectionParams +from testgres.operations.os_ops import OsOperations +from testgres.operations.local_ops import LocalOperations InternalError = pglib.InternalError ProgrammingError = pglib.ProgrammingError diff --git a/testgres/node_app.py b/src/node_app.py similarity index 100% rename from testgres/node_app.py rename to src/node_app.py diff --git a/testgres/port_manager.py b/src/port_manager.py similarity index 100% rename from testgres/port_manager.py rename to src/port_manager.py diff --git a/testgres/pubsub.py b/src/pubsub.py similarity index 100% rename from testgres/pubsub.py rename to src/pubsub.py diff --git a/testgres/standby.py b/src/standby.py similarity index 100% rename from testgres/standby.py rename to src/standby.py diff --git a/testgres/utils.py b/src/utils.py similarity index 97% rename from testgres/utils.py rename to src/utils.py index 7ad4e536..c04b4fd3 100644 --- a/testgres/utils.py +++ b/src/utils.py @@ -15,10 +15,10 @@ from .exceptions import ExecUtilException from .config import testgres_config as tconf -from .operations.os_ops import OsOperations -from .operations.remote_ops import RemoteOperations -from .operations.local_ops import LocalOperations -from .operations.helpers import Helpers as OsHelpers +from testgres.operations.os_ops import OsOperations +from testgres.operations.remote_ops import RemoteOperations +from testgres.operations.local_ops import LocalOperations +from testgres.operations.helpers import Helpers as OsHelpers from .impl.port_manager__generic import PortManager__Generic diff --git a/testgres/operations/__init__.py b/testgres/operations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/testgres/operations/helpers.py b/testgres/operations/helpers.py deleted file mode 100644 index ebbf0f73..00000000 --- a/testgres/operations/helpers.py +++ /dev/null @@ -1,55 +0,0 @@ -import locale - - -class Helpers: - @staticmethod - def _make_get_default_encoding_func(): - # locale.getencoding is added in Python 3.11 - if hasattr(locale, 'getencoding'): - return locale.getencoding - - # It must exist - return locale.getpreferredencoding - - # Prepared pointer on function to get a name of system codepage - _get_default_encoding_func = _make_get_default_encoding_func.__func__() - - @staticmethod - def GetDefaultEncoding(): - # - # Original idea/source was: - # - # def os_ops.get_default_encoding(): - # if not hasattr(locale, 'getencoding'): - # locale.getencoding = locale.getpreferredencoding - # return locale.getencoding() or 'UTF-8' - # - - assert __class__._get_default_encoding_func is not None - - r = __class__._get_default_encoding_func() - - if r: - assert r is not None - assert type(r) == str # noqa: E721 - assert r != "" - return r - - # Is it an unexpected situation? - return 'UTF-8' - - @staticmethod - def PrepareProcessInput(input, encoding): - if not input: - return None - - if type(input) == str: # noqa: E721 - if encoding is None: - return input.encode(__class__.GetDefaultEncoding()) - - assert type(encoding) == str # noqa: E721 - return input.encode(encoding) - - # It is expected! - assert type(input) == bytes # noqa: E721 - return input diff --git a/testgres/operations/local_ops.py b/testgres/operations/local_ops.py deleted file mode 100644 index 4ec92cb9..00000000 --- a/testgres/operations/local_ops.py +++ /dev/null @@ -1,601 +0,0 @@ -from __future__ import annotations - -import getpass -import logging -import os -import shutil -import stat -import subprocess -import tempfile -import time -import socket - -import psutil -import typing -import threading -import copy - -from ..exceptions import ExecUtilException -from ..exceptions import InvalidOperationException -from .os_ops import ConnectionParams, OsOperations, get_default_encoding -from .raise_error import RaiseError -from .helpers import Helpers - -try: - from shutil import which as find_executable - from shutil import rmtree -except ImportError: - from distutils.spawn import find_executable - from distutils import rmtree - -CMD_TIMEOUT_SEC = 60 - - -class LocalOperations(OsOperations): - sm_dummy_conn_params = ConnectionParams() - sm_single_instance: OsOperations = None - sm_single_instance_guard = threading.Lock() - - # TODO: make it read-only - conn_params: ConnectionParams - host: str - ssh_key: typing.Optional[str] - remote: bool - username: str - - def __init__(self, conn_params=None): - super().__init__() - - if conn_params is __class__.sm_dummy_conn_params: - return - - if conn_params is None: - conn_params = ConnectionParams() - - self.conn_params = conn_params - self.host = conn_params.host - self.ssh_key = None - self.remote = False - self.username = conn_params.username or getpass.getuser() - - @staticmethod - def get_single_instance() -> OsOperations: - assert __class__ == LocalOperations - assert __class__.sm_single_instance_guard is not None - - if __class__.sm_single_instance is not None: - assert type(__class__.sm_single_instance) == __class__ # noqa: E721 - return __class__.sm_single_instance - - with __class__.sm_single_instance_guard: - if __class__.sm_single_instance is None: - __class__.sm_single_instance = __class__() - assert __class__.sm_single_instance is not None - assert type(__class__.sm_single_instance) == __class__ # noqa: E721 - return __class__.sm_single_instance - - def create_clone(self) -> LocalOperations: - clone = __class__(__class__.sm_dummy_conn_params) - clone.conn_params = copy.copy(self.conn_params) - clone.host = self.host - clone.ssh_key = self.ssh_key - clone.remote = self.remote - clone.username = self.username - return clone - - @staticmethod - def _process_output(encoding, temp_file_path): - """Process the output of a command from a temporary file.""" - with open(temp_file_path, 'rb') as temp_file: - output = temp_file.read() - if encoding: - output = output.decode(encoding) - return output, None # In Windows stderr writing in stdout - - def _run_command__nt( - self, cmd, shell, input, stdin, stdout, stderr, get_process, timeout, encoding, - exec_env: typing.Optional[dict], - cwd: typing.Optional[str], - ): - assert exec_env is None or type(exec_env) == dict # noqa: E721 - assert cwd is None or type(cwd) == str # noqa: E721 - - # TODO: why don't we use the data from input? - - extParams: typing.Dict[str, str] = dict() - - if exec_env is None: - pass - elif len(exec_env) == 0: - pass - else: - env = os.environ.copy() - assert type(env) == dict # noqa: E721 - for v in exec_env.items(): - assert type(v) == tuple # noqa: E721 - assert len(v) == 2 - assert type(v[0]) == str # noqa: E721 - assert v[0] != "" - - if v[1] is None: - env.pop(v[0], None) - else: - assert type(v[1]) == str # noqa: E721 - env[v[0]] = v[1] - - extParams["env"] = env - - with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as temp_file: - stdout = temp_file - stderr = subprocess.STDOUT - process = subprocess.Popen( - cmd, - shell=shell, - stdin=stdin or subprocess.PIPE if input is not None else None, - stdout=stdout, - stderr=stderr, - cwd=cwd, - **extParams, - ) - if get_process: - return process, None, None - temp_file_path = temp_file.name - - # Wait process finished - process.wait() - - output, error = self._process_output(encoding, temp_file_path) - return process, output, error - - def _run_command__generic( - self, cmd, shell, input, stdin, stdout, stderr, get_process, timeout, encoding, - exec_env: typing.Optional[dict], - cwd: typing.Optional[str], - ): - assert exec_env is None or type(exec_env) == dict # noqa: E721 - assert cwd is None or type(cwd) == str # noqa: E721 - - input_prepared = None - if not get_process: - input_prepared = Helpers.PrepareProcessInput(input, encoding) # throw - - assert input_prepared is None or (type(input_prepared) == bytes) # noqa: E721 - - extParams: typing.Dict[str, str] = dict() - - if exec_env is None: - pass - elif len(exec_env) == 0: - pass - else: - env = os.environ.copy() - assert type(env) == dict # noqa: E721 - for v in exec_env.items(): - assert type(v) == tuple # noqa: E721 - assert len(v) == 2 - assert type(v[0]) == str # noqa: E721 - assert v[0] != "" - - if v[1] is None: - env.pop(v[0], None) - else: - assert type(v[1]) == str # noqa: E721 - env[v[0]] = v[1] - - extParams["env"] = env - - process = subprocess.Popen( - cmd, - shell=shell, - stdin=stdin or subprocess.PIPE if input is not None else None, - stdout=stdout or subprocess.PIPE, - stderr=stderr or subprocess.PIPE, - cwd=cwd, - **extParams - ) - assert not (process is None) - if get_process: - return process, None, None - try: - output, error = process.communicate(input=input_prepared, timeout=timeout) - except subprocess.TimeoutExpired: - process.kill() - raise ExecUtilException("Command timed out after {} seconds.".format(timeout)) - - assert type(output) == bytes # noqa: E721 - assert type(error) == bytes # noqa: E721 - - if encoding: - output = output.decode(encoding) - error = error.decode(encoding) - return process, output, error - - def _run_command( - self, cmd, shell, input, stdin, stdout, stderr, get_process, timeout, encoding, - exec_env: typing.Optional[dict], - cwd: typing.Optional[str], - ): - """Execute a command and return the process and its output.""" - - assert exec_env is None or type(exec_env) == dict # noqa: E721 - assert cwd is None or type(cwd) == str # noqa: E721 - - if os.name == 'nt' and stdout is None: # Windows - method = __class__._run_command__nt - else: # Other OS - method = __class__._run_command__generic - - return method(self, cmd, shell, input, stdin, stdout, stderr, get_process, timeout, encoding, exec_env, cwd) - - def exec_command( - self, cmd, wait_exit=False, verbose=False, expect_error=False, encoding=None, shell=False, - text=False, input=None, stdin=None, stdout=None, stderr=None, get_process=False, timeout=None, - ignore_errors=False, - exec_env: typing.Optional[dict] = None, - cwd: typing.Optional[str] = None - ): - """ - Execute a command in a subprocess and handle the output based on the provided parameters. - """ - assert type(expect_error) == bool # noqa: E721 - assert type(ignore_errors) == bool # noqa: E721 - assert exec_env is None or type(exec_env) == dict # noqa: E721 - assert cwd is None or type(cwd) == str # noqa: E721 - - process, output, error = self._run_command( - cmd, shell, input, stdin, stdout, stderr, get_process, timeout, encoding, - exec_env, - cwd - ) - - if get_process: - return process - - if expect_error: - if process.returncode == 0: - raise InvalidOperationException("We expected an execution error.") - elif ignore_errors: - pass - elif process.returncode == 0: - pass - else: - assert not expect_error - assert not ignore_errors - assert process.returncode != 0 - RaiseError.UtilityExitedWithNonZeroCode( - cmd=cmd, - exit_code=process.returncode, - msg_arg=error or output, - error=error, - out=output) - - if verbose: - return process.returncode, output, error - - return output - - def build_path(self, a: str, *parts: str) -> str: - assert a is not None - assert parts is not None - assert type(a) == str # noqa: E721 - assert type(parts) == tuple # noqa: E721 - return os.path.join(a, *parts) - - # Environment setup - def environ(self, var_name): - return os.environ.get(var_name) - - def cwd(self): - return os.getcwd() - - def find_executable(self, executable): - return find_executable(executable) - - def is_executable(self, file): - # Check if the file is executable - assert stat.S_IXUSR != 0 - return (os.stat(file).st_mode & stat.S_IXUSR) == stat.S_IXUSR - - def set_env(self, var_name, var_val): - # Check if the directory is already in PATH - os.environ[var_name] = var_val - - def get_name(self): - return os.name - - # Work with dirs - def makedirs(self, path, remove_existing=False): - if remove_existing: - shutil.rmtree(path, ignore_errors=True) - try: - os.makedirs(path) - except FileExistsError: - pass - - def makedir(self, path: str): - assert type(path) == str # noqa: E721 - os.mkdir(path) - - # [2025-02-03] Old name of parameter attempts is "retries". - def rmdirs(self, path, ignore_errors=True, attempts=3, delay=1): - """ - Removes a directory and its contents, retrying on failure. - - :param path: Path to the directory. - :param ignore_errors: If True, ignore errors. - :param retries: Number of attempts to remove the directory. - :param delay: Delay between attempts in seconds. - """ - assert type(path) == str # noqa: E721 - assert type(ignore_errors) == bool # noqa: E721 - assert type(attempts) == int # noqa: E721 - assert type(delay) == int or type(delay) == float # noqa: E721 - assert attempts > 0 - assert delay >= 0 - - a = 0 - while True: - assert a < attempts - a += 1 - try: - rmtree(path) - except FileNotFoundError: - pass - except Exception as e: - if a < attempts: - errMsg = "Failed to remove directory {0} on attempt {1} ({2}): {3}".format( - path, a, type(e).__name__, e - ) - logging.warning(errMsg) - time.sleep(delay) - continue - - assert a == attempts - if not ignore_errors: - raise - - return False - - # OK! - return True - - def rmdir(self, path: str): - assert type(path) == str # noqa: E721 - os.rmdir(path) - - def listdir(self, path): - return os.listdir(path) - - def path_exists(self, path): - return os.path.exists(path) - - @property - def pathsep(self): - os_name = self.get_name() - if os_name == "posix": - pathsep = ":" - elif os_name == "nt": - pathsep = ";" - else: - raise Exception("Unsupported operating system: {}".format(os_name)) - return pathsep - - def mkdtemp(self, prefix=None): - return tempfile.mkdtemp(prefix='{}'.format(prefix)) - - def mkstemp(self, prefix=None): - fd, filename = tempfile.mkstemp(prefix=prefix) - os.close(fd) # Close the file descriptor immediately after creating the file - return filename - - def copytree(self, src, dst): - return shutil.copytree(src, dst) - - # Work with files - def write(self, filename, data, truncate=False, binary=False, read_and_write=False): - """ - Write data to a file locally - Args: - filename: The file path where the data will be written. - data: The data to be written to the file. - truncate: If True, the file will be truncated before writing ('w' option); - if False (default), data will be appended ('a' option). - binary: If True, the data will be written in binary mode ('b' option); - if False (default), the data will be written in text mode. - read_and_write: If True, the file will be opened with read and write permissions ('+' option); - if False (default), only write permission will be used. - """ - if isinstance(data, bytes) or isinstance(data, list) and all(isinstance(item, bytes) for item in data): - binary = True - - mode = "w" if truncate else "a" - - if read_and_write: - mode += "+" - - # If it is a bytes str or list - if binary: - mode += "b" - - assert type(mode) == str # noqa: E721 - assert mode != "" - - with open(filename, mode) as file: - if isinstance(data, list): - data2 = [__class__._prepare_line_to_write(s, binary) for s in data] - file.writelines(data2) - else: - data2 = __class__._prepare_data_to_write(data, binary) - file.write(data2) - - @staticmethod - def _prepare_line_to_write(data, binary): - data = __class__._prepare_data_to_write(data, binary) - - if binary: - assert type(data) == bytes # noqa: E721 - return data.rstrip(b'\n') + b'\n' - - assert type(data) == str # noqa: E721 - return data.rstrip('\n') + '\n' - - @staticmethod - def _prepare_data_to_write(data, binary): - if isinstance(data, bytes): - return data if binary else data.decode() - - if isinstance(data, str): - return data if not binary else data.encode() - - raise InvalidOperationException("Unknown type of data type [{0}].".format(type(data).__name__)) - - def touch(self, filename): - """ - Create a new file or update the access and modification times of an existing file. - Args: - filename (str): The name of the file to touch. - - This method behaves as the 'touch' command in Unix. It's equivalent to calling 'touch filename' in the shell. - """ - # cross-python touch(). It is vulnerable to races, but who cares? - with open(filename, "a"): - os.utime(filename, None) - - def read(self, filename, encoding=None, binary=False): - assert type(filename) == str # noqa: E721 - assert encoding is None or type(encoding) == str # noqa: E721 - assert type(binary) == bool # noqa: E721 - - if binary: - if encoding is not None: - raise InvalidOperationException("Enconding is not allowed for read binary operation") - - return self._read__binary(filename) - - # python behavior - assert (None or "abc") == "abc" - assert ("" or "abc") == "abc" - - return self._read__text_with_encoding(filename, encoding or get_default_encoding()) - - def _read__text_with_encoding(self, filename, encoding): - assert type(filename) == str # noqa: E721 - assert type(encoding) == str # noqa: E721 - with open(filename, mode='r', encoding=encoding) as file: # open in a text mode - content = file.read() - assert type(content) == str # noqa: E721 - return content - - def _read__binary(self, filename): - assert type(filename) == str # noqa: E721 - with open(filename, 'rb') as file: # open in a binary mode - content = file.read() - assert type(content) == bytes # noqa: E721 - return content - - def readlines(self, filename, num_lines=0, binary=False, encoding=None): - """ - Read lines from a local file. - If num_lines is greater than 0, only the last num_lines lines will be read. - """ - assert type(num_lines) == int # noqa: E721 - assert type(filename) == str # noqa: E721 - assert type(binary) == bool # noqa: E721 - assert encoding is None or type(encoding) == str # noqa: E721 - assert num_lines >= 0 - - if binary: - assert encoding is None - pass - elif encoding is None: - encoding = get_default_encoding() - assert type(encoding) == str # noqa: E721 - else: - assert type(encoding) == str # noqa: E721 - pass - - mode = 'rb' if binary else 'r' - if num_lines == 0: - with open(filename, mode, encoding=encoding) as file: # open in binary mode - return file.readlines() - else: - bufsize = 8192 - buffers = 1 - - with open(filename, mode, encoding=encoding) as file: # open in binary mode - file.seek(0, os.SEEK_END) - end_pos = file.tell() - - while True: - offset = max(0, end_pos - bufsize * buffers) - file.seek(offset, os.SEEK_SET) - pos = file.tell() - lines = file.readlines() - cur_lines = len(lines) - - if cur_lines >= num_lines or pos == 0: - return lines[-num_lines:] # get last num_lines from lines - - buffers = int( - buffers * max(2, int(num_lines / max(cur_lines, 1))) - ) # Adjust buffer size - - def read_binary(self, filename, offset): - assert type(filename) == str # noqa: E721 - assert type(offset) == int # noqa: E721 - - if offset < 0: - raise ValueError("Negative 'offset' is not supported.") - - with open(filename, 'rb') as file: # open in a binary mode - file.seek(offset, os.SEEK_SET) - r = file.read() - assert type(r) == bytes # noqa: E721 - return r - - def isfile(self, remote_file): - return os.path.isfile(remote_file) - - def isdir(self, dirname): - return os.path.isdir(dirname) - - def get_file_size(self, filename): - assert filename is not None - assert type(filename) == str # noqa: E721 - return os.path.getsize(filename) - - def remove_file(self, filename): - return os.remove(filename) - - # Processes control - def kill(self, pid, signal, expect_error=False): - # Kill the process - cmd = "kill -{} {}".format(signal, pid) - return self.exec_command(cmd, expect_error=expect_error) - - def get_pid(self): - # Get current process id - return os.getpid() - - def get_process_children(self, pid): - assert type(pid) == int # noqa: E721 - return psutil.Process(pid).children() - - def is_port_free(self, number: int) -> bool: - assert type(number) == int # noqa: E721 - assert number >= 0 - assert number <= 65535 # OK? - - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - try: - s.bind(("", number)) - return True - except OSError: - return False - - def get_tempdir(self) -> str: - r = tempfile.gettempdir() - assert r is not None - assert type(r) == str # noqa: E721 - assert os.path.exists(r) - return r diff --git a/testgres/operations/os_ops.py b/testgres/operations/os_ops.py deleted file mode 100644 index 46422269..00000000 --- a/testgres/operations/os_ops.py +++ /dev/null @@ -1,145 +0,0 @@ -from __future__ import annotations - -import locale - - -class ConnectionParams: - def __init__(self, host='127.0.0.1', port=None, ssh_key=None, username=None): - self.host = host - self.port = port - self.ssh_key = ssh_key - self.username = username - - -def get_default_encoding(): - if not hasattr(locale, 'getencoding'): - locale.getencoding = locale.getpreferredencoding - return locale.getencoding() or 'UTF-8' - - -class OsOperations: - def __init__(self): - pass - - def create_clone(self) -> OsOperations: - raise NotImplementedError() - - # Command execution - def exec_command(self, cmd, **kwargs): - raise NotImplementedError() - - def build_path(self, a: str, *parts: str) -> str: - assert a is not None - assert parts is not None - assert type(a) == str # noqa: E721 - assert type(parts) == tuple # noqa: E721 - raise NotImplementedError() - - # Environment setup - def environ(self, var_name): - raise NotImplementedError() - - def cwd(self): - raise NotImplementedError() - - def find_executable(self, executable): - raise NotImplementedError() - - def is_executable(self, file): - # Check if the file is executable - raise NotImplementedError() - - def set_env(self, var_name, var_val): - # Check if the directory is already in PATH - raise NotImplementedError() - - def get_user(self): - return self.username - - def get_name(self): - raise NotImplementedError() - - # Work with dirs - def makedirs(self, path, remove_existing=False): - raise NotImplementedError() - - def makedir(self, path: str): - assert type(path) == str # noqa: E721 - raise NotImplementedError() - - def rmdirs(self, path, ignore_errors=True): - raise NotImplementedError() - - def rmdir(self, path: str): - assert type(path) == str # noqa: E721 - raise NotImplementedError() - - def listdir(self, path): - raise NotImplementedError() - - def path_exists(self, path): - raise NotImplementedError() - - @property - def pathsep(self): - raise NotImplementedError() - - def mkdtemp(self, prefix=None): - raise NotImplementedError() - - def mkstemp(self, prefix=None): - raise NotImplementedError() - - def copytree(self, src, dst): - raise NotImplementedError() - - # Work with files - def write(self, filename, data, truncate=False, binary=False, read_and_write=False): - raise NotImplementedError() - - def touch(self, filename): - raise NotImplementedError() - - def read(self, filename, encoding, binary): - raise NotImplementedError() - - def readlines(self, filename): - raise NotImplementedError() - - def read_binary(self, filename, offset): - assert type(filename) == str # noqa: E721 - assert type(offset) == int # noqa: E721 - assert offset >= 0 - raise NotImplementedError() - - def isfile(self, remote_file): - raise NotImplementedError() - - def isdir(self, dirname): - raise NotImplementedError() - - def get_file_size(self, filename): - raise NotImplementedError() - - def remove_file(self, filename): - assert type(filename) == str # noqa: E721 - raise NotImplementedError() - - # Processes control - def kill(self, pid, signal): - # Kill the process - raise NotImplementedError() - - def get_pid(self): - # Get current process id - raise NotImplementedError() - - def get_process_children(self, pid): - raise NotImplementedError() - - def is_port_free(self, number: int): - assert type(number) == int # noqa: E721 - raise NotImplementedError() - - def get_tempdir(self) -> str: - raise NotImplementedError() diff --git a/testgres/operations/raise_error.py b/testgres/operations/raise_error.py deleted file mode 100644 index 0d14be5a..00000000 --- a/testgres/operations/raise_error.py +++ /dev/null @@ -1,57 +0,0 @@ -from ..exceptions import ExecUtilException -from .helpers import Helpers - - -class RaiseError: - @staticmethod - def UtilityExitedWithNonZeroCode(cmd, exit_code, msg_arg, error, out): - assert type(exit_code) == int # noqa: E721 - - msg_arg_s = __class__._TranslateDataIntoString(msg_arg) - assert type(msg_arg_s) == str # noqa: E721 - - msg_arg_s = msg_arg_s.strip() - if msg_arg_s == "": - msg_arg_s = "#no_error_message" - - message = "Utility exited with non-zero code (" + str(exit_code) + "). Error: `" + msg_arg_s + "`" - raise ExecUtilException( - message=message, - command=cmd, - exit_code=exit_code, - out=out, - error=error) - - @staticmethod - def CommandExecutionError(cmd, exit_code, message, error, out): - assert type(exit_code) == int # noqa: E721 - assert type(message) == str # noqa: E721 - assert message != "" - - raise ExecUtilException( - message=message, - command=cmd, - exit_code=exit_code, - out=out, - error=error) - - @staticmethod - def _TranslateDataIntoString(data): - if data is None: - return "" - - if type(data) == bytes: # noqa: E721 - return __class__._TranslateDataIntoString__FromBinary(data) - - return str(data) - - @staticmethod - def _TranslateDataIntoString__FromBinary(data): - assert type(data) == bytes # noqa: E721 - - try: - return data.decode(Helpers.GetDefaultEncoding()) - except UnicodeDecodeError: - pass - - return "#cannot_decode_text" diff --git a/testgres/operations/remote_ops.py b/testgres/operations/remote_ops.py deleted file mode 100644 index 4cdb158e..00000000 --- a/testgres/operations/remote_ops.py +++ /dev/null @@ -1,874 +0,0 @@ -from __future__ import annotations - -import getpass -import os -import posixpath -import platform -import subprocess -import tempfile -import io -import logging -import typing -import copy -import re - -from ..exceptions import ExecUtilException -from ..exceptions import InvalidOperationException -from .os_ops import OsOperations, ConnectionParams, get_default_encoding -from .raise_error import RaiseError -from .helpers import Helpers - -error_markers = [b'error', b'Permission denied', b'fatal', b'No such file or directory'] - - -class PsUtilProcessProxy: - def __init__(self, ssh, pid): - assert isinstance(ssh, RemoteOperations) - assert type(pid) == int # noqa: E721 - self.ssh = ssh - self.pid = pid - - def kill(self): - assert isinstance(self.ssh, RemoteOperations) - assert type(self.pid) == int # noqa: E721 - command = ["kill", str(self.pid)] - self.ssh.exec_command(command, encoding=get_default_encoding()) - - def cmdline(self): - assert isinstance(self.ssh, RemoteOperations) - assert type(self.pid) == int # noqa: E721 - command = ["ps", "-p", str(self.pid), "-o", "cmd", "--no-headers"] - output = self.ssh.exec_command(command, encoding=get_default_encoding()) - assert type(output) == str # noqa: E721 - cmdline = output.strip() - # TODO: This code work wrong if command line contains quoted values. Yes? - return cmdline.split() - - -class RemoteOperations(OsOperations): - sm_dummy_conn_params = ConnectionParams() - - conn_params: ConnectionParams - host: str - port: int - ssh_key: str - ssh_args: list - remote: bool - username: str - ssh_dest: str - - def __init__(self, conn_params: ConnectionParams): - if not platform.system().lower() == "linux": - raise EnvironmentError("Remote operations are supported only on Linux!") - - if conn_params is None: - raise ValueError("Argument 'conn_params' is None.") - - super().__init__() - - if conn_params is __class__.sm_dummy_conn_params: - return - - self.conn_params = conn_params - self.host = conn_params.host - self.port = conn_params.port - self.ssh_key = conn_params.ssh_key - self.ssh_args = [] - if self.ssh_key: - self.ssh_args += ["-i", self.ssh_key] - if self.port: - self.ssh_args += ["-p", self.port] - self.remote = True - self.username = conn_params.username or getpass.getuser() - self.ssh_dest = f"{self.username}@{self.host}" if conn_params.username else self.host - - def __enter__(self): - return self - - def create_clone(self) -> RemoteOperations: - clone = __class__(__class__.sm_dummy_conn_params) - clone.conn_params = copy.copy(self.conn_params) - clone.host = self.host - clone.port = self.port - clone.ssh_key = self.ssh_key - clone.ssh_args = copy.copy(self.ssh_args) - clone.remote = self.remote - clone.username = self.username - clone.ssh_dest = self.ssh_dest - return clone - - def exec_command( - self, cmd, wait_exit=False, verbose=False, expect_error=False, - encoding=None, shell=True, text=False, input=None, stdin=None, stdout=None, - stderr=None, get_process=None, timeout=None, ignore_errors=False, - exec_env: typing.Optional[dict] = None, - cwd: typing.Optional[str] = None - ): - """ - Execute a command in the SSH session. - Args: - - cmd (str): The command to be executed. - """ - assert type(expect_error) == bool # noqa: E721 - assert type(ignore_errors) == bool # noqa: E721 - assert exec_env is None or type(exec_env) == dict # noqa: E721 - assert cwd is None or type(cwd) == str # noqa: E721 - - input_prepared = None - if not get_process: - input_prepared = Helpers.PrepareProcessInput(input, encoding) # throw - - assert input_prepared is None or (type(input_prepared) == bytes) # noqa: E721 - - cmds = [] - - if cwd is not None: - assert type(cwd) == str # noqa: E721 - cmds.append(__class__._build_cmdline(["cd", cwd])) - - cmds.append(__class__._build_cmdline(cmd, exec_env)) - - assert len(cmds) >= 1 - - cmdline = ";".join(cmds) - assert type(cmdline) == str # noqa: E721 - assert cmdline != "" - - ssh_cmd = ['ssh', self.ssh_dest] + self.ssh_args + [cmdline] - - process = subprocess.Popen(ssh_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - assert not (process is None) - if get_process: - return process - - try: - output, error = process.communicate(input=input_prepared, timeout=timeout) - except subprocess.TimeoutExpired: - process.kill() - raise ExecUtilException("Command timed out after {} seconds.".format(timeout)) - - assert type(output) == bytes # noqa: E721 - assert type(error) == bytes # noqa: E721 - - if encoding: - output = output.decode(encoding) - error = error.decode(encoding) - - if expect_error: - if process.returncode == 0: - raise InvalidOperationException("We expected an execution error.") - elif ignore_errors: - pass - elif process.returncode == 0: - pass - else: - assert not expect_error - assert not ignore_errors - assert process.returncode != 0 - RaiseError.UtilityExitedWithNonZeroCode( - cmd=cmd, - exit_code=process.returncode, - msg_arg=error, - error=error, - out=output) - - if verbose: - return process.returncode, output, error - - return output - - def build_path(self, a: str, *parts: str) -> str: - assert a is not None - assert parts is not None - assert type(a) == str # noqa: E721 - assert type(parts) == tuple # noqa: E721 - return __class__._build_path(a, *parts) - - # Environment setup - def environ(self, var_name: str) -> str: - """ - Get the value of an environment variable. - Args: - - var_name (str): The name of the environment variable. - """ - cmd = "echo ${}".format(var_name) - return self.exec_command(cmd, encoding=get_default_encoding()).strip() - - def cwd(self): - cmd = 'pwd' - return self.exec_command(cmd, encoding=get_default_encoding()).rstrip() - - def find_executable(self, executable): - search_paths = self.environ("PATH") - if not search_paths: - return None - - search_paths = search_paths.split(self.pathsep) - for path in search_paths: - remote_file = __class__._build_path(path, executable) - if self.isfile(remote_file): - return remote_file - - return None - - def is_executable(self, file): - # Check if the file is executable - command = ["test", "-x", file] - - exit_status, output, error = self.exec_command(cmd=command, encoding=get_default_encoding(), ignore_errors=True, verbose=True) - - assert type(output) == str # noqa: E721 - assert type(error) == str # noqa: E721 - - if exit_status == 0: - return True - - if exit_status == 1: - return False - - errMsg = "Test operation returns an unknown result code: {0}. File name is [{1}].".format( - exit_status, - file) - - RaiseError.CommandExecutionError( - cmd=command, - exit_code=exit_status, - message=errMsg, - error=error, - out=output - ) - - def set_env(self, var_name: str, var_val: str): - """ - Set the value of an environment variable. - Args: - - var_name (str): The name of the environment variable. - - var_val (str): The value to be set for the environment variable. - """ - return self.exec_command("export {}={}".format(var_name, var_val)) - - def get_name(self): - cmd = 'python3 -c "import os; print(os.name)"' - return self.exec_command(cmd, encoding=get_default_encoding()).strip() - - # Work with dirs - def makedirs(self, path, remove_existing=False): - """ - Create a directory in the remote server. - Args: - - path (str): The path to the directory to be created. - - remove_existing (bool): If True, the existing directory at the path will be removed. - """ - if remove_existing: - cmd = "rm -rf {} && mkdir -p {}".format(path, path) - else: - cmd = "mkdir -p {}".format(path) - try: - exit_status, result, error = self.exec_command(cmd, verbose=True) - except ExecUtilException as e: - raise Exception("Couldn't create dir {} because of error {}".format(path, e.message)) - if exit_status != 0: - raise Exception("Couldn't create dir {} because of error {}".format(path, error)) - return result - - def makedir(self, path: str): - assert type(path) == str # noqa: E721 - cmd = ["mkdir", path] - self.exec_command(cmd) - - def rmdirs(self, path, ignore_errors=True): - """ - Remove a directory in the remote server. - Args: - - path (str): The path to the directory to be removed. - - ignore_errors (bool): If True, do not raise error if directory does not exist. - """ - assert type(path) == str # noqa: E721 - assert type(ignore_errors) == bool # noqa: E721 - - # ENOENT = 2 - No such file or directory - # ENOTDIR = 20 - Not a directory - - cmd1 = [ - "if", "[", "-d", path, "]", ";", - "then", "rm", "-rf", path, ";", - "elif", "[", "-e", path, "]", ";", - "then", "{", "echo", "cannot remove '" + path + "': it is not a directory", ">&2", ";", "exit", "20", ";", "}", ";", - "else", "{", "echo", "directory '" + path + "' does not exist", ">&2", ";", "exit", "2", ";", "}", ";", - "fi" - ] - - cmd2 = ["sh", "-c", subprocess.list2cmdline(cmd1)] - - try: - self.exec_command(cmd2, encoding=Helpers.GetDefaultEncoding()) - except ExecUtilException as e: - if e.exit_code == 2: # No such file or directory - return True - - if not ignore_errors: - raise - - errMsg = "Failed to remove directory {0} ({1}): {2}".format( - path, type(e).__name__, e - ) - logging.warning(errMsg) - return False - return True - - def rmdir(self, path: str): - assert type(path) == str # noqa: E721 - cmd = ["rmdir", path] - self.exec_command(cmd) - - def listdir(self, path): - """ - List all files and directories in a directory. - Args: - path (str): The path to the directory. - """ - command = ["ls", path] - output = self.exec_command(cmd=command, encoding=get_default_encoding()) - assert type(output) == str # noqa: E721 - result = output.splitlines() - assert type(result) == list # noqa: E721 - return result - - def path_exists(self, path): - command = ["test", "-e", path] - - exit_status, output, error = self.exec_command(cmd=command, encoding=get_default_encoding(), ignore_errors=True, verbose=True) - - assert type(output) == str # noqa: E721 - assert type(error) == str # noqa: E721 - - if exit_status == 0: - return True - - if exit_status == 1: - return False - - errMsg = "Test operation returns an unknown result code: {0}. Path is [{1}].".format( - exit_status, - path) - - RaiseError.CommandExecutionError( - cmd=command, - exit_code=exit_status, - message=errMsg, - error=error, - out=output - ) - - @property - def pathsep(self): - os_name = self.get_name() - if os_name == "posix": - pathsep = ":" - elif os_name == "nt": - pathsep = ";" - else: - raise Exception("Unsupported operating system: {}".format(os_name)) - return pathsep - - def mkdtemp(self, prefix=None): - """ - Creates a temporary directory in the remote server. - Args: - - prefix (str): The prefix of the temporary directory name. - """ - if prefix: - command = ["mktemp", "-d", "-t", prefix + "XXXXXX"] - else: - command = ["mktemp", "-d"] - - exec_exitcode, exec_output, exec_error = self.exec_command(command, verbose=True, encoding=get_default_encoding(), ignore_errors=True) - - assert type(exec_exitcode) == int # noqa: E721 - assert type(exec_output) == str # noqa: E721 - assert type(exec_error) == str # noqa: E721 - - if exec_exitcode != 0: - RaiseError.CommandExecutionError( - cmd=command, - exit_code=exec_exitcode, - message="Could not create temporary directory.", - error=exec_error, - out=exec_output) - - temp_dir = exec_output.strip() - return temp_dir - - def mkstemp(self, prefix=None): - """ - Creates a temporary file in the remote server. - Args: - - prefix (str): The prefix of the temporary directory name. - """ - if prefix: - command = ["mktemp", "-t", prefix + "XXXXXX"] - else: - command = ["mktemp"] - - exec_exitcode, exec_output, exec_error = self.exec_command(command, verbose=True, encoding=get_default_encoding(), ignore_errors=True) - - assert type(exec_exitcode) == int # noqa: E721 - assert type(exec_output) == str # noqa: E721 - assert type(exec_error) == str # noqa: E721 - - if exec_exitcode != 0: - RaiseError.CommandExecutionError( - cmd=command, - exit_code=exec_exitcode, - message="Could not create temporary file.", - error=exec_error, - out=exec_output) - - temp_file = exec_output.strip() - return temp_file - - def copytree(self, src, dst): - if not os.path.isabs(dst): - dst = __class__._build_path('~', dst) - if self.isdir(dst): - raise FileExistsError("Directory {} already exists.".format(dst)) - return self.exec_command("cp -r {} {}".format(src, dst)) - - # Work with files - def write(self, filename, data, truncate=False, binary=False, read_and_write=False, encoding=None): - if not encoding: - encoding = get_default_encoding() - mode = "wb" if binary else "w" - - with tempfile.NamedTemporaryFile(mode=mode, delete=False) as tmp_file: - # For scp the port is specified by a "-P" option - scp_args = ['-P' if x == '-p' else x for x in self.ssh_args] - - if not truncate: - scp_cmd = ['scp'] + scp_args + [f"{self.ssh_dest}:{filename}", tmp_file.name] - subprocess.run(scp_cmd, check=False) # The file might not exist yet - tmp_file.seek(0, os.SEEK_END) - - if isinstance(data, list): - data2 = [__class__._prepare_line_to_write(s, binary, encoding) for s in data] - tmp_file.writelines(data2) - else: - data2 = __class__._prepare_data_to_write(data, binary, encoding) - tmp_file.write(data2) - - tmp_file.flush() - scp_cmd = ['scp'] + scp_args + [tmp_file.name, f"{self.ssh_dest}:{filename}"] - subprocess.run(scp_cmd, check=True) - - remote_directory = os.path.dirname(filename) - mkdir_cmd = ['ssh'] + self.ssh_args + [self.ssh_dest, f"mkdir -p {remote_directory}"] - subprocess.run(mkdir_cmd, check=True) - - os.remove(tmp_file.name) - - @staticmethod - def _prepare_line_to_write(data, binary, encoding): - data = __class__._prepare_data_to_write(data, binary, encoding) - - if binary: - assert type(data) == bytes # noqa: E721 - return data.rstrip(b'\n') + b'\n' - - assert type(data) == str # noqa: E721 - return data.rstrip('\n') + '\n' - - @staticmethod - def _prepare_data_to_write(data, binary, encoding): - if isinstance(data, bytes): - return data if binary else data.decode(encoding) - - if isinstance(data, str): - return data if not binary else data.encode(encoding) - - raise InvalidOperationException("Unknown type of data type [{0}].".format(type(data).__name__)) - - def touch(self, filename): - """ - Create a new file or update the access and modification times of an existing file on the remote server. - - Args: - filename (str): The name of the file to touch. - - This method behaves as the 'touch' command in Unix. It's equivalent to calling 'touch filename' in the shell. - """ - self.exec_command("touch {}".format(filename)) - - def read(self, filename, binary=False, encoding=None): - assert type(filename) == str # noqa: E721 - assert encoding is None or type(encoding) == str # noqa: E721 - assert type(binary) == bool # noqa: E721 - - if binary: - if encoding is not None: - raise InvalidOperationException("Enconding is not allowed for read binary operation") - - return self._read__binary(filename) - - # python behavior - assert (None or "abc") == "abc" - assert ("" or "abc") == "abc" - - return self._read__text_with_encoding(filename, encoding or get_default_encoding()) - - def _read__text_with_encoding(self, filename, encoding): - assert type(filename) == str # noqa: E721 - assert type(encoding) == str # noqa: E721 - content = self._read__binary(filename) - assert type(content) == bytes # noqa: E721 - buf0 = io.BytesIO(content) - buf1 = io.TextIOWrapper(buf0, encoding=encoding) - content_s = buf1.read() - assert type(content_s) == str # noqa: E721 - return content_s - - def _read__binary(self, filename): - assert type(filename) == str # noqa: E721 - cmd = ["cat", filename] - content = self.exec_command(cmd) - assert type(content) == bytes # noqa: E721 - return content - - def readlines(self, filename, num_lines=0, binary=False, encoding=None): - assert type(num_lines) == int # noqa: E721 - assert type(filename) == str # noqa: E721 - assert type(binary) == bool # noqa: E721 - assert encoding is None or type(encoding) == str # noqa: E721 - - if num_lines > 0: - cmd = ["tail", "-n", str(num_lines), filename] - else: - cmd = ["cat", filename] - - if binary: - assert encoding is None - pass - elif encoding is None: - encoding = get_default_encoding() - assert type(encoding) == str # noqa: E721 - else: - assert type(encoding) == str # noqa: E721 - pass - - result = self.exec_command(cmd, encoding=encoding) - assert result is not None - - if binary: - assert type(result) == bytes # noqa: E721 - lines = result.splitlines() - else: - assert type(result) == str # noqa: E721 - lines = result.splitlines() - - assert type(lines) == list # noqa: E721 - return lines - - def read_binary(self, filename, offset): - assert type(filename) == str # noqa: E721 - assert type(offset) == int # noqa: E721 - - if offset < 0: - raise ValueError("Negative 'offset' is not supported.") - - cmd = ["tail", "-c", "+{}".format(offset + 1), filename] - r = self.exec_command(cmd) - assert type(r) == bytes # noqa: E721 - return r - - def isfile(self, remote_file): - stdout = self.exec_command("test -f {}; echo $?".format(remote_file)) - result = int(stdout.strip()) - return result == 0 - - def isdir(self, dirname): - cmd = "if [ -d {} ]; then echo True; else echo False; fi".format(dirname) - response = self.exec_command(cmd) - return response.strip() == b"True" - - def get_file_size(self, filename): - C_ERR_SRC = "RemoteOpertions::get_file_size" - - assert filename is not None - assert type(filename) == str # noqa: E721 - cmd = ["du", "-b", filename] - - s = self.exec_command(cmd, encoding=get_default_encoding()) - assert type(s) == str # noqa: E721 - - if len(s) == 0: - raise Exception( - "[BUG CHECK] Can't get size of file [{2}]. Remote operation returned an empty string. Check point [{0}][{1}].".format( - C_ERR_SRC, - "#001", - filename - ) - ) - - i = 0 - - while i < len(s) and s[i].isdigit(): - assert s[i] >= '0' - assert s[i] <= '9' - i += 1 - - if i == 0: - raise Exception( - "[BUG CHECK] Can't get size of file [{2}]. Remote operation returned a bad formatted string. Check point [{0}][{1}].".format( - C_ERR_SRC, - "#002", - filename - ) - ) - - if i == len(s): - raise Exception( - "[BUG CHECK] Can't get size of file [{2}]. Remote operation returned a bad formatted string. Check point [{0}][{1}].".format( - C_ERR_SRC, - "#003", - filename - ) - ) - - if not s[i].isspace(): - raise Exception( - "[BUG CHECK] Can't get size of file [{2}]. Remote operation returned a bad formatted string. Check point [{0}][{1}].".format( - C_ERR_SRC, - "#004", - filename - ) - ) - - r = 0 - - for i2 in range(0, i): - ch = s[i2] - assert ch >= '0' - assert ch <= '9' - # Here is needed to check overflow or that it is a human-valid result? - r = (r * 10) + ord(ch) - ord('0') - - return r - - def remove_file(self, filename): - cmd = "rm {}".format(filename) - return self.exec_command(cmd) - - # Processes control - def kill(self, pid, signal): - # Kill the process - cmd = "kill -{} {}".format(signal, pid) - return self.exec_command(cmd) - - def get_pid(self): - # Get current process id - return int(self.exec_command("echo $$", encoding=get_default_encoding())) - - def get_process_children(self, pid): - assert type(pid) == int # noqa: E721 - command = ["ssh"] + self.ssh_args + [self.ssh_dest, "pgrep", "-P", str(pid)] - - result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - - if result.returncode == 0: - children = result.stdout.strip().splitlines() - return [PsUtilProcessProxy(self, int(child_pid.strip())) for child_pid in children] - - raise ExecUtilException(f"Error in getting process children. Error: {result.stderr}") - - def is_port_free(self, number: int) -> bool: - assert type(number) == int # noqa: E721 - assert number >= 0 - assert number <= 65535 # OK? - - # grep -q returns 0 if a listening socket on that port is found - port_hex = format(number, '04X') - - # sl local_address rem_address st tx_queue rx_queue tr tm->when retrnsmt ... - # 137: 0A01A8C0:EC08 1DA2A959:01BB 01 00000000:00000000 02:00000000 00000000 ... - C_REGEXP = r"^\s*[0-9]+:\s*[0-9a-fA-F]{8}:" + re.escape(port_hex) + r"\s+[0-9a-fA-F]{8}:[0-9a-fA-F]{4}\s+" - - # Search /proc/net/tcp for any entry with this port - # NOTE: grep requires quote string with regular expression - # TODO: added a support for tcp/ip v6 - grep_cmd_s = "grep -q -E \"" + C_REGEXP + "\" /proc/net/tcp" - - cmd = [ - "/bin/bash", - "-c", - grep_cmd_s, - ] - - exit_status, output, error = self.exec_command( - cmd=cmd, - encoding=get_default_encoding(), - ignore_errors=True, - verbose=True - ) - - # grep exit 0 -> port is busy - if exit_status == 0: - return False - - # grep exit 1 -> port is free - if exit_status == 1: - return True - - # any other code is an unexpected error - errMsg = f"grep returned unexpected exit code: {exit_status}" - raise RaiseError.CommandExecutionError( - cmd=cmd, - exit_code=exit_status, - message=errMsg, - error=error, - out=output - ) - - def get_tempdir(self) -> str: - command = ["mktemp", "-u", "-d"] - - exec_exitcode, exec_output, exec_error = self.exec_command( - command, - verbose=True, - encoding=get_default_encoding(), - ignore_errors=True - ) - - assert type(exec_exitcode) == int # noqa: E721 - assert type(exec_output) == str # noqa: E721 - assert type(exec_error) == str # noqa: E721 - - if exec_exitcode != 0: - RaiseError.CommandExecutionError( - cmd=command, - exit_code=exec_exitcode, - message="Could not detect a temporary directory.", - error=exec_error, - out=exec_output) - - temp_subdir = exec_output.strip() - assert type(temp_subdir) == str # noqa: E721 - temp_dir = os.path.dirname(temp_subdir) - assert type(temp_dir) == str # noqa: E721 - return temp_dir - - @staticmethod - def _is_port_free__process_0(error: str) -> bool: - assert type(error) == str # noqa: E721 - # - # Example of error text: - # "Connection to localhost (127.0.0.1) 1024 port [tcp/*] succeeded!\n" - # - # May be here is needed to check error message? - # - return False - - @staticmethod - def _is_port_free__process_1(error: str) -> bool: - assert type(error) == str # noqa: E721 - # May be here is needed to check error message? - return True - - @staticmethod - def _build_cmdline(cmd, exec_env: typing.Dict = None) -> str: - cmd_items = __class__._create_exec_env_list(exec_env) - - assert type(cmd_items) == list # noqa: E721 - - cmd_items.append(__class__._ensure_cmdline(cmd)) - - cmdline = ';'.join(cmd_items) - assert type(cmdline) == str # noqa: E721 - return cmdline - - @staticmethod - def _ensure_cmdline(cmd) -> typing.List[str]: - if type(cmd) == str: # noqa: E721 - cmd_s = cmd - elif type(cmd) == list: # noqa: E721 - cmd_s = subprocess.list2cmdline(cmd) - else: - raise ValueError("Invalid 'cmd' argument type - {0}".format(type(cmd).__name__)) - - assert type(cmd_s) == str # noqa: E721 - return cmd_s - - @staticmethod - def _create_exec_env_list(exec_env: typing.Dict) -> typing.List[str]: - env: typing.Dict[str, str] = dict() - - # ---------------------------------- SYSTEM ENV - for envvar in os.environ.items(): - if __class__._does_put_envvar_into_exec_cmd(envvar[0]): - env[envvar[0]] = envvar[1] - - # ---------------------------------- EXEC (LOCAL) ENV - if exec_env is None: - pass - else: - for envvar in exec_env.items(): - assert type(envvar) == tuple # noqa: E721 - assert len(envvar) == 2 - assert type(envvar[0]) == str # noqa: E721 - env[envvar[0]] = envvar[1] - - # ---------------------------------- FINAL BUILD - result: typing.List[str] = list() - for envvar in env.items(): - assert type(envvar) == tuple # noqa: E721 - assert len(envvar) == 2 - assert type(envvar[0]) == str # noqa: E721 - - if envvar[1] is None: - result.append("unset " + envvar[0]) - else: - assert type(envvar[1]) == str # noqa: E721 - qvalue = __class__._quote_envvar(envvar[1]) - assert type(qvalue) == str # noqa: E721 - result.append(envvar[0] + "=" + qvalue) - continue - - return result - - sm_envs_for_exec_cmd = ["LANG", "LANGUAGE"] - - @staticmethod - def _does_put_envvar_into_exec_cmd(name: str) -> bool: - assert type(name) == str # noqa: E721 - name = name.upper() - if name.startswith("LC_"): - return True - if name in __class__.sm_envs_for_exec_cmd: - return True - return False - - @staticmethod - def _quote_envvar(value: str) -> str: - assert type(value) == str # noqa: E721 - result = "\"" - for ch in value: - if ch == "\"": - result += "\\\"" - elif ch == "\\": - result += "\\\\" - else: - result += ch - result += "\"" - return result - - @staticmethod - def _build_path(a: str, *parts: str) -> str: - assert a is not None - assert parts is not None - assert type(a) == str # noqa: E721 - assert type(parts) == tuple # noqa: E721 - return posixpath.join(a, *parts) - - -def normalize_error(error): - if isinstance(error, bytes): - return error.decode() - return error diff --git a/testgres/plugins/__init__.py b/testgres/plugins/__init__.py deleted file mode 100644 index 824eadc6..00000000 --- a/testgres/plugins/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .pg_probackup2.pg_probackup2.gdb import GDBobj -from .pg_probackup2.pg_probackup2.app import ProbackupApp, ProbackupException -from .pg_probackup2.pg_probackup2.init_helpers import init_params -from .pg_probackup2.pg_probackup2.storage.fs_backup import FSTestBackupDir - -__all__ = [ - "ProbackupApp", "ProbackupException", "init_params", "FSTestBackupDir", "GDBobj" -] diff --git a/testgres/plugins/pg_probackup2/README.md b/testgres/plugins/pg_probackup2/README.md deleted file mode 100644 index fb996c44..00000000 --- a/testgres/plugins/pg_probackup2/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Deprecated -The project was moved to https://github.com/postgrespro/testgres-pg_probackup2 -Use the new repository for commits. - -# testgres - pg_probackup2 - -Ccontrol and testing utility for [pg_probackup2](https://github.com/postgrespro/pg_probackup). Python 3.5+ is supported. - - -## Installation - -To install `testgres`, run: - -``` -pip install testgres-pg_probackup -``` - -We encourage you to use `virtualenv` for your testing environment. -The package requires testgres~=1.9.3. - -## Usage - -### Environment variables - -| Variable | Required | Default value | Description | -| - | - | - | - | -| PGPROBACKUP_TMP_DIR | No | tests/tmp_dirs | The root of the temporary directory hierarchy where tests store data and logs. Relative paths start from the current working directory. | -| PG_PROBACKUP_TEST_BACKUP_DIR_PREFIX | No | Temporary test hierarchy | Prefix of the test backup directories. Must be an absolute path. Use this variable to store test backups in a location other than the temporary test hierarchy. | - -See [Testgres](https://github.com/postgrespro/testgres/tree/master#environment) on how to configure a custom Postgres installation using `PG_CONFIG` and `PG_BIN` environment variables. - -### Examples - -Here is an example of what you can do with `testgres-pg_probackup2`: - -```python -# You can see full script here plugins/pg_probackup2/pg_probackup2/tests/basic_test.py -def test_full_backup(self): - # Setting up a simple test node - node = self.pg_node.make_simple('node', pg_options={"fsync": "off", "synchronous_commit": "off"}) - - # Initialize and configure Probackup - self.pb.init() - self.pb.add_instance('node', node) - self.pb.set_archiving('node', node) - - # Start the node and initialize pgbench - node.slow_start() - node.pgbench_init(scale=100, no_vacuum=True) - - # Perform backup and validation - backup_id = self.pb.backup_node('node', node) - out = self.pb.validate('node', backup_id) - - # Check if the backup is valid - self.assertIn(f"INFO: Backup {backup_id} is valid", out) -``` - -## Authors - -[Postgres Professional](https://postgrespro.ru/about) diff --git a/testgres/plugins/pg_probackup2/__init__.py b/testgres/plugins/pg_probackup2/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/__init__.py b/testgres/plugins/pg_probackup2/pg_probackup2/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/app.py b/testgres/plugins/pg_probackup2/pg_probackup2/app.py deleted file mode 100644 index 2b87b48f..00000000 --- a/testgres/plugins/pg_probackup2/pg_probackup2/app.py +++ /dev/null @@ -1,906 +0,0 @@ -import contextlib -import importlib -import json -import logging -import os -import re -import subprocess -import threading -import time -import unittest - -import testgres - -from .storage.fs_backup import TestBackupDir, FSTestBackupDir -from .gdb import GDBobj -from .init_helpers import init_params - -warning = """ -Wrong splint in show_pb -Original Header:f -{header} -Original Body: -{body} -Splitted Header -{header_split} -Splitted Body -{body_split} -""" - - -class ProbackupException(Exception): - def __init__(self, message, cmd): - self.message = message - self.cmd = cmd - - def __str__(self): - return '\n ERROR: {0}\n CMD: {1}'.format(repr(self.message), self.cmd) - - -# Local backup control -fs_backup_class = FSTestBackupDir - - -class ProbackupApp: - - def __init__(self, test_class: unittest.TestCase, - pg_node, pb_log_path, test_env, auto_compress_alg, backup_dir, probackup_path=None): - self.process = None - self.test_class = test_class - self.pg_node = pg_node - self.pb_log_path = pb_log_path - self.test_env = test_env - self.auto_compress_alg = auto_compress_alg - self.backup_dir = backup_dir - self.probackup_path = probackup_path or init_params.probackup_path - self.probackup_old_path = init_params.probackup_old_path - self.remote = init_params.remote - self.wal_tree_enabled = init_params.wal_tree_enabled - self.verbose = init_params.verbose - self.archive_compress = init_params.archive_compress - self.test_class.output = None - self.execution_time = None - self.valgrind_sup_path = init_params.valgrind_sup_path - - def form_daemon_process(self, cmdline, env): - def stream_output(stream: subprocess.PIPE) -> None: - try: - for line in iter(stream.readline, ''): - print(line) - self.test_class.output += line - finally: - stream.close() - - self.process = subprocess.Popen( - cmdline, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - env=env - ) - logging.info(f"Process started in background with PID: {self.process.pid}") - - if self.process.stdout and self.process.stderr: - stdout_thread = threading.Thread(target=stream_output, args=(self.process.stdout,), daemon=True) - stderr_thread = threading.Thread(target=stream_output, args=(self.process.stderr,), daemon=True) - - stdout_thread.start() - stderr_thread.start() - - return self.process.pid - - # ---- Start run function ---- # - def run(self, command, gdb=False, old_binary=False, return_id=True, env=None, - skip_log_directory=False, expect_error=False, use_backup_dir=True, daemonize=False): - """ - Run pg_probackup - backup_dir: target directory for making backup - command: commandline options - expect_error: option for ignoring errors and getting error message as a result of running the function - gdb: when True it returns GDBObj(), when tuple('suspend', port) it runs probackup - in suspended gdb mode with attachable gdb port, for local debugging - """ - command = self._add_backup_dir_to_cmd(command, use_backup_dir) - # Old bin or regular one - binary_path = self._get_binary_path(old_binary) - - if not env: - env = self.test_env - # Add additional options if needed - command, strcommand = self._add_options(command, skip_log_directory) - - self.test_class.cmd = f"{binary_path} {strcommand}" - if self.verbose: - print(self.test_class.cmd) - - cmdline = self._form_cmdline(binary_path, command) - - if gdb is True: - # general test flow for using GDBObj - return GDBobj(cmdline, self.test_class) - - return self._execute_command(cmdline, env, command, gdb, expect_error, return_id, daemonize) - - def _add_backup_dir_to_cmd(self, command: list, use_backup_dir: TestBackupDir): - if isinstance(use_backup_dir, TestBackupDir): - return [command[0], *use_backup_dir.pb_args, *command[1:]] - elif use_backup_dir: - return [command[0], *self.backup_dir.pb_args, *command[1:]] - else: - return [command[0], *self.backup_dir.pb_args[2:], *command[1:]] - - def _get_binary_path(self, old_binary): - if old_binary: - if not self.probackup_old_path: - logging.error('PGPROBACKUPBIN_OLD is not set') - exit(1) - return self.probackup_old_path - return self.probackup_path - - def _add_options(self, command: list, skip_log_directory: bool): - strcommand = ' '.join(str(p) for p in command) - - if '--log-level-file' in strcommand and \ - '--log-directory' not in strcommand and \ - not skip_log_directory: - command += ['--log-directory=' + self.pb_log_path] - strcommand += ' ' + command[-1] - - if 'pglz' in strcommand and \ - ' -j' not in strcommand and \ - '--thread' not in strcommand: - command += ['-j', '1'] - strcommand += ' -j 1' - - return command, strcommand - - def _form_cmdline(self, binary_path, command): - cmdline = [binary_path, *command] - - if self.valgrind_sup_path and command[0] != "--version": - os.makedirs(self.pb_log_path, exist_ok=True) - if self.valgrind_sup_path and not os.path.isfile(self.valgrind_sup_path): - raise FileNotFoundError(f"PG_PROBACKUP_VALGRIND_SUP should contain path to valgrind suppression file, " - f"but found: {self.valgrind_sup_path}") - valgrind_cmd = [ - "valgrind", - "--gen-suppressions=all", - "--leak-check=full", - "--show-reachable=yes", - "--error-limit=no", - "--show-leak-kinds=all", - "--errors-for-leak-kinds=all", - "--error-exitcode=0", - f"--log-file={os.path.join(self.pb_log_path, f'valgrind-{command[0]}-%p.log')}", - f"--suppressions={self.valgrind_sup_path}", - "--" - ] - cmdline = valgrind_cmd + cmdline - - return cmdline - - def _execute_command(self, cmdline, env, command, gdb, expect_error, return_id, daemonize): - try: - if isinstance(gdb, tuple) and gdb[0] == 'suspend': - gdb_port = gdb[1] - cmdline = ['gdbserver'] + ['localhost:' + str(gdb_port)] + cmdline - logging.warning("pg_probackup gdb suspended, waiting gdb connection on localhost:{0}".format(gdb_port)) - - # Execute command - start_time = time.time() - if daemonize: - return self.form_daemon_process(cmdline, env) - else: - self.test_class.output = subprocess.check_output( - cmdline, - stderr=subprocess.STDOUT, - env=env - ).decode('utf-8', errors='replace') - end_time = time.time() - self.execution_time = end_time - start_time - - if command[0] == 'backup' and return_id: - result = self.get_backup_id() - else: - result = self.test_class.output - if expect_error is True: - assert False, f"Exception was expected, but run finished successful with result: `{result}`\n" \ - f"CMD: {self.test_class.cmd}" - elif expect_error: - assert False, f"Exception was expected {expect_error}, but run finished successful with result: `{result}`\n" \ - f"CMD: {self.test_class.cmd}" - return result - except subprocess.CalledProcessError as e: - self.test_class.output = e.output.decode('utf-8').replace("\r", "") - if expect_error: - return self.test_class.output - else: - raise ProbackupException(self.test_class.output, self.test_class.cmd) - # ---- End run function ---- # - - def get_backup_id(self): - if init_params.major_version > 2: - pattern = re.compile(r"Backup (.*) completed successfully.") - for line in self.test_class.output.splitlines(): - match = pattern.search(line) - if match: - return match.group(1) - else: - for line in self.test_class.output.splitlines(): - if 'INFO: Backup' and 'completed' in line: - return line.split()[2] - return None - - def init(self, options=None, old_binary=False, skip_log_directory=False, expect_error=False, use_backup_dir=True): - if options is None: - options = [] - return self.run(['init'] + options, - old_binary=old_binary, - skip_log_directory=skip_log_directory, - expect_error=expect_error, - use_backup_dir=use_backup_dir - ) - - def add_instance(self, instance, node, old_binary=False, options=None, expect_error=False, datname=False): - if options is None: - options = [] - - if not datname: - datname = 'postgres' - - cmd = [ - 'add-instance', - '--instance={0}'.format(instance), - '-D', node.data_dir, - '--pgport', '%i' % node.port, - '--pgdatabase', datname - ] - - # don`t forget to kill old_binary after remote ssh release - if self.remote and not old_binary: - options = options + [ - '--remote-proto=ssh', - '--remote-host=localhost'] - - if self.wal_tree_enabled: - options = options + ['--wal-tree'] - - return self.run(cmd + options, old_binary=old_binary, expect_error=expect_error) - - def set_config(self, instance, old_binary=False, options=None, expect_error=False): - if options is None: - options = [] - cmd = [ - 'set-config', - '--instance={0}'.format(instance), - ] - - return self.run(cmd + options, old_binary=old_binary, expect_error=expect_error) - - def set_backup(self, instance, backup_id=False, - old_binary=False, options=None, expect_error=False): - if options is None: - options = [] - cmd = [ - 'set-backup', - ] - - if instance: - cmd = cmd + ['--instance={0}'.format(instance)] - - if backup_id: - cmd = cmd + ['-i', backup_id] - - return self.run(cmd + options, old_binary=old_binary, expect_error=expect_error) - - def del_instance(self, instance, options=None, old_binary=False, expect_error=False): - if options is None: - options = [] - cmd = ['del-instance', '--instance={0}'.format(instance)] + options - return self.run(cmd, - old_binary=old_binary, - expect_error=expect_error) - - def backup_node( - self, instance, node, data_dir=False, - backup_type='full', datname=False, options=None, - gdb=False, - old_binary=False, return_id=True, no_remote=False, - env=None, - expect_error=False, - sync=False - ): - if options is None: - options = [] - if not node and not data_dir: - logging.error('You must provide ether node or data_dir for backup') - exit(1) - - if not datname: - datname = 'postgres' - - cmd_list = [ - 'backup', - '--instance={0}'.format(instance), - # "-D", pgdata, - '-p', '%i' % node.port, - '-d', datname - ] - - if data_dir: - cmd_list += ['-D', self._node_dir(data_dir)] - - # don`t forget to kill old_binary after remote ssh release - if self.remote and not old_binary and not no_remote: - options = options + [ - '--remote-proto=ssh', - '--remote-host=localhost'] - - if self.auto_compress_alg and '--compress' in options and \ - self.archive_compress and self.archive_compress != 'zlib': - options = [o if o != '--compress' else f'--compress-algorithm={self.archive_compress}' - for o in options] - - if backup_type: - cmd_list += ['-b', backup_type] - - if not (old_binary or sync): - cmd_list += ['--no-sync'] - - return self.run(cmd_list + options, gdb, old_binary, return_id, env=env, - expect_error=expect_error) - - def backup_replica_node(self, instance, node, data_dir=False, *, - master, backup_type='full', datname=False, - options=None, env=None): - """ - Try to reliably run backup on replica by switching wal at master - at the moment pg_probackup is waiting for archived wal segment - """ - if options is None: - options = [] - assert '--stream' not in options or backup_type == 'page', \ - "backup_replica_node should be used with one of archive-mode or " \ - "page-stream mode" - - options = options.copy() - if not any('--log-level-file' in x for x in options): - options.append('--log-level-file=INFO') - - gdb = self.backup_node( - instance, node, data_dir, - backup_type=backup_type, - datname=datname, - options=options, - env=env, - gdb=True) - gdb.set_breakpoint('wait_wal_lsn') - # we need to break on wait_wal_lsn in pg_stop_backup - gdb.run_until_break() - if backup_type == 'page': - self.switch_wal_segment(master) - if '--stream' not in options: - gdb.continue_execution_until_break() - self.switch_wal_segment(master) - gdb.continue_execution_until_exit() - - output = self.read_pb_log() - self.unlink_pg_log() - parsed_output = re.compile(r'Backup \S+ completed').search(output) - assert parsed_output, f"Expected: `Backup 'backup_id' completed`, but found `{output}`" - backup_id = parsed_output[0].split(' ')[1] - return (backup_id, output) - - def checkdb_node( - self, use_backup_dir=False, instance=False, data_dir=False, - options=None, gdb=False, old_binary=False, - skip_log_directory=False, - expect_error=False - ): - if options is None: - options = [] - cmd_list = ["checkdb"] - - if instance: - cmd_list += ["--instance={0}".format(instance)] - - if data_dir: - cmd_list += ["-D", self._node_dir(data_dir)] - - return self.run(cmd_list + options, gdb, old_binary, - skip_log_directory=skip_log_directory, expect_error=expect_error, - use_backup_dir=use_backup_dir) - - def merge_backup( - self, instance, backup_id, - gdb=False, old_binary=False, options=None, expect_error=False): - if options is None: - options = [] - cmd_list = [ - 'merge', - '--instance={0}'.format(instance), - '-i', backup_id - ] - - return self.run(cmd_list + options, gdb, old_binary, expect_error=expect_error) - - def restore_node( - self, instance, node=None, restore_dir=None, - backup_id=None, old_binary=False, options=None, - gdb=False, - expect_error=False, - sync=False - ): - if options is None: - options = [] - if node: - if isinstance(node, str): - data_dir = node - else: - data_dir = node.data_dir - elif restore_dir: - data_dir = self._node_dir(restore_dir) - else: - raise ValueError("You must provide ether node or base_dir for backup") - - cmd_list = [ - 'restore', - '-D', data_dir, - '--instance={0}'.format(instance) - ] - - # don`t forget to kill old_binary after remote ssh release - if self.remote and not old_binary: - options = options + [ - '--remote-proto=ssh', - '--remote-host=localhost'] - - if backup_id: - cmd_list += ['-i', backup_id] - - if not (old_binary or sync): - cmd_list += ['--no-sync'] - - return self.run(cmd_list + options, gdb=gdb, old_binary=old_binary, expect_error=expect_error) - - def catchup_node( - self, - backup_mode, source_pgdata, destination_node, - options=None, - remote_host='localhost', - remote_port=None, - expect_error=False, - gdb=False - ): - - if options is None: - options = [] - cmd_list = [ - 'catchup', - '--backup-mode={0}'.format(backup_mode), - '--source-pgdata={0}'.format(source_pgdata), - '--destination-pgdata={0}'.format(destination_node.data_dir) - ] - if self.remote: - cmd_list += ['--remote-proto=ssh', f'--remote-host={remote_host}'] - if remote_port: - cmd_list.append(f'--remote-port={remote_port}') - if self.verbose: - cmd_list += [ - '--log-level-file=VERBOSE', - '--log-directory={0}'.format(destination_node.logs_dir) - ] - - return self.run(cmd_list + options, gdb=gdb, expect_error=expect_error, use_backup_dir=False) - - def show( - self, instance=None, backup_id=None, - options=None, as_text=False, as_json=True, old_binary=False, - env=None, - expect_error=False, - gdb=False - ): - - if options is None: - options = [] - backup_list = [] - specific_record = {} - cmd_list = [ - 'show', - ] - if instance: - cmd_list += ['--instance={0}'.format(instance)] - - if backup_id: - cmd_list += ['-i', backup_id] - - # AHTUNG, WARNING will break json parsing - if as_json: - cmd_list += ['--format=json', '--log-level-console=error'] - - if as_text: - # You should print it when calling as_text=true - return self.run(cmd_list + options, old_binary=old_binary, env=env, - expect_error=expect_error, gdb=gdb) - - # get show result as list of lines - if as_json: - text_json = str(self.run(cmd_list + options, old_binary=old_binary, env=env, - expect_error=expect_error, gdb=gdb)) - try: - if expect_error: - return text_json - data = json.loads(text_json) - except ValueError: - assert False, f"Couldn't parse {text_json} as json. " \ - f"Check that you don't have additional messages inside the log or use 'as_text=True'" - - for instance_data in data: - # find specific instance if requested - if instance and instance_data['instance'] != instance: - continue - - for backup in reversed(instance_data['backups']): - # find specific backup if requested - if backup_id: - if backup['id'] == backup_id: - return backup - else: - backup_list.append(backup) - - if backup_id is not None: - assert False, "Failed to find backup with ID: {0}".format(backup_id) - - return backup_list - else: - show_splitted = self.run(cmd_list + options, old_binary=old_binary, env=env, - expect_error=expect_error).splitlines() - if instance is not None and backup_id is None: - # cut header(ID, Mode, etc) from show as single string - header = show_splitted[1:2][0] - # cut backup records from show as single list - # with string for every backup record - body = show_splitted[3:] - # inverse list so oldest record come first - body = body[::-1] - # split string in list with string for every header element - header_split = re.split(' +', header) - # Remove empty items - for i in header_split: - if i == '': - header_split.remove(i) - continue - header_split = [ - header_element.rstrip() for header_element in header_split - ] - for backup_record in body: - backup_record = backup_record.rstrip() - # split list with str for every backup record element - backup_record_split = re.split(' +', backup_record) - # Remove empty items - for i in backup_record_split: - if i == '': - backup_record_split.remove(i) - if len(header_split) != len(backup_record_split): - logging.error(warning.format( - header=header, body=body, - header_split=header_split, - body_split=backup_record_split) - ) - exit(1) - new_dict = dict(zip(header_split, backup_record_split)) - backup_list.append(new_dict) - return backup_list - else: - # cut out empty lines and lines started with # - # and other garbage then reconstruct it as dictionary - # print show_splitted - sanitized_show = [item for item in show_splitted if item] - sanitized_show = [ - item for item in sanitized_show if not item.startswith('#') - ] - # print sanitized_show - for line in sanitized_show: - name, var = line.partition(' = ')[::2] - var = var.strip('"') - var = var.strip("'") - specific_record[name.strip()] = var - - if not specific_record: - assert False, "Failed to find backup with ID: {0}".format(backup_id) - - return specific_record - - def show_archive( - self, instance=None, options=None, - as_text=False, as_json=True, old_binary=False, - tli=0, - expect_error=False - ): - if options is None: - options = [] - cmd_list = [ - 'show', - '--archive', - ] - if instance: - cmd_list += ['--instance={0}'.format(instance)] - - # AHTUNG, WARNING will break json parsing - if as_json: - cmd_list += ['--format=json', '--log-level-console=error'] - - if as_text: - # You should print it when calling as_text=true - return self.run(cmd_list + options, old_binary=old_binary, expect_error=expect_error) - - if as_json: - if as_text: - data = self.run(cmd_list + options, old_binary=old_binary, expect_error=expect_error) - else: - data = json.loads(self.run(cmd_list + options, old_binary=old_binary, expect_error=expect_error)) - - if instance: - instance_timelines = None - for instance_name in data: - if instance_name['instance'] == instance: - instance_timelines = instance_name['timelines'] - break - - if tli > 0: - for timeline in instance_timelines: - if timeline['tli'] == tli: - return timeline - - return {} - - if instance_timelines: - return instance_timelines - - return data - else: - show_splitted = self.run(cmd_list + options, old_binary=old_binary, - expect_error=expect_error).splitlines() - logging.error(show_splitted) - exit(1) - - def validate( - self, instance=None, backup_id=None, - options=None, old_binary=False, gdb=False, expect_error=False - ): - if options is None: - options = [] - cmd_list = [ - 'validate', - ] - if instance: - cmd_list += ['--instance={0}'.format(instance)] - if backup_id: - cmd_list += ['-i', backup_id] - - return self.run(cmd_list + options, old_binary=old_binary, gdb=gdb, - expect_error=expect_error) - - def delete( - self, instance, backup_id=None, - options=None, old_binary=False, gdb=False, expect_error=False): - if options is None: - options = [] - cmd_list = [ - 'delete', - ] - - cmd_list += ['--instance={0}'.format(instance)] - if backup_id: - cmd_list += ['-i', backup_id] - - return self.run(cmd_list + options, old_binary=old_binary, gdb=gdb, - expect_error=expect_error) - - def delete_expired( - self, instance, options=None, old_binary=False, expect_error=False): - if options is None: - options = [] - cmd_list = [ - 'delete', - '--instance={0}'.format(instance) - ] - return self.run(cmd_list + options, old_binary=old_binary, expect_error=expect_error) - - def show_config(self, instance, old_binary=False, expect_error=False, gdb=False): - out_dict = {} - cmd_list = [ - 'show-config', - '--instance={0}'.format(instance) - ] - - res = self.run(cmd_list, old_binary=old_binary, expect_error=expect_error, gdb=gdb).splitlines() - for line in res: - if not line.startswith('#'): - name, var = line.partition(' = ')[::2] - out_dict[name] = var - return out_dict - - def run_binary(self, command, asynchronous=False, env=None): - - if not env: - env = self.test_env - - if self.verbose: - print([' '.join(map(str, command))]) - try: - if asynchronous: - return subprocess.Popen( - command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env - ) - else: - self.test_class.output = subprocess.check_output( - command, - stderr=subprocess.STDOUT, - env=env - ).decode('utf-8') - return self.test_class.output - except subprocess.CalledProcessError as e: - raise ProbackupException(e.output.decode('utf-8'), command) - - def _node_dir(self, base_dir): - return os.path.join(self.pg_node.test_path, base_dir) - - def set_archiving( - self, instance, node, replica=False, - overwrite=False, compress=True, old_binary=False, - log_level=False, archive_timeout=False, - custom_archive_command=None): - - # parse postgresql.auto.conf - options = {} - if replica: - options['archive_mode'] = 'always' - options['hot_standby'] = 'on' - else: - options['archive_mode'] = 'on' - - if custom_archive_command is None: - archive_command = " ".join([f'"{init_params.probackup_path}"', - 'archive-push', *self.backup_dir.pb_args]) - if os.name == 'posix': - # Dash produces a core dump when it gets a SIGQUIT from its - # child process so replace the shell with pg_probackup - archive_command = 'exec ' + archive_command - elif os.name == "nt": - archive_command = archive_command.replace("\\", "\\\\") - archive_command += f' --instance={instance}' - - # don`t forget to kill old_binary after remote ssh release - if init_params.remote and not old_binary: - archive_command += ' --remote-proto=ssh --remote-host=localhost' - - if init_params.archive_compress and compress: - archive_command += ' --compress-algorithm=' + init_params.archive_compress - - if overwrite: - archive_command += ' --overwrite' - - if init_params.major_version > 2: - archive_command += ' --log-level-console=trace' - else: - archive_command += ' --log-level-console=VERBOSE' - archive_command += ' -j 5' - archive_command += ' --batch-size 10' - - archive_command += ' --no-sync' - - if archive_timeout: - archive_command += f' --archive-timeout={archive_timeout}' - - if os.name == 'posix': - archive_command += ' --wal-file-path=%p --wal-file-name=%f' - - elif os.name == 'nt': - archive_command += ' --wal-file-path="%p" --wal-file-name="%f"' - - if log_level: - archive_command += f' --log-level-console={log_level}' - else: # custom_archive_command is not None - archive_command = custom_archive_command - options['archive_command'] = archive_command - - node.set_auto_conf(options) - - def switch_wal_segment(self, node, sleep_seconds=1, and_tx=False): - """ - Execute pg_switch_wal() in given node - - Args: - node: an instance of PostgresNode or NodeConnection class - """ - if isinstance(node, testgres.PostgresNode): - with node.connect('postgres') as con: - if and_tx: - con.execute('select txid_current()') - lsn = con.execute('select pg_switch_wal()')[0][0] - else: - lsn = node.execute('select pg_switch_wal()')[0][0] - - if sleep_seconds > 0: - time.sleep(sleep_seconds) - return lsn - - @contextlib.contextmanager - def switch_wal_after(self, node, seconds, and_tx=True): - tm = threading.Timer(seconds, self.switch_wal_segment, [node, 0, and_tx]) - tm.start() - try: - yield - finally: - tm.cancel() - tm.join() - - def read_pb_log(self): - with open(os.path.join(self.pb_log_path, 'pg_probackup.log')) as fl: - return fl.read() - - def unlink_pg_log(self): - os.unlink(os.path.join(self.pb_log_path, 'pg_probackup.log')) - - def load_backup_class(fs_type): - fs_type = os.environ.get('PROBACKUP_FS_TYPE') - implementation = f"{__package__}.fs_backup.FSTestBackupDir" - if fs_type: - implementation = fs_type - - logging.info("Using ", implementation) - module_name, class_name = implementation.rsplit(sep='.', maxsplit=1) - - module = importlib.import_module(module_name) - - return getattr(module, class_name) - - def archive_push(self, instance, node, wal_file_name, wal_file_path=None, options=None, expect_error=False): - if options is None: - options = [] - cmd = [ - 'archive-push', - '--instance={0}'.format(instance), - '--wal-file-name={0}'.format(wal_file_name), - ] - if wal_file_path is None: - cmd = cmd + ['--wal-file-path={0}'.format(os.path.join(node.data_dir, 'pg_wal'))] - else: - cmd = cmd + ['--wal-file-path={0}'.format(wal_file_path)] - return self.run(cmd + options, expect_error=expect_error) - - def archive_get(self, instance, wal_file_name, wal_file_path, options=None, expect_error=False): - if options is None: - options = [] - cmd = [ - 'archive-get', - '--instance={0}'.format(instance), - '--wal-file-name={0}'.format(wal_file_name), - '--wal-file-path={0}'.format(wal_file_path), - ] - return self.run(cmd + options, expect_error=expect_error) - - def maintain( - self, instance=None, backup_id=None, - options=None, old_binary=False, gdb=False, expect_error=False - ): - if options is None: - options = [] - cmd_list = [ - 'maintain', - ] - if instance: - cmd_list += ['--instance={0}'.format(instance)] - if backup_id: - cmd_list += ['-i', backup_id] - - return self.run(cmd_list + options, old_binary=old_binary, gdb=gdb, - expect_error=expect_error) - - def build_backup_dir(self, backup='backup'): - return fs_backup_class(rel_path=self.rel_path, backup=backup) diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py b/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py deleted file mode 100644 index b7ca549e..00000000 --- a/testgres/plugins/pg_probackup2/pg_probackup2/gdb.py +++ /dev/null @@ -1,341 +0,0 @@ -import functools -import os -import subprocess -import sys -import unittest -from time import sleep - - -class GdbException(Exception): - def __init__(self, message="False"): - self.message = message - - def __str__(self): - return '\n ERROR: {0}\n'.format(repr(self.message)) - - -class GDBobj: - _gdb_enabled = False - _gdb_ok = False - _gdb_ptrace_ok = False - - def __init__(self, cmd, env, attach=False): - self.verbose = env.verbose - self.output = '' - self._did_quit = False - self.has_breakpoint = False - - # Check gdb flag is set up - if not hasattr(env, "_gdb_decorated") or not env._gdb_decorated: - raise GdbException("Test should be decorated with @needs_gdb") - if not self._gdb_enabled: - raise GdbException("No `PGPROBACKUP_GDB=on` is set.") - if not self._gdb_ok: - if not self._gdb_ptrace_ok: - raise GdbException("set /proc/sys/kernel/yama/ptrace_scope to 0" - " to run GDB tests") - raise GdbException("No gdb usage possible.") - - # Check gdb presence - try: - gdb_version, _ = subprocess.Popen( - ['gdb', '--version'], - stdout=subprocess.PIPE - ).communicate() - except OSError: - raise GdbException("Couldn't find gdb on the path") - - self.base_cmd = [ - 'gdb', - '--interpreter', - 'mi2', - ] - - if attach: - self.cmd = self.base_cmd + ['--pid'] + cmd - else: - self.cmd = self.base_cmd + ['--args'] + cmd - - if self.verbose: - print([' '.join(map(str, self.cmd))]) - - self.proc = subprocess.Popen( - self.cmd, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - bufsize=0, - text=True, - errors='replace', - ) - self.gdb_pid = self.proc.pid - - while True: - line = self.get_line() - - if 'No such process' in line: - raise GdbException(line) - - if not line.startswith('(gdb)'): - pass - else: - break - - def __del__(self): - if not self._did_quit and hasattr(self, "proc"): - try: - self.quit() - except subprocess.TimeoutExpired: - self.kill() - - def get_line(self): - line = self.proc.stdout.readline() - self.output += line - return line - - def kill(self): - self._did_quit = True - self.proc.kill() - self.proc.wait(3) - self.proc.stdin.close() - self.proc.stdout.close() - - def terminate_subprocess(self): - self._execute('kill') - - def set_breakpoint(self, location): - - result = self._execute('break ' + location) - self.has_breakpoint = True - for line in result: - if line.startswith('~"Breakpoint'): - return - - elif line.startswith('=breakpoint-created'): - return - - elif line.startswith('^error'): # or line.startswith('(gdb)'): - break - - elif line.startswith('&"break'): - pass - - elif line.startswith('&"Function'): - raise GdbException(line) - - elif line.startswith('&"No line'): - raise GdbException(line) - - elif line.startswith('~"Make breakpoint pending on future shared'): - raise GdbException(line) - - raise GdbException( - 'Failed to set breakpoint.\n Output:\n {0}'.format(result) - ) - - def remove_all_breakpoints(self): - if not self.has_breakpoint: - return - - result = self._execute('delete') - self.has_breakpoint = False - for line in result: - - if line.startswith('^done'): - return - - raise GdbException( - 'Failed to remove breakpoints.\n Output:\n {0}'.format(result) - ) - - def run_until_break(self): - result = self._execute('run', False) - for line in result: - if line.startswith('*stopped,reason="breakpoint-hit"'): - return - raise GdbException( - 'Failed to run until breakpoint.\n' - ) - - def continue_execution_until_running(self): - result = self._execute('continue') - - for line in result: - if line.startswith('*running') or line.startswith('^running'): - return - if line.startswith('*stopped,reason="breakpoint-hit"'): - continue - if line.startswith('*stopped,reason="exited-normally"'): - continue - - raise GdbException( - 'Failed to continue execution until running.\n' - ) - - def signal(self, sig): - if 'KILL' in sig: - self.remove_all_breakpoints() - self._execute(f'signal {sig}') - - def continue_execution_until_exit(self): - self.remove_all_breakpoints() - result = self._execute('continue', False) - - for line in result: - if line.startswith('*running'): - continue - if line.startswith('*stopped,reason="breakpoint-hit"'): - continue - if line.startswith('*stopped,reason="exited') or line == '*stopped\n': - self.quit() - return - - raise GdbException( - 'Failed to continue execution until exit.\n' - ) - - def continue_execution_until_error(self): - self.remove_all_breakpoints() - result = self._execute('continue', False) - - for line in result: - if line.startswith('^error'): - return - if line.startswith('*stopped,reason="exited'): - return - if line.startswith( - '*stopped,reason="signal-received",signal-name="SIGABRT"'): - return - - raise GdbException( - 'Failed to continue execution until error.\n') - - def continue_execution_until_break(self, ignore_count=0): - if ignore_count > 0: - result = self._execute( - 'continue ' + str(ignore_count), - False - ) - else: - result = self._execute('continue', False) - - for line in result: - if line.startswith('*stopped,reason="breakpoint-hit"'): - return - if line.startswith('*stopped,reason="exited-normally"'): - break - - raise GdbException( - 'Failed to continue execution until break.\n') - - def show_backtrace(self): - return self._execute("backtrace", running=False) - - def stopped_in_breakpoint(self): - while True: - line = self.get_line() - if self.verbose: - print(line) - if line.startswith('*stopped,reason="breakpoint-hit"'): - return True - - def detach(self): - if not self._did_quit: - self._execute('detach') - - def quit(self): - if not self._did_quit: - self._did_quit = True - self.proc.terminate() - self.proc.wait(3) - self.proc.stdin.close() - self.proc.stdout.close() - - # use for breakpoint, run, continue - def _execute(self, cmd, running=True): - output = [] - self.proc.stdin.flush() - self.proc.stdin.write(cmd + '\n') - self.proc.stdin.flush() - sleep(1) - - # look for command we just send - while True: - line = self.get_line() - if self.verbose: - print(repr(line)) - - if cmd not in line: - continue - else: - break - - while True: - line = self.get_line() - output += [line] - if self.verbose: - print(repr(line)) - if line.startswith('^done') or line.startswith('*stopped'): - break - if line.startswith('^error'): - break - if running and (line.startswith('*running') or line.startswith('^running')): - # if running and line.startswith('*running'): - break - return output - - -def _set_gdb(self): - test_env = os.environ.copy() - self._gdb_enabled = test_env.get('PGPROBACKUP_GDB') == 'ON' - self._gdb_ok = self._gdb_enabled - if not self._gdb_enabled or sys.platform != 'linux': - return - try: - with open('/proc/sys/kernel/yama/ptrace_scope') as f: - ptrace = f.read() - except FileNotFoundError: - self._gdb_ptrace_ok = True - return - self._gdb_ptrace_ok = int(ptrace) == 0 - self._gdb_ok = self._gdb_ok and self._gdb_ptrace_ok - - -def _check_gdb_flag_or_skip_test(): - if not GDBobj._gdb_enabled: - return ("skip", - "Specify PGPROBACKUP_GDB and build without " - "optimizations for run this test" - ) - if GDBobj._gdb_ok: - return None - if not GDBobj._gdb_ptrace_ok: - return ("fail", "set /proc/sys/kernel/yama/ptrace_scope to 0" - " to run GDB tests") - else: - return ("fail", "use of gdb is not possible") - - -def needs_gdb(func): - check = _check_gdb_flag_or_skip_test() - if not check: - @functools.wraps(func) - def ok_wrapped(self): - self._gdb_decorated = True - func(self) - - return ok_wrapped - reason = check[1] - if check[0] == "skip": - return unittest.skip(reason)(func) - elif check[0] == "fail": - @functools.wraps(func) - def fail_wrapper(self): - self.fail(reason) - - return fail_wrapper - else: - raise "Wrong action {0}".format(check) - - -_set_gdb(GDBobj) diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py b/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py deleted file mode 100644 index 9c62dcf1..00000000 --- a/testgres/plugins/pg_probackup2/pg_probackup2/init_helpers.py +++ /dev/null @@ -1,228 +0,0 @@ -import logging -from functools import reduce -import getpass -import os -import re -import shutil -import subprocess -import sys -import testgres - -try: - import lz4.frame # noqa: F401 - - HAVE_LZ4 = True -except ImportError as e: - HAVE_LZ4 = False - LZ4_error = e - -try: - import zstd # noqa: F401 - - HAVE_ZSTD = True -except ImportError as e: - HAVE_ZSTD = False - ZSTD_error = e - -delete_logs = os.getenv('KEEP_LOGS') not in ['1', 'y', 'Y'] - -try: - testgres.configure_testgres( - cache_initdb=False, - cached_initdb_dir=False, - node_cleanup_full=delete_logs) -except Exception as e: - logging.warning("Can't configure testgres: {0}".format(e)) - - -class Init(object): - def __init__(self): - if '-v' in sys.argv or '--verbose' in sys.argv: - self.verbose = True - else: - self.verbose = False - - self._pg_config = testgres.get_pg_config() - self.is_enterprise = self._pg_config.get('PGPRO_EDITION', None) == 'enterprise' - self.is_shardman = self._pg_config.get('PGPRO_EDITION', None) == 'shardman' - self.is_pgpro = 'PGPRO_EDITION' in self._pg_config - self.is_nls_enabled = 'enable-nls' in self._pg_config['CONFIGURE'] - self.is_lz4_enabled = '-llz4' in self._pg_config['LIBS'] - version = self._pg_config['VERSION'].rstrip('develalphabetapre') - parts = [*version.split(' ')[1].split('.'), '0', '0'][:3] - parts[0] = re.match(r'\d+', parts[0]).group() - self.pg_config_version = reduce(lambda v, x: v * 100 + int(x), parts, 0) - - os.environ['LANGUAGE'] = 'en' # set default locale language to en. All messages will use this locale - test_env = os.environ.copy() - envs_list = [ - 'LANGUAGE', - 'LC_ALL', - 'PGCONNECT_TIMEOUT', - 'PGDATA', - 'PGDATABASE', - 'PGHOSTADDR', - 'PGREQUIRESSL', - 'PGSERVICE', - 'PGSSLMODE', - 'PGUSER', - 'PGPORT', - 'PGHOST' - ] - - for e in envs_list: - test_env.pop(e, None) - - test_env['LC_MESSAGES'] = 'C' - test_env['LC_TIME'] = 'C' - self._test_env = test_env - - # Get the directory from which the script was executed - self.source_path = os.getcwd() - tmp_path = test_env.get('PGPROBACKUP_TMP_DIR') - if tmp_path and os.path.isabs(tmp_path): - self.tmp_path = tmp_path - else: - self.tmp_path = os.path.abspath( - os.path.join(self.source_path, tmp_path or os.path.join('tests', 'tmp_dirs')) - ) - - os.makedirs(self.tmp_path, exist_ok=True) - - self.username = getpass.getuser() - - self.probackup_path = None - if 'PGPROBACKUPBIN' in test_env: - if shutil.which(test_env["PGPROBACKUPBIN"]): - self.probackup_path = test_env["PGPROBACKUPBIN"] - else: - if self.verbose: - print('PGPROBACKUPBIN is not an executable file') - - if not self.probackup_path: - probackup_path_tmp = os.path.join( - testgres.get_pg_config()['BINDIR'], 'pg_probackup') - - if os.path.isfile(probackup_path_tmp): - if not os.access(probackup_path_tmp, os.X_OK): - logging.warning('{0} is not an executable file'.format( - probackup_path_tmp)) - else: - self.probackup_path = probackup_path_tmp - - if not self.probackup_path: - probackup_path_tmp = self.source_path - - if os.path.isfile(probackup_path_tmp): - if not os.access(probackup_path_tmp, os.X_OK): - logging.warning('{0} is not an executable file'.format( - probackup_path_tmp)) - else: - self.probackup_path = probackup_path_tmp - - if not self.probackup_path: - raise Exception('pg_probackup binary is not found') - - if os.name == 'posix': - self.EXTERNAL_DIRECTORY_DELIMITER = ':' - os.environ['PATH'] = os.path.dirname( - self.probackup_path) + ':' + os.environ['PATH'] - - elif os.name == 'nt': - self.EXTERNAL_DIRECTORY_DELIMITER = ';' - os.environ['PATH'] = os.path.dirname( - self.probackup_path) + ';' + os.environ['PATH'] - - self.probackup_old_path = None - if 'PGPROBACKUPBIN_OLD' in test_env: - if (os.path.isfile(test_env['PGPROBACKUPBIN_OLD']) and os.access(test_env['PGPROBACKUPBIN_OLD'], os.X_OK)): - self.probackup_old_path = test_env['PGPROBACKUPBIN_OLD'] - else: - if self.verbose: - print('PGPROBACKUPBIN_OLD is not an executable file') - - self.probackup_version = None - self.old_probackup_version = None - - probackup_version_output = subprocess.check_output( - [self.probackup_path, "--version"], - stderr=subprocess.STDOUT, - ).decode('utf-8') - match = re.search(r"\d+\.\d+\.\d+", - probackup_version_output) - self.probackup_version = match.group(0) if match else None - match = re.search(r"\(compressions: ([^)]*)\)", probackup_version_output) - compressions = match.group(1) if match else None - if compressions: - self.probackup_compressions = {s.strip() for s in compressions.split(',')} - else: - self.probackup_compressions = [] - - if self.probackup_old_path: - old_probackup_version_output = subprocess.check_output( - [self.probackup_old_path, "--version"], - stderr=subprocess.STDOUT, - ).decode('utf-8') - match = re.search(r"\d+\.\d+\.\d+", - old_probackup_version_output) - self.old_probackup_version = match.group(0) if match else None - - self.remote = test_env.get('PGPROBACKUP_SSH_REMOTE', None) == 'ON' - self.ptrack = test_env.get('PG_PROBACKUP_PTRACK', None) == 'ON' and self.pg_config_version >= 110000 - self.wal_tree_enabled = test_env.get('PG_PROBACKUP_WAL_TREE_ENABLED', None) == 'ON' - - self.bckp_source = test_env.get('PG_PROBACKUP_SOURCE', 'pro').lower() - if self.bckp_source not in ('base', 'direct', 'pro'): - raise Exception("Wrong PG_PROBACKUP_SOURCE value. Available options: base|direct|pro") - - self.paranoia = test_env.get('PG_PROBACKUP_PARANOIA', None) == 'ON' - env_compress = test_env.get('ARCHIVE_COMPRESSION', None) - if env_compress: - env_compress = env_compress.lower() - if env_compress in ('on', 'zlib'): - self.compress_suffix = '.gz' - self.archive_compress = 'zlib' - elif env_compress == 'lz4': - if not HAVE_LZ4: - raise LZ4_error - if 'lz4' not in self.probackup_compressions: - raise Exception("pg_probackup is not compiled with lz4 support") - self.compress_suffix = '.lz4' - self.archive_compress = 'lz4' - elif env_compress == 'zstd': - if not HAVE_ZSTD: - raise ZSTD_error - if 'zstd' not in self.probackup_compressions: - raise Exception("pg_probackup is not compiled with zstd support") - self.compress_suffix = '.zst' - self.archive_compress = 'zstd' - else: - self.compress_suffix = '' - self.archive_compress = False - - cfs_compress = test_env.get('PG_PROBACKUP_CFS_COMPRESS', None) - if cfs_compress: - self.cfs_compress = cfs_compress.lower() - else: - self.cfs_compress = self.archive_compress - - os.environ["PGAPPNAME"] = "pg_probackup" - self.delete_logs = delete_logs - - if self.probackup_version.split('.')[0].isdigit(): - self.major_version = int(self.probackup_version.split('.')[0]) - else: - raise Exception('Can\'t process pg_probackup version \"{}\": the major version is expected to be a number'.format(self.probackup_version)) - - self.valgrind_sup_path = test_env.get('PG_PROBACKUP_VALGRIND_SUP', None) - - def test_env(self): - return self._test_env.copy() - - -try: - init_params = Init() -except Exception as e: - logging.error(str(e)) - logging.warning("testgres.plugins.probackup2.init_params is set to None.") - init_params = None diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/storage/__init__.py b/testgres/plugins/pg_probackup2/pg_probackup2/storage/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/storage/fs_backup.py b/testgres/plugins/pg_probackup2/pg_probackup2/storage/fs_backup.py deleted file mode 100644 index 6c9d1463..00000000 --- a/testgres/plugins/pg_probackup2/pg_probackup2/storage/fs_backup.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -Utilities for accessing pg_probackup backup data on file system. -""" -import os -import shutil - -from ..init_helpers import init_params - - -class TestBackupDir: - - def list_instance_backups(self, instance): - raise NotImplementedError() - - def list_files(self, sub_dir, recursive=False): - raise NotImplementedError() - - def list_dirs(self, sub_dir): - raise NotImplementedError() - - def read_file(self, sub_path, *, text=True): - raise NotImplementedError() - - def write_file(self, sub_path, data, *, text=True): - raise NotImplementedError() - - def cleanup(self): - raise NotImplementedError() - - def remove_file(self, sub_path): - raise NotImplementedError() - - def remove_dir(self, sub_path): - raise NotImplementedError() - - def exists(self, sub_path): - raise NotImplementedError() - - -class FSTestBackupDir(TestBackupDir): - is_file_based = True - - """ Backup directory. Usually created by running pg_probackup init -B """ - - def __init__(self, *, rel_path, backup): - backup_prefix = os.environ.get('PG_PROBACKUP_TEST_BACKUP_DIR_PREFIX') - if backup_prefix and not os.path.isabs(backup_prefix): - raise Exception(f"PG_PROBACKUP_TEST_BACKUP_DIR_PREFIX must be an absolute path, current value: {backup_prefix}") - self.path = os.path.join(backup_prefix or init_params.tmp_path, rel_path, backup) - self.pb_args = ('-B', self.path) - - def list_instance_backups(self, instance): - full_path = os.path.join(self.path, 'backups', instance) - return sorted((x for x in os.listdir(full_path) - if os.path.isfile(os.path.join(full_path, x, 'backup.control')))) - - def list_files(self, sub_dir, recursive=False): - full_path = os.path.join(self.path, sub_dir) - if not recursive: - return [f for f in os.listdir(full_path) - if os.path.isfile(os.path.join(full_path, f))] - files = [] - for rootdir, dirs, files_in_dir in os.walk(full_path): - rootdir = rootdir[len(self.path) + 1:] - files.extend(os.path.join(rootdir, file) for file in files_in_dir) - return files - - def list_dirs(self, sub_dir): - full_path = os.path.join(self.path, sub_dir) - return [f for f in os.listdir(full_path) - if os.path.isdir(os.path.join(full_path, f))] - - def read_file(self, sub_path, *, text=True): - full_path = os.path.join(self.path, sub_path) - with open(full_path, 'r' if text else 'rb') as fin: - return fin.read() - - def write_file(self, sub_path, data, *, text=True): - full_path = os.path.join(self.path, sub_path) - with open(full_path, 'w' if text else 'wb') as fout: - fout.write(data) - - def cleanup(self): - shutil.rmtree(self.path, ignore_errors=True) - - def remove_file(self, sub_path): - os.remove(os.path.join(self.path, sub_path)) - - def remove_dir(self, sub_path): - full_path = os.path.join(self.path, sub_path) - shutil.rmtree(full_path, ignore_errors=True) - - def exists(self, sub_path): - full_path = os.path.join(self.path, sub_path) - return os.path.exists(full_path) - - def __str__(self): - return self.path - - def __repr__(self): - return "FSTestBackupDir" + str(self.path) - - def __fspath__(self): - return self.path diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/tests/__init__.py b/testgres/plugins/pg_probackup2/pg_probackup2/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py b/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py deleted file mode 100644 index 2540ddb0..00000000 --- a/testgres/plugins/pg_probackup2/pg_probackup2/tests/test_basic.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations - -import os -import shutil -import pytest - -import testgres -from ...pg_probackup2.app import ProbackupApp -from ...pg_probackup2.init_helpers import Init, init_params -from ..storage.fs_backup import FSTestBackupDir - - -class ProbackupTest: - pg_node: testgres.NodeApp - - @staticmethod - def probackup_is_available() -> bool: - p = os.environ.get("PGPROBACKUPBIN") - - if p is None: - return False - - if not os.path.exists(p): - return False - - return True - - @pytest.fixture(autouse=True, scope="function") - def implicit_fixture(self, request: pytest.FixtureRequest): - assert isinstance(request, pytest.FixtureRequest) - self.helper__setUp(request) - yield - self.helper__tearDown() - - def helper__setUp(self, request: pytest.FixtureRequest): - assert isinstance(request, pytest.FixtureRequest) - - self.helper__setup_test_environment(request) - self.helper__setup_test_paths() - self.helper__setup_backup_dir() - self.helper__setup_probackup() - - def helper__setup_test_environment(self, request: pytest.FixtureRequest): - assert isinstance(request, pytest.FixtureRequest) - - self.output = None - self.cmd = None - self.nodes_to_cleanup = [] - self.module_name, self.fname = request.node.cls.__name__, request.node.name - self.test_env = Init().test_env() - - def helper__setup_test_paths(self): - self.rel_path = os.path.join(self.module_name, self.fname) - self.test_path = os.path.join(init_params.tmp_path, self.rel_path) - os.makedirs(self.test_path, exist_ok=True) - self.pb_log_path = os.path.join(self.test_path, "pb_log") - - def helper__setup_backup_dir(self): - self.backup_dir = self.helper__build_backup_dir('backup') - self.backup_dir.cleanup() - - def helper__setup_probackup(self): - self.pg_node = testgres.NodeApp(self.test_path, self.nodes_to_cleanup) - self.pb = ProbackupApp(self, self.pg_node, self.pb_log_path, self.test_env, - auto_compress_alg='zlib', backup_dir=self.backup_dir) - - def helper__tearDown(self): - if os.path.exists(self.test_path): - shutil.rmtree(self.test_path) - - def helper__build_backup_dir(self, backup='backup'): - return FSTestBackupDir(rel_path=self.rel_path, backup=backup) - - -@pytest.mark.skipif(not ProbackupTest.probackup_is_available(), reason="Check that PGPROBACKUPBIN is defined and is valid.") -class TestBasic(ProbackupTest): - def test_full_backup(self): - assert self.pg_node is not None - assert type(self.pg_node) == testgres.NodeApp # noqa: E721 - assert self.pb is not None - assert type(self.pb) == ProbackupApp # noqa: E721 - - # Setting up a simple test node - node = self.pg_node.make_simple('node', pg_options={"fsync": "off", "synchronous_commit": "off"}) - - assert node is not None - assert type(node) == testgres.PostgresNode # noqa: E721 - - with node: - # Initialize and configure Probackup - self.pb.init() - self.pb.add_instance('node', node) - self.pb.set_archiving('node', node) - - # Start the node and initialize pgbench - node.slow_start() - node.pgbench_init(scale=100, no_vacuum=True) - - # Perform backup and validation - backup_id = self.pb.backup_node('node', node) - out = self.pb.validate('node', backup_id) - - # Check if the backup is valid - assert f"INFO: Backup {backup_id} is valid" in out diff --git a/testgres/plugins/pg_probackup2/setup.py b/testgres/plugins/pg_probackup2/setup.py deleted file mode 100644 index b9b0067e..00000000 --- a/testgres/plugins/pg_probackup2/setup.py +++ /dev/null @@ -1,18 +0,0 @@ -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - -setup( - version='0.1.1', - name='testgres_pg_probackup2', - packages=['pg_probackup2', 'pg_probackup2.storage'], - description='Plugin for testgres that manages pg_probackup2', - url='https://github.com/postgrespro/testgres', - long_description_content_type='text/markdown', - license='PostgreSQL', - author='Postgres Professional', - author_email='testgres@postgrespro.ru', - keywords=['pg_probackup', 'testing', 'testgres'], - install_requires=['testgres>=1.9.2'] -) diff --git a/tests/helpers/global_data.py b/tests/helpers/global_data.py index f3df41a3..5c3f7a46 100644 --- a/tests/helpers/global_data.py +++ b/tests/helpers/global_data.py @@ -3,9 +3,9 @@ from testgres.operations.local_ops import LocalOperations from testgres.operations.remote_ops import RemoteOperations -from testgres.node import PortManager -from testgres.node import PortManager__ThisHost -from testgres.node import PortManager__Generic +from src.node import PortManager +from src.node import PortManager__ThisHost +from src.node import PortManager__Generic import os diff --git a/tests/requirements.txt b/tests/requirements.txt index f4974514..0fdcae51 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -3,3 +3,4 @@ pytest pytest-xdist psycopg2 six +git+https://github.com/postgrespro/testgres.os_ops.git diff --git a/tests/test_config.py b/tests/test_config.py index a80a11f1..3969b2c2 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,9 +1,9 @@ -from testgres import TestgresConfig -from testgres import configure_testgres -from testgres import scoped_config -from testgres import pop_config +from src import TestgresConfig +from src import configure_testgres +from src import scoped_config +from src import pop_config -import testgres +import src as testgres import pytest diff --git a/tests/test_os_ops_common.py b/tests/test_os_ops_common.py index d3c85753..bf2dce76 100644 --- a/tests/test_os_ops_common.py +++ b/tests/test_os_ops_common.py @@ -15,8 +15,8 @@ import typing import uuid -from testgres import InvalidOperationException -from testgres import ExecUtilException +from src import InvalidOperationException +from src import ExecUtilException from concurrent.futures import ThreadPoolExecutor from concurrent.futures import Future as ThreadFuture diff --git a/tests/test_os_ops_remote.py b/tests/test_os_ops_remote.py index 65830218..8be98da5 100755 --- a/tests/test_os_ops_remote.py +++ b/tests/test_os_ops_remote.py @@ -3,7 +3,7 @@ from .helpers.global_data import OsOpsDescrs from .helpers.global_data import OsOperations -from testgres import ExecUtilException +from src import ExecUtilException import os import pytest diff --git a/tests/test_testgres_common.py b/tests/test_testgres_common.py index a7ddbb27..e308a95e 100644 --- a/tests/test_testgres_common.py +++ b/tests/test_testgres_common.py @@ -5,32 +5,32 @@ from .helpers.global_data import OsOperations from .helpers.global_data import PortManager -from testgres.node import PgVer -from testgres.node import PostgresNode -from testgres.node import PostgresNodeLogReader -from testgres.node import PostgresNodeUtils -from testgres.utils import get_pg_version2 -from testgres.utils import file_tail -from testgres.utils import get_bin_path2 -from testgres import ProcessType -from testgres import NodeStatus -from testgres import IsolationLevel -from testgres import NodeApp +from src.node import PgVer +from src.node import PostgresNode +from src.node import PostgresNodeLogReader +from src.node import PostgresNodeUtils +from src.utils import get_pg_version2 +from src.utils import file_tail +from src.utils import get_bin_path2 +from src import ProcessType +from src import NodeStatus +from src import IsolationLevel +from src import NodeApp # New name prevents to collect test-functions in TestgresException and fixes # the problem with pytest warning. -from testgres import TestgresException as testgres_TestgresException - -from testgres import InitNodeException -from testgres import StartNodeException -from testgres import QueryException -from testgres import ExecUtilException -from testgres import TimeoutException -from testgres import InvalidOperationException -from testgres import BackupException -from testgres import ProgrammingError -from testgres import scoped_config -from testgres import First, Any +from src import TestgresException as testgres_TestgresException + +from src import InitNodeException +from src import StartNodeException +from src import QueryException +from src import ExecUtilException +from src import TimeoutException +from src import InvalidOperationException +from src import BackupException +from src import ProgrammingError +from src import scoped_config +from src import First, Any from contextlib import contextmanager diff --git a/tests/test_testgres_local.py b/tests/test_testgres_local.py index 63e5f37e..6018188e 100644 --- a/tests/test_testgres_local.py +++ b/tests/test_testgres_local.py @@ -7,21 +7,21 @@ import platform import logging -import testgres +import src as testgres -from testgres import StartNodeException -from testgres import ExecUtilException -from testgres import NodeApp -from testgres import scoped_config -from testgres import get_new_node -from testgres import get_bin_path -from testgres import get_pg_config -from testgres import get_pg_version +from src import StartNodeException +from src import ExecUtilException +from src import NodeApp +from src import scoped_config +from src import get_new_node +from src import get_bin_path +from src import get_pg_config +from src import get_pg_version # NOTE: those are ugly imports -from testgres.utils import bound_ports -from testgres.utils import PgVer -from testgres.node import ProcessProxy +from src.utils import bound_ports +from src.utils import PgVer +from src.node import ProcessProxy def pg_version_ge(version): diff --git a/tests/test_testgres_remote.py b/tests/test_testgres_remote.py index 6a8d068b..fc533559 100755 --- a/tests/test_testgres_remote.py +++ b/tests/test_testgres_remote.py @@ -7,16 +7,16 @@ from .helpers.global_data import PostgresNodeService from .helpers.global_data import PostgresNodeServices -import testgres +import src as testgres -from testgres.exceptions import InitNodeException -from testgres.exceptions import ExecUtilException +from src.exceptions import InitNodeException +from src.exceptions import ExecUtilException -from testgres.config import scoped_config -from testgres.config import testgres_config +from src.config import scoped_config +from src.config import testgres_config -from testgres import get_bin_path -from testgres import get_pg_config +from src import get_bin_path +from src import get_pg_config # NOTE: those are ugly imports diff --git a/tests/test_utils.py b/tests/test_utils.py index 39e9dda0..9bb233b2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -2,9 +2,9 @@ from .helpers.global_data import OsOpsDescrs from .helpers.global_data import OsOperations -from testgres.utils import parse_pg_version -from testgres.utils import get_pg_config2 -from testgres import scoped_config +from src.utils import parse_pg_version +from src.utils import get_pg_config2 +from src import scoped_config import pytest import typing