Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

working example #78

Open
wants to merge 33 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
45d8c1e
working example
Dec 15, 2023
2df836c
Fix bug returning returning a tuple instead of string
Dec 20, 2023
0b3685f
fix connection string
Dec 20, 2023
da66314
feat: allow and default to psycopg instead of psycopg2
jo-bitsch May 7, 2024
45d29b6
Refactor handler.py: Add destructors for db handlers in order to
Jun 4, 2024
05b5f7f
Refactor handler.py: Add destructors for db handlers in order to
Jun 4, 2024
01f0546
Handle upstream divergence caused by amended commit (just style chang…
Jun 4, 2024
b54b245
Refactor handler.py: Remove duplicate destructor in PostgresDBHandler.
Jun 4, 2024
02e5b4a
Fix: Fix 3 issues: with-context, db-close and env-id problem
Jun 10, 2024
b08232a
Feature: Add Bitbucket integration in collect_ci_info()
Jun 10, 2024
6cd3a90
Update documentation to explain usage of PostgreSQL Handler
Jun 11, 2024
a481510
Update changelog.rst and add this feature.
Jun 11, 2024
4256986
Fix: Add close() function for proper closing of PostgreSQL connection
Jun 11, 2024
1520ac1
Add tests for session and database implementations (dbhandlers)
Jun 11, 2024
550d043
Merge branch 'master' into postgres-DB-handler
Jun 13, 2024
b675b77
Add Flake8 and Ruff linting hints.
Jun 18, 2024
0638d39
Fix bug in test_monitor_session.py introduced in last commit.
Jun 18, 2024
0c137a5
Fix linting hints round 2.
Jun 18, 2024
c48a4eb
Update requirements.txt to include psycopg module
Jun 18, 2024
ec046b9
Fix python interpreter compatibility issues (import annotations future)
Jun 18, 2024
af2e9e3
Fix python interpreter compatibility issues (import annotations future)
Jun 18, 2024
a31d7fa
Fix missing equal signs in requirements.dev.txt
Jun 18, 2024
1e43666
Merge branch 'postgres-DB-handler' of github.com:einhundert/pytest-mo…
Jun 18, 2024
f5788df
Add mock module to requirements.dev.txt
Jun 18, 2024
482d8d8
Add updated gitlab-ci.yml to include PostgreSQL container for testing
Jun 18, 2024
3aca81b
Fix wrong formatting in gitlab-ci.yml
Jun 18, 2024
ed4c67e
Update .circleci/config.yml in order to provide postgresdb container
Jun 18, 2024
31ed8ab
Fix circleci config errors
Jun 18, 2024
50a627e
Fix circleci config file part 2
Jun 18, 2024
3ca589b
Merge branch 'postgres-DB-handler' of github.com:einhundert/pytest-mo…
Jun 18, 2024
d6bb03d
Remove unneeded pytester fixture from test_monitor_handler.py
Jun 18, 2024
653d746
Add docstrings to new tests.
Jun 18, 2024
4edc0ba
Add workaround fix for faulty memory_profiler module
Jul 15, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ version: 2.1

aliases:
docker-image: &image
- image: mambaorg/micromamba
image: mambaorg/micromamba

filter-pr-only: &PR-only
branches:
ignore:
Expand Down Expand Up @@ -216,13 +217,28 @@ workflows:

jobs:
lint:
docker: *image
docker:
- *image
steps:
- make-env:
use_specific_requirements_file: requirements.dev.txt
- lint-project
build:
docker: *image
docker:
- *image
- image: circleci/postgres:12-alpine
environment:
POSTGRES_DB: postgres
POSTGRES_USER: postgres
POSTGRES_PASSWORD: testing_db
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
PYTEST_MONITOR_DB_NAME: postgres
PYTEST_MONITOR_DB_USER: postgres
PYTEST_MONITOR_DB_PASSWORD: testing_db
PYTEST_MONITOR_DB_HOST: localhost
PYTEST_MONITOR_DB_PORT: 5432

parameters:
python:
type: string
Expand All @@ -235,7 +251,8 @@ jobs:
pytest: << parameters.pytest >>
- test-project
publish:
docker: *image
docker:
- *image
steps:
- make-env:
extra_deps: twine setuptools build
Expand Down
15 changes: 15 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,20 @@
image: continuumio/miniconda

variables:
- POSTGRES_DB: postgres
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: testing_db
- POSTGRES_HOST: localhost
- POSTGRES_PORT: 5432
- PYTEST_MONITOR_DB_NAME: postgres
- PYTEST_MONITOR_DB_USER: postgres
- PYTEST_MONITOR_DB_PASSWORD: testing_db
- PYTEST_MONITOR_DB_HOST: localhost
- PYTEST_MONITOR_DB_PORT: 5432

services:
- name: postgres:16

stages:
- test
- deploy
Expand Down
3 changes: 2 additions & 1 deletion AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ Project developed and lead by Jean-Sébastine Dieu.
Contributors include:
- Raymond Gauthier (jraygauthier) added Python 3.5 support.
- Kyle Altendorf (altendky) fixed bugs on session teardown
- Hannes Engelhardt (veritogen) added Bitbucket CI support.
- Hannes Engelhardt (veritogen) added Bitbucket CI support and Postgres DB Handler.
- Lucas Haupt (lhpt2) added Postgres DB Handler.
29 changes: 29 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Use postgres/example user/password credentials
services:

db:
image: postgres
restart: always
# set shared memory limit when using docker-compose
shm_size: 128mb
# or set shared memory limit when deploy via swarm stack
#volumes:
# - type: tmpfs
# target: /dev/shm
# tmpfs:
# size: 134217728 # 128*2^20 bytes = 128Mb
environment:
POSTGRES_PASSWORD: testing_db
ports:
- 5432:5432
command: [ "postgres", "-c", "log_statement=all" ]
logging:
driver: "json-file"
options:
max-size: "50m"

adminer:
image: adminer
restart: always
ports:
- 8080:8080
2 changes: 1 addition & 1 deletion docs/sources/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ Changelog
=========

* :release:`to be discussed`
* :feature: `#77` Add a PostgreSQL backend implementation to optionally use a PostgreSQL Database for test metric logging.
* :feature:`#75` Automatically gather CI build information for Bitbucket CI.

* :release:`1.6.6 <2023-05-06>`
* :bug:`#64` Prepare version 1.7.0 of pytest-monitor. Last version to support Python <= 3.7 and all pytest <= 5.*
* :bug:`#0` Improve and fix some CI issues, notably one that may cause python to not be the requested one but a more recent one.
Expand Down
2 changes: 1 addition & 1 deletion docs/sources/introduction.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ Extending your application with new features, or fixing its bugs, might have an
Usage
-----

Simply run pytest as usual: pytest-monitor is active by default as soon as it is installed. After running your first session, a .pymon sqlite database will be accessible in the directory where pytest was run.
Simply run pytest as usual: pytest-monitor is active by default as soon as it is installed. After running your first session, a .pymon sqlite database (or optionally another database implementation like PostgreSQL) will be accessible in the directory where pytest was run.
20 changes: 20 additions & 0 deletions docs/sources/operating.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,26 @@ You are free to override the name of this database by setting the `--db` option:
pytest --db /path/to/your/monitor/database


There is also a PostgreSQL implementation that talks to a PostgreSQL database.
The `--use-postgres` option is set for using PostgreSQL as database.

.. code-block:: shell

pytest --use-postgres

The connection parameters are set by the following environment variables:

PYTEST_MONITOR_DB_HOST
The hostname of the instance running the PostgreSQL server
PYTEST_MONITOR_DB_PORT
The port the PostgreSQL server listens on.
PYTEST_MONITOR_DB_NAME
The name of the database to connect to.
PYTEST_MONITOR_DB_USER
The name of the user to log into the database as.
PYTEST_MONITOR_DB_PASSWORD
The password to log into the database.

You can also sends your tests result to a monitor server (under development at that time) in order to centralize
your Metrics and Execution Context (see below):

Expand Down
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,12 @@ dev = [
"flake8-pyproject==1.2.3",
"pre-commit==3.3.3"
]
psycopg = [
"psycopg"
]
psycopg2 = [
"psycopg2"
]

[tool.flake8]
max-line-length = 120
Expand Down
200 changes: 198 additions & 2 deletions pytest_monitor/handler.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,24 @@
import os
import sqlite3

try:
import psycopg
except ImportError:
import psycopg2 as psycopg

class DBHandler:

class SqliteDBHandler:
def __init__(self, db_path):
self.__db = db_path
self.__cnx = sqlite3.connect(self.__db) if db_path else None
self.prepare()

def close(self):
self.__cnx.close()

def __del__(self):
self.__cnx.close()

def query(self, what, bind_to, many=False):
cursor = self.__cnx.cursor()
cursor.execute(what, bind_to)
Expand All @@ -15,7 +27,8 @@ def query(self, what, bind_to, many=False):
def insert_session(self, h, run_date, scm_id, description):
with self.__cnx:
self.__cnx.execute(
"insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)" " values (?,?,?,?)",
"insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)"
" values (?,?,?,?)",
(h, run_date, scm_id, description),
)

Expand Down Expand Up @@ -131,3 +144,186 @@ def prepare(self):
"""
)
self.__cnx.commit()

def get_env_id(self, env_hash):
query_result = self.query(
"SELECT ENV_H FROM EXECUTION_CONTEXTS WHERE ENV_H= ?", (env_hash,)
)
return query_result[0] if query_result else None


class PostgresDBHandler:
def __init__(self):
self.__db = os.getenv("PYTEST_MONITOR_DB_NAME")
if not self.__db:
raise Exception(
"Please provide the postgres db name using the PYTEST_MONITOR_DB_NAME environment variable."
)
self.__user = os.getenv("PYTEST_MONITOR_DB_USER")
if not self.__user:
raise Exception(
"Please provide the postgres user name using the PYTEST_MONITOR_DB_USER environment variable."
)
self.__password = os.getenv("PYTEST_MONITOR_DB_PASSWORD")
if not self.__password:
raise Exception(
"Please provide the postgres user password using the PYTEST_MONITOR_DB_PASSWORD environment variable."
)
self.__host = os.getenv("PYTEST_MONITOR_DB_HOST")
if not self.__host:
raise Exception(
"Please provide the postgres hostname using the PYTEST_MONITOR_DB_HOST environment variable."
)
self.__port = os.getenv("PYTEST_MONITOR_DB_PORT")
if not self.__port:
raise Exception(
"Please provide the postgres port using the PYTEST_MONITOR_DB_PORT environment variable."
)
self.__cnx = self.connect()
self.prepare()

def __del__(self):
self.__cnx.close()

def close(self):
self.__cnx.close()

def connect(self):
connection_string = (
f"dbname='{self.__db}' user='{self.__user}' password='{self.__password}' "
+ f"host='{self.__host}' port='{self.__port}'"
)
return psycopg.connect(connection_string)

def query(self, what, bind_to, many=False):
cursor = self.__cnx.cursor()
cursor.execute(what, bind_to)
return cursor.fetchall() if many else cursor.fetchone()

def insert_session(self, h, run_date, scm_id, description):
self.__cnx.cursor().execute(
"insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)"
" values (%s,%s,%s,%s)",
(h, run_date, scm_id, description),
)
self.__cnx.commit()

def insert_metric(
self,
session_id,
env_id,
item_start_date,
item,
item_path,
item_variant,
item_loc,
kind,
component,
total_time,
user_time,
kernel_time,
cpu_usage,
mem_usage,
):
self.__cnx.cursor().execute(
"insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM,"
"ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME,"
"USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) "
"values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(
session_id,
env_id,
item_start_date,
item,
item_path,
item_variant,
item_loc,
kind,
component,
total_time,
user_time,
kernel_time,
cpu_usage,
mem_usage,
),
)
self.__cnx.commit()

def insert_execution_context(self, exc_context):
self.__cnx.cursor().execute(
"insert into EXECUTION_CONTEXTS(CPU_COUNT,CPU_FREQUENCY_MHZ,CPU_TYPE,CPU_VENDOR,"
"RAM_TOTAL_MB,MACHINE_NODE,MACHINE_TYPE,MACHINE_ARCH,SYSTEM_INFO,"
"PYTHON_INFO,ENV_H) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(
exc_context.cpu_count,
exc_context.cpu_frequency,
exc_context.cpu_type,
exc_context.cpu_vendor,
exc_context.ram_total,
exc_context.fqdn,
exc_context.machine,
exc_context.architecture,
exc_context.system_info,
exc_context.python_info,
exc_context.compute_hash(),
),
)
self.__cnx.commit()

def prepare(self):
cursor = self.__cnx.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS TEST_SESSIONS(
SESSION_H varchar(64) primary key not null unique, -- Session identifier
RUN_DATE varchar(64), -- Date of test run
SCM_ID varchar(128), -- SCM change id
RUN_DESCRIPTION json
);"""
)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS (
ENV_H varchar(64) primary key not null unique,
CPU_COUNT integer,
CPU_FREQUENCY_MHZ integer,
CPU_TYPE varchar(64),
CPU_VENDOR varchar(256),
RAM_TOTAL_MB integer,
MACHINE_NODE varchar(512),
MACHINE_TYPE varchar(32),
MACHINE_ARCH varchar(16),
SYSTEM_INFO varchar(256),
PYTHON_INFO varchar(512)
);
"""
)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS TEST_METRICS (
SESSION_H varchar(64), -- Session identifier
ENV_H varchar(64), -- Environment description identifier
ITEM_START_TIME varchar(64), -- Effective start time of the test
ITEM_PATH varchar(4096), -- Path of the item, following Python import specification
ITEM varchar(2048), -- Name of the item
ITEM_VARIANT varchar(2048), -- Optional parametrization of an item.
ITEM_FS_LOC varchar(2048), -- Relative path from pytest invocation directory to the item's module.
KIND varchar(64), -- Package, Module or function
COMPONENT varchar(512) NULL, -- Tested component if any
TOTAL_TIME float, -- Total time spent running the item
USER_TIME float, -- time spent in user space
KERNEL_TIME float, -- time spent in kernel space
CPU_USAGE float, -- cpu usage
MEM_USAGE float, -- Max resident memory used.
FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H),
FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H)
);"""
)

self.__cnx.commit()

def get_env_id(self, env_hash):
query_result = self.query(
"select ENV_H from EXECUTION_CONTEXTS where ENV_H = %s", (env_hash,)
)
return query_result[0] if query_result else None
Loading