diff --git a/.github/workflows/python_actions.yml b/.github/workflows/python_actions.yml index 9ca637d..e54b4d8 100644 --- a/.github/workflows/python_actions.yml +++ b/.github/workflows/python_actions.yml @@ -11,13 +11,12 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: '3.10' - name: Install dependencies run: | - python -m pip install --upgrade wheel setuptools==57 pip - pip install -U -r requirements.txt - pip install -U -r dev-requirements.txt + python -m pip install "pip==24" setuptools==57.5.0 wheel + pip install ".[dev]" - name: Test with pytest run: | diff --git a/.gitignore b/.gitignore index cdda8fe..24598b8 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,9 @@ python docker-compose* Dockerfile scripts/pytest/* +build/ +*.egg-info/ +__pycache__/ +.pytest_cache/ +.tox/ +dist/ diff --git a/README.md b/README.md index 411db06..843b45f 100644 --- a/README.md +++ b/README.md @@ -19,14 +19,20 @@ To load and enter the VM: `vagrant up && vagrant ssh` ### tests -Run the tests using `py.test`: +Run the tests using `pytest`: ```bash -docker run --name some-postgres -e POSTGRES_USER="postgres" POSTGRES_PASSWORD="postgres" -p 5432:5432 --name postgres -virtualenv python +docker run -d --name postgres -e POSTGRES_USER="postgres" -e POSTGRES_PASSWORD="postgres" -p 5432:5432 postgres:12.6 + +# Setup environment +python3 -m venv python source python/bin/activate -pip install -r requirements.txt -pip install -r dev-requirements.txt -py.tests biblib/tests/ + +# Install with legacy build support (requires pip 24 and specific setuptools) +python -m pip install "pip==24" setuptools==57.5.0 wheel +pip install -e ".[dev]" + +# Run tests +pytest ``` ### Layout @@ -42,33 +48,50 @@ All tests have been written top down, or in a Test-Driven Development approach, ### Running Biblib Locally To run a version of Biblib locally, a postgres database needs to be created and properly formatted for use with Biblib. This can be done with a local postgres instance or in a docker container using the following commands. -`config.py` must also be copied to `local_config.py` and the environment variables must be adjusted to reflect the local environment. +`local_config.py` should be created in `biblib/` and the environment variables must be adjusted to reflect the local environment. + ```bash -docker run -d -e POSTGRES_USER="postgres" -e POSTGRES_PASSWORD="postgres" -p 5432:5432 --name postgres postgres:12.6 -docker exec -it postgres bash -c "psql -c \"CREATE ROLE biblib_service WITH LOGIN PASSWORD 'biblib_service';\"" -docker exec -it postgres bash -c "psql -c \"CREATE DATABASE biblib_service;\"" -docker exec -it postgres bash -c "psql -c \"GRANT CREATE ON DATABASE biblib_service TO biblib_service;\"" +# Setup database +docker run -d -e POSTGRES_USER="postgres" -e POSTGRES_PASSWORD="postgres" -p 5432:5432 --name postgres postgres:12.6 +docker exec -it postgres psql -U postgres -c "CREATE ROLE biblib_service WITH LOGIN PASSWORD 'biblib_service';" +docker exec -it postgres psql -U postgres -c "CREATE DATABASE biblib_service;" +docker exec -it postgres psql -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE biblib_service TO biblib_service;" ``` -Once the database has been created, `alembic` can be used to upgrade the database to the correct alembic revision -```bash -#In order for alembic to have access to the models metadata, the biblib-service directory must be added to the PYTHONPATH +# Run migrations +# In order for alembic to have access to the models metadata, the biblib-service directory must be added to the PYTHONPATH export PYTHONPATH=$(pwd):$PYTHONPATH +python biblib/manage.py syncdb # This will sync users and can be used to initialize schema via alembic indirectly or directly: alembic upgrade head ``` -A new revision can be created by doing the following: +A test version of the microservice can then be deployed using: ```bash -#In order for alembic to have access to the models metadata, the biblib-service directory must be added to the PYTHONPATH -export PYTHONPATH=$(pwd):$PYTHONPATH -alembic revision -m "" --autogenerate +export FLASK_APP=biblib/app.py +flask run --port 4000 +``` +or via the legacy entrypoint: +```bash +python wsgi.py ``` -A test version of the microservice can then be deployed using +### Database versioning + +Database versioning is managed using Alembic. You can upgrade to the latest revision or downgrade to a previous one using the following commands: + ```bash -python3 wsgi.py +# Upgrade to latest revision +alembic upgrade head + +# Downgrade revision +alembic downgrade + +# Create a new revision +alembic revision --autogenerate -m "revision description" ``` +New revisions of libraries and notes are created automatically by `sqlalchemy-continuum` whenever a record is updated and committed to the database. + ## deployment The only thing to take care of when making a deployment is the migration of the backend database. Libraries uses specific features of PostgreSQL, such as `UUID` and `JSON`-store, so you should think carefully if you wish to change the backend. **The use of `flask-migrate` for database migrations has been replaced by directly calling `alembic`.** diff --git a/config.py b/biblib/config.py similarity index 100% rename from config.py rename to biblib/config.py diff --git a/biblib/manage.py b/biblib/manage.py index e7f9d44..206c629 100644 --- a/biblib/manage.py +++ b/biblib/manage.py @@ -1,37 +1,21 @@ -""" -Alembic migration management file -""" -from datetime import datetime -from dateutil.relativedelta import relativedelta import os import sys -PROJECT_HOME = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) -sys.path.append(PROJECT_HOME) -from flask import current_app -from flask_script import Manager, Command, Option -from flask_migrate import Migrate, MigrateCommand -from biblib.models import Base, User, Permissions, Library, Notes +import click from biblib.app import create_app -from sqlalchemy import create_engine, desc -from sqlalchemy.orm import sessionmaker, scoped_session +from biblib.models import User, Permissions, Library, Notes +from flask import current_app +from datetime import datetime +from dateutil.relativedelta import relativedelta import sqlalchemy_continuum -# Load the app with the factory -app = create_app() - -class DeleteStaleUsers(Command): +class DeleteStaleUsers: """ Compares the users that exist within the API to those within the - microservice and deletes any stale users that no longer exist. The logic - also takes care of the associated permissions and libraries depending on - the cascade that has been implemented. + microservice and deletes any stale users that no longer exist. """ - @staticmethod - def run(app=app): - """ - Carries out the deletion of the stale content - """ + def run(self, app=None): + if app is None: + app = create_app() with app.app_context(): with current_app.session_scope() as session: # Obtain the list of API users @@ -39,109 +23,149 @@ def run(app=app): result = session.execute(postgres_search_text).fetchall() list_of_api_users = [int(r[0]) for r in result] - # Loop through every use in the service database + # Loop through every user in the service database removal_list = [] for service_user in session.query(User).all(): if service_user.absolute_uid not in list_of_api_users: try: # Obtain the libraries that should be deleted - permissions = session.query(Permissions).filter(Permissions.user_id == service_user.id).all() - libraries = [session.query(Library).filter(Library.id == permission.library_id).one() for permission in permissions if permission.permissions['owner']] + permissions = session.query(Permissions).filter( + Permissions.user_id == service_user.id + ).all() + + libraries = [ + session.query(Library).filter(Library.id == permission.library_id).one() + for permission in permissions if permission.permissions['owner'] + ] # Delete all the libraries found # By cascade this should delete all the permissions - d = [session.delete(library) for library in libraries] - p = [session.delete(permission) for permission in permissions] - d_len = len(d) - + for library in libraries: + session.delete(library) + for permission in permissions: + session.delete(permission) + session.delete(service_user) session.commit() + + d_len = len(libraries) current_app.logger.info('Removed stale user: {} and {} libraries'.format(service_user, d_len)) + removal_list.append(service_user) - + except Exception as error: current_app.logger.info('Problem with database, could not remove user {}: {}' .format(service_user, error)) session.rollback() + current_app.logger.info('Deleted {} stale users: {}'.format(len(removal_list), removal_list)) -class DeleteObsoleteVersionsTime(Command): +class DeleteObsoleteVersionsTime: """ Clears obsolete library and notes versions older than chosen time. """ - @staticmethod - def run(app=app, n_years=None): - """ - Carries out the deletion of older versions - """ + def run(self, n_years=None, app=None): + if app is None: + app = create_app() with app.app_context(): - - if not n_years: n_years = current_app.config.get('REVISION_TIME', 7) + if not n_years: + n_years = current_app.config.get('REVISION_TIME', 7) with current_app.session_scope() as session: - # Obtain a list of all versions older than 1 year. + # Obtain a list of all versions older than chosen time. LibraryVersion = sqlalchemy_continuum.version_class(Library) NotesVersion = sqlalchemy_continuum.version_class(Notes) + current_date = datetime.now() current_offset = current_date - relativedelta(years=n_years) - try: - library_results = session.query(LibraryVersion).filter(LibraryVersion.date_last_modified n_revisions: + obsolete = revisions[:-n_revisions] + for r in obsolete: + session.delete(r) + + session.commit() + + current_app.logger.info('Removed {} obsolete revisions for entity: {}'.format(len(obsolete), entity.id)) + + except Exception as error: + current_app.logger.info('Problem with the database, could not remove revisions for entity {}: {}' + .format(entity, error)) + session.rollback() -# Setup the command line arguments using Flask-Script -manager = Manager(app) -manager.add_command('syncdb', DeleteStaleUsers()) -manager.add_command('clean_versions_time', DeleteObsoleteVersionsTime()) -manager.add_command('clean_versions_number', DeleteObsoleteVersionsNumber()) + with current_app.session_scope() as session: + limit_revisions(session, Library, n_revisions) + limit_revisions(session, Notes, n_revisions) + +# CLI part for backward compatibility running as script +@click.group() +def manager(): + """Management script for the Biblib service.""" + pass + +@manager.command() +def syncdb(): + """Compares microservice users to API users and deletes stale users.""" + DeleteStaleUsers().run() + +@manager.command(name='clean_versions_time') +@click.option('--years', default=None, type=int, help='Number of years to keep') +def clean_versions_time(years): + """Clears obsolete revisions older than chosen time.""" + DeleteObsoleteVersionsTime().run(n_years=years) + +@manager.command(name='clean_versions_number') +@click.option('--revisions', default=None, type=int, help='Maximum revisions to keep') +def clean_versions_number(revisions): + """Limits number of revisions saved per entity.""" + DeleteObsoleteVersionsNumber().run(n_revisions=revisions) if __name__ == '__main__': - manager.run() + manager() + diff --git a/biblib/models.py b/biblib/models.py index 9ef0f6c..ec0633f 100644 --- a/biblib/models.py +++ b/biblib/models.py @@ -36,6 +36,7 @@ class GUID(TypeDecorator): as Flask cannot serialise UUIDs correctly. """ + cache_ok = True # Refers to the class of type being decorated impl = CHAR diff --git a/biblib/tests/base.py b/biblib/tests/base.py index 4bf563b..5ee4208 100644 --- a/biblib/tests/base.py +++ b/biblib/tests/base.py @@ -173,16 +173,20 @@ def request_callback(request, uri, headers): if self.kwargs.get('solr_docs'): docs = self.kwargs['solr_docs'] - elif self.kwargs.get('canonical_bibcode'): + elif 'canonical_bibcode' in self.kwargs: if not self.kwargs.get('invalid'): docs = [] canonical_bibcodes = self.kwargs.get('canonical_bibcode') + if not isinstance(canonical_bibcodes, list): + canonical_bibcodes = list(canonical_bibcodes) for i in range(self.page*self.page_size, min(len(canonical_bibcodes), (self.page + 1)*self.page_size)): docs.append({'bibcode': canonical_bibcodes[i]}) else: #This treats every other odd bibcode as valid. docs = [] canonical_bibcodes = self.kwargs.get('canonical_bibcode') + if not isinstance(canonical_bibcodes, list): + canonical_bibcodes = list(canonical_bibcodes) i = self.page*self.page_size while len(docs) < min(len(canonical_bibcodes), (self.page + 1)*self.page_size) and i < len(canonical_bibcodes): if i%4-1 == 0: @@ -240,10 +244,10 @@ def __exit__(self, etype, value, traceback): :param traceback: the traceback for the exit :return: no return """ - #adding this allows for checking pagination calls. - return self.page HTTPretty.reset() HTTPretty.disable() + #adding this allows for checking pagination calls. + return self.page class MockSolrQueryService(MockADSWSAPI): @@ -273,9 +277,11 @@ def request_callback(request, uri, headers): """ if not self.kwargs.get('invalid'): #Sets all generated bibcodes as valid - if self.kwargs.get('canonical_bibcode'): + if 'canonical_bibcode' in self.kwargs: docs = [] canonical_bibcodes = self.kwargs.get('canonical_bibcode') + if not isinstance(canonical_bibcodes, list): + canonical_bibcodes = list(canonical_bibcodes) for i in range(len(canonical_bibcodes)): docs.append({'bibcode': canonical_bibcodes[i]}) input_query ="identifier:("+" OR ".join(canonical_bibcodes)+")" @@ -295,12 +301,14 @@ def request_callback(request, uri, headers): } else: - if self.kwargs.get('canonical_bibcode'): + if 'canonical_bibcode' in self.kwargs: docs = [] canonical_bibcodes = self.kwargs.get('canonical_bibcode') - #Sets all odd indexed bibcodes as valid, all other bibcodes are invalid. + if not isinstance(canonical_bibcodes, list): + canonical_bibcodes = list(canonical_bibcodes) + #Sets all odd indexed bibcodes as valid, all other bibcodes are invalid. for i in range(len(canonical_bibcodes)): - if i%2-1 == 0: + if i % 2 == 1: docs.append({'bibcode': canonical_bibcodes[i]}) input_query ="identifier:("+" OR ".join(canonical_bibcodes)+")" params = { @@ -544,7 +552,7 @@ def request_callback(request, uri, headers): HTTPretty.register_uri( HTTPretty.GET, - re.compile('{0}/\w+'.format( + re.compile(r'{0}/\w+'.format( current_app.config['BIBLIB_USER_EMAIL_ADSWS_API_URL']) ), body=request_callback, diff --git a/biblib/tests/functional_tests/test_bb_and_classic_user_epic.py b/biblib/tests/functional_tests/test_bb_and_classic_user_epic.py index 68680ca..bb6cc74 100644 --- a/biblib/tests/functional_tests/test_bb_and_classic_user_epic.py +++ b/biblib/tests/functional_tests/test_bb_and_classic_user_epic.py @@ -14,7 +14,7 @@ from biblib.tests.base import TestCaseDatabase, MockEmailService, \ MockSolrBigqueryService, MockEndPoint, MockClassicService from biblib.views.http_errors import NO_CLASSIC_ACCOUNT -from config import BIBLIB_CLASSIC_SERVICE_URL +from biblib.config import BIBLIB_CLASSIC_SERVICE_URL class TestBBClassicUserEpic(TestCaseDatabase): diff --git a/biblib/tests/functional_tests/test_classic_user_epic.py b/biblib/tests/functional_tests/test_classic_user_epic.py index 1690a99..5b819ad 100644 --- a/biblib/tests/functional_tests/test_classic_user_epic.py +++ b/biblib/tests/functional_tests/test_classic_user_epic.py @@ -14,7 +14,7 @@ from biblib.tests.base import TestCaseDatabase, MockEmailService, \ MockSolrBigqueryService, MockEndPoint, MockClassicService from biblib.views.http_errors import NO_CLASSIC_ACCOUNT -from config import BIBLIB_CLASSIC_SERVICE_URL +from biblib.config import BIBLIB_CLASSIC_SERVICE_URL @urlmatch(netloc=r'(.*\.)?{}.*'.format(BIBLIB_CLASSIC_SERVICE_URL)) diff --git a/biblib/tests/unit_tests/test_manage.py b/biblib/tests/unit_tests/test_manage.py index f1f7d0c..c47a00f 100644 --- a/biblib/tests/unit_tests/test_manage.py +++ b/biblib/tests/unit_tests/test_manage.py @@ -301,8 +301,8 @@ def test_delete_obsolete_versions_number(self): NotesVersion = sqlalchemy_continuum.version_class(Notes) notes = session.query(Notes).all() notes_revision_lengths = [] - for notes in notes: - revisions = session.query(NotesVersion).filter_by(id=notes.id).all() + for note in notes: + revisions = session.query(NotesVersion).filter_by(id=note.id).all() notes_revision_lengths.append(len(revisions)) self.assertEqual(notes_revision_lengths, [2, 2]) # Now run the obsolete deletion @@ -312,8 +312,7 @@ def test_delete_obsolete_versions_number(self): libraries = [session.query(Library).filter(Library.id == permission.library_id).one() for permission in permissions if permission.permissions['owner']] LibraryVersion = sqlalchemy_continuum.version_class(Library) updated_revision_lengths = [] - - + #confirm most recent remaining revision matches current state of library for library in libraries: updated_revisions = session.query(LibraryVersion).filter_by(id=library.id).all() @@ -541,5 +540,3 @@ def test_delete_obsolete_versions_time(self): if __name__ == '__main__': unittest.main(verbosity=2) - - diff --git a/biblib/tests/unit_tests/test_views.py b/biblib/tests/unit_tests/test_views.py index 188ffd0..1238ca7 100644 --- a/biblib/tests/unit_tests/test_views.py +++ b/biblib/tests/unit_tests/test_views.py @@ -376,9 +376,13 @@ def test_user_can_retrieve_rows_number_of_libraries(self): number_of_libs = 100 for i in range(number_of_libs): stub_library = LibraryShop() + # Ensure name is unique by appending the index to avoid Faker name collisions + library_data = stub_library.user_view_post_data.copy() + library_data['name'] = '{0} {1}'.format(library_data['name'], i) + self.user_view.create_library( service_uid=user.id, - library_data=stub_library.user_view_post_data + library_data=library_data ) # Get the library created @@ -410,9 +414,13 @@ def test_user_can_retrieve_libraries_from_start(self): number_of_libs = 100 for i in range(number_of_libs): stub_library = LibraryShop() + # Ensure name is unique by appending the index to avoid Faker name collisions + library_data = stub_library.user_view_post_data.copy() + library_data['name'] = '{0} {1}'.format(library_data['name'], i) + self.user_view.create_library( service_uid=user.id, - library_data=stub_library.user_view_post_data + library_data=library_data ) with MockEmailService(self.stub_user, end_type='uid'): libraries_full = self.user_view.get_libraries( @@ -452,9 +460,13 @@ def test_user_can_retrieve_all_libraries_by_paging(self): number_of_libs = 100 for i in range(number_of_libs): stub_library = LibraryShop() + # Ensure name is unique by appending the index to avoid Faker name collisions + library_data = stub_library.user_view_post_data.copy() + library_data['name'] = '{0} {1}'.format(library_data['name'], i) + self.user_view.create_library( service_uid=user.id, - library_data=stub_library.user_view_post_data + library_data=library_data ) with MockEmailService(self.stub_user, end_type='uid'): libraries_full = self.user_view.get_libraries( @@ -467,15 +479,18 @@ def test_user_can_retrieve_all_libraries_by_paging(self): libraries = [] total_libraries = 0 with MockEmailService(self.stub_user, end_type='uid'): - for start in range(number_of_libs): + for start in range(0, number_of_libs, 10): curr_libraries = self.user_view.get_libraries( service_uid=user.id, - absolute_uid=user.absolute_uid, start=start*10, + absolute_uid=user.absolute_uid, start=start, rows=10 ) libraries += curr_libraries['libraries'] total_libraries = curr_libraries['count'] self.assertEqual(total_libraries, 100) + # Sort by id to ensure order-independent comparison + libraries_full.sort(key=lambda x: x['id']) + libraries.sort(key=lambda x: x['id']) self.assertEqual(libraries_full, libraries) def test_user_can_retrieve_library_when_uid_does_not_exist(self): diff --git a/biblib/tests/unit_tests/test_webservices.py b/biblib/tests/unit_tests/test_webservices.py index f66303c..3343a2f 100644 --- a/biblib/tests/unit_tests/test_webservices.py +++ b/biblib/tests/unit_tests/test_webservices.py @@ -1179,10 +1179,10 @@ def test_add_invalid_document_to_library(self): self.assertEqual(response.status_code, 400) - # Check the library was created and documents exist + # Check the library was created and documents do not exist (it should be empty) url = url_for('libraryview', library=library_id) with MockSolrBigqueryService( - canonical_bibcode=stub_library.bibcode) as BQ, \ + canonical_bibcode=[]) as BQ, \ MockEmailService(stub_user, end_type='uid') as ES: response = self.client.get( url, @@ -1190,8 +1190,7 @@ def test_add_invalid_document_to_library(self): ) self.assertEqual(response.status_code, 200, response) - self.assertNotEqual(stub_library.get_bibcodes(), - response.json['documents']) + self.assertEqual(response.json['documents'], []) def test_add_some_invalid_documents_to_library(self): """ @@ -1236,15 +1235,17 @@ def test_add_some_invalid_documents_to_library(self): # Check the library was created and documents exist url = url_for('libraryview', library=library_id) + # We only expect the second bibcode to be in the library + valid_bibcodes = [json.loads(stub_library.document_view_post_data_json('add')).get('bibcode')[1]] with MockSolrBigqueryService( - canonical_bibcode=stub_library.bibcode) as BQ, \ + canonical_bibcode=valid_bibcodes) as BQ, \ MockEmailService(stub_user, end_type='uid') as ES: response = self.client.get( url, headers=stub_user.headers ) #Check that the expected bibcode and only the expected bibcode is in the libary. - self.assertIn(json.loads(stub_library.document_view_post_data_json('add')).get('bibcode')[1], response.json['documents']) + self.assertIn(valid_bibcodes[0], response.json['documents']) self.assertNotIn(json.loads(stub_library.document_view_post_data_json('add')).get('bibcode')[0], response.json['documents']) #Check that the library makes sense. @@ -1417,7 +1418,7 @@ def test_timestamp_sort_returns_correct_order(self): url = url_for('libraryview', library=library_id) with MockSolrBigqueryService( - canonical_bibcode=stub_library.bibcode) as BQ, \ + canonical_bibcode=full_bibcodes) as BQ, \ MockEmailService(stub_user, end_type='uid') as ES: response = self.client.get( url, @@ -1433,7 +1434,7 @@ def test_timestamp_sort_returns_correct_order(self): response.json['documents']) with MockSolrBigqueryService( - canonical_bibcode=stub_library.bibcode) as BQ, \ + canonical_bibcode=full_bibcodes) as BQ, \ MockEmailService(stub_user, end_type='uid') as ES: response = self.client.get( url, diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 99994b8..0000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -Flask-Testing==0.8.1 -httpretty==0.9.7 -testing.postgresql==1.3.0 -pytest==6.2.1 -pytest-cache==1.0 -pytest-cov==2.10.1 -pytest-pep8==1.0.6 -coveralls==2.2.0 -fake-factory==0.5.3 -factory-boy==2.6.0 -freezegun==1.2.2 -httmock==1.2.3 -mock==1.3.0 - diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..33e898d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,58 @@ +[project] +name = "biblib" +version = "0.0.1" +description = "ADS Library Service" +authors = [{ name = "ADS Team", email = "adshelp@cfa.harvard.edu" }] +license = { text = "MIT" } +readme = "README.md" +packages = ["biblib"] +dependencies = [ + "adsmutils @ git+https://github.com/adsabs/ADSMicroserviceUtils.git@v1.3.0", + "alembic==1.12.0", + "psycopg2-binary==2.9.9", + "sqlalchemy-continuum==1.3.6", + "Flask-Mail==0.9.1", + "Flask-Email==1.4.4", + "Jinja2==3.1.2", + "markupsafe==2.1.3", + "itsdangerous==2.1.2", + "werkzeug==2.3.8" +] + +[project.optional-dependencies] +dev = [ + "Flask-Testing==0.8.1", + "httpretty==1.1.4", + "testing.postgresql==1.3.0", + "pytest==6.2.5", + "pytest-cov==3.0.0", + "Faker==22.0.0", + "factory-boy==3.3.0", + "freezegun==1.4.0", + "httmock==1.4.0", + "mock==4.0.3", + "flake8==4.0.1", + "black==22.3.0", + "isort==5.12.0", + "coveralls==3.3.1" +] + +[build-system] +requires = [ + "setuptools==57.5.0", + "wheel", + "flit_core >=3.2,<4", + "ppsetuptools==2.0.2" +] +build-backend = "flit_core.buildapi" + +[tool.pytest.ini_options] +addopts = "--cov=biblib --cov-report=term-missing" +testpaths = ["biblib/tests"] + +[tool.black] +line-length = 88 +target-version = ['py310'] + +[tool.isort] +profile = "black" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 6400285..0000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = --cov=biblib --cov-report=term-missing -testpaths = biblib/tests \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 1411b05..0000000 --- a/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -git+https://github.com/adsabs/ADSMicroserviceUtils.git@v1.1.9 -Flask-Migrate==2.0.2 -Flask-Script==2.0.5 -alembic==1.5.3 -psycopg2==2.8.6 -sqlalchemy-continuum==1.3.12 -Flask-Mail==0.9.1 -Flask-Email==1.4.4 -Jinja2==2.11.3 -markupsafe<=2.0.1 -itsdangerous<=2.0.1 -werkzeug<=2.0.3 diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/cronjob.sh b/scripts/cronjob.sh index 52a9723..a2ce491 100644 --- a/scripts/cronjob.sh +++ b/scripts/cronjob.sh @@ -1,3 +1,3 @@ * 1 * * * /usr/bin/python /biblib/biblib/manage.py syncdb >> /tmp/biblib_delete_stale_users.log * 1 * * * /usr/bin/python /biblib/biblib/manage.py clean_versions_number >> /tmp/biblib_revision_deletion.log -* 1 * * * /usr/bin/python /biblib/biblib/manage.py clean_versions_time >> /tmp/biblib_revision_deletion.log \ No newline at end of file +* 1 * * * /usr/bin/python /biblib/biblib/manage.py clean_versions_time >> /tmp/biblib_revision_deletion.log diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..dfa7e12 --- /dev/null +++ b/setup.py @@ -0,0 +1,52 @@ +# mimic presence of 'setupy.py' - by reading config from pyproject.toml + +# we are using ppsetuptools to read values and pass them to setup() +# however we also have to deal with idiosyncracies of ppsetuptools +# and of flint (which doesn't want to allow any entry inside +# [project.entry-points.console_scripts]) + +import inspect +import os + +# important to import before importing ppsetuptools +import setuptools +import toml + +orig_setup = setuptools.setup + + +def monkey(*args, **kwargs): + del kwargs["license_files"] + del kwargs["keywords"] + + try: + caller_directory = os.path.abspath(os.path.dirname(inspect.stack()[1].filename)) + if not os.path.exists(os.path.join(caller_directory, "pyproject.toml")): + raise + except: # noqa: E722 + caller_directory = "." + + with open(os.path.join(caller_directory, "pyproject.toml"), "r") as pptoml: + pyproject_toml = pptoml.read() + if isinstance(pyproject_toml, bytes): + pyproject_toml = pyproject_toml.decode("utf-8") + + data = toml.loads(pyproject_toml) + + if "xsetup" in data: + for key, value in data["xsetup"].items(): + if key not in kwargs or not kwargs[key]: + kwargs[key] = value + + print("monkey patched setuptools, going to call setup() with those kwargs:") + print("\n".join([str(x) for x in sorted(kwargs.items())])) + + orig_setup(*args, **kwargs) + # raise ("To see values; for testing purposes") + + +setuptools.setup = monkey +import ppsetuptools # noqa: E402 + +ppsetuptools.setup() +