Skip to content

Commit

Permalink
Merge pull request #278 from GSA/feature/db-migrations
Browse files Browse the repository at this point in the history
Feature/db migrations: Updates to formatter, file handler, logging, and db structure to agree with new standard.
  • Loading branch information
gsa-bri authored May 23, 2023
2 parents c50bf29 + c7cb2ff commit 9be7bed
Show file tree
Hide file tree
Showing 18 changed files with 1,113 additions and 162 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@ docs/
*.md
env/
venv/
.venv/
.venv*
crontab-test
.vscode/
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ celerybeat-schedule

# Environments
.env
.venv
.venv*
env/
venv/
ENV/
Expand Down
167 changes: 167 additions & 0 deletions alembic/dev/78823e9293e9_match_local_migrations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
"""Match local migrations
Revision ID: 78823e9293e9
Revises:
Create Date: 2023-05-18 11:45:28.896923
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = '78823e9293e9'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic###

#Agencies
op.alter_column('Agencies', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
# PRedictions
op.add_column('Predictions', sa.Column('eitLikelihood', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.add_column('Predictions', sa.Column('active', sa.Boolean(), server_default=sa.text('true'), nullable=True))
op.alter_column('Predictions', 'title',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('Predictions', 'solNum',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('Predictions', 'noticeType',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('Predictions', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('Predictions', 'history', server_default=sa.text("'[]'::jsonb"))

op.create_unique_constraint(None, 'Predictions', ['solNum'])
op.drop_column('Predictions', 'feedback')
op.drop_column('Predictions', 'category_list')

# Surveys
op.alter_column('Surveys', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)

# Users
op.alter_column('Users', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
# Agency Alias
op.alter_column('agency_alias', 'agency_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('agency_alias', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.now(),
nullable=False)
op.alter_column('agency_alias', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)


# Notice Type
op.add_column('notice_type', sa.Column('createdAt', sa.DateTime(), default=sa.func.now(), nullable=False))
op.add_column('notice_type', sa.Column('updatedAt', sa.DateTime(), nullable=True, onupdate=sa.func.now()))

# Survey Responses
op.alter_column('survey_responses', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('now()'))
op.create_index(op.f('ix_survey_responses_solNum'), 'survey_responses', ['solNum'], unique=False)

# Survey Responses Archive
op.alter_column('survey_responses_archive', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.now(),
nullable=False)
op.alter_column('survey_responses_archive', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=True)
op.alter_column('survey_responses_archive', 'response', server_default=sa.text("'[]'::jsonb"))


# Solcitations
op.create_unique_constraint(None, 'solicitations', ['solNum'])
op.alter_column('solicitations', 'history', server_default=sa.text("'[]'::jsonb"))
op.alter_column('solicitations', 'action', server_default=sa.text("'[]'::jsonb"))
op.alter_column('solicitations', 'predictions', server_default=sa.text("'{\"value\": \"red\", \"history\": []}'::jsonb"))
op.alter_column('solicitations', 'compliant', server_default=sa.text("0"))
op.alter_column('solicitations', 'active', server_default=sa.text("true"))
op.alter_column('solicitations', 'na_flag', server_default=sa.text("false"))
op.alter_column('solicitations', 'updateAt', nullable=True)

# Attachment
op.alter_column('attachment', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.now(),
nullable=False)

# Notice
op.alter_column('notice', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
server_default=sa.func.now(),
nullable=False)




def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('survey_responses_archive', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=False)
op.drop_index(op.f('ix_survey_responses_solNum'), table_name='survey_responses')
op.alter_column('survey_responses', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('now()'))
op.drop_constraint(None, 'solicitations', type_='unique')
op.drop_column('notice_type', 'updatedAt')
op.drop_column('notice_type', 'createdAt')
op.alter_column('attachment', 'filename',
existing_type=sa.TEXT(),
nullable=False)
op.alter_column('agency_alias', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('agency_alias', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('agency_alias', 'agency_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('Users', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('Surveys', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.add_column('Predictions', sa.Column('category_list', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.add_column('Predictions', sa.Column('feedback', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.drop_constraint(None, 'Predictions', type_='unique')
op.alter_column('Predictions', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('Predictions', 'noticeType',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('Predictions', 'solNum',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('Predictions', 'title',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_column('Predictions', 'active')
op.drop_column('Predictions', 'eitLikelihood')
op.alter_column('Agencies', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
# ### end Alembic commands ###
1 change: 1 addition & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
config.set_main_option('sqlalchemy.url',
get_db_url())
from fbo_scraper.db import db

target_metadata = db.Base.metadata


Expand Down
143 changes: 143 additions & 0 deletions alembic/prod/06c9149baecd_prod_db_migration_matching.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
"""prod db migration matching
Revision ID: 06c9149baecd
Revises:
Create Date: 2023-05-22 15:30:25.932722
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = '06c9149baecd'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('solicitations_pre_dla_update_oct_2021')
op.alter_column('Agencies', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=True)
op.alter_column('Predictions', 'noticeType',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('Predictions', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.drop_constraint('uniqueSolNum', 'Predictions', type_='unique')
op.create_unique_constraint(op.f('uq_Predictions_solNum'), 'Predictions', ['solNum'])
op.alter_column('Surveys', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('Users', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=True)
op.alter_column('agency_alias', 'agency_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('notice_type', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('now()'))
op.alter_column('solicitations', 'solNum',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('solicitations', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('now()'))
op.drop_constraint('solicitations_solNum_key', 'solicitations', type_='unique')
op.create_unique_constraint(op.f('uq_solicitations_solNum'), 'solicitations', ['solNum'])
op.alter_column('survey_responses', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_index('ix_feedback_solNum', table_name='survey_responses')
op.create_index(op.f('ix_survey_responses_solNum'), 'survey_responses', ['solNum'], unique=False)
op.alter_column('survey_responses_archive', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('survey_responses_archive', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_index(op.f('ix_survey_responses_solNum'), table_name='survey_responses')
op.create_index('ix_feedback_solNum', 'survey_responses', ['solNum'], unique=False)
op.alter_column('survey_responses', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('CURRENT_TIMESTAMP'))
op.drop_constraint(op.f('uq_solicitations_solNum'), 'solicitations', type_='unique')
op.create_unique_constraint('solicitations_solNum_key', 'solicitations', ['solNum'])
op.alter_column('solicitations', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('now()'))
op.alter_column('solicitations', 'solNum',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('notice_type', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True,
existing_server_default=sa.text('now()'))
op.alter_column('agency_alias', 'agency_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('Users', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=False)
op.alter_column('Surveys', 'updatedAt',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.drop_constraint(op.f('uq_Predictions_solNum'), 'Predictions', type_='unique')
op.create_unique_constraint('uniqueSolNum', 'Predictions', ['solNum'])
op.alter_column('Predictions', 'createdAt',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('Predictions', 'noticeType',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('Agencies', 'updatedAt',
existing_type=postgresql.TIMESTAMP(timezone=True),
nullable=False)
op.create_table('solicitations_pre_dla_update_oct_2021',
sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('solNum', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('active', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('updatedAt', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('createdAt', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('url', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('agency', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('numDocs', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('notice_type_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('noticeType', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('office', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('na_flag', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('category_list', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('undetermined', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('history', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('action', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('actionDate', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('actionStatus', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('contactInfo', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('parseStatus', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('predictions', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('reviewRec', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('searchText', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('compliant', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('noticeData', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('agency_id', sa.INTEGER(), autoincrement=False, nullable=True)
)
# ### end Alembic commands ###
Loading

0 comments on commit 9be7bed

Please sign in to comment.