diff --git a/README.md b/README.md index 4e5b5f65..df8a2bbb 100644 --- a/README.md +++ b/README.md @@ -96,8 +96,8 @@ components of RESC. ### VCS Scanner Worker flow diagram The flow diagram below shows the different stages that a VCS Scanner Worker goes through and the choices it is confronted with to -come to the desired result. It first picks up a branch from the queue where it is decided, in conjunction with user input, what type of scan to run. -If it is a base scan, a full scan of all commits will be carried out to look for secrets. Possible findings are stored inside the database along with the last scanned commit hash of the branch. An incremental scan, where the branch was scanned earlier, only looks at the commits that were made after the last scanned commit hash. The process of finding secrets and storing them in the database is similar as previously described. +come to the desired result. It first picks up a repository from the queue where it is decided, in conjunction with user input, what type of scan to run. +If it is a base scan, a full scan of all commits will be carried out to look for secrets. Possible findings are stored inside the database along with the last scanned commit hash of the repository. An incremental scan, where the repository was scanned earlier, only looks at the commits that were made after the last scanned commit hash. The process of finding secrets and storing them in the database is similar as previously described. ![product-screenshot!](images/RESC_Scan_Flow_Diagram.png) diff --git a/components/resc-backend/.coveragerc b/components/resc-backend/.coveragerc index 02b7cdc0..76f5bd72 100644 --- a/components/resc-backend/.coveragerc +++ b/components/resc-backend/.coveragerc @@ -1,5 +1,5 @@ [report] -fail_under=61 +fail_under=60 exclude_lines = pragma: no cover if __name__ == .__main__.: diff --git a/components/resc-backend/alembic/versions/44ac9602612b_remove_branch.py b/components/resc-backend/alembic/versions/44ac9602612b_remove_branch.py new file mode 100644 index 00000000..ff0cc054 --- /dev/null +++ b/components/resc-backend/alembic/versions/44ac9602612b_remove_branch.py @@ -0,0 +1,73 @@ +"""remove branch + +Revision ID: 44ac9602612b +Revises: 8dd0f349b5ad +Create Date: 2023-06-27 10:03:22.197295 + +""" +import logging +import sys + +from alembic import op +import sqlalchemy as sa + +from sqlalchemy.engine import Inspector + +# revision identifiers, used by Alembic. +revision = '44ac9602612b' +down_revision = '8dd0f349b5ad' +branch_labels = None +depends_on = None + +# Logger +logger = logging.getLogger() + + +def upgrade(): + inspector = Inspector.from_engine(op.get_bind()) + + # add column repository_id to scan and finding + op.add_column('finding', sa.Column('repository_id', sa.Integer(), nullable=True)) + op.add_column('scan', sa.Column('repository_id', sa.Integer(), nullable=True)) + # Fill it with corresponding contents + op.execute("update finding " + "set finding.repository_id = branch.repository_id " + "from branch " + "where branch.id = finding.branch_id") + op.execute("update scan " + "set scan.repository_id = branch.repository_id " + "from branch " + "where branch.id = scan.branch_id") + # make repository_id not nullable + op.alter_column('finding', 'repository_id', existing_type=sa.Integer(), nullable=False) + op.alter_column('scan', 'repository_id', existing_type=sa.Integer(), nullable=False) + # Add foreign key constraint from scan and finding to repository + op.create_foreign_key('fk_finding_repository_id', 'finding', 'repository', ['repository_id'], ['id']) + op.create_foreign_key('fk_scan_repository_id', 'scan', 'repository', ['repository_id'], ['id']) + # Update unique constraint in finding with repository_id instead of branch_id + op.drop_constraint('uc_finding_per_branch', 'finding', type_='unique') + op.create_unique_constraint('uc_finding_per_repository', 'finding', + ['commit_id', 'repository_id', 'rule_name', 'file_path', 'line_number', + 'column_start', 'column_end']) + # Drop column branch_id from finding and scan + op.drop_constraint(get_foreign_key_name(inspector, 'finding', 'branch'), 'finding', type_='foreignkey') + op.drop_column('finding', 'branch_id') + op.drop_constraint(get_foreign_key_name(inspector, 'scan', 'branch'), 'scan', type_='foreignkey') + op.drop_column('scan', 'branch_id') + # Drop table branch + op.drop_table('branch') + + +def downgrade(): + # Unable to make a reliable downgrade here as there would not be enough information in the database to restore the + # branch table and re-link the finding and scan tables to it. Meaning that all findings would be invalidated + pass + + +def get_foreign_key_name(inspector: Inspector, table_name: str, reference_table: str): + foreign_keys = inspector.get_foreign_keys(table_name=table_name) + for foreign_key in foreign_keys: + if foreign_key["referred_table"] == reference_table: + return foreign_key["name"] + logger.error(f"Unable to find foreign key name for {table_name} referencing {reference_table}") + sys.exit(-1) diff --git a/components/resc-backend/src/resc_backend/constants.py b/components/resc-backend/src/resc_backend/constants.py index 009012b0..97e81ed5 100644 --- a/components/resc-backend/src/resc_backend/constants.py +++ b/components/resc-backend/src/resc_backend/constants.py @@ -8,7 +8,6 @@ # RWS: RESC Web Service RWS_VERSION_PREFIX = "/resc/v1" RWS_ROUTE_REPOSITORIES = "/repositories" -RWS_ROUTE_BRANCHES = "/branches" RWS_ROUTE_SCANS = "/scans" RWS_ROUTE_LAST_SCAN = "/last-scan" RWS_ROUTE_FINDINGS = "/findings" @@ -42,7 +41,6 @@ RWS_ROUTE_HEALTH = "/health" REPOSITORIES_TAG = "resc-repositories" -BRANCHES_TAG = "resc-branches" SCANS_TAG = "resc-scans" FINDINGS_TAG = "resc-findings" RULES_TAG = "resc-rules" diff --git a/components/resc-backend/src/resc_backend/db/model/__init__.py b/components/resc-backend/src/resc_backend/db/model/__init__.py index aa792ce4..a9edf716 100644 --- a/components/resc-backend/src/resc_backend/db/model/__init__.py +++ b/components/resc-backend/src/resc_backend/db/model/__init__.py @@ -13,7 +13,6 @@ # First Party from resc_backend.db.model.audit import DBaudit -from resc_backend.db.model.branch import DBbranch from resc_backend.db.model.finding import DBfinding from resc_backend.db.model.repository import DBrepository from resc_backend.db.model.rule import DBrule diff --git a/components/resc-backend/src/resc_backend/db/model/branch.py b/components/resc-backend/src/resc_backend/db/model/branch.py deleted file mode 100644 index 45be4480..00000000 --- a/components/resc-backend/src/resc_backend/db/model/branch.py +++ /dev/null @@ -1,32 +0,0 @@ -# Third Party -from sqlalchemy import Column, ForeignKey, Integer, String, UniqueConstraint - -# First Party -from resc_backend.db.model import Base -from resc_backend.resc_web_service.schema.branch import Branch - - -class DBbranch(Base): - __tablename__ = "branch" - id_ = Column("id", Integer, primary_key=True) - repository_id = Column(Integer, ForeignKey("repository.id"), nullable=False) - branch_id = Column(String(200), nullable=False) - branch_name = Column(String(200), nullable=False) - latest_commit = Column(String(100), nullable=False) - __table_args__ = (UniqueConstraint("branch_id", "repository_id", name="unique_branch_id_per_repository"),) - - def __init__(self, repository_id, branch_id, branch_name, latest_commit): - self.branch_id = branch_id - self.repository_id = repository_id - self.branch_name = branch_name - self.latest_commit = latest_commit - - @staticmethod - def create_from_branch(branch: Branch, repository_id: int): - db_branch = DBbranch( - repository_id=repository_id, - branch_id=branch.branch_id, - branch_name=branch.branch_name, - latest_commit=branch.latest_commit - ) - return db_branch diff --git a/components/resc-backend/src/resc_backend/db/model/finding.py b/components/resc-backend/src/resc_backend/db/model/finding.py index c694c2ae..c90852c9 100644 --- a/components/resc-backend/src/resc_backend/db/model/finding.py +++ b/components/resc-backend/src/resc_backend/db/model/finding.py @@ -12,7 +12,7 @@ class DBfinding(Base): __tablename__ = "finding" id_ = Column("id", Integer, primary_key=True) - branch_id = Column(Integer, ForeignKey("branch.id"), nullable=False) + repository_id = Column(Integer, ForeignKey("repository.id"), nullable=False) rule_name = Column(String(400), nullable=False) file_path = Column(String(500), nullable=False) line_number = Column(Integer, nullable=False) @@ -25,11 +25,11 @@ class DBfinding(Base): email = Column(String(100)) event_sent_on = Column(DateTime, nullable=True) - __table_args__ = (UniqueConstraint("commit_id", "branch_id", "rule_name", "file_path", "line_number", - "column_start", "column_end", name="uc_finding_per_branch"),) + __table_args__ = (UniqueConstraint("commit_id", "repository_id", "rule_name", "file_path", "line_number", + "column_start", "column_end", name="uc_finding_per_repository"),) def __init__(self, rule_name, file_path, line_number, commit_id, commit_message, commit_timestamp, author, - email, event_sent_on, branch_id, column_start, column_end): + email, event_sent_on, repository_id, column_start, column_end): self.email = email self.author = author self.commit_timestamp = commit_timestamp @@ -39,7 +39,7 @@ def __init__(self, rule_name, file_path, line_number, commit_id, commit_message, self.file_path = file_path self.rule_name = rule_name self.event_sent_on = event_sent_on - self.branch_id = branch_id + self.repository_id = repository_id self.column_start = column_start self.column_end = column_end @@ -55,7 +55,7 @@ def create_from_finding(finding): commit_timestamp=finding.commit_timestamp, author=finding.author, event_sent_on=finding.event_sent_on, - branch_id=finding.branch_id, + repository_id=finding.repository_id, column_start=finding.column_start, column_end=finding.column_end ) diff --git a/components/resc-backend/src/resc_backend/db/model/scan.py b/components/resc-backend/src/resc_backend/db/model/scan.py index 423a30cd..b9ac0f81 100644 --- a/components/resc-backend/src/resc_backend/db/model/scan.py +++ b/components/resc-backend/src/resc_backend/db/model/scan.py @@ -10,22 +10,22 @@ from resc_backend.db.model.rule_pack import DBrulePack from resc_backend.resc_web_service.schema.scan_type import ScanType -BRANCH_ID = "branch.id" +REPOSITORY_ID = "repository.id" class DBscan(Base): __tablename__ = "scan" id_ = Column("id", Integer, primary_key=True) - branch_id = Column(Integer, ForeignKey(BRANCH_ID)) + repository_id = Column(Integer, ForeignKey(REPOSITORY_ID)) rule_pack = Column(String(100), ForeignKey(DBrulePack.version), nullable=False) scan_type = Column(Enum(ScanType), default=ScanType.BASE, server_default=BASE_SCAN, nullable=False) last_scanned_commit = Column(String(100), nullable=False) timestamp = Column(DateTime, nullable=False, default=datetime.utcnow) increment_number = Column(Integer, server_default=text("0"), default=0, nullable=False) - def __init__(self, branch_id: int, scan_type: ScanType, last_scanned_commit: str, timestamp: datetime, + def __init__(self, repository_id: int, scan_type: ScanType, last_scanned_commit: str, timestamp: datetime, increment_number: int, rule_pack: str): - self.branch_id = branch_id + self.repository_id = repository_id self.scan_type = scan_type self.last_scanned_commit = last_scanned_commit self.timestamp = timestamp @@ -34,13 +34,13 @@ def __init__(self, branch_id: int, scan_type: ScanType, last_scanned_commit: str @staticmethod def create_from_metadata(timestamp: datetime, scan_type: ScanType, last_scanned_commit: str, increment_number: int, - rule_pack: str, branch_id: int): + rule_pack: str, repository_id: int): db_scan = DBscan( timestamp=timestamp, scan_type=scan_type, last_scanned_commit=last_scanned_commit, increment_number=increment_number, rule_pack=rule_pack, - branch_id=branch_id + repository_id=repository_id ) return db_scan diff --git a/components/resc-backend/src/resc_backend/helpers/git_operation.py b/components/resc-backend/src/resc_backend/helpers/git_operation.py index 82c96bff..be23d267 100644 --- a/components/resc-backend/src/resc_backend/helpers/git_operation.py +++ b/components/resc-backend/src/resc_backend/helpers/git_operation.py @@ -12,7 +12,6 @@ def clone_repository(repository_url: str, - branch_name: str, repo_clone_path: str, username: str = "", personal_access_token: str = ""): @@ -20,8 +19,6 @@ def clone_repository(repository_url: str, Clones the given repository :param repository_url: Repository url to clone - :param branch_name: - Branch name of the repository url to clone :param repo_clone_path: Path where to clone the repository :param username: @@ -36,5 +33,5 @@ def clone_repository(repository_url: str, repo_clone_url = f"https://{personal_access_token}@{url}" logger.debug(f"username is not specified, so cloning the repository with only personal access token: {url}") - Repo.clone_from(repo_clone_url, repo_clone_path, branch=branch_name) - logger.debug(f"Repository {repository_url}:{branch_name} cloned successfully") + Repo.clone_from(repo_clone_url, repo_clone_path) + logger.debug(f"Repository {repository_url} cloned successfully") diff --git a/components/resc-backend/src/resc_backend/resc_web_service/api.py b/components/resc-backend/src/resc_backend/resc_web_service/api.py index 33383ad2..ddbdab70 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/api.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/api.py @@ -20,7 +20,6 @@ requires_no_auth ) from resc_backend.resc_web_service.endpoints import ( - branches, common, detailed_findings, findings, @@ -92,7 +91,6 @@ def generate_logger_config(log_file_path, debug=True): {"name": "resc-rules", "description": "Manage rule information"}, {"name": "resc-rule-packs", "description": "Manage rule pack information"}, {"name": "resc-repositories", "description": "Manage repository information"}, - {"name": "resc-branches", "description": "Manage branch information"}, {"name": "resc-scans", "description": "Manage scan information"}, {"name": "resc-findings", "description": "Manage findings information"}, {"name": "resc-vcs-instances", "description": "Manage vcs instance information"}, @@ -120,7 +118,6 @@ def generate_logger_config(log_file_path, debug=True): app.include_router(health.router, prefix=RWS_VERSION_PREFIX) app.include_router(common.router, prefix=RWS_VERSION_PREFIX) -app.include_router(branches.router, prefix=RWS_VERSION_PREFIX) app.include_router(rules.router, prefix=RWS_VERSION_PREFIX) app.include_router(rule_packs.router, prefix=RWS_VERSION_PREFIX) app.include_router(findings.router, prefix=RWS_VERSION_PREFIX) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/branch.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/branch.py deleted file mode 100644 index e8ed5054..00000000 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/branch.py +++ /dev/null @@ -1,192 +0,0 @@ -# Third Party -from sqlalchemy import func -from sqlalchemy.orm import Session - -# First Party -from resc_backend.constants import DEFAULT_RECORDS_PER_PAGE_LIMIT, MAX_RECORDS_PER_PAGE_LIMIT -from resc_backend.db import model -from resc_backend.resc_web_service.crud import finding as finding_crud -from resc_backend.resc_web_service.crud import scan as scan_crud -from resc_backend.resc_web_service.crud import scan_finding as scan_finding_crud -from resc_backend.resc_web_service.schema import branch as branch_schema - - -def get_branches(db_connection: Session, skip: int = 0, - limit: int = DEFAULT_RECORDS_PER_PAGE_LIMIT): - limit_val = MAX_RECORDS_PER_PAGE_LIMIT if limit > MAX_RECORDS_PER_PAGE_LIMIT else limit - branches = db_connection.query(model.DBbranch).order_by( - model.branch.DBbranch.id_).offset(skip).limit(limit_val).all() - return branches - - -def get_branches_for_repository(db_connection: Session, repository_id: int, skip: int = 0, - limit: int = DEFAULT_RECORDS_PER_PAGE_LIMIT) -> [model.DBbranch]: - """ - Retrieve all branch child objects of a repository object from the database - :param db_connection: - Session of the database connection - :param repository_id: - id of the parent repository object of which to retrieve branch objects - :param skip: - integer amount of records to skip to support pagination - :param limit: - integer amount of records to return, to support pagination - :return: [DBbranch] - The output will contain a list of DBbranch type objects, - or an empty list if no branch was found for the given repository_id - """ - limit_val = MAX_RECORDS_PER_PAGE_LIMIT if limit > MAX_RECORDS_PER_PAGE_LIMIT else limit - branches = db_connection.query(model.DBbranch) \ - .filter(model.DBbranch.repository_id == repository_id) \ - .order_by(model.branch.DBbranch.id_).offset(skip).limit(limit_val).all() - return branches - - -def get_branches_count(db_connection: Session) -> int: - """ - Retrieve count of branches records - :param db_connection: - Session of the database connection - :return: total_count - count of branches - """ - total_count = db_connection.query(func.count(model.DBbranch.id_)).scalar() - return total_count - - -def get_branches_count_for_repository(db_connection: Session, repository_id: int) -> int: - """ - Retrieve count of finding records of a given scan - :param db_connection: - Session of the database connection - :param repository_id: - id of the repository_id object for which to retrieve the count of branches - :return: total_count - count of branches - """ - total_count = db_connection.query(func.count(model.DBbranch.id_)) \ - .filter(model.DBbranch.repository_id == repository_id) \ - .scalar() - return total_count - - -def get_branch(db_connection: Session, branch_id: int): - branch = db_connection.query(model.DBbranch) \ - .filter(model.branch.DBbranch.id_ == branch_id).first() - return branch - - -def update_branch(db_connection: Session, branch_id: int, branch: branch_schema.BranchCreate): - db_branch = db_connection.query(model.DBbranch).filter_by(id_=branch_id).first() - - db_branch.branch_name = branch.branch_name - db_branch.latest_commit = branch.latest_commit - - db_connection.commit() - db_connection.refresh(db_branch) - return db_branch - - -def create_branch(db_connection: Session, branch: branch_schema.BranchCreate): - db_branch = model.branch.DBbranch( - repository_id=branch.repository_id, - branch_id=branch.branch_id, - branch_name=branch.branch_name, - latest_commit=branch.latest_commit - ) - db_connection.add(db_branch) - db_connection.commit() - db_connection.refresh(db_branch) - return db_branch - - -def create_branch_if_not_exists(db_connection: Session, branch: branch_schema.BranchCreate): - # Query the database to see if the branch object exists based on the unique constraint parameters - db_select_branch = db_connection.query(model.DBbranch) \ - .filter(model.DBbranch.branch_id == branch.branch_id, - model.DBbranch.repository_id == branch.repository_id).first() - if db_select_branch is not None: - return db_select_branch - - # Create non-existing branch object - return create_branch(db_connection, branch) - - -def get_findings_metadata_by_branch_id(db_connection: Session, branch_id: int): - """ - Retrieves the finding metadata for a branch id from the database with most recent scan information - :param db_connection: - Session of the database connection - :param branch_id: - id of the branch for which findings metadata to be retrieved - :return: findings_metadata - findings_metadata containing the count for each status - """ - - latest_scan = scan_crud.get_latest_scan_for_branch(db_connection, branch_id=branch_id) - - if latest_scan is not None: - findings_metadata = scan_crud.get_branch_findings_metadata_for_latest_scan( - db_connection, branch_id=latest_scan.branch_id, scan_timestamp=latest_scan.timestamp) - else: - findings_metadata = { - "true_positive": 0, - "false_positive": 0, - "not_analyzed": 0, - "under_review": 0, - "clarification_required": 0, - "total_findings_count": 0 - } - - return findings_metadata - - -def delete_branch(db_connection: Session, branch_id: int, delete_related: bool = False): - """ - Delete a branch object - :param db_connection: - Session of the database connection - :param branch_id: - id of the branch to be deleted - :param delete_related: - if related records need to be deleted - """ - if delete_related: - scan_finding_crud.delete_scan_finding_by_branch_id(db_connection, branch_id=branch_id) - finding_crud.delete_findings_by_branch_id(db_connection, branch_id=branch_id) - scan_crud.delete_scans_by_branch_id(db_connection, branch_id=branch_id) - db_connection.query(model.DBbranch) \ - .filter(model.branch.DBbranch.id_ == branch_id) \ - .delete(synchronize_session=False) - db_connection.commit() - - -def delete_branches_by_repository_id(db_connection: Session, repository_id: int): - """ - Delete branches for a given repository - :param db_connection: - Session of the database connection - :param repository_id: - id of the repository - """ - db_connection.query(model.DBbranch) \ - .filter(model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id, - model.repository.DBrepository.id_ == repository_id) \ - .delete(synchronize_session=False) - db_connection.commit() - - -def delete_branches_by_vcs_instance_id(db_connection: Session, vcs_instance_id: int): - """ - Delete branches for a given vcs instance - :param db_connection: - Session of the database connection - :param vcs_instance_id: - id of the vcs instance - """ - db_connection.query(model.DBbranch) \ - .filter(model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id, - model.repository.DBrepository.vcs_instance == model.vcs_instance.DBVcsInstance.id_, - model.vcs_instance.DBVcsInstance.id_ == vcs_instance_id) \ - .delete(synchronize_session=False) - db_connection.commit() diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/detailed_finding.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/detailed_finding.py index 0d8c0f81..d4aed0f7 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/detailed_finding.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/detailed_finding.py @@ -33,13 +33,13 @@ def get_detailed_findings(db_connection: Session, findings_filter: FindingsFilte The output will contain a list of DetailedFindingRead objects, or an empty list if no finding was found for the given findings_filter """ - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) if findings_filter.rule_pack_versions: max_base_scan_subquery = max_base_scan_subquery.filter( model.DBscan.rule_pack.in_(findings_filter.rule_pack_versions)) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() # subquery to select latest audit ids of findings max_audit_subquery = db_connection.query(model.DBaudit.finding_id, @@ -78,7 +78,6 @@ def get_detailed_findings(db_connection: Session, findings_filter: FindingsFilte model.DBfinding.event_sent_on, model.DBscan.timestamp, model.DBscan.id_.label("scan_id"), - model.DBbranch.branch_name, model.DBscan.last_scanned_commit, model.DBVcsInstance.provider_type.label("vcs_provider"), model.DBrepository.project_key, @@ -90,13 +89,12 @@ def get_detailed_findings(db_connection: Session, findings_filter: FindingsFilte query = query.join(model.DBscan, and_(model.DBscanFinding.scan_id == model.DBscan.id_, model.DBscan.id_.in_(findings_filter.scan_ids))) else: - query = query.join(max_base_scan_subquery, model.DBfinding.branch_id == max_base_scan_subquery.c.branch_id) + query = query.join(max_base_scan_subquery, + model.DBfinding.repository_id == max_base_scan_subquery.c.repository_id) query = query.join(model.DBscan, and_(model.DBscanFinding.scan_id == model.DBscan.id_, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) - query = query.join(model.DBbranch, - model.branch.DBbranch.id_ == model.finding.DBfinding.branch_id) \ - .join(model.DBrepository, - model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id) \ + query = query.join(model.DBrepository, + model.repository.DBrepository.id_ == model.finding.DBfinding.repository_id) \ .join(model.DBVcsInstance, model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) query = query.join(max_audit_subquery, max_audit_subquery.c.finding_id == model.finding.DBfinding.id_, @@ -123,8 +121,6 @@ def get_detailed_findings(db_connection: Session, findings_filter: FindingsFilte else: query = query.filter(model.finding.DBfinding.event_sent_on.is_(None)) - if findings_filter.branch_name: - query = query.filter(model.DBbranch.branch_name == findings_filter.branch_name) if findings_filter.repository_name: query = query.filter(model.DBrepository.repository_name == findings_filter.repository_name) if findings_filter.vcs_providers and findings_filter.vcs_providers is not None: @@ -162,13 +158,13 @@ def get_detailed_findings_count(db_connection: Session, findings_filter: Finding func.max(model.DBaudit.id_).label("audit_id")) \ .group_by(model.DBaudit.finding_id).subquery() - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) if findings_filter.rule_pack_versions: max_base_scan_subquery = max_base_scan_subquery.filter( model.DBscan.rule_pack.in_(findings_filter.rule_pack_versions)) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() rule_tag_subquery = db_connection.query(model.DBruleTag.rule_id) \ .join(model.DBtag, model.DBruleTag.tag_id == model.DBtag.id_) @@ -189,14 +185,13 @@ def get_detailed_findings_count(db_connection: Session, findings_filter: Finding query = query.join(model.DBscan, and_(model.DBscanFinding.scan_id == model.DBscan.id_, model.DBscan.id_.in_(findings_filter.scan_ids))) else: - query = query.join(max_base_scan_subquery, model.DBfinding.branch_id == max_base_scan_subquery.c.branch_id) + query = query.join(max_base_scan_subquery, + model.DBfinding.repository_id == max_base_scan_subquery.c.repository_id) query = query.join(model.DBscan, and_(model.DBscanFinding.scan_id == model.DBscan.id_, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) - query = query.join(model.DBbranch, - model.branch.DBbranch.id_ == model.finding.DBfinding.branch_id) \ - .join(model.DBrepository, - model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id) \ + query = query.join(model.DBrepository, + model.repository.DBrepository.id_ == model.finding.DBfinding.repository_id) \ .join(model.DBVcsInstance, model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) query = query.join(max_audit_subquery, max_audit_subquery.c.finding_id == model.finding.DBfinding.id_, @@ -223,8 +218,6 @@ def get_detailed_findings_count(db_connection: Session, findings_filter: Finding else: query = query.filter(model.finding.DBfinding.event_sent_on.is_(None)) - if findings_filter.branch_name: - query = query.filter(model.DBbranch.branch_name == findings_filter.branch_name) if findings_filter.repository_name: query = query.filter(model.DBrepository.repository_name == findings_filter.repository_name) if findings_filter.vcs_providers and findings_filter.vcs_providers is not None: @@ -282,7 +275,6 @@ def get_detailed_finding(db_connection: Session, finding_id: int) -> detailed_fi model.DBscan.rule_pack, model.DBscan.timestamp, scan_id, - model.DBbranch.branch_name, model.DBscan.last_scanned_commit, model.DBVcsInstance.provider_type.label("vcs_provider"), model.DBrepository.project_key, @@ -291,10 +283,8 @@ def get_detailed_finding(db_connection: Session, finding_id: int) -> detailed_fi ).join(max_scan_subquery, model.finding.DBfinding.id_ == max_scan_subquery.c.finding_id) \ .join(model.DBscan, model.scan.DBscan.id_ == max_scan_subquery.c.scan_id) \ - .join(model.DBbranch, - model.branch.DBbranch.id_ == model.scan.DBscan.branch_id) \ .join(model.DBrepository, - model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id) \ + model.repository.DBrepository.id_ == model.scan.DBscan.repository_id) \ .join(model.DBVcsInstance, model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) \ .join(max_audit_subquery, max_audit_subquery.c.finding_id == model.finding.DBfinding.id_, diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/finding.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/finding.py index fcee3a1a..03bf3bec 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/finding.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/finding.py @@ -39,31 +39,32 @@ def create_findings(db_connection: Session, findings: List[finding_schema.Findin if len(findings) < 1: # Function is called with an empty list of findings return [] - branch_id = findings[0].branch_id + repository_id = findings[0].repository_id - # get a list of known / registered findings for this branch - db_branch_findings = db_connection.query(model.DBfinding).filter(model.DBfinding.branch_id == branch_id).all() + # get a list of known / registered findings for this repository + db_repository_findings = db_connection.query(model.DBfinding).\ + filter(model.DBfinding.repository_id == repository_id).all() # Compare new findings list with findings in the db new_findings = findings[:] db_findings = [] for finding in findings: - for branch_finding in db_branch_findings: + for repository_finding in db_repository_findings: # Compare based on the unique key in the findings table - if branch_finding.commit_id == finding.commit_id and \ - branch_finding.rule_name == finding.rule_name and \ - branch_finding.file_path == finding.file_path and \ - branch_finding.line_number == finding.line_number and \ - branch_finding.column_start == finding.column_start and \ - branch_finding.column_end == finding.column_end: + if repository_finding.commit_id == finding.commit_id and \ + repository_finding.rule_name == finding.rule_name and \ + repository_finding.file_path == finding.file_path and \ + repository_finding.line_number == finding.line_number and \ + repository_finding.column_start == finding.column_start and \ + repository_finding.column_end == finding.column_end: # Store the already known finding - db_findings.append(branch_finding) - # Remove from the db_branch_findings to increase performance for the next loop - db_branch_findings.remove(branch_finding) + db_findings.append(repository_finding) + # Remove from the db_repository_findings to increase performance for the next loop + db_repository_findings.remove(repository_finding) # Remove from the to be created findings new_findings.remove(finding) break - logger.info(f"create_findings branch {branch_id}, Requested: {len(findings)}. " + logger.info(f"create_findings repository {repository_id}, Requested: {len(findings)}. " f"New findings: {len(new_findings)}. Already in db: {len(db_findings)}") db_create_findings = [] @@ -175,7 +176,7 @@ def get_total_findings_count(db_connection: Session, findings_filter: FindingsFi model.audit.DBaudit.id_ == max_audit_subquery.c.audit_id), isouter=True) if (findings_filter.vcs_providers and findings_filter.vcs_providers is not None) \ - or findings_filter.project_name or findings_filter.branch_name \ + or findings_filter.project_name \ or findings_filter.repository_name or findings_filter.start_date_time \ or findings_filter.end_date_time: total_count_query = total_count_query \ @@ -183,10 +184,8 @@ def get_total_findings_count(db_connection: Session, findings_filter: FindingsFi model.scan_finding.DBscanFinding.finding_id == model.finding.DBfinding.id_) \ .join(model.DBscan, model.scan.DBscan.id_ == model.scan_finding.DBscanFinding.scan_id) \ - .join(model.DBbranch, - model.branch.DBbranch.id_ == model.scan.DBscan.branch_id) \ .join(model.DBrepository, - model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id) \ + model.repository.DBrepository.id_ == model.scan.DBscan.repository_id) \ .join(model.DBVcsInstance, model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) @@ -196,8 +195,6 @@ def get_total_findings_count(db_connection: Session, findings_filter: FindingsFi if findings_filter.end_date_time: total_count_query = total_count_query.filter(model.scan.DBscan.timestamp <= findings_filter.end_date_time) - if findings_filter.branch_name: - total_count_query = total_count_query.filter(model.DBbranch.branch_name == findings_filter.branch_name) if findings_filter.repository_name: total_count_query = total_count_query.filter( model.DBrepository.repository_name == findings_filter.repository_name) @@ -283,10 +280,8 @@ def get_distinct_rules_from_findings(db_connection: Session, scan_id: int = -1, model.scan_finding.DBscanFinding.finding_id == model.finding.DBfinding.id_) \ .join(model.DBscan, model.scan.DBscan.id_ == model.scan_finding.DBscanFinding.scan_id) \ - .join(model.DBbranch, - model.branch.DBbranch.id_ == model.scan.DBscan.branch_id) \ .join(model.DBrepository, - model.repository.DBrepository.id_ == model.branch.DBbranch.repository_id) \ + model.repository.DBrepository.id_ == model.scan.DBscan.repository_id) \ .join(model.DBVcsInstance, model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) if finding_statuses: @@ -404,19 +399,19 @@ def get_rule_findings_count_by_status(db_connection: Session, rule_pack_versions model.DBaudit.status, func.count(model.DBfinding.id_)) - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) if rule_pack_versions: max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.rule_pack.in_(rule_pack_versions)) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() max_audit_subquery = db_connection.query(model.DBaudit.finding_id, func.max(model.DBaudit.id_).label("audit_id")) \ .group_by(model.DBaudit.finding_id).subquery() query = query.join(model.DBscanFinding, model.DBfinding.id_ == model.DBscanFinding.finding_id) - query = query.join(max_base_scan_subquery, model.DBfinding.branch_id == max_base_scan_subquery.c.branch_id) + query = query.join(max_base_scan_subquery, model.DBfinding.repository_id == max_base_scan_subquery.c.repository_id) query = query.join(model.DBscan, and_(model.DBscanFinding.scan_id == model.DBscan.id_, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) if rule_tags: @@ -600,20 +595,6 @@ def delete_finding(db_connection: Session, finding_id: int, delete_related: bool db_connection.commit() -def delete_findings_by_branch_id(db_connection: Session, branch_id: int): - """ - Delete findings for a given branch - :param db_connection: - Session of the database connection - :param branch_id: - id of the branch - """ - db_connection.query(model.DBfinding) \ - .filter(model.finding.DBfinding.branch_id == branch_id) \ - .delete(synchronize_session=False) - db_connection.commit() - - def delete_findings_by_repository_id(db_connection: Session, repository_id: int): """ Delete findings for a given repository @@ -623,9 +604,7 @@ def delete_findings_by_repository_id(db_connection: Session, repository_id: int) id of the repository """ db_connection.query(model.DBfinding) \ - .filter(model.finding.DBfinding.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, - model.repository.DBrepository.id_ == repository_id) \ + .filter(model.finding.DBfinding.repository_id == repository_id) \ .delete(synchronize_session=False) db_connection.commit() @@ -639,8 +618,7 @@ def delete_findings_by_vcs_instance_id(db_connection: Session, vcs_instance_id: id of the vcs instance """ db_connection.query(model.DBfinding) \ - .filter(model.finding.DBfinding.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, + .filter(model.finding.DBfinding.repository_id == model.repository.DBrepository.id_, model.repository.DBrepository.vcs_instance == model.vcs_instance.DBVcsInstance.id_, model.vcs_instance.DBVcsInstance.id_ == vcs_instance_id) \ .delete(synchronize_session=False) @@ -673,8 +651,7 @@ def get_finding_audit_status_count_over_time(db_connection: Session, status: Fin .group_by(model.DBaudit.finding_id).subquery() query = query.join(max_audit_subquery, max_audit_subquery.c.audit_id == model.DBaudit.id_) query = query.join(model.DBfinding, model.DBfinding.id_ == model.DBaudit.finding_id) - query = query.join(model.DBbranch, model.DBbranch.id_ == model.DBfinding.branch_id) - query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBbranch.repository_id) + query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBfinding.repository_id) query = query.join(model.DBVcsInstance, model.DBVcsInstance.id_ == model.DBrepository.vcs_instance) query = query.filter(model.DBaudit.status == status) query = query.group_by(model.DBVcsInstance.provider_type) @@ -706,15 +683,15 @@ def get_finding_count_by_vcs_provider_over_time(db_connection: Session, weeks: i func.count(model.DBfinding.id_).label("finding_count") ) max_base_scan = db_connection.query(func.max(model.DBscan.id_).label("scan_id"), - model.DBscan.branch_id) \ + model.DBscan.repository_id) \ .filter(extract('year', model.DBscan.timestamp) == extract('year', last_nth_week_date_time)) \ .filter(extract('week', model.DBscan.timestamp) <= extract('week', last_nth_week_date_time)) \ .filter(model.DBscan.scan_type == ScanType.BASE) \ - .group_by(model.DBscan.branch_id).subquery() + .group_by(model.DBscan.repository_id).subquery() query = query.join(model.DBscanFinding, model.DBfinding.id_ == model.DBscanFinding.finding_id) query = query.join(model.DBscan, model.DBscan.id_ == model.DBscanFinding.scan_id) - query = query.join(max_base_scan, and_(max_base_scan.c.branch_id == model.DBscan.branch_id, + query = query.join(max_base_scan, and_(max_base_scan.c.repository_id == model.DBscan.repository_id, or_(model.DBscan.id_ == max_base_scan.c.scan_id, (and_(model.DBscan.id_ > max_base_scan.c.scan_id, model.DBscan.scan_type == ScanType.INCREMENTAL, @@ -725,8 +702,7 @@ def get_finding_count_by_vcs_provider_over_time(db_connection: Session, weeks: i ) ) ) - query = query.join(model.DBbranch, model.DBbranch.id_ == model.DBscan.branch_id) - query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBbranch.repository_id) + query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBscan.repository_id) query = query.join(model.DBVcsInstance, model.DBVcsInstance.id_ == model.DBrepository.vcs_instance) query = query.group_by(model.DBVcsInstance.provider_type) @@ -757,11 +733,11 @@ def get_un_triaged_finding_count_by_vcs_provider_over_time(db_connection: Sessio func.count(model.DBfinding.id_).label("finding_count") ) max_base_scan = db_connection.query(func.max(model.DBscan.id_).label("scan_id"), - model.DBscan.branch_id) \ + model.DBscan.repository_id) \ .filter(extract('year', model.DBscan.timestamp) == extract('year', last_nth_week_date_time)) \ .filter(extract('week', model.DBscan.timestamp) <= extract('week', last_nth_week_date_time)) \ .filter(model.DBscan.scan_type == ScanType.BASE) \ - .group_by(model.DBscan.branch_id).subquery() + .group_by(model.DBscan.repository_id).subquery() max_audit_subquery = db_connection.query(model.DBaudit.finding_id, func.max(model.DBaudit.id_).label("audit_id")) \ @@ -771,7 +747,7 @@ def get_un_triaged_finding_count_by_vcs_provider_over_time(db_connection: Sessio query = query.join(model.DBscanFinding, model.DBfinding.id_ == model.DBscanFinding.finding_id) query = query.join(model.DBscan, model.DBscan.id_ == model.DBscanFinding.scan_id) - query = query.join(max_base_scan, and_(max_base_scan.c.branch_id == model.DBscan.branch_id, + query = query.join(max_base_scan, and_(max_base_scan.c.repository_id == model.DBscan.repository_id, or_(model.DBscan.id_ == max_base_scan.c.scan_id, (and_(model.DBscan.id_ > max_base_scan.c.scan_id, model.DBscan.scan_type == ScanType.INCREMENTAL, @@ -782,8 +758,7 @@ def get_un_triaged_finding_count_by_vcs_provider_over_time(db_connection: Sessio ) ) ) - query = query.join(model.DBbranch, model.DBbranch.id_ == model.DBscan.branch_id) - query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBbranch.repository_id) + query = query.join(model.DBrepository, model.DBrepository.id_ == model.DBscan.repository_id) query = query.join(model.DBVcsInstance, model.DBVcsInstance.id_ == model.DBrepository.vcs_instance) query = query.join(max_audit_subquery, max_audit_subquery.c.finding_id == model.finding.DBfinding.id_, diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/repository.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/repository.py index 15f8bf39..25e959b7 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/repository.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/repository.py @@ -5,7 +5,6 @@ # First Party from resc_backend.constants import DEFAULT_RECORDS_PER_PAGE_LIMIT, MAX_RECORDS_PER_PAGE_LIMIT from resc_backend.db import model -from resc_backend.resc_web_service.crud import branch as branch_crud from resc_backend.resc_web_service.crud import finding as finding_crud from resc_backend.resc_web_service.crud import scan as scan_crud from resc_backend.resc_web_service.crud import scan_finding as scan_finding_crud @@ -38,6 +37,12 @@ def get_repositories(db_connection: Session, vcs_providers: [VCSProviders] = Non list of DBrepository objects """ limit_val = MAX_RECORDS_PER_PAGE_LIMIT if limit > MAX_RECORDS_PER_PAGE_LIMIT else limit + + # Get the latest scan for repository + repo_last_scan_sub_query = db_connection.query(model.DBscan.repository_id, + func.max(model.DBscan.timestamp).label("max_timestamp")) + repo_last_scan_sub_query = repo_last_scan_sub_query.group_by(model.DBscan.repository_id).subquery() + query = db_connection.query( model.DBrepository.id_, model.DBrepository.project_key, @@ -45,25 +50,32 @@ def get_repositories(db_connection: Session, vcs_providers: [VCSProviders] = Non model.DBrepository.repository_name, model.DBrepository.repository_url, model.DBrepository.vcs_instance, - model.DBVcsInstance.provider_type) \ - .join(model.DBVcsInstance, - model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) + model.DBVcsInstance.provider_type, + func.coalesce(model.DBscan.id_, None).label('last_scan_id'), + func.coalesce(model.DBscan.timestamp, None).label('last_scan_timestamp')) + query = query.join(model.DBVcsInstance, + model.vcs_instance.DBVcsInstance.id_ == model.repository.DBrepository.vcs_instance) + query = query.join(repo_last_scan_sub_query, + model.repository.DBrepository.id_ == repo_last_scan_sub_query.c.repository_id, isouter=True) + query = query.join(model.DBscan, + and_(model.scan.DBscan.repository_id == model.repository.DBrepository.id_, + model.scan.DBscan.timestamp == repo_last_scan_sub_query.c.max_timestamp), isouter=True) if only_if_has_findings: - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() sub_query = db_connection.query(model.DBrepository.id_) - sub_query = sub_query.join(model.DBbranch, model.DBbranch.repository_id == model.DBrepository.id_) - sub_query = sub_query.join(max_base_scan_subquery, model.DBbranch.id_ == max_base_scan_subquery.c.branch_id) - sub_query = sub_query.join(model.DBscan, and_(model.DBbranch.id_ == model.DBscan.branch_id, + sub_query = sub_query.join(max_base_scan_subquery, + model.DBrepository.id_ == max_base_scan_subquery.c.repository_id) + sub_query = sub_query.join(model.DBscan, and_(model.DBrepository.id_ == model.DBscan.repository_id, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) sub_query = sub_query.join(model.DBscanFinding, model.DBscan.id_ == model.DBscanFinding.scan_id) sub_query = sub_query.distinct() - # Filter on repositories that are in the branches selection + # Filter on repositories that are in the selection query = query.filter(model.DBrepository.id_.in_(sub_query)) if vcs_providers and vcs_providers is not None: @@ -100,20 +112,20 @@ def get_repositories_count(db_connection: Session, vcs_providers: [VCSProviders] query = db_connection.query(func.count(model.DBrepository.id_)) if only_if_has_findings: - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() sub_query = db_connection.query(model.DBrepository.id_) - sub_query = sub_query.join(model.DBbranch, model.DBbranch.repository_id == model.DBrepository.id_) - sub_query = sub_query.join(max_base_scan_subquery, model.DBbranch.id_ == max_base_scan_subquery.c.branch_id) - sub_query = sub_query.join(model.DBscan, and_(model.DBbranch.id_ == model.DBscan.branch_id, + sub_query = sub_query.join(max_base_scan_subquery, + model.DBrepository.id_ == max_base_scan_subquery.c.repository_id) + sub_query = sub_query.join(model.DBscan, and_(model.DBrepository.id_ == model.DBscan.repository_id, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) sub_query = sub_query.join(model.DBscanFinding, model.DBscan.id_ == model.DBscanFinding.scan_id) sub_query = sub_query.distinct() - # Filter on repositories that are in the branches selection + # Filter on repositories that are in the selection query = query.filter(model.DBrepository.id_.in_(sub_query)) if vcs_providers and vcs_providers is not None: @@ -196,14 +208,13 @@ def get_distinct_projects(db_connection: Session, vcs_providers: [VCSProviders] query = db_connection.query(model.DBrepository.project_key) if only_if_has_findings: - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() - query = query.join(model.DBbranch, model.DBbranch.repository_id == model.DBrepository.id_) - query = query.join(max_base_scan_subquery, model.DBbranch.id_ == max_base_scan_subquery.c.branch_id) - query = query.join(model.DBscan, and_(model.DBbranch.id_ == model.DBscan.branch_id, + query = query.join(max_base_scan_subquery, model.DBrepository.id_ == max_base_scan_subquery.c.repository_id) + query = query.join(model.DBscan, and_(model.DBrepository.id_ == model.DBscan.repository_id, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) query = query.join(model.DBscanFinding, model.DBscan.id_ == model.DBscanFinding.scan_id) @@ -237,14 +248,13 @@ def get_distinct_repositories(db_connection: Session, vcs_providers: [VCSProvide query = db_connection.query(model.DBrepository.repository_name) if only_if_has_findings: - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() - query = query.join(model.DBbranch, model.DBbranch.repository_id == model.DBrepository.id_) - query = query.join(max_base_scan_subquery, model.DBbranch.id_ == max_base_scan_subquery.c.branch_id) - query = query.join(model.DBscan, and_(model.DBbranch.id_ == model.DBscan.branch_id, + query = query.join(max_base_scan_subquery, model.DBrepository.id_ == max_base_scan_subquery.c.repository_id) + query = query.join(model.DBscan, and_(model.DBrepository.id_ == model.DBscan.repository_id, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) query = query.join(model.DBscanFinding, model.DBscan.id_ == model.DBscanFinding.scan_id) @@ -274,18 +284,17 @@ def get_findings_metadata_by_repository_id(db_connection: Session, repository_id model.DBaudit.status, func.count(model.DBscanFinding.finding_id)) - max_base_scan_subquery = db_connection.query(model.DBscan.branch_id, + max_base_scan_subquery = db_connection.query(model.DBscan.repository_id, func.max(model.DBscan.id_).label("latest_base_scan_id")) max_base_scan_subquery = max_base_scan_subquery.filter(model.DBscan.scan_type == ScanType.BASE) - max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.branch_id).subquery() + max_base_scan_subquery = max_base_scan_subquery.group_by(model.DBscan.repository_id).subquery() max_audit_subquery = db_connection.query(model.DBaudit.finding_id, func.max(model.DBaudit.id_).label("audit_id")) \ .group_by(model.DBaudit.finding_id).subquery() - query = query.join(model.DBbranch, model.DBbranch.repository_id == model.DBrepository.id_) - query = query.join(max_base_scan_subquery, model.DBbranch.id_ == max_base_scan_subquery.c.branch_id) - query = query.join(model.DBscan, and_(model.DBbranch.id_ == model.DBscan.branch_id, + query = query.join(max_base_scan_subquery, model.DBrepository.id_ == max_base_scan_subquery.c.repository_id) + query = query.join(model.DBscan, and_(model.DBrepository.id_ == model.DBscan.repository_id, model.DBscan.id_ >= max_base_scan_subquery.c.latest_base_scan_id)) query = query.join(model.DBscanFinding, model.DBscan.id_ == model.DBscanFinding.scan_id) query = query.join(max_audit_subquery, max_audit_subquery.c.finding_id == model.DBscanFinding.finding_id, @@ -335,7 +344,6 @@ def delete_repository(db_connection: Session, repository_id: int, delete_related scan_finding_crud.delete_scan_finding_by_repository_id(db_connection, repository_id=repository_id) finding_crud.delete_findings_by_repository_id(db_connection, repository_id=repository_id) scan_crud.delete_scans_by_repository_id(db_connection, repository_id=repository_id) - branch_crud.delete_branches_by_repository_id(db_connection, repository_id=repository_id) db_connection.query(model.DBrepository) \ .filter(model.repository.DBrepository.id_ == repository_id) \ .delete(synchronize_session=False) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/scan.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/scan.py index 801b9d70..7259be21 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/scan.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/scan.py @@ -22,35 +22,35 @@ def get_scan(db_connection: Session, scan_id: int) -> model.DBscan: return scan -def get_latest_scan_for_branch(db_connection: Session, branch_id: int) -> model.DBscan: +def get_latest_scan_for_repository(db_connection: Session, repository_id: int) -> model.DBscan: """ - Retrieve the most recent scan of a given branch object + Retrieve the most recent scan of a given repository object :param db_connection: Session of the database connection - :param branch_id: - id of the branch object for which to retrieve the most recent scan + :param repository_id: + id of the repository object for which to retrieve the most recent scan :return: scan - scan object having the most recent timestamp for a given branch object + scan object having the most recent timestamp for a given repository object """ subquery = (db_connection.query(func.max(model.DBscan.timestamp).label("max_time")) - .filter(model.scan.DBscan.branch_id == branch_id)).subquery() + .filter(model.scan.DBscan.repository_id == repository_id)).subquery() scan = db_connection.query(model.DBscan) \ .join(subquery, and_(model.DBscan.timestamp == subquery.c.max_time)) \ - .filter(model.scan.DBscan.branch_id == branch_id).first() + .filter(model.scan.DBscan.repository_id == repository_id).first() return scan def get_scans(db_connection: Session, skip: int = 0, - limit: int = DEFAULT_RECORDS_PER_PAGE_LIMIT, branch_id: int = -1) -> List[model.DBscan]: + limit: int = DEFAULT_RECORDS_PER_PAGE_LIMIT, repository_id: int = -1) -> List[model.DBscan]: """ - Retrieve the scan records, ordered by scan_id and optionally filtered by branch + Retrieve the scan records, ordered by scan_id and optionally filtered by repository_id :param db_connection: Session of the database connection - :param branch_id: - optional int filtering the branch for which to retrieve scans + :param repository_id: + optional int filtering the repository for which to retrieve scans :param skip: integer amount of records to skip to support pagination :param limit: @@ -61,27 +61,27 @@ def get_scans(db_connection: Session, skip: int = 0, limit_val = MAX_RECORDS_PER_PAGE_LIMIT if limit > MAX_RECORDS_PER_PAGE_LIMIT else limit query = db_connection.query(model.DBscan) - if branch_id > 0: - query = query.filter(model.DBscan.branch_id == branch_id) + if repository_id > 0: + query = query.filter(model.DBscan.repository_id == repository_id) scans = query.order_by(model.scan.DBscan.id_).offset(skip).limit(limit_val).all() return scans -def get_scans_count(db_connection: Session, branch_id: int = -1) -> int: +def get_scans_count(db_connection: Session, repository_id: int = -1) -> int: """ Retrieve count of scan records optionally filtered by VCS provider :param db_connection: Session of the database connection - :param branch_id: - optional int filtering the branch for which to retrieve scans + :param repository_id: + optional int filtering the repository for which to retrieve scans :return: total_count count of scans """ query = db_connection.query(func.count(model.DBscan.id_)) - if branch_id > 0: - query = query.filter(model.DBscan.branch_id == branch_id) + if repository_id > 0: + query = query.filter(model.DBscan.repository_id == repository_id) total_count = query.scalar() return total_count @@ -103,7 +103,7 @@ def create_scan(db_connection: Session, scan: scan_schema.ScanCreate) -> model.D db_scan = model.scan.DBscan( scan_type=scan.scan_type, last_scanned_commit=scan.last_scanned_commit, - branch_id=scan.branch_id, + repository_id=scan.repository_id, timestamp=scan.timestamp, increment_number=scan.increment_number, rule_pack=scan.rule_pack @@ -114,14 +114,14 @@ def create_scan(db_connection: Session, scan: scan_schema.ScanCreate) -> model.D return db_scan -def get_branch_findings_metadata_for_latest_scan(db_connection: Session, branch_id: int, - scan_timestamp: datetime): +def get_repository_findings_metadata_for_latest_scan(db_connection: Session, repository_id: int, + scan_timestamp: datetime): """ - Retrieves the finding metadata for latest scan of a branch from the database + Retrieves the finding metadata for latest scan of a repository from the database :param db_connection: Session of the database connection - :param branch_id: - branch id of the latest scan + :param repository_id: + repository id of the latest scan :param scan_timestamp: timestamp of the latest scan :return: findings_metadata @@ -129,7 +129,7 @@ def get_branch_findings_metadata_for_latest_scan(db_connection: Session, branch_ """ scan_ids_latest_to_base = [] scans = get_scans(db_connection=db_connection, - branch_id=branch_id, limit=1000000) + repository_id=repository_id, limit=1000000) scans.sort(key=lambda x: x.timestamp, reverse=True) for scan in scans: if scan.timestamp <= scan_timestamp: @@ -156,8 +156,9 @@ def get_branch_findings_metadata_for_latest_scan(db_connection: Session, branch_ if finding_status == FindingStatus.CLARIFICATION_REQUIRED: clarification_required_count = count - total_findings_count = true_positive_count + false_positive_count + not_analyzed_count \ - + under_review_count + clarification_required_count + total_findings_count = \ + true_positive_count + false_positive_count + not_analyzed_count + under_review_count + \ + clarification_required_count findings_metadata = { "true_positive": true_positive_count, @@ -171,28 +172,28 @@ def get_branch_findings_metadata_for_latest_scan(db_connection: Session, branch_ return findings_metadata -def delete_branch_findings_not_linked_to_any_scan(db_connection: Session, branch_id: int): +def delete_repository_findings_not_linked_to_any_scan(db_connection: Session, repository_id: int): """ - Delete findings for a given branch which are not linked to any scans + Delete findings for a given repository which are not linked to any scans :param db_connection: Session of the database connection - :param branch_id: - id of the branch + :param repository_id: + id of the repository """ sub_query = db_connection.query(model.DBscanFinding.finding_id).distinct() db_connection.query(model.DBfinding) \ - .filter(model.finding.DBfinding.id_.not_in(sub_query), model.finding.DBfinding.branch_id == branch_id) \ + .filter(model.finding.DBfinding.id_.not_in(sub_query), model.finding.DBfinding.repository_id == repository_id) \ .delete(synchronize_session=False) db_connection.commit() -def delete_scan(db_connection: Session, branch_id: int, scan_id: int, delete_related: bool = False): +def delete_scan(db_connection: Session, repository_id: int, scan_id: int, delete_related: bool = False): """ Delete a scan object :param db_connection: Session of the database connection - :param branch_id: - branch id for which findings will be deleted which are not linked to any scans + :param repository_id: + repository_id for which findings will be deleted which are not linked to any scans :param scan_id: id of the scan to be deleted :param delete_related: @@ -204,21 +205,7 @@ def delete_scan(db_connection: Session, branch_id: int, scan_id: int, delete_rel .filter(model.scan.DBscan.id_ == scan_id) \ .delete(synchronize_session=False) db_connection.commit() - delete_branch_findings_not_linked_to_any_scan(db_connection, branch_id=branch_id) - - -def delete_scans_by_branch_id(db_connection: Session, branch_id: int): - """ - Delete scans for a given branch - :param db_connection: - Session of the database connection - :param branch_id: - id of the branch - """ - db_connection.query(model.DBscan) \ - .filter(model.scan.DBscan.branch_id == branch_id) \ - .delete(synchronize_session=False) - db_connection.commit() + delete_repository_findings_not_linked_to_any_scan(db_connection, repository_id=repository_id) def delete_scans_by_repository_id(db_connection: Session, repository_id: int): @@ -230,9 +217,7 @@ def delete_scans_by_repository_id(db_connection: Session, repository_id: int): id of the repository """ db_connection.query(model.DBscan) \ - .filter(model.scan.DBscan.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, - model.repository.DBrepository.id_ == repository_id) \ + .filter(model.scan.DBscan.repository_id == repository_id) \ .delete(synchronize_session=False) db_connection.commit() @@ -246,8 +231,7 @@ def delete_scans_by_vcs_instance_id(db_connection: Session, vcs_instance_id: int id of the vcs instance """ db_connection.query(model.DBscan) \ - .filter(model.scan.DBscan.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, + .filter(model.scan.DBscan.repository_id == model.repository.DBrepository.id_, model.repository.DBrepository.vcs_instance == model.vcs_instance.DBVcsInstance.id_, model.vcs_instance.DBVcsInstance.id_ == vcs_instance_id) \ .delete(synchronize_session=False) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/scan_finding.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/scan_finding.py index 36299062..5773bb52 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/scan_finding.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/scan_finding.py @@ -51,23 +51,6 @@ def delete_scan_finding(db_connection: Session, finding_id: int = None, scan_id: db_connection.commit() -def delete_scan_finding_by_branch_id(db_connection: Session, branch_id: int): - """ - Delete scan findings for a given branch - :param db_connection: - Session of the database connection - :param branch_id: - id of the branch - """ - db_connection.query(model.DBscanFinding) \ - .filter(model.scan_finding.DBscanFinding.scan_id == model.scan.DBscan.id_, - model.scan_finding.DBscanFinding.finding_id == model.finding.DBfinding.id_, - model.scan.DBscan.branch_id == model.finding.DBfinding.branch_id, - model.scan.DBscan.branch_id == branch_id) \ - .delete(synchronize_session=False) - db_connection.commit() - - def delete_scan_finding_by_repository_id(db_connection: Session, repository_id: int): """ Delete scan findings for a given repository @@ -79,9 +62,8 @@ def delete_scan_finding_by_repository_id(db_connection: Session, repository_id: db_connection.query(model.DBscanFinding) \ .filter(model.scan_finding.DBscanFinding.scan_id == model.scan.DBscan.id_, model.scan_finding.DBscanFinding.finding_id == model.finding.DBfinding.id_, - model.scan.DBscan.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, - model.repository.DBrepository.id_ == repository_id) \ + model.scan.DBscan.repository_id == model.finding.DBfinding.repository_id, + model.scan.DBscan.repository_id == repository_id) \ .delete(synchronize_session=False) db_connection.commit() @@ -97,8 +79,7 @@ def delete_scan_finding_by_vcs_instance_id(db_connection: Session, vcs_instance_ db_connection.query(model.DBscanFinding) \ .filter(model.scan_finding.DBscanFinding.scan_id == model.scan.DBscan.id_, model.scan_finding.DBscanFinding.finding_id == model.finding.DBfinding.id_, - model.scan.DBscan.branch_id == model.branch.DBbranch.id_, - model.branch.DBbranch.repository_id == model.repository.DBrepository.id_, + model.scan.DBscan.repository_id == model.repository.DBrepository.id_, model.repository.DBrepository.vcs_instance == model.vcs_instance.DBVcsInstance.id_, model.vcs_instance.DBVcsInstance.id_ == vcs_instance_id) \ .delete(synchronize_session=False) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/crud/vcs_instance.py b/components/resc-backend/src/resc_backend/resc_web_service/crud/vcs_instance.py index 7e5018f6..35901d68 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/crud/vcs_instance.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/crud/vcs_instance.py @@ -9,7 +9,6 @@ # First Party from resc_backend.constants import DEFAULT_RECORDS_PER_PAGE_LIMIT, MAX_RECORDS_PER_PAGE_LIMIT from resc_backend.db import model -from resc_backend.resc_web_service.crud import branch as branch_crud from resc_backend.resc_web_service.crud import finding as finding_crud from resc_backend.resc_web_service.crud import repository as repository_crud from resc_backend.resc_web_service.crud import scan as scan_crud @@ -147,7 +146,6 @@ def delete_vcs_instance(db_connection: Session, vcs_instance_id: int, delete_rel scan_finding_crud.delete_scan_finding_by_vcs_instance_id(db_connection, vcs_instance_id=vcs_instance_id) finding_crud.delete_findings_by_vcs_instance_id(db_connection, vcs_instance_id=vcs_instance_id) scan_crud.delete_scans_by_vcs_instance_id(db_connection, vcs_instance_id=vcs_instance_id) - branch_crud.delete_branches_by_vcs_instance_id(db_connection, vcs_instance_id=vcs_instance_id) repository_crud.delete_repositories_by_vcs_instance_id(db_connection, vcs_instance_id=vcs_instance_id) db_vcs_instance = db_connection.query(model.DBVcsInstance).filter_by(id_=vcs_instance_id).first() db_connection.delete(db_vcs_instance) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/dependencies.py b/components/resc-backend/src/resc_backend/resc_web_service/dependencies.py index 7a21b63c..1fe54955 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/dependencies.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/dependencies.py @@ -15,7 +15,7 @@ # First Party from resc_backend.constants import RESC_OPERATOR_ROLE from resc_backend.db.connection import Session, engine -from resc_backend.db.model import DBbranch, DBfinding, DBrepository, DBrule, DBscan, DBscanFinding +from resc_backend.db.model import DBfinding, DBrepository, DBrule, DBscan, DBscanFinding security = HTTPBearer() logger = logging.getLogger(__name__) @@ -109,7 +109,6 @@ def get_db_connection(): @retry(wait=wait_exponential(multiplier=1, min=2, max=10), stop=stop_after_attempt(100)) def check_db_initialized(): required_table_names = [ - DBbranch.__tablename__, DBfinding.__tablename__, DBrepository.__tablename__, DBrule.__tablename__, diff --git a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/branches.py b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/branches.py deleted file mode 100644 index 36c96189..00000000 --- a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/branches.py +++ /dev/null @@ -1,254 +0,0 @@ -# Third Party -from fastapi import APIRouter, Depends, HTTPException, Query, status - -# First Party -from resc_backend.constants import ( - BRANCHES_TAG, - DEFAULT_RECORDS_PER_PAGE_LIMIT, - ERROR_MESSAGE_500, - ERROR_MESSAGE_503, - RWS_ROUTE_BRANCHES, - RWS_ROUTE_FINDINGS_METADATA, - RWS_ROUTE_LAST_SCAN, - RWS_ROUTE_SCANS -) -from resc_backend.db.connection import Session -from resc_backend.resc_web_service.crud import branch as branch_crud -from resc_backend.resc_web_service.crud import scan as scan_crud -from resc_backend.resc_web_service.dependencies import get_db_connection -from resc_backend.resc_web_service.helpers.resc_swagger_models import Model404 -from resc_backend.resc_web_service.schema import branch as branch_schema -from resc_backend.resc_web_service.schema import scan as scan_schema -from resc_backend.resc_web_service.schema.finding_count_model import FindingCountModel -from resc_backend.resc_web_service.schema.pagination_model import PaginationModel - -router = APIRouter(prefix=f"{RWS_ROUTE_BRANCHES}", tags=[BRANCHES_TAG]) - - -@router.get("", - response_model=PaginationModel[branch_schema.BranchRead], - summary="Get branches", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve all branches"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def get_all_branches(skip: int = Query(default=0, ge=0), - limit: int = Query(default=DEFAULT_RECORDS_PER_PAGE_LIMIT, ge=1), - db_connection: Session = Depends(get_db_connection)) \ - -> PaginationModel[branch_schema.BranchRead]: - """ - Retrieve all branch objects paginated - - - **db_connection**: Session of the database connection - - **skip**: Integer amount of records to skip to support pagination - - **limit**: Integer amount of records to return, to support pagination - - **return**: [BranchRead] - The output will contain a PaginationModel containing the list of BranchRead type objects, - or an empty list if no branch was found - """ - branches = branch_crud.get_branches(db_connection, skip=skip, limit=limit) - - total_branches = branch_crud.get_branches_count(db_connection) - - return PaginationModel[branch_schema.BranchRead](data=branches, total=total_branches, - limit=limit, skip=skip) - - -@router.post("", - response_model=branch_schema.BranchRead, - summary="Create a branch", - status_code=status.HTTP_201_CREATED, - responses={ - 201: {"description": "Create a new branch"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def create_branch( - branch: branch_schema.BranchCreate, db_connection: Session = Depends(get_db_connection)): - """ - Create a branch with all the information - - - **db_connection**: Session of the database connection - - **branch_id**: branch id - - **branch_name**: branch name - - **latest_commit**: branch latest commit hash - - **repository_id**: repository id - """ - return branch_crud.create_branch_if_not_exists(db_connection=db_connection, branch=branch) - - -@router.get("/{branch_id}", - response_model=branch_schema.BranchRead, - summary="Fetch a branch by ID", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve branch "}, - 404: {"model": Model404, "description": "Branch not found"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def read_branch(branch_id: int, db_connection: Session = Depends(get_db_connection)): - """ - Read a branch by ID - - - **db_connection**: Session of the database connection - - **branch_id**: ID of the branch for which details need to be fetched - """ - db_branch = branch_crud.get_branch(db_connection, branch_id=branch_id) - if db_branch is None: - raise HTTPException(status_code=404, detail="Branch not found") - return db_branch - - -@router.put("/{branch_id}", - response_model=branch_schema.BranchRead, - summary="Update an existing branch", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Update branch "}, - 404: {"model": Model404, "description": "Branch not found"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def update_branch( - branch_id: int, - branch: branch_schema.BranchCreate, - db_connection: Session = Depends(get_db_connection)): - """ - Update an existing branch - - - **db_connection**: Session of the database connection - - **branch_id**: branch id to update - - **branch_name**: branch name to update - - **latest_commit**: branch latest commit hash to update - - **repository_id**: repository id to update - """ - db_branch = branch_crud.get_branch(db_connection, branch_id=branch_id) - if db_branch is None: - raise HTTPException(status_code=404, detail="Branch not found") - return branch_crud.update_branch( - db_connection=db_connection, - branch_id=branch_id, - branch=branch - ) - - -@router.delete("/{branch_id}", - summary="Delete a branch", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Delete branch "}, - 404: {"model": Model404, "description": "Branch not found"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def delete_branch(branch_id: int, db_connection: Session = Depends(get_db_connection)): - """ - Delete a branch object - - - **db_connection**: Session of the database connection - - **branch_id**: ID of the branch to delete - - **return**: The output will contain a success or error message based on the success of the deletion - """ - db_branch = branch_crud.get_branch(db_connection, branch_id=branch_id) - if db_branch is None: - raise HTTPException(status_code=404, detail="Branch not found") - branch_crud.delete_branch(db_connection, branch_id=branch_id, delete_related=True) - return {"ok": True} - - -@router.get("/{branch_id}"f"{RWS_ROUTE_SCANS}", - summary="Get scans for branch", - response_model=PaginationModel[scan_schema.ScanRead], - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve all the scans related to a branch"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def get_scans_for_branch(branch_id: int, skip: int = Query(default=0, ge=0), - limit: int = Query(default=DEFAULT_RECORDS_PER_PAGE_LIMIT, ge=1), - db_connection: Session = Depends(get_db_connection)) \ - -> PaginationModel[scan_schema.ScanRead]: - """ - Retrieve all scan objects related to a branch paginated - - - **db_connection**: Session of the database connection - - **branch_id**: ID of the parent branch object for which scan objects to be retrieved - - **skip**: Integer amount of records to skip to support pagination - - **limit**: Integer amount of records to return, to support pagination - - **return**: [ScanRead] - The output will contain a PaginationModel containing the list of ScanRead type objects, - or an empty list if no scan was found - """ - scans = scan_crud.get_scans(db_connection, skip=skip, limit=limit, branch_id=branch_id) - - total_scans = scan_crud.get_scans_count(db_connection, branch_id=branch_id) - - return PaginationModel[scan_schema.ScanRead](data=scans, total=total_scans, limit=limit, skip=skip) - - -@router.get("/{branch_id}"f"{RWS_ROUTE_LAST_SCAN}", - response_model=scan_schema.ScanRead, - summary="Get latest scan for branch", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve the latest scan related to a branch"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def get_last_scan_for_branch(branch_id: int, db_connection: Session = Depends(get_db_connection)) \ - -> scan_schema.ScanRead: - """ - Retrieve the latest scan object related to a branch - - - **db_connection**: Session of the database connection - - **branch_id**: ID of the parent branch object for which scan objects to be retrieved - - **return**: ScanRead - The output will contain a ScanRead type object, - or empty if no scan was found - """ - last_scan = scan_crud.get_latest_scan_for_branch(db_connection, branch_id=branch_id) - - return last_scan - - -@router.get("/{branch_id}"f"{RWS_ROUTE_FINDINGS_METADATA}", - response_model=FindingCountModel[branch_schema.BranchRead], - summary="Get findings metadata for branch", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve findings metadata for branch "}, - 404: {"model": Model404, "description": "Branch not found"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def get_findings_metadata_for_branch(branch_id: int, - db_connection: Session = Depends(get_db_connection)) \ - -> FindingCountModel[branch_schema.BranchRead]: - """ - Retrieve findings metadata for a branch - - - **db_connection**: Session of the database connection - - **branch_id**: ID of the branch object for which findings metadata to be retrieved - - **return**: BranchedRead, findings count per status - The output will contain a BranchedRead type object along with findings count per status, - or empty if no scan was found - """ - branch = branch_crud.get_branch(db_connection, branch_id=branch_id) - if branch is None: - raise HTTPException(status_code=404, detail="Branch not found") - - findings_meta_data = branch_crud.get_findings_metadata_by_branch_id(db_connection, - branch_id=branch_id) - - return FindingCountModel[branch_schema.BranchRead]( - data=branch, - true_positive=findings_meta_data["true_positive"], - false_positive=findings_meta_data["false_positive"], - not_analyzed=findings_meta_data["not_analyzed"], - under_review=findings_meta_data["under_review"], - clarification_required=findings_meta_data["clarification_required"], - total_findings_count=findings_meta_data["total_findings_count"]) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/detailed_findings.py b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/detailed_findings.py index 1771514d..f8345a12 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/detailed_findings.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/detailed_findings.py @@ -67,8 +67,6 @@ def get_all_detailed_findings(skip: int = Query(default=0, ge=0), - repository_names of type [String] - - branch_name of type String - - scan_ids of type list Integer - start_date_time of type datetime with the following format: 1970-01-31T00:00:00 diff --git a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/findings.py b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/findings.py index 008119a6..259381a3 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/findings.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/findings.py @@ -92,7 +92,7 @@ def create_findings(findings: List[finding_schema.FindingCreate], db_connection: - **email**: Email of the author - **event_sent_on**: event sent timestamp - **rule_name**: rule name - - **branch_id**: branch id of the finding + - **repository_id**: repository id of the finding - **return**: int The output will contain the number of successful created findings """ diff --git a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/repositories.py b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/repositories.py index 19ee068d..54bd211e 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/repositories.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/repositories.py @@ -1,5 +1,4 @@ # Standard Library -import datetime from typing import List, Optional # Third Party @@ -11,27 +10,23 @@ ERROR_MESSAGE_500, ERROR_MESSAGE_503, REPOSITORIES_TAG, - RWS_ROUTE_BRANCHES, RWS_ROUTE_DISTINCT_PROJECTS, RWS_ROUTE_DISTINCT_REPOSITORIES, RWS_ROUTE_FINDINGS_METADATA, - RWS_ROUTE_REPOSITORIES + RWS_ROUTE_LAST_SCAN, + RWS_ROUTE_REPOSITORIES, + RWS_ROUTE_SCANS ) from resc_backend.db.connection import Session -from resc_backend.db.model import DBbranch -from resc_backend.resc_web_service.crud import branch as branch_crud -from resc_backend.resc_web_service.crud import finding as finding_crud from resc_backend.resc_web_service.crud import repository as repository_crud from resc_backend.resc_web_service.crud import scan as scan_crud from resc_backend.resc_web_service.dependencies import get_db_connection -from resc_backend.resc_web_service.filters import FindingsFilter from resc_backend.resc_web_service.helpers.resc_swagger_models import Model404 -from resc_backend.resc_web_service.schema import branch as branch_schema from resc_backend.resc_web_service.schema import repository as repository_schema from resc_backend.resc_web_service.schema import repository_enriched as repository_enriched_schema +from resc_backend.resc_web_service.schema import scan as scan_schema from resc_backend.resc_web_service.schema.finding_count_model import FindingCountModel from resc_backend.resc_web_service.schema.pagination_model import PaginationModel -from resc_backend.resc_web_service.schema.scan_type import ScanType from resc_backend.resc_web_service.schema.vcs_provider import VCSProviders router = APIRouter(prefix=f"{RWS_ROUTE_REPOSITORIES}", tags=[REPOSITORIES_TAG]) @@ -185,80 +180,6 @@ def delete_repository(repository_id: int, db_connection: Session = Depends(get_d return {"ok": True} -@router.get("/{repository_id}"f"{RWS_ROUTE_BRANCHES}", - response_model=PaginationModel[branch_schema.ViewableBranch], - summary="Get branches for a repository", - status_code=status.HTTP_200_OK, - responses={ - 200: {"description": "Retrieve all the branches of a repository, enriched with the recent scan " - "information"}, - 500: {"description": ERROR_MESSAGE_500}, - 503: {"description": ERROR_MESSAGE_503} - }) -def get_branches_for_repository(repository_id: int, skip: int = Query(default=0, ge=0), - limit: int = Query(default=DEFAULT_RECORDS_PER_PAGE_LIMIT, ge=1), - db_connection: Session = Depends(get_db_connection)) \ - -> PaginationModel[branch_schema.ViewableBranch]: - """ - Retrieve all branches enriched with most recent scan information for a repository - - - **db_connection**: Session of the database connection - - **repository_id**: ID of the parent repository object for which branch objects need to be retrieved - - **skip**: Integer amount of records to skip to support pagination - - **limit**: Integer amount of records to return, to support pagination - - **return**: [ViewableBranch] - The output will contain a PaginationModel containing the list of ViewableBranch type objects, - or an empty list if no branch was found for the given repository_id - """ - branches = branch_crud.get_branches_for_repository(db_connection, skip=skip, limit=limit, - repository_id=repository_id) - for branch in branches: - branch = enrich_branch_with_latest_scan_data(db_connection, branch) - - total_branches = branch_crud.get_branches_count_for_repository(db_connection, - repository_id=repository_id) - return PaginationModel[branch_schema.ViewableBranch](data=branches, total=total_branches, - limit=limit, skip=skip) - - -def enrich_branch_with_latest_scan_data(db_connection: Session, branch: DBbranch): - """ - Enriches a branch object retrieved from the database with most recent scan information - :param db_connection: - Session of the database connection - :param branch: - DBbranch object to enrich with scan information - :return: branch - DBbranch object enriched with latest scan information as type ViewableBranch - """ - - branch.last_scan_datetime = datetime.datetime.min - branch.last_scan_id = None - branch.last_scan_finding_count = 0 - branch.scan_finding_count = 0 - - latest_scan = scan_crud.get_latest_scan_for_branch(db_connection, branch_id=branch.id_) - if latest_scan is not None: - branch.last_scan_datetime = latest_scan.timestamp - branch.last_scan_id = latest_scan.id_ - branch.last_scan_finding_count = finding_crud.get_total_findings_count( - db_connection, FindingsFilter(scan_ids=[latest_scan.id_])) - - scan_ids_latest_to_base = [] - scans = scan_crud.get_scans(db_connection=db_connection, branch_id=branch.id_, limit=1000000) - scans.sort(key=lambda x: x.timestamp, reverse=True) - for scan in scans: - if scan.timestamp <= latest_scan.timestamp: - scan_ids_latest_to_base.append(scan.id_) - if scan.scan_type == ScanType.BASE: - break - - branch.scan_finding_count = finding_crud.get_total_findings_count( - db_connection, FindingsFilter(scan_ids=scan_ids_latest_to_base)) - - return branch - - @router.get(f"{RWS_ROUTE_DISTINCT_PROJECTS}/", response_model=List[str], summary="Get all unique project names", @@ -420,6 +341,8 @@ def get_all_repositories_with_findings_metadata( repository_name=repo.repository_name, repository_url=repo.repository_url, vcs_provider=repo.provider_type, + last_scan_id=repo.last_scan_id, + last_scan_timestamp=repo.last_scan_timestamp, true_positive=repo_findings_meta_data[repo.id_]["true_positive"], false_positive=repo_findings_meta_data[repo.id_]["false_positive"], not_analyzed=repo_findings_meta_data[repo.id_]["not_analyzed"], @@ -432,3 +355,59 @@ def get_all_repositories_with_findings_metadata( return PaginationModel[repository_enriched_schema.RepositoryEnrichedRead](data=repository_list, total=total_repositories, limit=limit, skip=skip) + + +@router.get("/{repository_id}"f"{RWS_ROUTE_LAST_SCAN}", + response_model=scan_schema.ScanRead, + summary="Get latest scan for repository", + status_code=status.HTTP_200_OK, + responses={ + 200: {"description": "Retrieve the latest scan related to a repository"}, + 500: {"description": ERROR_MESSAGE_500}, + 503: {"description": ERROR_MESSAGE_503} + }) +def get_last_scan_for_repository(repository_id: int, db_connection: Session = Depends(get_db_connection)) \ + -> scan_schema.ScanRead: + """ + Retrieve the latest scan object related to a repository + + - **db_connection**: Session of the database connection + - **repository_id**: ID of the parent repository object for which scan objects to be retrieved + - **return**: ScanRead + The output will contain a ScanRead type object, + or empty if no scan was found + """ + last_scan = scan_crud.get_latest_scan_for_repository(db_connection, repository_id=repository_id) + + return last_scan + + +@router.get("/{repository_id}"f"{RWS_ROUTE_SCANS}", + summary="Get scans for repository", + response_model=PaginationModel[scan_schema.ScanRead], + status_code=status.HTTP_200_OK, + responses={ + 200: {"description": "Retrieve all the scans related to a repository"}, + 500: {"description": ERROR_MESSAGE_500}, + 503: {"description": ERROR_MESSAGE_503} + }) +def get_scans_for_repository(repository_id: int, skip: int = Query(default=0, ge=0), + limit: int = Query(default=DEFAULT_RECORDS_PER_PAGE_LIMIT, ge=1), + db_connection: Session = Depends(get_db_connection)) \ + -> PaginationModel[scan_schema.ScanRead]: + """ + Retrieve all scan objects related to a repository paginated + + - **db_connection**: Session of the database connection + - **repository_id**: ID of the parent repository object for which scan objects to be retrieved + - **skip**: Integer amount of records to skip to support pagination + - **limit**: Integer amount of records to return, to support pagination + - **return**: [ScanRead] + The output will contain a PaginationModel containing the list of ScanRead type objects, + or an empty list if no scan was found + """ + scans = scan_crud.get_scans(db_connection, skip=skip, limit=limit, repository_id=repository_id) + + total_scans = scan_crud.get_scans_count(db_connection, repository_id=repository_id) + + return PaginationModel[scan_schema.ScanRead](data=scans, total=total_scans, limit=limit, skip=skip) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/scans.py b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/scans.py index ab4d1911..fe0159af 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/endpoints/scans.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/endpoints/scans.py @@ -82,11 +82,11 @@ def create_scan(scan: scan_schema.ScanCreate, db_connection: Session = Depends(g - **timestamp**: creation timestamp - **increment_number**: scan increment number - **rule_pack**: rule pack version - - **branch_id**: branch id + - **repository_id**: repository id """ # Determine the increment number if needed and not supplied if scan.scan_type == ScanType.INCREMENTAL and (not scan.increment_number or scan.increment_number <= 0): - last_scan = scan_crud.get_latest_scan_for_branch(db_connection, branch_id=scan.branch_id) + last_scan = scan_crud.get_latest_scan_for_repository(db_connection, repository_id=scan.repository_id) new_increment = last_scan.increment_number + 1 scan.increment_number = new_increment @@ -145,7 +145,7 @@ def update_scan( - **timestamp**: scan timestamp - **increment_number**: scan increment number - **rule_pack**: rule pack version - - **branch_id**: branch id + - **repository_id**: repository id """ db_scan = scan_crud.get_scan(db_connection, scan_id=scan_id) @@ -174,7 +174,7 @@ def delete_scan(scan_id: int, db_connection: Session = Depends(get_db_connection db_scan = scan_crud.get_scan(db_connection, scan_id=scan_id) if db_scan is None: raise HTTPException(status_code=404, detail="Scan not found") - scan_crud.delete_scan(db_connection, branch_id=db_scan.branch_id, scan_id=scan_id, delete_related=True) + scan_crud.delete_scan(db_connection, repository_id=db_scan.repository_id, scan_id=scan_id, delete_related=True) return {"ok": True} @@ -210,7 +210,7 @@ def create_scan_findings(scan_id: int, - **comment**: Comment - **event_sent_on**: event sent timestamp - **rule_name**: rule name - - **branch_id**: branch id of the finding + - **repository_id**: repository id of the finding - **return**: [FindingRead] The output will contain a PaginationModel containing the list of FindingRead type objects, or an empty list if no scan was found diff --git a/components/resc-backend/src/resc_backend/resc_web_service/filters.py b/components/resc-backend/src/resc_backend/resc_web_service/filters.py index c92fa684..85c3f55c 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/filters.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/filters.py @@ -20,7 +20,6 @@ class FindingsFilter: rule_tags: List[str] = None project_name: str = None repository_name: str = None - branch_name: str = None scan_ids: List[int] = None start_date_time: datetime = None end_date_time: datetime = None diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/branch.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/branch.py deleted file mode 100644 index 2ccdf3c5..00000000 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/branch.py +++ /dev/null @@ -1,42 +0,0 @@ -# pylint: disable=no-name-in-module -# Standard Library -import datetime -from typing import Optional - -# Third Party -from pydantic import BaseModel, conint, constr - - -class BranchBase(BaseModel): - branch_id: constr(min_length=1, max_length=200) - branch_name: constr(min_length=1, max_length=200) - latest_commit: constr(min_length=1, max_length=100) - - -class BranchCreate(BranchBase): - repository_id: conint(gt=0) - - @classmethod - def create_from_base_class(cls, base_object: BranchBase, repository_id: int): - return cls(**(dict(base_object)), repository_id=repository_id) - - -class Branch(BranchBase): - pass - - -class BranchRead(BranchCreate): - id_: conint(gt=0) - - class Config: - orm_mode = True - - -class ViewableBranch(BranchRead): - last_scan_datetime: Optional[datetime.datetime] - last_scan_id: Optional[conint(gt=0)] - last_scan_finding_count: Optional[conint(gt=-1)] - scan_finding_count: Optional[conint(gt=-1)] - - class Config: - orm_mode = True diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/detailed_finding.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/detailed_finding.py index 587f6e51..b2983c38 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/detailed_finding.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/detailed_finding.py @@ -32,7 +32,6 @@ class DetailedFindingBase(BaseModel): repository_url: HttpUrl timestamp: datetime.datetime vcs_provider: VCSProviders - branch_name: constr(min_length=1, max_length=200) last_scanned_commit: constr(min_length=1, max_length=100) scan_id: conint(gt=0) event_sent_on: Optional[datetime.datetime] @@ -65,20 +64,16 @@ def build_bitbucket_commit_url(repository_url: str, @staticmethod def build_ado_commit_url(repository_url: str, - branch_name: str, file_path: str, commit_id: str) -> str: - ado_commit_url = f"{repository_url}/commit/{commit_id}?" \ - f"refName=refs/heads/{branch_name}&path=/{file_path}" + ado_commit_url = f"{repository_url}/commit/{commit_id}?path=/{file_path}" return ado_commit_url @staticmethod def build_github_commit_url(repository_url: str, - branch_name: str, file_path: str, commit_id: str) -> str: - github_commit_url = f"{repository_url}/commit/{commit_id}?" \ - f"refName=refs/heads/{branch_name}&path=/{file_path}" + github_commit_url = f"{repository_url}/commit/{commit_id}?path=/{file_path}" return github_commit_url @root_validator @@ -95,13 +90,11 @@ def build_commit_url(cls, values) -> Dict: commit_id=values["commit_id"]) elif values["vcs_provider"] == VCSProviders.AZURE_DEVOPS: values["commit_url"] = cls.build_ado_commit_url(repository_url=values["repository_url"], - branch_name=values["branch_name"], file_path=values["file_path"], commit_id=values["commit_id"]) elif values["vcs_provider"] == VCSProviders.GITHUB_PUBLIC: values["commit_url"] = cls.build_github_commit_url(repository_url=values["repository_url"], - branch_name=values["branch_name"], file_path=values["file_path"], commit_id=values["commit_id"]) else: diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/finding.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/finding.py index f21dda28..c6a79060 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/finding.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/finding.py @@ -30,11 +30,11 @@ class FindingPatch(BaseModel): class FindingCreate(FindingBase): - branch_id: conint(gt=0) + repository_id: conint(gt=0) @classmethod - def create_from_base_class(cls, base_object: FindingBase, branch_id: int): - return cls(**(dict(base_object)), branch_id=branch_id) + def create_from_base_class(cls, base_object: FindingBase, repository_id: int): + return cls(**(dict(base_object)), repository_id=repository_id) class Finding(FindingBase): @@ -63,6 +63,6 @@ def create_from_db_entities(cls, db_finding: DBfinding, scan_ids: List[int]): email=db_finding.email, event_sent_on=db_finding.event_sent_on, rule_name=db_finding.rule_name, - branch_id=db_finding.branch_id, + repository_id=db_finding.repository_id, scan_ids=scan_ids ) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/finding_count_model.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/finding_count_model.py index bafbda60..43f61294 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/finding_count_model.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/finding_count_model.py @@ -12,7 +12,7 @@ class FindingCountModel(GenericModel, Generic[Model]): """ Generic encapsulation class for findings count end points to standardize output of the API - example creation, FindingCountModel[BranchRead](data=db_findings, true_positive=true_positive, + example creation, FindingCountModel[FindingRead](data=db_findings, true_positive=true_positive, false_positive=false_positive, not_analyzed=not_analyzed, under_review=under_review, clarification_required=clarification_required, total_findings_count=total_findings_count) :param Generic[Model]: diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/repository.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/repository.py index 9a919f01..9a258ca0 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/repository.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/repository.py @@ -1,10 +1,7 @@ # pylint: disable=no-name-in-module + # Third Party from pydantic import BaseModel, HttpUrl, conint, constr -from pydantic.types import List - -# First Party -from resc_backend.resc_web_service.schema.branch import Branch class RepositoryBase(BaseModel): @@ -16,7 +13,7 @@ class RepositoryBase(BaseModel): class Repository(RepositoryBase): - branches: List[Branch] + pass class RepositoryCreate(RepositoryBase): diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/repository_enriched.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/repository_enriched.py index 35286089..62f87196 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/repository_enriched.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/repository_enriched.py @@ -1,4 +1,7 @@ # pylint: disable=no-name-in-module +# Standard Library +import datetime + # Third Party from pydantic import BaseModel, HttpUrl, conint, constr @@ -12,6 +15,8 @@ class RepositoryEnrichedBase(BaseModel): repository_name: constr(min_length=1, max_length=100) repository_url: HttpUrl vcs_provider: VCSProviders + last_scan_id: conint(gt=0) = None + last_scan_timestamp: datetime.datetime = None true_positive: conint(gt=-1) false_positive: conint(gt=-1) not_analyzed: conint(gt=-1) diff --git a/components/resc-backend/src/resc_backend/resc_web_service/schema/scan.py b/components/resc-backend/src/resc_backend/resc_web_service/schema/scan.py index 14d0eede..d23761db 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service/schema/scan.py +++ b/components/resc-backend/src/resc_backend/resc_web_service/schema/scan.py @@ -18,11 +18,11 @@ class ScanBase(BaseModel): class ScanCreate(ScanBase): - branch_id: conint(gt=0) + repository_id: conint(gt=0) @classmethod - def create_from_base_class(cls, base_object: ScanBase, branch_id: int): - return cls(**(dict(base_object)), branch_id=branch_id) + def create_from_base_class(cls, base_object: ScanBase, repository_id: int): + return cls(**(dict(base_object)), repository_id=repository_id) class Scan(ScanBase): diff --git a/components/resc-backend/src/resc_backend/resc_web_service_interface/branches.py b/components/resc-backend/src/resc_backend/resc_web_service_interface/branches.py deleted file mode 100644 index 543e44f6..00000000 --- a/components/resc-backend/src/resc_backend/resc_web_service_interface/branches.py +++ /dev/null @@ -1,23 +0,0 @@ -# Standard Library -import logging - -# Third Party -import requests - -# First Party -from resc_backend.constants import RWS_ROUTE_BRANCHES, RWS_ROUTE_LAST_SCAN, RWS_VERSION_PREFIX -from resc_backend.resc_web_service.schema.branch import BranchCreate - -logger = logging.getLogger(__name__) - - -def create_branch(url: str, branch: BranchCreate): - api_url = f"{url}{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}" - response = requests.post(api_url, data=branch.json(), proxies={"http": "", "https": ""}) - return response - - -def get_last_scan_for_branch(url: str, branch_id: int): - api_url = f"{url}{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}{RWS_ROUTE_LAST_SCAN}" - response = requests.get(api_url, proxies={"http": "", "https": ""}) - return response diff --git a/components/resc-backend/src/resc_backend/resc_web_service_interface/repositories.py b/components/resc-backend/src/resc_backend/resc_web_service_interface/repositories.py index 3c3efeed..0bfb37e3 100644 --- a/components/resc-backend/src/resc_backend/resc_web_service_interface/repositories.py +++ b/components/resc-backend/src/resc_backend/resc_web_service_interface/repositories.py @@ -5,7 +5,7 @@ import requests # First Party -from resc_backend.constants import RWS_ROUTE_REPOSITORIES, RWS_VERSION_PREFIX +from resc_backend.constants import RWS_ROUTE_LAST_SCAN, RWS_ROUTE_REPOSITORIES, RWS_VERSION_PREFIX from resc_backend.resc_web_service.schema.repository import Repository logger = logging.getLogger(__name__) @@ -15,3 +15,9 @@ def create_repository(url: str, repository: Repository): api_url = f"{url}{RWS_VERSION_PREFIX}{RWS_ROUTE_REPOSITORIES}" response = requests.post(api_url, data=repository.json(), proxies={"http": "", "https": ""}) return response + + +def get_last_scan_for_repository(url: str, repository_id: int): + api_url = f"{url}{RWS_VERSION_PREFIX}{RWS_ROUTE_REPOSITORIES}/{repository_id}{RWS_ROUTE_LAST_SCAN}" + response = requests.get(api_url, proxies={"http": "", "https": ""}) + return response diff --git a/components/resc-backend/test_data/database_dummy_data.sql b/components/resc-backend/test_data/database_dummy_data.sql index 95a7e0da..b75f73f1 100644 --- a/components/resc-backend/test_data/database_dummy_data.sql +++ b/components/resc-backend/test_data/database_dummy_data.sql @@ -8,12 +8,6 @@ INSERT INTO repository ( project_key, repository_id, repository_name, repository ( 'ado-proj2', 'r1', 'repo1', 'https://fake-ado.com/p2/r1', 1), ( 'btbk-proj1', 'r1', 'repo1', 'https://fake-bitbucket.com/p1/r1', 2); -INSERT INTO branch ( repository_id, branch_id, branch_name, latest_commit) VALUES - ( 1, 'b1', 'branch1', 'qwerty123'), - ( 1, 'b2', 'branch2', 'qwerty123'), - ( 3, 'b1', 'branch1', 'qwerty123'), - ( 4, 'b1', 'branch1', 'qwerty123'); - INSERT INTO rule_allow_list (description) VALUES ('rule allow list number 1'); @@ -25,13 +19,13 @@ INSERT INTO rules (rule_pack, rule_name, description, regex) VALUES ('0.0.0', 'rule#1', 'rule number 1', '*.*'), ('0.0.0', 'rule#2', 'rule number 2', '*.*'); -INSERT INTO scan (branch_id, [timestamp], scan_type, last_scanned_commit, increment_number, rule_pack) VALUES +INSERT INTO scan (repository_id, [timestamp], scan_type, last_scanned_commit, increment_number, rule_pack) VALUES (1, '2021-01-01 00:00:00.000', 'BASE', 'qwerty1', 0, '0.0.0'), (2, '2020-01-01 00:00:00.000', 'BASE', 'qwerty1', 0, '0.0.0'), (3, '2022-02-24 17:00:00.000', 'BASE', 'qwerty1', 0, '0.0.0'), (3, '2022-03-24 17:00:00.000', 'BASE', 'qwerty1', 0, '0.0.0'); -INSERT INTO finding (branch_id, file_path, line_number, commit_id, commit_message, commit_timestamp, author, email, rule_name, column_start, column_end) VALUES +INSERT INTO finding (repository_id, file_path, line_number, commit_id, commit_message, commit_timestamp, author, email, rule_name, column_start, column_end) VALUES (1, '/path/to/file', 123, 'qwerty1', 'this is commit 1', '2021-01-01 00:00:00.000', 'developer', 'developer@abn.com', 'rule#1', 1, 100), (1, '/path/to/file', 123, 'qwerty1', 'this is commit 1', '2021-01-01 00:00:00.000', 'developer', 'developer@abn.com', 'rule#2', 0, 0), (2, '/path/to/file', 123, 'qwerty1', 'this is commit 1', '2021-01-01 00:00:00.000', 'developer', 'developer@abn.com', 'rule#1', 1, 50), diff --git a/components/resc-backend/tests/newman_tests/RESC_web_service.postman_collection.json b/components/resc-backend/tests/newman_tests/RESC_web_service.postman_collection.json index 80d9e1cf..5ef9f7b1 100644 --- a/components/resc-backend/tests/newman_tests/RESC_web_service.postman_collection.json +++ b/components/resc-backend/tests/newman_tests/RESC_web_service.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "6db9b02e-2513-4d4e-a31d-c90f3a766832", + "_postman_id": "9337674c-4f7e-47f2-b9fd-2ee2166b6420", "name": "Repository Scanner (RESC)", "description": "RESC API helps you to perform several operations upon findings obtained from multiple source code repositories. 🚀", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" @@ -5152,202 +5152,6 @@ } ] }, - { - "name": "Get branches for a repository", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"]).to.be.an(\"array\").that.is.empty;", - " pm.expect(responseJson[\"total\"]).to.eql(0);", - " pm.expect(responseJson[\"limit\"]).to.eql(100);", - " pm.expect(responseJson[\"skip\"]).to.eql(0);", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/branches?skip=0&limit=100", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - ":repository_id", - "branches" - ], - "query": [ - { - "key": "skip", - "value": "0" - }, - { - "key": "limit", - "value": "100" - } - ], - "variable": [ - { - "key": "repository_id", - "value": "{{repository_id}}" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get branches for a repository with non existent repository id", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"]).to.be.an(\"array\").that.is.empty;", - " pm.expect(responseJson[\"total\"]).to.eql(0);", - " pm.expect(responseJson[\"limit\"]).to.eql(100);", - " pm.expect(responseJson[\"skip\"]).to.eql(0);", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/branches?skip=0&limit=100", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - ":repository_id", - "branches" - ], - "query": [ - { - "key": "skip", - "value": "0" - }, - { - "key": "limit", - "value": "100" - } - ], - "variable": [ - { - "key": "repository_id", - "value": "999" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, { "name": "Get all unique project names", "event": [ @@ -6566,1570 +6370,21 @@ "name": "Successful Response", "originalRequest": { "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get all unique repository names with vcs provider and project name filter", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"array\");", - " pm.expect(responseJson.length).to.eql(1);", - " pm.expect(responseJson).to.eql([\"test-repository\"] );", - " pm.expect(responseJson).to.include(\"test-repository\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/distinct-repositories/?vcsprovider=AZURE_DEVOPS&projectname=test-project&onlyifhasfindings=false", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - "distinct-repositories", - "" - ], - "query": [ - { - "key": "vcsprovider", - "value": "AZURE_DEVOPS" - }, - { - "key": "projectname", - "value": "test-project" - }, - { - "key": "onlyifhasfindings", - "value": "false" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get findings metadata for a repository", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"][\"id_\"]).to.eql(pm.environment.get(\"repository_id\"));", - " pm.expect(responseJson[\"data\"][\"project_key\"]).to.eql(\"test-project\");", - " pm.expect(responseJson[\"data\"][\"repository_id\"]).to.eql(\"test-repository-id1\");", - " pm.expect(responseJson[\"data\"][\"repository_name\"]).to.eql(\"test-repository\");", - " pm.expect(responseJson[\"data\"][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", - " pm.expect(responseJson[\"data\"][\"vcs_instance\"]).to.eql(pm.environment.get(\"vcs_instance_id\"));", - " pm.expect(responseJson[\"true_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"false_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"not_analyzed\"]).to.eql(0);", - " pm.expect(responseJson[\"under_review\"]).to.eql(0);", - " pm.expect(responseJson[\"clarification_required\"]).to.eql(0);", - " pm.expect(responseJson[\"total_findings_count\"]).to.eql(0);", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/findings-metadata", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - ":repository_id", - "findings-metadata" - ], - "variable": [ - { - "key": "repository_id", - "value": "{{repository_id}}" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get all repositories with findings metadata", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(pm.environment.get(\"repository_id\"));", - " pm.expect(responseJson[\"data\"][0][\"project_key\"]).to.eql(\"test-project\");", - " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(\"test-repository-id1\");", - " pm.expect(responseJson[\"data\"][0][\"repository_name\"]).to.eql(\"test-repository\");", - " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", - " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"true_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"data\"][0][\"false_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"data\"][0][\"not_analyzed\"]).to.eql(0);", - " pm.expect(responseJson[\"data\"][0][\"under_review\"]).to.eql(0);", - " pm.expect(responseJson[\"data\"][0][\"clarification_required\"]).to.eql(0);", - " pm.expect(responseJson[\"data\"][0][\"total_findings_count\"]).to.eql(0);", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/findings-metadata/?skip=0&limit=1000&onlyifhasfindings=false", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - "findings-metadata", - "" - ], - "query": [ - { - "key": "skip", - "value": "0" - }, - { - "key": "limit", - "value": "1000" - }, - { - "key": "onlyifhasfindings", - "value": "false" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Delete a repository which doesn't exist", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 404\", function () {", - " pm.response.to.have.status(404);", - "});", - "", - "pm.test(\"Status is Not Found\"), function (){", - " pm.response.to.have.property(\"status\",\"Not Found\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"]).to.eql(\"Repository not found\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - ":repository_id" - ], - "variable": [ - { - "key": "repository_id", - "value": "999" - } - ] - }, - "description": " Retrieve all uniquely detected rules across all findings in the database\n:param db_connection:\n Session of the database connection\n:return: [str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/detected-rules", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "detected-rules" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"occaecat ut ea et Ut\",\n \"ut nostrud\"\n]" - } - ] - }, - { - "name": "Delete a repository with invalid id", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", - "});", - "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "repositories", - ":repository_id" - ], - "variable": [ - { - "key": "repository_id", - "value": "one" - } - ] - }, - "description": " Retrieve all uniquely detected rules across all findings in the database\n:param db_connection:\n Session of the database connection\n:return: [str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/detected-rules", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "detected-rules" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"occaecat ut ea et Ut\",\n \"ut nostrud\"\n]" - } - ] - } - ] - }, - { - "name": "branches", - "item": [ - { - "name": "Create a branch", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 201\", function () {", - " pm.response.to.have.status(201);", - "});", - "", - "pm.test(\"Status is Created\"), function (){", - " pm.response.to.have.property(\"status\",\" Created\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"id_\"]).to.be.greaterThan(0);", - " pm.environment.set(\"branch_id\", responseJson[\"id_\"]);", - " pm.expect(responseJson[\"branch_id\"]).to.eql(\"test-branch-id1\");", - " pm.expect(responseJson[\"branch_name\"]).to.eql(\"test-branch1\");", - " pm.expect(responseJson[\"latest_commit\"]).to.eql(\"testcommit12345\");", - " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"branch_id\": \"test-branch-id1\",\n \"branch_name\": \"test-branch1\",\n \"latest_commit\": \"testcommit12345\",\n \"repository_id\": {{repository_id}}\n}" - }, - "url": { - "raw": "{{baseUrl}}/resc/v1/branches", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches" - ] - } - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Created", - "code": 201, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"project_key\": \"sed magna pa\",\n \"repository_id\": \"ut incididunt cupidatat Lorem ad\",\n \"repository_name\": \"aliqua\",\n \"repository_url\": \"pariatur consequat qui fugiat\",\n \"vcs_provider\": \"Exc\",\n \"id_\": 78011707\n}" - }, - { - "name": "Validation Error", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Unprocessable Entity (WebDAV) (RFC 4918)", - "code": 422, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"id deserunt\",\n \"elit voluptate qui cupidatat magna\"\n ],\n \"msg\": \"ad adipisicing\",\n \"type\": \"mollit\"\n },\n {\n \"loc\": [\n \"adipisicing et ut Ut\",\n \"cillum culpa\"\n ],\n \"msg\": \"amet occaecat deserunt ex\",\n \"type\": \"ad cupidatat in\"\n }\n ]\n}" - } - ] - }, - { - "name": "Create a branch with invalid body", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", - "});", - "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"branch_id\": \"test-branch-id1\",\n \"branch_name\": \"test-branch1\",\n \"latest_commit\": \"testcommit12345\",\n \"repository_id\": \"invalid\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/resc/v1/branches", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches" - ] - } - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Created", - "code": 201, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"project_key\": \"sed magna pa\",\n \"repository_id\": \"ut incididunt cupidatat Lorem ad\",\n \"repository_name\": \"aliqua\",\n \"repository_url\": \"pariatur consequat qui fugiat\",\n \"vcs_provider\": \"Exc\",\n \"id_\": 78011707\n}" - }, - { - "name": "Validation Error", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Unprocessable Entity (WebDAV) (RFC 4918)", - "code": 422, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"id deserunt\",\n \"elit voluptate qui cupidatat magna\"\n ],\n \"msg\": \"ad adipisicing\",\n \"type\": \"mollit\"\n },\n {\n \"loc\": [\n \"adipisicing et ut Ut\",\n \"cillum culpa\"\n ],\n \"msg\": \"amet occaecat deserunt ex\",\n \"type\": \"ad cupidatat in\"\n }\n ]\n}" - } - ] - }, - { - "name": "Fetch a branch by ID", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(\"test-branch-id1\");", - " pm.expect(responseJson[\"latest_commit\"]).to.eql(\"testcommit12345\");", - " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", - " pm.expect(responseJson[\"id_\"]).to.eql(pm.environment.get(\"branch_id\"));", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "{{branch_id}}" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Fetch a branch by non existent ID", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 404\", function () {", - " pm.response.to.have.status(404);", - "});", - "", - "pm.test(\"Status is Not Found\"), function (){", - " pm.response.to.have.property(\"status\",\"Not Found\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"]).to.eql(\"Branch not found\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "999" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Fetch a branch by invalid ID", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", - "});", - "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "invalid" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get all branches", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(\"test-branch-id1\");", - " pm.expect(responseJson[\"data\"][0][\"latest_commit\"]).to.eql(\"testcommit12345\");", - " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", - " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(pm.environment.get(\"branch_id\"));", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches?skip=0&limit=100", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches" - ], - "query": [ - { - "key": "skip", - "value": "0" - }, - { - "key": "limit", - "value": "100" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Get all branches with invalid parameter", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", - "});", - "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", - "", - " pm.expect(responseJson[\"detail\"][1][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][1][\"msg\"]).to.eql(\"value is not a valid integer\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches?skip=invalid&limit=invalid", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches" - ], - "query": [ - { - "key": "skip", - "value": "invalid" - }, - { - "key": "limit", - "value": "invalid" - } - ] - }, - "description": " Retrieve all supported vcs providers\n:return: List[str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "supported-vcs-providers" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" - } - ] - }, - { - "name": "Update a branch", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"id_\"]).to.be.greaterThan(0);", - " pm.environment.set(\"branch_id\", responseJson[\"id_\"]);", - " pm.expect(responseJson[\"branch_id\"]).to.eql(\"test-branch-id1\");", - " pm.expect(responseJson[\"branch_name\"]).to.eql(\"test-branch1\");", - " pm.expect(responseJson[\"latest_commit\"]).to.eql(\"testcommit12345\");", - " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "PUT", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"branch_id\": \"test-branch-id1\",\n \"branch_name\": \"test-branch1\",\n \"latest_commit\": \"testcommit12345\",\n \"repository_id\": {{repository_id}}\n}" - }, - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "{{branch_id}}" - } - ] - } - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Created", - "code": 201, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"project_key\": \"sed magna pa\",\n \"repository_id\": \"ut incididunt cupidatat Lorem ad\",\n \"repository_name\": \"aliqua\",\n \"repository_url\": \"pariatur consequat qui fugiat\",\n \"vcs_provider\": \"Exc\",\n \"id_\": 78011707\n}" - }, - { - "name": "Validation Error", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Unprocessable Entity (WebDAV) (RFC 4918)", - "code": 422, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"id deserunt\",\n \"elit voluptate qui cupidatat magna\"\n ],\n \"msg\": \"ad adipisicing\",\n \"type\": \"mollit\"\n },\n {\n \"loc\": [\n \"adipisicing et ut Ut\",\n \"cillum culpa\"\n ],\n \"msg\": \"amet occaecat deserunt ex\",\n \"type\": \"ad cupidatat in\"\n }\n ]\n}" - } - ] - }, - { - "name": "Update a branch with non existent ID", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 404\", function () {", - " pm.response.to.have.status(404);", - "});", - "", - "pm.test(\"Status is Not Found\"), function (){", - " pm.response.to.have.property(\"status\",\" Not Found\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"]).to.eql(\"Branch not found\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "PUT", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"branch_id\": \"test-branch-id1\",\n \"branch_name\": \"test-branch1\",\n \"latest_commit\": \"testcommit12345\",\n \"repository_id\": {{repository_id}}\n}" - }, - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "999" - } - ] - } - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Created", - "code": 201, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"project_key\": \"sed magna pa\",\n \"repository_id\": \"ut incididunt cupidatat Lorem ad\",\n \"repository_name\": \"aliqua\",\n \"repository_url\": \"pariatur consequat qui fugiat\",\n \"vcs_provider\": \"Exc\",\n \"id_\": 78011707\n}" - }, - { - "name": "Validation Error", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Unprocessable Entity (WebDAV) (RFC 4918)", - "code": 422, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"id deserunt\",\n \"elit voluptate qui cupidatat magna\"\n ],\n \"msg\": \"ad adipisicing\",\n \"type\": \"mollit\"\n },\n {\n \"loc\": [\n \"adipisicing et ut Ut\",\n \"cillum culpa\"\n ],\n \"msg\": \"amet occaecat deserunt ex\",\n \"type\": \"ad cupidatat in\"\n }\n ]\n}" - } - ] - }, - { - "name": "Update a branch with invalid body", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", - "});", - "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "PUT", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"branch_id\": \"test-branch-id1\",\n \"branch_name\": \"test-branch1\",\n \"latest_commit\": \"testcommit12345\",\n \"repository_id\": \"str\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "{{branch_id}}" - } - ] - } - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, - "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "repositories-info" - ] - } - }, - "status": "Created", - "code": 201, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "{\n \"project_key\": \"sed magna pa\",\n \"repository_id\": \"ut incididunt cupidatat Lorem ad\",\n \"repository_name\": \"aliqua\",\n \"repository_url\": \"pariatur consequat qui fugiat\",\n \"vcs_provider\": \"Exc\",\n \"id_\": 78011707\n}" - }, - { - "name": "Validation Error", - "originalRequest": { - "method": "POST", - "header": [], - "body": { - "mode": "raw", - "raw": "{\n \"project_key\": \"commodo do labore\",\n \"repository_id\": \"non qui e\",\n \"repository_name\": \"officia fugiat dolore in\",\n \"repository_url\": \"enim Lorem ea\",\n \"vcs_provider\": \"officia cupidatat dolor non\"\n}" - }, + "header": [], "url": { - "raw": "{{baseUrl}}/sts/v1/repositories-info", + "raw": "{{baseUrl}}/sts/v1/supported-vcs-providers", "host": [ "{{baseUrl}}" ], "path": [ "sts", "v1", - "repositories-info" + "supported-vcs-providers" ] } }, - "status": "Unprocessable Entity (WebDAV) (RFC 4918)", - "code": 422, + "status": "OK", + "code": 200, "_postman_previewlanguage": "json", "header": [ { @@ -8138,12 +6393,12 @@ } ], "cookie": [], - "body": "{\n \"detail\": [\n {\n \"loc\": [\n \"id deserunt\",\n \"elit voluptate qui cupidatat magna\"\n ],\n \"msg\": \"ad adipisicing\",\n \"type\": \"mollit\"\n },\n {\n \"loc\": [\n \"adipisicing et ut Ut\",\n \"cillum culpa\"\n ],\n \"msg\": \"amet occaecat deserunt ex\",\n \"type\": \"ad cupidatat in\"\n }\n ]\n}" + "body": "[\n \"in ea occaecat\",\n \"Excepteur dolor culpa occaecat eiusmod\"\n]" } ] }, { - "name": "Get scans for a branch", + "name": "Get all unique repository names with vcs provider and project name filter", "event": [ { "listen": "test", @@ -8159,12 +6414,10 @@ "", "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"]).to.be.an(\"array\");", - " pm.expect(responseJson[\"data\"].length).to.eql(0);", - " pm.expect(responseJson[\"total\"]).to.eql(0);", - " pm.expect(responseJson[\"limit\"]).to.eql(100);", - " pm.expect(responseJson[\"skip\"]).to.eql(0);", + " pm.expect(responseJson).to.be.an(\"array\");", + " pm.expect(responseJson.length).to.eql(1);", + " pm.expect(responseJson).to.eql([\"test-repository\"] );", + " pm.expect(responseJson).to.include(\"test-repository\");", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8179,21 +6432,29 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/scans", + "raw": "{{baseUrl}}/resc/v1/repositories/distinct-repositories/?vcsprovider=AZURE_DEVOPS&projectname=test-project&onlyifhasfindings=false", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "scans" + "repositories", + "distinct-repositories", + "" ], - "variable": [ + "query": [ + { + "key": "vcsprovider", + "value": "AZURE_DEVOPS" + }, + { + "key": "projectname", + "value": "test-project" + }, { - "key": "branch_id", - "value": "{{branch_id}}" + "key": "onlyifhasfindings", + "value": "false" } ] }, @@ -8232,7 +6493,7 @@ ] }, { - "name": "Get scans for a branch with non existent ID", + "name": "Get findings metadata for a repository", "event": [ { "listen": "test", @@ -8243,16 +6504,24 @@ "});", "", "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\"OK\")", + " pm.response.to.have.property(\"status\",\" OK\")", "}", "", "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"]).to.be.an(\"array\").that.is.empty;;", - " pm.expect(responseJson[\"total\"]).to.eql(0);", - " pm.expect(responseJson[\"limit\"]).to.eql(100);", - " pm.expect(responseJson[\"skip\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][\"id_\"]).to.eql(pm.environment.get(\"repository_id\"));", + " pm.expect(responseJson[\"data\"][\"project_key\"]).to.eql(\"test-project\");", + " pm.expect(responseJson[\"data\"][\"repository_id\"]).to.eql(\"test-repository-id1\");", + " pm.expect(responseJson[\"data\"][\"repository_name\"]).to.eql(\"test-repository\");", + " pm.expect(responseJson[\"data\"][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", + " pm.expect(responseJson[\"data\"][\"vcs_instance\"]).to.eql(pm.environment.get(\"vcs_instance_id\"));", + " pm.expect(responseJson[\"true_positive\"]).to.eql(0);", + " pm.expect(responseJson[\"false_positive\"]).to.eql(0);", + " pm.expect(responseJson[\"not_analyzed\"]).to.eql(0);", + " pm.expect(responseJson[\"under_review\"]).to.eql(0);", + " pm.expect(responseJson[\"clarification_required\"]).to.eql(0);", + " pm.expect(responseJson[\"total_findings_count\"]).to.eql(0);", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8267,21 +6536,21 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/scans", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/findings-metadata", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "scans" + "repositories", + ":repository_id", + "findings-metadata" ], "variable": [ { - "key": "branch_id", - "value": "999" + "key": "repository_id", + "value": "{{repository_id}}" } ] }, @@ -8320,25 +6589,37 @@ ] }, { - "name": "Get scans for a branch with invalid ID", + "name": "Get all repositories with findings metadata", "event": [ { "listen": "test", "script": { "exec": [ - "pm.test(\"Status code is 422\", function () {", - " pm.response.to.have.status(422);", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", "});", "", - "pm.test(\"Status is Unprocessable Entity\"), function (){", - " pm.response.to.have.property(\"status\",\"Unprocessable Entity\")", + "pm.test(\"Status is OK\"), function (){", + " pm.response.to.have.property(\"status\",\" OK\")", "}", "", "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"][0][\"type\"]).to.eql(\"type_error.integer\");", - " pm.expect(responseJson[\"detail\"][0][\"msg\"]).to.eql(\"value is not a valid integer\");", + " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(pm.environment.get(\"repository_id\"));", + " pm.expect(responseJson[\"data\"][0][\"project_key\"]).to.eql(\"test-project\");", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(\"test-repository-id1\");", + " pm.expect(responseJson[\"data\"][0][\"repository_name\"]).to.eql(\"test-repository\");", + " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", + " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", + " pm.expect(responseJson[\"data\"][0][\"last_scan_id\"]).to.eql(null);", + " pm.expect(responseJson[\"data\"][0][\"last_scan_timestamp\"]).to.eql(null);", + " pm.expect(responseJson[\"data\"][0][\"true_positive\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][0][\"false_positive\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][0][\"not_analyzed\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][0][\"under_review\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][0][\"clarification_required\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"][0][\"total_findings_count\"]).to.eql(0);", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8353,21 +6634,29 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/scans", + "raw": "{{baseUrl}}/resc/v1/repositories/findings-metadata/?skip=0&limit=1000&onlyifhasfindings=false", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "scans" + "repositories", + "findings-metadata", + "" ], - "variable": [ + "query": [ { - "key": "branch_id", - "value": "invalid" + "key": "skip", + "value": "0" + }, + { + "key": "limit", + "value": "1000" + }, + { + "key": "onlyifhasfindings", + "value": "false" } ] }, @@ -8406,7 +6695,7 @@ ] }, { - "name": "Get latest scan for a branch", + "name": "Get latest scan for a repository", "event": [ { "listen": "test", @@ -8437,21 +6726,21 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/last-scan", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/last-scan", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", + "repositories", + ":repository_id", "last-scan" ], "variable": [ { - "key": "branch_id", - "value": "{{branch_id}}" + "key": "repository_id", + "value": "{{repository_id}}" } ] }, @@ -8490,7 +6779,7 @@ ] }, { - "name": "Get latest scan for a branch with non existent ID", + "name": "Get latest scan for a repository with non existent ID", "event": [ { "listen": "test", @@ -8521,20 +6810,20 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/last-scan", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/last-scan", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", + "repositories", + ":repository_id", "last-scan" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "999" } ] @@ -8574,7 +6863,7 @@ ] }, { - "name": "Get latest scan for a branch with invalid ID", + "name": "Get latest scan for a repository with invalid ID", "event": [ { "listen": "test", @@ -8607,20 +6896,20 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/last-scan", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/last-scan", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", + "repositories", + ":repository_id", "last-scan" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "invalid" } ] @@ -8660,7 +6949,7 @@ ] }, { - "name": "Get findings metadata for a branch", + "name": "Get scans for a repository", "event": [ { "listen": "test", @@ -8677,17 +6966,11 @@ "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"data\"][\"id_\"]).to.eql(pm.environment.get(\"branch_id\"));", - " pm.expect(responseJson[\"data\"][\"branch_id\"]).to.eql(\"test-branch-id1\");", - " pm.expect(responseJson[\"data\"][\"branch_name\"]).to.eql(\"test-branch1\");", - " pm.expect(responseJson[\"data\"][\"latest_commit\"]).to.eql(\"testcommit12345\");", - " pm.expect(responseJson[\"data\"][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", - " pm.expect(responseJson[\"true_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"false_positive\"]).to.eql(0);", - " pm.expect(responseJson[\"not_analyzed\"]).to.eql(0);", - " pm.expect(responseJson[\"under_review\"]).to.eql(0);", - " pm.expect(responseJson[\"clarification_required\"]).to.eql(0);", - " pm.expect(responseJson[\"total_findings_count\"]).to.eql(0);", + " pm.expect(responseJson[\"data\"]).to.be.an(\"array\");", + " pm.expect(responseJson[\"data\"].length).to.eql(0);", + " pm.expect(responseJson[\"total\"]).to.eql(0);", + " pm.expect(responseJson[\"limit\"]).to.eql(100);", + " pm.expect(responseJson[\"skip\"]).to.eql(0);", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8702,21 +6985,21 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/findings-metadata", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/scans", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "findings-metadata" + "repositories", + ":repository_id", + "scans" ], "variable": [ { - "key": "branch_id", - "value": "{{branch_id}}" + "key": "repository_id", + "value": "{{repository_id}}" } ] }, @@ -8755,24 +7038,27 @@ ] }, { - "name": "Get findings metadata for a branch with non existent ID", + "name": "Get scans for a repository with non existent ID", "event": [ { "listen": "test", "script": { "exec": [ - "pm.test(\"Status code is 404\", function () {", - " pm.response.to.have.status(404);", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", "});", "", - "pm.test(\"Status is Not Found\"), function (){", - " pm.response.to.have.property(\"status\",\" Not Found\")", + "pm.test(\"Status is OK\"), function (){", + " pm.response.to.have.property(\"status\",\"OK\")", "}", "", "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"]).to.eql(\"Branch not found\");", + " pm.expect(responseJson[\"data\"]).to.be.an(\"array\").that.is.empty;;", + " pm.expect(responseJson[\"total\"]).to.eql(0);", + " pm.expect(responseJson[\"limit\"]).to.eql(100);", + " pm.expect(responseJson[\"skip\"]).to.eql(0);", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8787,20 +7073,20 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/findings-metadata", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/scans", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "findings-metadata" + "repositories", + ":repository_id", + "scans" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "999" } ] @@ -8840,7 +7126,7 @@ ] }, { - "name": "Get findings metadata for a branch with invalid ID", + "name": "Get scans for a repository with invalid ID", "event": [ { "listen": "test", @@ -8873,20 +7159,20 @@ "method": "GET", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id/findings-metadata", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id/scans", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id", - "findings-metadata" + "repositories", + ":repository_id", + "scans" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "invalid" } ] @@ -8926,7 +7212,7 @@ ] }, { - "name": "Delete a branch which doesn't exist", + "name": "Delete a repository which doesn't exist", "event": [ { "listen": "test", @@ -8943,7 +7229,7 @@ "const responseJson = pm.response.json();", "pm.test(\"Response body matches\", function() {", " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"detail\"]).to.eql(\"Branch not found\");", + " pm.expect(responseJson[\"detail\"]).to.eql(\"Repository not found\");", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -8958,19 +7244,19 @@ "method": "DELETE", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id" + "repositories", + ":repository_id" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "999" } ] @@ -9010,7 +7296,7 @@ ] }, { - "name": "Delete a branch with invalid id", + "name": "Delete a repository with invalid id", "event": [ { "listen": "test", @@ -9043,19 +7329,19 @@ "method": "DELETE", "header": [], "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", + "raw": "{{baseUrl}}/resc/v1/repositories/:repository_id", "host": [ "{{baseUrl}}" ], "path": [ "resc", "v1", - "branches", - ":branch_id" + "repositories", + ":repository_id" ], "variable": [ { - "key": "branch_id", + "key": "repository_id", "value": "one" } ] @@ -9124,7 +7410,7 @@ " pm.expect(responseJson[\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"increment_number\"]).to.eql(0);", " pm.expect(responseJson[\"rule_pack\"]).to.eql(\"1.0.0\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -9155,7 +7441,7 @@ ], "body": { "mode": "raw", - "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"{{currentdate}}\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"branch_id\": {{branch_id}}\n}" + "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"{{currentdate}}\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"repository_id\": {{repository_id}}\n}" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans", @@ -9284,7 +7570,7 @@ ], "body": { "mode": "raw", - "raw": "{\n \"scan_type\": \"TEST\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": \"one\",\n \"rule_pack\": 1,\n \"branch_id\": \"str\"\n}" + "raw": "{\n \"scan_type\": \"TEST\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": \"one\",\n \"rule_pack\": 1,\n \"repository_id\": \"str\"\n}" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans", @@ -9390,7 +7676,7 @@ " pm.expect(responseJson[\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"increment_number\"]).to.eql(0);", " pm.expect(responseJson[\"rule_pack\"]).to.eql(\"1.0.0\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"id_\"]).to.eql(pm.environment.get(\"scan_id\"));", "});", "", @@ -9650,7 +7936,7 @@ " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"increment_number\"]).to.eql(0);", " pm.expect(responseJson[\"data\"][0][\"rule_pack\"]).to.eql(\"1.0.0\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(pm.environment.get(\"scan_id\"));", "});", "", @@ -9845,7 +8131,7 @@ " pm.expect(responseJson[\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"increment_number\"]).to.eql(0);", " pm.expect(responseJson[\"rule_pack\"]).to.eql(\"1.0.0\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", "});", "", "pm.test(\"Response time is less than 300ms\", function () {", @@ -9866,7 +8152,7 @@ ], "body": { "mode": "raw", - "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"{{currentdate}}\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"branch_id\": {{branch_id}}\n}" + "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"{{currentdate}}\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"repository_id\": {{repository_id}}\n}" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans/:scan_id", @@ -9995,7 +8281,7 @@ ], "body": { "mode": "raw", - "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"branch_id\": {{branch_id}}\n}" + "raw": "{\n \"scan_type\": \"BASE\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": 0,\n \"rule_pack\": \"1.0.0\",\n \"repository_id\": {{repository_id}}\n}" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans/:scan_id", @@ -10131,7 +8417,7 @@ ], "body": { "mode": "raw", - "raw": "{\n \"scan_type\": \"TEST\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": \"one\",\n \"rule_pack\": 1,\n \"branch_id\": \"str\"\n}" + "raw": "{\n \"scan_type\": \"TEST\",\n \"last_scanned_commit\": \"testcommit12345\",\n \"timestamp\": \"2023-01-30T22:51:31.486Z\",\n \"increment_number\": \"one\",\n \"rule_pack\": 1,\n \"repository_id\": \"str\"\n}" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans/:scan_id", @@ -10956,7 +9242,7 @@ ], "body": { "mode": "raw", - "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"status\": \"NOT_ANALYZED\",\n \"comment\": \"\",\n \"event_sent_on\": null,\n \"rule_name\": \"Rule-1\",\n \"branch_id\": {{branch_id}}\n }\n\n]" + "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"status\": \"NOT_ANALYZED\",\n \"comment\": \"\",\n \"event_sent_on\": null,\n \"rule_name\": \"Rule-1\",\n \"repository_id\": {{repository_id}}\n }\n\n]" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans/:scan_id/findings", @@ -11088,7 +9374,7 @@ ], "body": { "mode": "raw", - "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"Rule-1\",\n \"branch_id\": \"str\"\n }\n\n]" + "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"Rule-1\",\n \"repository_id\": \"str\"\n }\n\n]" }, "url": { "raw": "{{baseUrl}}/resc/v1/scans/:scan_id/findings", @@ -11209,7 +9495,7 @@ " pm.expect(responseJson[\"data\"][0][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][0][\"rule_name\"]).to.eql(\"Rule-1\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"scan_ids\"]).to.eql(null);", "});", @@ -11507,7 +9793,7 @@ " pm.expect(responseJson[\"data\"][0][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][0][\"rule_name\"]).to.eql(\"Rule-1\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"scan_ids\"]).to.eql(null);", "});", @@ -11818,7 +10104,7 @@ " pm.expect(responseJson[\"data\"][0][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][0][\"rule_name\"]).to.eql(\"Rule-1\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"scan_ids\"]).to.eql(null);", "});", @@ -12027,7 +10313,6 @@ " pm.expect(responseJson[\"data\"][0][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][0][\"rule_name\"]).to.eql(\"Rule-1\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"scan_ids\"]).to.eql(null);", "});", @@ -12682,7 +10967,7 @@ ], "body": { "mode": "raw", - "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"GitHub Personal Access Token\",\n \"branch_id\": {{branch_id}}\n }\n]" + "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"GitHub Personal Access Token\",\n \"repository_id\": {{repository_id}}\n }\n]" }, "url": { "raw": "{{baseUrl}}/resc/v1/findings", @@ -12805,7 +11090,7 @@ ], "body": { "mode": "raw", - "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"GitHub Personal Access Token\",\n \"branch_id\": \"str\"\n }\n]" + "raw": "[\n {\n \"file_path\": \"dummy_file.txt\",\n \"line_number\": 10,\n \"column_start\": 1,\n \"column_end\": 8,\n \"commit_id\": \"testcommit12345\",\n \"commit_message\": \"test commit message\",\n \"commit_timestamp\": \"2023-01-30T23:18:24.546Z\",\n \"author\": \"test-author\",\n \"email\": \"test-author@mail.com\",\n \"event_sent_on\": null,\n \"rule_name\": \"GitHub Personal Access Token\",\n \"repository_id\": \"str\"\n }\n]" }, "url": { "raw": "{{baseUrl}}/resc/v1/findings", @@ -12918,7 +11203,7 @@ " pm.expect(responseJson[\"data\"][1][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][1][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][1][\"rule_name\"]).to.eql(\"GitHub Personal Access Token\");", - " pm.expect(responseJson[\"data\"][1][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][1][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][1][\"scan_ids\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][1][\"id_\"]).to.be.greaterThan(0);", " pm.environment.set(\"finding_id\", responseJson[\"data\"][1][\"id_\"]);", @@ -13219,7 +11504,7 @@ " pm.expect(responseJson[\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"rule_name\"]).to.eql(\"GitHub Personal Access Token\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"id_\"]).to.eql(pm.environment.get(\"finding_id\"));", " pm.expect(responseJson[\"scan_ids\"]).to.be.an(\"array\");", "});", @@ -13485,7 +11770,7 @@ " pm.expect(responseJson[\"data\"][0][\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", " pm.expect(responseJson[\"data\"][0][\"rule_name\"]).to.eql(\"GitHub Personal Access Token\");", - " pm.expect(responseJson[\"data\"][0][\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"data\"][0][\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(pm.environment.get(\"finding_id\"));", " pm.expect(responseJson[\"data\"][0][\"scan_ids\"]).to.eql(null);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -15117,7 +13402,7 @@ " pm.expect(responseJson[\"email\"]).to.eql(\"test-author@mail.com\");", " pm.expect(responseJson[\"event_sent_on\"]).to.include(\"2023-01-31T13\");", " pm.expect(responseJson[\"rule_name\"]).to.eql(\"GitHub Personal Access Token\");", - " pm.expect(responseJson[\"branch_id\"]).to.eql(pm.environment.get(\"branch_id\"));", + " pm.expect(responseJson[\"repository_id\"]).to.eql(pm.environment.get(\"repository_id\"));", " pm.expect(responseJson[\"id_\"]).to.eql(pm.environment.get(\"finding_id\"));", " pm.expect(responseJson[\"scan_ids\"]).to.be.an(\"array\");", " pm.expect(responseJson[\"scan_ids\"].length).to.eql(0);", @@ -15954,11 +14239,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16078,11 +14361,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16197,11 +14478,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16316,11 +14595,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16435,11 +14712,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16554,11 +14829,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16673,11 +14946,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16792,11 +15063,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -16911,11 +15180,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -17042,11 +15309,9 @@ " pm.expect(responseJson[\"data\"][0][\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"data\"][0][\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"data\"][0][\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"data\"][0][\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"data\"][0][\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"data\"][0][\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"data\"][0][\"id_\"]).to.eql(1);", " pm.expect(responseJson[\"total\"]).to.eql(1);", @@ -17172,12 +15437,10 @@ " pm.expect(responseJson[\"repository_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository\");", " pm.expect(responseJson[\"timestamp\"].substring(0, 10)).to.eql(pm.environment.get(\"currentdate\").substring(0, 10));", " pm.expect(responseJson[\"vcs_provider\"]).to.eql(\"AZURE_DEVOPS\");", - " pm.expect(responseJson[\"branch_name\"]).to.eql(\"test-branch1\");", " pm.expect(responseJson[\"last_scanned_commit\"]).to.eql(\"testcommit12345\");", " pm.expect(responseJson[\"scan_id\"]).to.eql(pm.environment.get(\"scan_id\"));", " pm.expect(responseJson[\"event_sent_on\"]).to.eql(null);", - " pm.expect(responseJson[\"branch_name\"]).to.eql(\"test-branch1\");", - " pm.expect(responseJson[\"commit_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository/commit/testcommit12345?refName=refs/heads/test-branch1&path=/dummy_file.txt\");", + " pm.expect(responseJson[\"commit_url\"]).to.eql(\"https://dev.azure.com/test-org/test-project/_git/test-repository/commit/testcommit12345?path=/dummy_file.txt\");", " pm.expect(responseJson[\"id_\"]).to.be.greaterThan(0);", " pm.expect(responseJson[\"id_\"]).to.eql(1);", "});", @@ -18277,95 +16540,6 @@ } ] }, - { - "name": "Delete a branch", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});", - "", - "pm.test(\"Status is OK\"), function (){", - " pm.response.to.have.property(\"status\",\" OK\")", - "}", - "", - "const responseJson = pm.response.json();", - "pm.test(\"Response body matches\", function() {", - " pm.expect(responseJson).to.be.an(\"object\");", - " pm.expect(responseJson[\"ok\"]).to.eql(true);", - "});", - "", - "pm.test(\"Branch id doesn't exist\", function() {", - " pm.environment.unset(\"branch_id\");", - " pm.expect(pm.environment.get(\"branch_id\")).to.eql(undefined);", - "});", - "", - "pm.test(\"Response time is less than 300ms\", function () {", - " pm.expect(pm.response.responseTime).to.be.below(300);", - "});" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "DELETE", - "header": [], - "url": { - "raw": "{{baseUrl}}/resc/v1/branches/:branch_id", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "resc", - "v1", - "branches", - ":branch_id" - ], - "variable": [ - { - "key": "branch_id", - "value": "{{branch_id}}" - } - ] - }, - "description": " Retrieve all uniquely detected rules across all findings in the database\n:param db_connection:\n Session of the database connection\n:return: [str]\n The output will contain a list of strings of unique rules in the findings table" - }, - "response": [ - { - "name": "Successful Response", - "originalRequest": { - "method": "GET", - "header": [], - "url": { - "raw": "{{baseUrl}}/sts/v1/detected-rules", - "host": [ - "{{baseUrl}}" - ], - "path": [ - "sts", - "v1", - "detected-rules" - ] - } - }, - "status": "OK", - "code": 200, - "_postman_previewlanguage": "json", - "header": [ - { - "key": "Content-Type", - "value": "application/json" - } - ], - "cookie": [], - "body": "[\n \"occaecat ut ea et Ut\",\n \"ut nostrud\"\n]" - } - ] - }, { "name": "Delete a repository", "event": [ diff --git a/components/resc-backend/tests/resc_backend/db/model/test_db_branch.py b/components/resc-backend/tests/resc_backend/db/model/test_db_branch.py deleted file mode 100644 index 451ef86f..00000000 --- a/components/resc-backend/tests/resc_backend/db/model/test_db_branch.py +++ /dev/null @@ -1,50 +0,0 @@ -# Standard Library -import sys -import unittest - -# Third Party -from sqlalchemy import create_engine -from sqlalchemy.orm import Session - -# First Party -from resc_backend.db.model import Base, DBbranch, DBrepository, DBVcsInstance - -sys.path.insert(0, "src") - - -class TestBranch(unittest.TestCase): - def setUp(self): - self.engine = create_engine('sqlite:///:memory:') - Base.metadata.create_all(self.engine) - self.session = Session(bind=self.engine) - self.vcs_instance = DBVcsInstance(name="name", - provider_type="provider_type", - scheme="scheme", - hostname="hostname", - port=123, - organization="organization", - scope="scope", - exceptions="exceptions") - self.session.add(self.vcs_instance) - - self.repository = DBrepository(project_key='TEST', - repository_id=1, - repository_name="test_temp", - repository_url="fake.url.com", - vcs_instance=1) - self.session.add(self.repository) - - self.branch = DBbranch(repository_id=1, - branch_name="test_temp", - branch_id='master', - latest_commit="FAKE_HASH") - self.session.add(self.branch) - self.session.commit() - - def tearDown(self): - Base.metadata.drop_all(self.engine) - - def test_query_all_branch(self): - expected = [self.branch] - result = self.session.query(DBbranch).all() - self.assertEqual(result, expected) diff --git a/components/resc-backend/tests/resc_backend/db/model/test_db_finding.py b/components/resc-backend/tests/resc_backend/db/model/test_db_finding.py index c7ce59fc..80e1b197 100644 --- a/components/resc-backend/tests/resc_backend/db/model/test_db_finding.py +++ b/components/resc-backend/tests/resc_backend/db/model/test_db_finding.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session # First Party -from resc_backend.db.model import Base, DBbranch, DBfinding, DBrepository, DBrule, DBscan, DBscanFinding, DBVcsInstance +from resc_backend.db.model import Base, DBfinding, DBrepository, DBrule, DBscan, DBscanFinding, DBVcsInstance from resc_backend.db.model.rule_pack import DBrulePack from resc_backend.resc_web_service.schema.finding import FindingCreate @@ -37,17 +37,11 @@ def setUp(self): vcs_instance=1) self.session.add(self.repository) - self.branch = DBbranch(repository_id=1, - branch_name="test_temp", - branch_id='master', - latest_commit="FAKE_HASH") - self.session.add(self.branch) - self.rule_pack = DBrulePack(version="1.2") self.rule = DBrule(rule_pack="1.2", rule_name="fake rule", description="fake1, fake2, fake3") - self.scan = DBscan(branch_id=1, scan_type="BASE", + self.scan = DBscan(repository_id=1, scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), rule_pack="1.2", increment_number=1) @@ -64,7 +58,7 @@ def setUp(self): email="fake.author@fake-domain.com", rule_name="rule_1", event_sent_on=datetime.utcnow(), - branch_id=1) + repository_id=1) self.scan_finding = DBscanFinding(finding_id=1, scan_id=1) self.session.add(self.finding) @@ -93,7 +87,7 @@ def test_create_from_finding(self): rule_name=self.finding.rule_name, event_sent_on=self.finding.event_sent_on, scan_ids=[self.scan_finding.scan_id], - branch_id=self.finding.branch_id) + repository_id=self.finding.repository_id) result = DBfinding.create_from_finding(finding) self.assertEqual(result.file_path, expected.file_path) self.assertEqual(result.line_number, expected.line_number) diff --git a/components/resc-backend/tests/resc_backend/db/model/test_db_scan.py b/components/resc-backend/tests/resc_backend/db/model/test_db_scan.py index 85cb10f5..5f912cd9 100644 --- a/components/resc-backend/tests/resc_backend/db/model/test_db_scan.py +++ b/components/resc-backend/tests/resc_backend/db/model/test_db_scan.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session # First Party -from resc_backend.db.model import Base, DBbranch, DBrepository, DBscan, DBVcsInstance +from resc_backend.db.model import Base, DBrepository, DBscan, DBVcsInstance from resc_backend.db.model.rule_pack import DBrulePack sys.path.insert(0, "src") @@ -36,15 +36,9 @@ def setUp(self): vcs_instance=1) self.session.add(self.repository) - self.branch = DBbranch(repository_id=1, - branch_name="test_temp", - branch_id='master', - latest_commit="FAKE_HASH") - self.session.add(self.branch) - self.rule_pack = DBrulePack(version="1.2") - self.scan = DBscan(branch_id=1, scan_type="BASE", + self.scan = DBscan(repository_id=1, scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), rule_pack="1.2", increment_number=1) diff --git a/components/resc-backend/tests/resc_backend/helpers/test_git_operation.py b/components/resc-backend/tests/resc_backend/helpers/test_git_operation.py index 461e9044..8c49bf96 100644 --- a/components/resc-backend/tests/resc_backend/helpers/test_git_operation.py +++ b/components/resc-backend/tests/resc_backend/helpers/test_git_operation.py @@ -10,14 +10,12 @@ def test_clone_repository(clone_from): username = "username" personal_access_token = "personal_access_token" repository_url = "https://fake-host.com" - branch_name = "branch_name" repo_clone_path = "repo_clone_path" - clone_repository(repository_url=repository_url, - branch_name=branch_name, repo_clone_path=repo_clone_path, username=username, + clone_repository(repository_url=repository_url, repo_clone_path=repo_clone_path, username=username, personal_access_token=personal_access_token) clone_from.assert_called_once() url = repository_url.replace("https://", "") expected_repo_clone_url = f"https://{username}:{personal_access_token}@{url}" - clone_from.assert_called_once_with(expected_repo_clone_url, repo_clone_path, branch=branch_name) + clone_from.assert_called_once_with(expected_repo_clone_url, repo_clone_path) diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_branches.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_branches.py deleted file mode 100644 index 9eb70aec..00000000 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_branches.py +++ /dev/null @@ -1,284 +0,0 @@ -# Standard Library -import json -import unittest -from datetime import datetime -from unittest.mock import ANY, patch - -# Third Party -from fastapi.testclient import TestClient - -# First Party -from resc_backend.constants import RWS_ROUTE_BRANCHES, RWS_ROUTE_LAST_SCAN, RWS_ROUTE_SCANS, RWS_VERSION_PREFIX -from resc_backend.db.model import DBbranch, DBscan -from resc_backend.resc_web_service.api import app -from resc_backend.resc_web_service.dependencies import requires_auth, requires_no_auth -from resc_backend.resc_web_service.schema.branch import BranchCreate - - -class TestBranches(unittest.TestCase): - def setUp(self): - self.client = TestClient(app) - app.dependency_overrides[requires_auth] = requires_no_auth - self.db_branches = [] - for i in range(1, 6): - self.db_branches.append( - DBbranch(repository_id=i, branch_id=f"branch_id_{i}", branch_name=f"branch_name_{i}", - latest_commit=f"latest_commit_{i}")) - self.db_branches[i - 1].id_ = i - - self.db_scans = [] - for i in range(1, 6): - self.db_scans.append(DBscan(branch_id=i, scan_type="BASE", - last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), - increment_number=0, rule_pack="1.3")) - self.db_scans[i - 1].id_ = i - - @staticmethod - def create_json_body(branch): - return json.loads(TestBranches.cast_db_branch_to_branch_create(branch).json()) - - @staticmethod - def cast_db_branch_to_branch_create(branch): - return BranchCreate(branch_id=branch.branch_id, - branch_name=branch.branch_name, - latest_commit=branch.latest_commit, - repository_id=branch.repository_id) - - @staticmethod - def assert_branch(data, branch): - assert data["branch_id"] == branch.branch_id - assert data["branch_name"] == branch.branch_name - assert data["latest_commit"] == branch.latest_commit - assert data["repository_id"] == branch.repository_id - assert data["id_"] == branch.id_ - - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_get_branches_non_existing(self, get_branch): - branch_id = 999 - get_branch.return_value = None - response = self.client.get( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}") - assert response.status_code == 404, response.text - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_get_branches(self, get_branch): - branch_id = 1 - get_branch.return_value = self.db_branches[branch_id] - response = self.client.get( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}") - assert response.status_code == 200, response.text - self.assert_branch(response.json(), self.db_branches[branch_id]) - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - - @patch("resc_backend.resc_web_service.crud.branch.create_branch_if_not_exists") - def test_post_branches(self, create_branch_if_not_exists): - branch_id = 1 - create_branch_if_not_exists.return_value = self.db_branches[branch_id] - response = self.client.post(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}", - json=self.create_json_body(self.db_branches[branch_id])) - assert response.status_code == 201, response.text - self.assert_branch(response.json(), self.db_branches[branch_id]) - create_branch_if_not_exists.assert_called_once() - - @patch("resc_backend.resc_web_service.crud.branch.create_branch_if_not_exists") - @patch("resc_backend.resc_web_service.crud.branch.create_branch") - def test_post_branches_no_body(self, create_branch, create_branch_if_not_exists): - response = self.client.post(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}") - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ["body"] - assert data["detail"][0]["msg"] == "field required" - create_branch.assert_not_called() - create_branch_if_not_exists.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.create_branch_if_not_exists") - @patch("resc_backend.resc_web_service.crud.branch.create_branch") - def test_post_branches_empty_body(self, create_branch, create_branch_if_not_exists): - response = self.client.post(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}", json={}) - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ['body', 'branch_id'] - assert data["detail"][0]["msg"] == "field required" - assert data["detail"][1]["loc"] == ['body', 'branch_name'] - assert data["detail"][1]["msg"] == "field required" - assert data["detail"][2]["loc"] == ['body', 'latest_commit'] - assert data["detail"][2]["msg"] == "field required" - assert data["detail"][3]["loc"] == ['body', 'repository_id'] - assert data["detail"][3]["msg"] == "field required" - create_branch.assert_not_called() - create_branch_if_not_exists.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.update_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_put_branches(self, get_branch, update_branch): - branch_id = 1 - get_branch.return_value = self.db_branches[0] - get_branch.return_value.id_ = branch_id - update_branch.return_value = self.db_branches[branch_id] - response = self.client.put( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}", - json=self.create_json_body(self.db_branches[branch_id])) - assert response.status_code == 200, response.text - self.assert_branch(response.json(), self.db_branches[branch_id]) - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - update_branch.assert_called_once() - - @patch("resc_backend.resc_web_service.crud.branch.update_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_put_branches_empty_body(self, get_branch, update_branch): - response = self.client.put(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/9999999999", - json={}) - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ['body', 'branch_id'] - assert data["detail"][0]["msg"] == "field required" - assert data["detail"][1]["loc"] == ['body', 'branch_name'] - assert data["detail"][1]["msg"] == "field required" - assert data["detail"][2]["loc"] == ['body', 'latest_commit'] - assert data["detail"][2]["msg"] == "field required" - assert data["detail"][3]["loc"] == ['body', 'repository_id'] - assert data["detail"][3]["msg"] == "field required" - get_branch.assert_not_called() - update_branch.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.update_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_put_branches_non_existing(self, get_branch, update_branch): - branch_id = 999 - get_branch.return_value = None - response = self.client.put( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}", - json=self.create_json_body(self.db_branches[0])) - assert response.status_code == 404, response.text - data = response.json() - assert data["detail"] == "Branch not found" - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - update_branch.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.delete_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_delete_branches(self, get_branch, delete_branch): - branch_id = 1 - get_branch.return_value = self.db_branches[branch_id] - response = self.client.delete( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}") - assert response.status_code == 200, response.text - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - delete_branch.assert_called_once_with(ANY, branch_id=branch_id, delete_related=True) - - @patch("resc_backend.resc_web_service.crud.branch.delete_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_delete_branches_non_existing(self, get_branch, delete_branch): - branch_id = 999 - get_branch.return_value = None - response = self.client.delete( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}") - assert response.status_code == 404, response.text - data = response.json() - assert data["detail"] == "Branch not found" - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - delete_branch.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.get_branches_count") - @patch("resc_backend.resc_web_service.crud.branch.get_branches") - def test_get_multiple_branches(self, get_branches, get_branches_count): - get_branches.return_value = self.db_branches[:2] - get_branches_count.return_value = len(self.db_branches[:2]) - response = self.client.get(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}", - params={"skip": 0, "limit": 5}) - assert response.status_code == 200, response.text - data = response.json() - assert len(data["data"]) == 2 - self.assert_branch(data["data"][0], self.db_branches[0]) - self.assert_branch(data["data"][1], self.db_branches[1]) - assert data["total"] == 2 - assert data["limit"] == 5 - assert data["skip"] == 0 - get_branches.assert_called_once_with(ANY, skip=0, limit=5) - - @patch("resc_backend.resc_web_service.crud.branch.get_branches") - def test_get_multiple_branches_with_negative_skip(self, get_branches): - response = self.client.get(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}", - params={"skip": -1, "limit": 5}) - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ["query", "skip"] - assert data["detail"][0]["msg"] == "ensure this value is greater than or equal to 0" - get_branches.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.branch.get_branches") - def test_get_multiple_branches_with_negative_limit(self, get_branches): - response = self.client.get(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}", - params={"skip": 0, "limit": -1}) - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ["query", "limit"] - assert data["detail"][0]["msg"] == "ensure this value is greater than or equal to 1" - get_branches.assert_not_called() - - @patch("resc_backend.resc_web_service.crud.scan.get_scans_count") - @patch("resc_backend.resc_web_service.crud.scan.get_scans") - def test_get_branches_scans(self, get_scans, get_scans_count): - get_scans.return_value = self.db_scans[:2] - get_scans_count.return_value = len(self.db_scans[:2]) - response = self.client.get( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/1{RWS_ROUTE_SCANS}/") - assert response.status_code == 200, response.text - data = response.json() - assert data['total'] == len(self.db_scans[:2]) - assert data['limit'] == 100 - assert data['skip'] == 0 - assert len(data["data"]) == len(self.db_scans[:2]) - assert data["data"][0]["id_"] == self.db_scans[0].id_ - assert data["data"][1]["id_"] == self.db_scans[1].id_ - - @patch("resc_backend.resc_web_service.crud.scan.get_latest_scan_for_branch") - def test_get_last_scan_for_branch(self, get_latest_scan_for_branch): - get_latest_scan_for_branch.return_value = self.db_scans[0] - response = self.client.get( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/1{RWS_ROUTE_LAST_SCAN}/") - assert response.status_code == 200, response.text - data = response.json() - assert data['scan_type'] == self.db_scans[0].scan_type - assert data['last_scanned_commit'] == self.db_scans[0].last_scanned_commit - assert data["branch_id"] == self.db_scans[0].branch_id - assert data["id_"] == self.db_scans[0].id_ - - @patch("resc_backend.resc_web_service.crud.branch.get_findings_metadata_by_branch_id") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_get_findings_metadata_for_branch(self, get_branch, get_findings_metadata_by_branch_id): - branch_id = 1 - get_branch.return_value = self.db_branches[branch_id] - response = self.client.get( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}") - assert response.status_code == 200, response.text - self.assert_branch(response.json(), self.db_branches[branch_id]) - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - - get_findings_metadata_by_branch_id.return_value = {"true_positive": 1, "false_positive": 2, - "not_analyzed": 3, - "under_review": 4, "clarification_required": 5, - "total_findings_count": 15} - response = get_findings_metadata_by_branch_id.return_value - assert response["true_positive"] == 1 - assert response["false_positive"] == 2 - assert response["not_analyzed"] == 3 - assert response["under_review"] == 4 - assert response["clarification_required"] == 5 - assert response["total_findings_count"] == 15 - - @patch("resc_backend.resc_web_service.crud.branch.get_findings_metadata_by_branch_id") - @patch("resc_backend.resc_web_service.crud.branch.get_branch") - def test_get_findings_metadata_for_branch_non_existing(self, get_branch, - get_findings_metadata_by_branch_id): - branch_id = 999 - get_branch.return_value = None - response = self.client.put( - f"{RWS_VERSION_PREFIX}{RWS_ROUTE_BRANCHES}/{branch_id}", - json=self.create_json_body(self.db_branches[0])) - assert response.status_code == 404, response.text - data = response.json() - assert data["detail"] == "Branch not found" - get_branch.assert_called_once_with(ANY, branch_id=branch_id) - get_findings_metadata_by_branch_id.assert_not_called() diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_common.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_common.py index 1567cea5..5851058d 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_common.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_common.py @@ -14,11 +14,13 @@ RWS_VERSION_PREFIX ) from resc_backend.resc_web_service.api import app +from resc_backend.resc_web_service.dependencies import requires_auth, requires_no_auth class TestFindings(unittest.TestCase): def setUp(self): self.client = TestClient(app) + app.dependency_overrides[requires_auth] = requires_no_auth def test_get_supported_vcs_providers(self): response = self.client.get(f"{RWS_VERSION_PREFIX}" diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_finding.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_finding.py index c37b960d..6c2ad77e 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_finding.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_finding.py @@ -42,7 +42,6 @@ def setUp(self): repository_url=f"http://fake.repo.com/_{i}", timestamp=datetime(year=1970, month=1, day=i), vcs_provider="AZURE_DEVOPS", - branch_name=f"_{i}", last_scanned_commit=f"_{i}", commit_url=f"_{i}", scan_id=i), @@ -68,7 +67,6 @@ def assert_detailed_finding(data, detailed_finding: DetailedFindingRead): assert data["repository_url"] == detailed_finding.repository_url assert datetime.strptime(data["timestamp"], "%Y-%m-%dT%H:%M:%S") == detailed_finding.timestamp assert data["vcs_provider"] == detailed_finding.vcs_provider - assert data["branch_name"] == detailed_finding.branch_name assert data["last_scanned_commit"] == detailed_finding.last_scanned_commit assert data["commit_url"] == detailed_finding.commit_url @@ -229,7 +227,6 @@ def test_get_detailed_findings_by_all_filters(self, get_detailed_findings, get_t "finding_statuses": finding_statuses, "start_date_time": "1970-11-11T00:00:00", "end_date_time": "1970-11-11T00:00:01", - "branch_name": "branch_name", "project_name": "project_name", "scan_ids": scan_ids, "repository_name": "repository_name", diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_findings.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_findings.py index 1327080d..7a383679 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_findings.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_detailed_findings.py @@ -42,7 +42,6 @@ def setUp(self): repository_url=f"http://fake.repo.com/_{i}", timestamp=datetime(year=1970, month=1, day=i), vcs_provider="AZURE_DEVOPS", - branch_name=f"_{i}", last_scanned_commit=f"_{i}", event_sent_on=datetime(year=1970, month=1, day=i), scan_id=i) @@ -68,7 +67,6 @@ def assert_detailed_finding(data, detailed_finding: DetailedFindingRead): assert data["repository_url"] == detailed_finding.repository_url assert datetime.strptime(data["timestamp"], "%Y-%m-%dT%H:%M:%S") == detailed_finding.timestamp assert data["vcs_provider"] == detailed_finding.vcs_provider - assert data["branch_name"] == detailed_finding.branch_name assert data["last_scanned_commit"] == detailed_finding.last_scanned_commit assert datetime.strptime(data["event_sent_on"], "%Y-%m-%dT%H:%M:%S") == detailed_finding.event_sent_on @@ -230,7 +228,6 @@ def test_get_detailed_findings_by_all_filters(self, get_detailed_findings, get_t "finding_statuses": finding_statuses, "start_date_time": "1970-11-11T00:00:00", "end_date_time": "1970-11-11T00:00:01", - "branch_name": "branch_name", "project_name": "project_name", "scan_ids": scan_ids, "repository_name": "repository_name", diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_findings.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_findings.py index b0b2de01..b79201c3 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_findings.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_findings.py @@ -46,7 +46,7 @@ def setUp(self): email=f"email_{i}", rule_name=f"rule_{i}", event_sent_on=datetime.utcnow(), - branch_id=1) + repository_id=1) self.db_findings.append(finding) self.db_findings[i - 1].id_ = i self.db_scan_findings.append(DBscanFinding( @@ -70,7 +70,7 @@ def setUp(self): for i in range(1, 6): self.enriched_findings.append(FindingRead(id_=i, scan_ids=[i], - branch_id=i, + repository_id=i, file_path=f"file_path_{i}", line_number=i, column_start=i, @@ -96,7 +96,7 @@ def assert_db_finding(data, finding: DBfinding, scan_findings: List[DBscanFindin assert data["email"] == finding.email assert data["rule_name"] == finding.rule_name assert data["scan_ids"] == [x.scan_id for x in scan_findings] - assert data["branch_id"] == finding.branch_id + assert data["repository_id"] == finding.repository_id assert data["id_"] == finding.id_ assert finding.id_ == scan_findings[0].finding_id assert datetime.strptime(data["event_sent_on"], "%Y-%m-%dT%H:%M:%S.%f") == finding.event_sent_on @@ -114,14 +114,14 @@ def assert_finding(data, finding: Finding): assert data["email"] == finding.email assert data["rule_name"] == finding.rule_name assert data["scan_ids"] == finding.scan_ids - assert data["branch_id"] == finding.branch_id + assert data["repository_id"] == finding.repository_id assert data["id_"] == finding.id_ assert datetime.strptime(data["event_sent_on"], "%Y-%m-%dT%H:%M:%S.%f") == finding.event_sent_on @staticmethod def cast_db_finding_to_finding_create(finding: DBfinding, scan_findings: List[DBscanFinding]): return FindingCreate(scan_ids=[x.scan_id for x in scan_findings], - branch_id=finding.branch_id, + repository_id=finding.repository_id, file_path=finding.file_path, line_number=finding.line_number, column_start=finding.column_start, diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_repositories.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_repositories.py index 16d607f1..d4ecdc3d 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_repositories.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_repositories.py @@ -9,13 +9,13 @@ # First Party from resc_backend.constants import ( - RWS_ROUTE_BRANCHES, RWS_ROUTE_DISTINCT_PROJECTS, RWS_ROUTE_DISTINCT_REPOSITORIES, RWS_ROUTE_REPOSITORIES, + RWS_ROUTE_SCANS, RWS_VERSION_PREFIX ) -from resc_backend.db.model import DBbranch, DBfinding, DBrepository, DBscan, DBVcsInstance +from resc_backend.db.model import DBfinding, DBrepository, DBscan, DBVcsInstance from resc_backend.resc_web_service.api import app from resc_backend.resc_web_service.dependencies import requires_auth, requires_no_auth from resc_backend.resc_web_service.schema.repository import RepositoryCreate @@ -47,16 +47,9 @@ def setUp(self): vcs_instance=i)) self.db_repositories[i - 1].id_ = i - self.db_branches = [] - for i in range(1, 6): - self.db_branches.append( - DBbranch(repository_id=i, branch_id=f"branch_id_{i}", branch_name=f"branch_name_{i}", - latest_commit=f"latest_commit_{i}")) - self.db_branches[i - 1].id_ = i - self.db_scans = [] for i in range(1, 6): - self.db_scans.append(DBscan(branch_id=i, scan_type="BASE", + self.db_scans.append(DBscan(repository_id=i, scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), rule_pack="1.2", increment_number=1)) self.db_scans[i - 1].id_ = i @@ -74,7 +67,7 @@ def setUp(self): email=f"email_{i}", rule_name=f"rule_{i}", event_sent_on=datetime.utcnow(), - branch_id=1)) + repository_id=1)) self.db_findings[i - 1].id_ = i @staticmethod @@ -276,54 +269,6 @@ def test_get_multiple_repositories_with_negative_limit(self, get_repositories): assert data["detail"][0]["msg"] == "ensure this value is greater than or equal to 1" get_repositories.assert_not_called() - @patch("resc_backend.resc_web_service.crud.finding.get_total_findings_count") - @patch("resc_backend.resc_web_service.crud.scan.get_latest_scan_for_branch") - @patch("resc_backend.resc_web_service.crud.branch.get_branches_for_repository") - @patch("resc_backend.resc_web_service.crud.branch.get_branches_count_for_repository") - @patch("resc_backend.resc_web_service.crud.scan.get_scans") - def test_get_repository_branches(self, get_scans, get_branches_count_for_repository, - get_branches_for_repository, get_latest_scan_for_branch, - get_total_findings_count): - get_branches_for_repository.return_value = self.db_branches - get_branches_count_for_repository.return_value = len(self.db_branches) - get_latest_scan_for_branch.return_value = self.db_scans[1] - get_scans.return_value = self.db_scans - get_total_findings_count.return_value = len(self.db_findings) - response = self.client.get(f"{RWS_VERSION_PREFIX}" - f"{RWS_ROUTE_REPOSITORIES}/1{RWS_ROUTE_BRANCHES}") - assert response.status_code == 200, response.text - data = response.json() - assert data["data"][0]["id_"] == self.db_branches[0].id_ - assert datetime.strptime(data["data"][0]["last_scan_datetime"], "%Y-%m-%dT%H:%M:%S.%f") == \ - self.db_scans[3].timestamp - assert data["total"] == 5 - assert data["limit"] == 100 - assert data["skip"] == 0 - - @patch("resc_backend.resc_web_service.crud.branch.get_branches_for_repository") - @patch("resc_backend.resc_web_service.crud.branch.get_branches_count_for_repository") - def test_get_repository_branches_non_existing(self, get_branches_count_for_repository, - get_branches_for_repository): - get_branches_for_repository.return_value = [] - get_branches_count_for_repository.return_value = 0 - response = self.client.get(f"{RWS_VERSION_PREFIX}" - f"{RWS_ROUTE_REPOSITORIES}/9999{RWS_ROUTE_BRANCHES}") - assert response.status_code == 200, response.text - data = response.json() - assert data["data"] == [] - assert data["total"] == 0 - assert data["limit"] == 100 - assert data["skip"] == 0 - - def test_get_repository_branches_invalid_id(self): - response = self.client.get(f"{RWS_VERSION_PREFIX}" - f"{RWS_ROUTE_REPOSITORIES}/invalid{RWS_ROUTE_BRANCHES}") - assert response.status_code == 422, response.text - data = response.json() - assert data["detail"][0]["loc"] == ['path', 'repository_id'] - assert data["detail"][0]["msg"] == "value is not a valid integer" - assert data["detail"][0]["type"] == "type_error.integer" - @patch("resc_backend.resc_web_service.crud.repository.get_distinct_projects") def test_get_distinct_projects_when_single_vcs_instance_selected(self, get_distinct_projects): get_distinct_projects.return_value = self.db_repositories @@ -506,6 +451,8 @@ def test_get_all_repositories_with_findings_metadata(self, get_repositories, get "repository_name": "dummy_repo", "repository_url": "https://fake-ado.com", "vcs_instance": "AZURE_DEVOPS", + "last_scan_id": 1, + "last_scan_timestamp": "2023-05-23T15:52:22.270000", "id_": 1 }, "true_positive": 1, @@ -523,6 +470,8 @@ def test_get_all_repositories_with_findings_metadata(self, get_repositories, get assert response["data"]["repository_name"] == "dummy_repo" assert response["data"]["repository_url"] == "https://fake-ado.com" assert response["data"]["vcs_instance"] == "AZURE_DEVOPS" + assert response["data"]["last_scan_id"] == 1 + assert response["data"]["last_scan_timestamp"] == "2023-05-23T15:52:22.270000" assert response["data"]["id_"] == 1 assert response["true_positive"] == 1 assert response["false_positive"] == 2 @@ -570,3 +519,19 @@ def test_get_findings_metadata_for_repository_non_existing(self, assert response.status_code == 404, response.text get_repository.assert_called_once_with(ANY, repository_id=repository_id) get_findings_metadata_by_repository_id.assert_not_called() + + @patch("resc_backend.resc_web_service.crud.scan.get_scans_count") + @patch("resc_backend.resc_web_service.crud.scan.get_scans") + def test_get_scans_for_repository(self, get_scans, get_scans_count): + get_scans.return_value = self.db_scans[:2] + get_scans_count.return_value = len(self.db_scans[:2]) + response = self.client.get( + f"{RWS_VERSION_PREFIX}{RWS_ROUTE_REPOSITORIES}/1{RWS_ROUTE_SCANS}/") + assert response.status_code == 200, response.text + data = response.json() + assert data['total'] == len(self.db_scans[:2]) + assert data['limit'] == 100 + assert data['skip'] == 0 + assert len(data["data"]) == len(self.db_scans[:2]) + assert data["data"][0]["id_"] == self.db_scans[0].id_ + assert data["data"][1]["id_"] == self.db_scans[1].id_ diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_scans.py b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_scans.py index 1558dbf4..bb4e0b5a 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_scans.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/endpoints/test_scans.py @@ -22,7 +22,7 @@ def setUp(self): app.dependency_overrides[requires_auth] = requires_no_auth self.db_scans = [] for i in range(1, 6): - self.db_scans.append(DBscan(branch_id=i, scan_type="BASE", + self.db_scans.append(DBscan(repository_id=i, scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), increment_number=0, rule_pack=f"rule_pack_{i}")) self.db_scans[i - 1].id_ = i @@ -45,7 +45,7 @@ def setUp(self): email=f"email_{i}", rule_name=f"rule_{i}", event_sent_on=datetime.utcnow(), - branch_id=1)) + repository_id=1)) self.db_findings[i - 1].id_ = i self.enriched_findings = [] @@ -61,7 +61,7 @@ def setUp(self): commit_timestamp=datetime.utcnow(), author=f"author_{i}", email=f"email_{i}", - branch_id=i, + repository_id=i, rule_name=f"rule_{i}", event_sent_on=datetime.utcnow())) @@ -70,7 +70,7 @@ def create_json_body(scan): return {"timestamp": datetime.strftime(scan.timestamp, "%Y-%m-%dT%H:%M:%S.%f"), "scan_type": scan.scan_type, "last_scanned_commit": scan.last_scanned_commit, - "branch_id": scan.branch_id, + "repository_id": scan.repository_id, "increment_number": scan.increment_number, "rule_pack": scan.rule_pack } @@ -78,13 +78,13 @@ def create_json_body(scan): @staticmethod def cast_db_scan_to_scan_create(scan): return ScanCreate(scan_type=scan.scan_type, last_scanned_commit=scan.last_scanned_commit, - timestamp=scan.timestamp, branch_id=scan.branch_id, + timestamp=scan.timestamp, repository_id=scan.repository_id, increment_number=scan.increment_number, rule_pack=scan.rule_pack) @staticmethod def assert_scan(data, scan): assert data["id_"] == scan.id_ - assert data["branch_id"] == scan.branch_id + assert data["repository_id"] == scan.repository_id assert datetime.strptime(data["timestamp"], "%Y-%m-%dT%H:%M:%S.%f") == scan.timestamp @patch("resc_backend.resc_web_service.crud.scan.get_scan") @@ -114,7 +114,8 @@ def test_delete_scan(self, delete_scan, get_scan): response = self.client.delete(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_SCANS}/{db_scan.id_}") assert response.status_code == 200, response.text get_scan.assert_called_once_with(ANY, scan_id=db_scan.id_) - delete_scan.assert_called_once_with(ANY, branch_id=db_scan.branch_id, scan_id=db_scan.id_, delete_related=True) + delete_scan.assert_called_once_with(ANY, repository_id=db_scan.repository_id, scan_id=db_scan.id_, + delete_related=True) @patch("resc_backend.resc_web_service.crud.scan.get_scan") @patch("resc_backend.resc_web_service.crud.scan.delete_scan") @@ -140,13 +141,13 @@ def test_post_scan(self, create_scan): create_scan.assert_called_once_with(db_connection=ANY, scan=self.cast_db_scan_to_scan_create(db_scan)) @patch("resc_backend.resc_web_service.crud.scan.create_scan") - @patch("resc_backend.resc_web_service.crud.scan.get_latest_scan_for_branch") - def test_post_increment_scan(self, get_latest_scan_for_branch, create_scan): + @patch("resc_backend.resc_web_service.crud.scan.get_latest_scan_for_repository") + def test_post_increment_scan(self, get_latest_scan_for_repository, create_scan): db_scan = self.db_scans[0] db_scan.scan_type = ScanType.INCREMENTAL print("db_scan", db_scan) create_scan.return_value = db_scan - get_latest_scan_for_branch.return_value = self.db_scans[1] + get_latest_scan_for_repository.return_value = self.db_scans[1] response = self.client.post(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_SCANS}", json=self.create_json_body(db_scan)) assert response.status_code == 201, response.text @@ -175,14 +176,14 @@ def test_post_scans_empty_body(self, create_scan): assert data["detail"][1]["msg"] == "field required" assert data["detail"][2]["loc"] == ['body', 'rule_pack'] assert data["detail"][2]["msg"] == "field required" - assert data["detail"][3]["loc"] == ['body', 'branch_id'] + assert data["detail"][3]["loc"] == ['body', 'repository_id'] assert data["detail"][3]["msg"] == "field required" create_scan.assert_not_called() @patch("resc_backend.resc_web_service.crud.scan.create_scan") def test_post_scans_invalid_timestamp(self, create_scan): response = self.client.post(f"{RWS_VERSION_PREFIX}{RWS_ROUTE_SCANS}", - json={"branch_id": 1, "scan_type": "BASE", + json={"repository_id": 1, "scan_type": "BASE", "last_scanned_commit": "dummy_commit", "timestamp": "invalid_time"}) assert response.status_code == 422, response.text data = response.json() @@ -215,7 +216,7 @@ def test_put_scans_non_existing(self, update_scan, get_scan): "last_scanned_commit": "dummy_commit", "timestamp": "2021-09-12T17:38:28.501000", "vcs_provider": "dummy_vcs_provider", - "branch_id": 999, + "repository_id": 999, "rule_pack": "1.5" }, ) @@ -237,7 +238,7 @@ def test_put_scans_empty_body(self, update_scan, get_scan): assert data["detail"][1]["msg"] == "field required" assert data["detail"][2]["loc"] == ['body', 'rule_pack'] assert data["detail"][2]["msg"] == "field required" - assert data["detail"][3]["loc"] == ['body', 'branch_id'] + assert data["detail"][3]["loc"] == ['body', 'repository_id'] assert data["detail"][3]["msg"] == "field required" update_scan.assert_not_called() get_scan.assert_not_called() diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_pagination_model.py b/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_pagination_model.py index c1c1c009..4d3103b3 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_pagination_model.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_pagination_model.py @@ -67,7 +67,7 @@ def test_pagination_model_findings(): email=f"email_{i}", rule_name=f"rule_{i}", event_sent_on=datetime.utcnow(), - branch_id=1) + repository_id=1) finding.id_ = i findings.append(FindingRead.create_from_db_entities(finding, scan_ids=[])) diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_schema_detailed_finding.py b/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_schema_detailed_finding.py index 236ec310..f2a6675a 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_schema_detailed_finding.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/schema/test_schema_detailed_finding.py @@ -32,7 +32,6 @@ def setUp(self): repository_url=f"http://fake.repo.com/{i}", timestamp=datetime.utcnow(), vcs_provider="AZURE_DEVOPS", - branch_name=f"branch_name_{i}", last_scanned_commit=f"last_scanned_commit_{i}", commit_url=f"commit_url_{i}", scan_id=i), @@ -59,7 +58,6 @@ def setUp(self): repository_url=f"https://dummy-bitbucket-instance.com/projects/project_key_{index}", timestamp=datetime.utcnow(), vcs_provider="BITBUCKET", - branch_name=f"branch_name_{index}", last_scanned_commit=f"last_scanned_commit_{index}", commit_url=f"commit_url_{index}", scan_id=index), @@ -70,8 +68,7 @@ def test_get_commit_url_by_vcs_provider_ado(self): for index, finding in enumerate(detailed_findings): assert detailed_findings[ index].commit_url == f"http://fake.repo.com/{index + 1}/commit/commit_id_{index + 1}" \ - f"?refName=refs/heads/branch_name_{index + 1}" \ - f"&path=/file_path_{index + 1}" + f"?path=/file_path_{index + 1}" def test_get_commit_url_by_vcs_provider_bitbucket(self): detailed_findings = self.detailed_findings_bitbucket diff --git a/components/resc-backend/tests/resc_backend/resc_web_service/test_dependencies.py b/components/resc-backend/tests/resc_backend/resc_web_service/test_dependencies.py index c03dd4e1..92f793d1 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service/test_dependencies.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service/test_dependencies.py @@ -43,7 +43,7 @@ def test_check_db_initialized_false(has_table, error_logger): check_db_initialized() error_logger.assert_called_once_with("Database is NOT connected or initialized | Unable to determine existence of " - "required table(s) branch, finding, repository, rules, scan, " + "required table(s) finding, repository, rules, scan, " "scan_finding | Retrying...") diff --git a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_branches.py b/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_branches.py deleted file mode 100644 index e7121e10..00000000 --- a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_branches.py +++ /dev/null @@ -1,22 +0,0 @@ -# Standard Library -from unittest.mock import patch - -# First Party -from resc_backend.resc_web_service.schema.branch import BranchCreate -from resc_backend.resc_web_service_interface.branches import create_branch - - -@patch("requests.post") -def test_create_branch(post): - expected_url = 'https://fake-host.com/resc/v1/branches' - url = 'https://fake-host.com' - - branch = BranchCreate(branch_id=1, - branch_name="branch_name", - latest_commit="latest_commit", - repository_id=2) - expected_json = branch.json() - - _ = create_branch(url, branch) - post.assert_called_once() - post.assert_called_with(expected_url, data=expected_json, proxies={'http': '', 'https': ''}) diff --git a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_findings.py b/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_findings.py index 1e9b747f..5447cd40 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_findings.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_findings.py @@ -23,7 +23,7 @@ status=FindingStatus.NOT_ANALYZED, comment=f"comment_{i}", rule_name=f"rule_{i}", - branch_id=1) + repository_id=1) ) diff --git a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_scans.py b/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_scans.py index 6233271e..86fbe6ac 100644 --- a/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_scans.py +++ b/components/resc-backend/tests/resc_backend/resc_web_service_interface/test_client_scans.py @@ -12,7 +12,7 @@ def test_create_scan(post): expected_url = 'https://fake-host.com/resc/v1/scans' url = 'https://fake-host.com' - scan = ScanCreate(scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), branch_id=1, + scan = ScanCreate(scan_type="BASE", last_scanned_commit="FAKE_HASH", timestamp=datetime.utcnow(), repository_id=1, rule_pack="1.2") expected_json = scan.json() diff --git a/components/resc-frontend/src/components/Filters/FindingStatusFilter.vue b/components/resc-frontend/src/components/Filters/FindingStatusFilter.vue index 251f45b5..109e936c 100644 --- a/components/resc-frontend/src/components/Filters/FindingStatusFilter.vue +++ b/components/resc-frontend/src/components/Filters/FindingStatusFilter.vue @@ -25,6 +25,7 @@ diff --git a/components/resc-frontend/src/components/ScanFindings/AuditModal.vue b/components/resc-frontend/src/components/ScanFindings/AuditModal.vue index 8a4fc81e..465845c9 100644 --- a/components/resc-frontend/src/components/ScanFindings/AuditModal.vue +++ b/components/resc-frontend/src/components/ScanFindings/AuditModal.vue @@ -68,6 +68,7 @@