Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: add F flag to remove unused imports #52649

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
- id: ruff
args:
- --fix
- --select=I
- --select=I,F

# Run the formatter.
- id: ruff-format
Expand Down
4 changes: 2 additions & 2 deletions airbyte-ci/connectors/base_images/base_images/java/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
from __future__ import annotations

from typing import Callable, Final
from typing import Final

import dagger

Expand Down Expand Up @@ -48,7 +48,7 @@ def get_container(self, platform: dagger.Platform) -> dagger.Container:
# tar is equired to untar java connector binary distributions.
# openssl is required because we need to ssh and scp sometimes.
# findutils is required for xargs, which is shipped as part of findutils.
f"yum install -y shadow-utils tar openssl findutils",
"yum install -y shadow-utils tar openssl findutils",
# Update first, but in the same .with_exec step as the package installation.
# Otherwise, we risk caching stale package URLs.
"yum update -y --security",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import json
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Mapping, Optional, Tuple, Type
from typing import Dict, List, Optional, Tuple, Type

import dagger
import semver
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def get_ci_on_master_report(connector: Connector) -> Dict | None:
json_report = get_ci_json_report(json_report_url)
if json_report["connector_version"] == connector.version:
return json_report
except Exception as e:
except Exception:
continue
return None

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@
from pathlib import Path
from typing import TYPE_CHECKING

import requests

if TYPE_CHECKING:
from typing import Callable, List, Set, Tuple

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from pathlib import Path
from typing import Any

from connector_ops.utils import Connector # type: ignore
from jinja2 import Environment, FileSystemLoader
from markdown_it import MarkdownIt
from markdown_it.tree import SyntaxTreeNode
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import re
from pathlib import Path
from typing import Any, Dict, List
from typing import Dict, List

from connector_ops.utils import Connector # type: ignore
from jinja2 import Environment, FileSystemLoader
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.

import os
from datetime import datetime, timedelta

import toml
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -481,4 +481,4 @@ def test_pass_when_documentation_file_has_changelog_entry(self, mocker, tmp_path

# Assert
assert result.status == CheckStatus.PASSED
assert f"Changelog entry found for version 1.0.0" in result.message
assert "Changelog entry found for version 1.0.0" in result.message
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
from __future__ import annotations

import os

import pytest

from connectors_qa import consts
from connectors_qa.checks import metadata
from connectors_qa.models import CheckStatus
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def test_pass_when_https_url(self, mocker, tmp_path):
# Arrange
connector = mocker.MagicMock(code_directory=tmp_path)
file_with_http_url = tmp_path / "file.py"
file_with_http_url.write_text(f"https://example.com")
file_with_http_url.write_text("https://example.com")

# Act
result = security.CheckConnectorUsesHTTPSOnly()._run(connector)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import rich
from connection_retriever import ConnectionObject, retrieve_objects # type: ignore
from connection_retriever.retrieval import TestingCandidate, retrieve_testing_candidates
from pydantic import ValidationError

from live_tests.commons import hacks
from live_tests.commons.models import ConnectionSubset
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import dagger
import pytest
from airbyte_protocol.models import AirbyteCatalog, AirbyteStateMessage, ConfiguredAirbyteCatalog, ConnectorSpecification # type: ignore
from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore
from connection_retriever.audit_logging import get_user_email # type: ignore
from connection_retriever.retrieval import ConnectionNotFoundError, get_current_docker_image_tag # type: ignore
from rich.prompt import Confirm, Prompt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from collections.abc import Callable

import pytest
from airbyte_protocol.models import Status, Type # type: ignore

from live_tests.commons.models import ExecutionResult
from live_tests.consts import MAX_LINES_IN_REPORT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import pytest
from _pytest.fixtures import SubRequest
from airbyte_protocol.models import AirbyteCatalog, AirbyteStream, Type # type: ignore
from airbyte_protocol.models import AirbyteStream # type: ignore

from live_tests.commons.models import ExecutionResult
from live_tests.utils import fail_test_on_failing_execution_results, get_and_write_diff, get_catalog
Expand Down
3 changes: 0 additions & 3 deletions airbyte-ci/connectors/live-tests/src/live_tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,9 @@
from pathlib import Path
from typing import TYPE_CHECKING, Optional, Union

import docker # type: ignore
import pytest
from airbyte_protocol.models import AirbyteCatalog, AirbyteMessage, ConnectorSpecification, Status, Type # type: ignore
from deepdiff import DeepDiff # type: ignore
from mitmproxy import http, io # type: ignore
from mitmproxy.addons.savehar import SaveHar # type: ignore

from live_tests import stash_keys
from live_tests.commons.models import ExecutionResult
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
from __future__ import annotations

from typing import TYPE_CHECKING, Callable, List, Union
from typing import Callable, List, Union

import dpath.util
import jsonschema
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from collections import defaultdict
from functools import reduce
from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional, Tuple
from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Tuple

import pytest
from airbyte_protocol.models import (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from airbyte_protocol.models import ConnectorSpecification

from live_tests.commons.json_schema_helper import JsonSchemaHelper, get_expected_schema_structure, get_paths_in_connector_config
from live_tests.commons.models import ExecutionResult, SecretDict
from live_tests.commons.models import ExecutionResult
from live_tests.utils import fail_test_on_failing_execution_results, find_all_values_for_key_in_schema, get_spec, get_test_logger

pytestmark = [
Expand Down Expand Up @@ -491,7 +491,7 @@ async def test_oauth_is_default_method(target_spec: ConnectorSpecification):
credentials = advanced_auth.predicate_key[0]
try:
one_of_default_method = dpath.util.get(spec_schema, f"/**/{credentials}/oneOf/0")
except KeyError as e: # Key Error when oneOf is not in credentials object
except KeyError: # Key Error when oneOf is not in credentials object
pytest.skip("Credentials object does not have oneOf option.")

path_in_credentials = "/".join(advanced_auth.predicate_key[1:])
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.

import pytest
import requests
import semver
import yaml

from metadata_service.docker_hub import get_latest_version_on_dockerhub
from metadata_service.models.generated.ConnectorMetadataDefinitionV0 import ConnectorMetadataDefinitionV0
from metadata_service.validators import metadata_validator

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import json
import os
import re
from datetime import datetime
from typing import List, Type, TypeVar

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,12 @@
import os
from typing import List

import numpy as np
import pandas as pd
import yaml
from dagster import OpExecutionContext, Output, asset
from metadata_service.constants import ICON_FILE_NAME, METADATA_FILE_NAME
from metadata_service.models.generated.ConnectorMetadataDefinitionV0 import ConnectorMetadataDefinitionV0
from orchestrator.config import get_public_url_for_gcs_file
from orchestrator.logging import sentry
from orchestrator.models.metadata import LatestMetadataEntry, MetadataDefinition, PartialMetadataDefinition
from orchestrator.models.metadata import LatestMetadataEntry, MetadataDefinition
from orchestrator.utils.object_helpers import are_values_equal, merge_values


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import orchestrator.hacks as HACKS
import pandas as pd
import semver
import sentry_sdk
from dagster import AutoMaterializePolicy, DynamicPartitionsDefinition, MetadataValue, OpExecutionContext, Output, asset
from dagster_gcp.gcs.file_manager import GCSFileHandle, GCSFileManager
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os

import pandas as pd
from dagster import AutoMaterializePolicy, FreshnessPolicy, OpExecutionContext, Output, asset
from dagster import AutoMaterializePolicy, FreshnessPolicy, OpExecutionContext, asset
from orchestrator.utils.dagster_helpers import OutputDataFrame, output_dataframe


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
NIGHTLY_INDIVIDUAL_TEST_REPORT_FILE_NAME = "output.json"
NIGHTLY_GHA_WORKFLOW_ID = "connector_nightly_builds_dagger.yml"
CI_TEST_REPORT_PREFIX = "airbyte-ci/connectors/test"
CI_MASTER_TEST_OUTPUT_REGEX = f".*master.*output.json$"
CI_MASTER_TEST_OUTPUT_REGEX = ".*master.*output.json$"
ANALYTICS_BUCKET = "ab-analytics-connector-metrics"
ANALYTICS_FOLDER = "data/connector_quality_metrics"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#

from dagster import AssetSelection, SkipReason, define_asset_job, job, op
from orchestrator.assets import metadata, registry, registry_entry, specs_secrets_mask
from orchestrator.assets import registry_entry
from orchestrator.config import HIGH_QUEUE_PRIORITY, MAX_METADATA_PARTITION_RUN_REQUEST
from orchestrator.logging.publish_connector_lifecycle import PublishConnectorLifecycle, PublishConnectorLifecycleStage, StageStatus

Expand Down Expand Up @@ -103,7 +103,7 @@ def add_new_metadata_partitions_op(context):
context.log.info(f"New etags found: {new_etags_found}")

if not new_etags_found:
return SkipReason(f"No new metadata files to process in GCS bucket")
return SkipReason("No new metadata files to process in GCS bucket")

# if there are more than the MAX_METADATA_PARTITION_RUN_REQUEST, we need to split them into multiple runs
etags_to_process = new_etags_found
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def _get_context_from_args_kwargs(args, kwargs):

# otherwise raise an error
raise Exception(
f"No context provided to Sentry Transaction. When using @instrument, ensure that the asset/op has a context as the first argument."
"No context provided to Sentry Transaction. When using @instrument, ensure that the asset/op has a context as the first argument."
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@

import os

from dagster import OpExecutionContext, op
from dagster_slack import SlackResource
from slack_sdk import WebhookClient
from dagster import OpExecutionContext


def chunk_messages(report):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import List, Optional, Union

import dagster._check as check
from dagster import BoolSource, Field, InitResourceContext, Noneable, StringSource, resource
from dagster import Field, InitResourceContext, Noneable, StringSource, resource
from dagster._core.storage.file_manager import check_file_like_obj
from dagster_gcp.gcs.file_manager import GCSFileHandle, GCSFileManager
from google.cloud import storage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@
from typing import List

from dagster import InitResourceContext, StringSource, resource
from dateutil.parser import parse
from github import ContentFile, Github, GitTreeElement, Repository
from github import ContentFile, Github, Repository
from metadata_service.constants import METADATA_FILE_NAME
from orchestrator.config import CONNECTORS_PATH

Expand Down Expand Up @@ -64,7 +63,7 @@ def get_latest_commit_for_file(github_connector_repo: Repository, path: str) ->
config_schema={"connectors_path": StringSource},
)
def github_connectors_metadata_files(resource_context: InitResourceContext) -> List[dict]:
resource_context.log.info(f"retrieving github metadata files")
resource_context.log.info("retrieving github metadata files")

github_connector_repo = resource_context.resources.github_connector_repo
repo_file_tree = github_connector_repo.get_git_tree("master", recursive=True).tree
Expand All @@ -78,7 +77,7 @@ def github_connectors_metadata_files(resource_context: InitResourceContext) -> L
if _valid_metadata_file_path(github_file.path)
]

resource_context.log.info(f"finished retrieving github metadata files")
resource_context.log.info("finished retrieving github metadata files")
return metadata_file_paths


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#

import hashlib
from typing import List, Optional
from typing import List

import pandas as pd
from dagster import MetadataValue, Output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,24 @@

from typing import TYPE_CHECKING

import anyio
from dagger import File, QueryError

from pipelines.airbyte_ci.connectors.build_image.steps.common import LoadContainerToLocalDockerHost
from pipelines.airbyte_ci.connectors.build_image.steps.java_connectors import (
BuildConnectorDistributionTar,
BuildConnectorImages,
dist_tar_directory_path,
)
from pipelines.airbyte_ci.connectors.build_image.steps.normalization import BuildOrPullNormalization
from pipelines.airbyte_ci.connectors.consts import CONNECTOR_TEST_STEP_ID
from pipelines.airbyte_ci.connectors.test.context import ConnectorTestContext
from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests
from pipelines.airbyte_ci.steps.gradle import GradleTask
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions.system import docker
from pipelines.helpers.execution.run_steps import StepToRun
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import STEP_PARAMS, StepResult, StepStatus
from pipelines.models.steps import STEP_PARAMS

if TYPE_CHECKING:
from typing import Callable, Dict, List, Optional
from typing import List

from pipelines.helpers.execution.run_steps import RESULTS_DICT, STEP_TREE
from pipelines.helpers.execution.run_steps import STEP_TREE


class IntegrationTests(GradleTask):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pathlib import Path
from typing import Any, ClassVar, Dict, Generator, List, Optional, Tuple, cast

from dagger import Container, ExecError
from dagger import ExecError

from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.helpers.utils import dagger_directory_as_zip_file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from pathlib import Path

import asyncclick as click
import pytest

from pipelines.airbyte_ci.connectors.build_image.steps import build_customization, python_connectors
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,10 @@
#
import datetime

import asyncclick as click
import pytest
import requests

from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.dagger.actions.python import common

pytestmark = [
pytest.mark.anyio,
Expand Down
Loading
Loading