Skip to content

Commit

Permalink
fix: Allow protobuf 5.x (#972)
Browse files Browse the repository at this point in the history
* chore: Update gapic-generator-python to v1.18.0

PiperOrigin-RevId: 638650618

Source-Link: googleapis/googleapis@6330f03

Source-Link: googleapis/googleapis-gen@44fa4f1
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9

* feat(spanner): Add support for Cloud Spanner Scheduled Backups

PiperOrigin-RevId: 649277844

Source-Link: googleapis/googleapis@fd7efa2

Source-Link: googleapis/googleapis-gen@50be251
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9

* feat: publish the Cloud Bigtable ExecuteQuery API

The ExecuteQuery API will allow users to query Bigtable using SQL

PiperOrigin-RevId: 650660213

Source-Link: googleapis/googleapis@f681f79

Source-Link: googleapis/googleapis-gen@3180845
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzE4MDg0NTQ4NzEzNjc5NDk1MmI4ZjM2NWZlNmM2ODY4OTk5ZDljMCJ9

* feat: publish ProtoRows Message

This is needed to parse ExecuteQuery responses

PiperOrigin-RevId: 651386373

Source-Link: googleapis/googleapis@a5be6fa

Source-Link: googleapis/googleapis-gen@d467ce8
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDQ2N2NlODkzYTA0YzQxZTUwNDk4MzM0NmMyMTVkNDFmZDI2MzY1MCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* update setup.py to match googleapis/gapic-generator-python/blob/main/gapic/templates/setup.py.j2

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* update constraints

---------

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Anthonios Partheniou <[email protected]>
  • Loading branch information
3 people committed Jul 18, 2024
1 parent 481c8d6 commit 7ac8e14
Show file tree
Hide file tree
Showing 27 changed files with 2,935 additions and 1,072 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore


try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ def __init__(

# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
self._ignore_credentials: bool = False

# If no credentials are provided, then determine the appropriate
# defaults.
Expand All @@ -108,7 +110,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ def __init__(

if isinstance(channel, grpc.Channel):
# Ignore credentials if a channel was passed.
credentials = False
credentials = None
self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,8 @@ def __init__(

if isinstance(channel, aio.Channel):
# Ignore credentials if a channel was passed.
credentials = False
credentials = None
self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore


try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ def __init__(

# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
self._ignore_credentials: bool = False

# If no credentials are provided, then determine the appropriate
# defaults.
Expand All @@ -108,7 +110,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,8 @@ def __init__(

if isinstance(channel, grpc.Channel):
# Ignore credentials if a channel was passed.
credentials = False
credentials = None
self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,8 @@ def __init__(

if isinstance(channel, aio.Channel):
# Ignore credentials if a channel was passed.
credentials = False
credentials = None
self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
Expand Down
22 changes: 22 additions & 0 deletions google/cloud/bigtable_v2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

from .types.bigtable import CheckAndMutateRowRequest
from .types.bigtable import CheckAndMutateRowResponse
from .types.bigtable import ExecuteQueryRequest
from .types.bigtable import ExecuteQueryResponse
from .types.bigtable import GenerateInitialChangeStreamPartitionsRequest
from .types.bigtable import GenerateInitialChangeStreamPartitionsResponse
from .types.bigtable import MutateRowRequest
Expand All @@ -40,12 +42,20 @@
from .types.bigtable import ReadRowsResponse
from .types.bigtable import SampleRowKeysRequest
from .types.bigtable import SampleRowKeysResponse
from .types.data import ArrayValue
from .types.data import Cell
from .types.data import Column
from .types.data import ColumnMetadata
from .types.data import ColumnRange
from .types.data import Family
from .types.data import Mutation
from .types.data import PartialResultSet
from .types.data import ProtoFormat
from .types.data import ProtoRows
from .types.data import ProtoRowsBatch
from .types.data import ProtoSchema
from .types.data import ReadModifyWriteRule
from .types.data import ResultSetMetadata
from .types.data import Row
from .types.data import RowFilter
from .types.data import RowRange
Expand All @@ -62,15 +72,20 @@
from .types.request_stats import RequestLatencyStats
from .types.request_stats import RequestStats
from .types.response_params import ResponseParams
from .types.types import Type

__all__ = (
"BigtableAsyncClient",
"ArrayValue",
"BigtableClient",
"Cell",
"CheckAndMutateRowRequest",
"CheckAndMutateRowResponse",
"Column",
"ColumnMetadata",
"ColumnRange",
"ExecuteQueryRequest",
"ExecuteQueryResponse",
"Family",
"FeatureFlags",
"FullReadStatsView",
Expand All @@ -81,8 +96,13 @@
"MutateRowsRequest",
"MutateRowsResponse",
"Mutation",
"PartialResultSet",
"PingAndWarmRequest",
"PingAndWarmResponse",
"ProtoFormat",
"ProtoRows",
"ProtoRowsBatch",
"ProtoSchema",
"RateLimitInfo",
"ReadChangeStreamRequest",
"ReadChangeStreamResponse",
Expand All @@ -95,6 +115,7 @@
"RequestLatencyStats",
"RequestStats",
"ResponseParams",
"ResultSetMetadata",
"Row",
"RowFilter",
"RowRange",
Expand All @@ -105,6 +126,7 @@
"StreamContinuationTokens",
"StreamPartition",
"TimestampRange",
"Type",
"Value",
"ValueRange",
)
15 changes: 15 additions & 0 deletions google/cloud/bigtable_v2/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@
"check_and_mutate_row"
]
},
"ExecuteQuery": {
"methods": [
"execute_query"
]
},
"GenerateInitialChangeStreamPartitions": {
"methods": [
"generate_initial_change_stream_partitions"
Expand Down Expand Up @@ -65,6 +70,11 @@
"check_and_mutate_row"
]
},
"ExecuteQuery": {
"methods": [
"execute_query"
]
},
"GenerateInitialChangeStreamPartitions": {
"methods": [
"generate_initial_change_stream_partitions"
Expand Down Expand Up @@ -115,6 +125,11 @@
"check_and_mutate_row"
]
},
"ExecuteQuery": {
"methods": [
"execute_query"
]
},
"GenerateInitialChangeStreamPartitions": {
"methods": [
"generate_initial_change_stream_partitions"
Expand Down
104 changes: 104 additions & 0 deletions google/cloud/bigtable_v2/services/bigtable/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore


try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
Expand Down Expand Up @@ -1293,6 +1294,109 @@ def read_change_stream(
# Done; return the response.
return response

def execute_query(
self,
request: Optional[Union[bigtable.ExecuteQueryRequest, dict]] = None,
*,
instance_name: Optional[str] = None,
query: Optional[str] = None,
app_profile_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> Awaitable[AsyncIterable[bigtable.ExecuteQueryResponse]]:
r"""Executes a BTQL query against a particular Cloud
Bigtable instance.
Args:
request (Optional[Union[google.cloud.bigtable_v2.types.ExecuteQueryRequest, dict]]):
The request object. Request message for
Bigtable.ExecuteQuery
instance_name (:class:`str`):
Required. The unique name of the instance against which
the query should be executed. Values are of the form
``projects/<project>/instances/<instance>``
This corresponds to the ``instance_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
query (:class:`str`):
Required. The query string.
This corresponds to the ``query`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
app_profile_id (:class:`str`):
Optional. This value specifies routing for replication.
If not specified, the ``default`` application profile
will be used.
This corresponds to the ``app_profile_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
AsyncIterable[google.cloud.bigtable_v2.types.ExecuteQueryResponse]:
Response message for
Bigtable.ExecuteQuery
"""
# Create or coerce a protobuf request object.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([instance_name, query, app_profile_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, bigtable.ExecuteQueryRequest):
request = bigtable.ExecuteQueryRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
if instance_name is not None:
request.instance_name = instance_name
if query is not None:
request.query = query
if app_profile_id is not None:
request.app_profile_id = app_profile_id

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._client._transport._wrapped_methods[
self._client._transport.execute_query
]

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("instance_name", request.instance_name),)
),
)

# Validate the universe domain.
self._client._validate_universe_domain()

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def __aenter__(self) -> "BigtableAsyncClient":
return self

Expand Down
Loading

0 comments on commit 7ac8e14

Please sign in to comment.