Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
DEFAULT_RETRY_TIMEOUT_SECS = 30


# Experimental API - Please note that this API is currently experimental and can change in the future.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Normally, this should be part of the docstring so it shows up in IDEs. Now a customer must read the source code to see that this is an experimental API. So it should be something like this:

class Batch(_BatchBase):
    """Accumulate mutations for transmission during :meth:`commit`.
    .. note::
        This API is experimental and subject to breaking changes.
    """

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So the problem with adding this directly to docString is that it will get copied to sync files as well. CrossSync ignores the "#"comments so it easily works this way.

Other way i can think of is by adding a method at the end of async classes to check if this is a async file using "crossSync.is_async" then append the experimental warning to the doc string. Something like:

if CrossSync.is_async:
    if Client.__doc__ is not None:
        Client.__doc__ += "\n\nExperimental API - Please note that this API is currently experimental and can change in the future.\n"

class _BatchBase(_SessionWrapper):
"""Accumulate mutations for transmission during :meth:`commit`.

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ def _initialize_metrics(project, credentials):
)


# Experimental API - Please note that this API is currently experimental and can change in the future.
class Client(ClientWithProject):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the most important class to mark clearly, as this is the main entry point for the async API. Can we add a bit more context to the docstring of this class? So something like this:

.. note::
The Async API is currently experimental and subject to breaking changes. This comment will be removed once the API has stabilized.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure will add

"""Client for interacting with Cloud Spanner API.

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@
DEFAULT_RETRY_BACKOFF = AsyncRetry(initial=0.02, maximum=32, multiplier=1.3)


# Experimental API - Please note that this API is currently experimental and can change in the future.
class Database(object):
"""Representation of a Cloud Spanner Database.

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def _type_string_to_type_pb(type_string):


@CrossSync.convert_class(add_mapping_for_name="Instance")
# Experimental API - Please note that this API is currently experimental and can change in the future.
class Instance(object):
"""Representation of a Cloud Spanner Instance.

Expand Down
5 changes: 5 additions & 0 deletions google/cloud/spanner_v1/_async/pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ async def __aexit__(self, exc_type, exc_value, traceback):


@CrossSync.convert_class
# Experimental API - Please note that this API is currently experimental and can change in the future.
class AbstractSessionPool(object):
"""Specifies required API for concrete session pool implementations.

Expand Down Expand Up @@ -209,6 +210,7 @@ def session(self, **kwargs):


@CrossSync.convert_class
# Experimental API - Please note that this API is currently experimental and can change in the future.
class FixedSizePool(AbstractSessionPool):
"""Concrete session pool implementation:

Expand Down Expand Up @@ -475,6 +477,7 @@ async def clear(self):


@CrossSync.convert_class
# Experimental API - Please note that this API is currently experimental and can change in the future.
class BurstyPool(AbstractSessionPool):
"""Concrete session pool implementation:

Expand Down Expand Up @@ -586,6 +589,7 @@ async def clear(self):


@CrossSync.convert_class
# Experimental API - Please note that this API is currently experimental and can change in the future.
class PingingPool(FixedSizePool):
"""Concrete session pool implementation:

Expand Down Expand Up @@ -835,6 +839,7 @@ async def ping(self):


@CrossSync.convert_class
# Experimental API - Please note that this API is currently experimental and can change in the future.
class TransactionPingingPool(PingingPool):
"""Concrete session pool implementation:

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@


@total_ordering
# Experimental API - Please note that this API is currently experimental and can change in the future.
class Session(object):
"""Representation of a Cloud Spanner Session.

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -776,6 +776,7 @@ def _update_for_precommit_token_pb_unsafe(
self._precommit_token = precommit_token_pb


# Experimental API - Please note that this API is currently experimental and can change in the future.
class Snapshot(_SnapshotBase):
"""Allow a set of reads / SQL statements with shared staleness."""

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/streamed.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from google.cloud.spanner_v1.types.type import TypeCode


# Experimental API - Please note that this API is currently experimental and can change in the future.
class StreamedResultSet(object):
"""Process a sequence of partial result sets into a single set of row data.

Expand Down
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/_async/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
from google.cloud.spanner_v1.types.transaction import TransactionOptions


# Experimental API - Please note that this API is currently experimental and can change in the future.
class Transaction(_SnapshotBase, _BatchBase):
"""Implement read-write transaction semantics for a session.

Expand Down
2 changes: 1 addition & 1 deletion google/cloud/spanner_v1/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def wrapped_method():
max_commit_delay=max_commit_delay,
request_options=request_options,
)
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we move these other changes to a separate PR?

getattr(database, "_next_nth_request", 0), 1, metadata, span
)
commit_method = functools.partial(
Expand Down
12 changes: 7 additions & 5 deletions google/cloud/spanner_v1/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,11 @@ def __init__(
def _resource_info(self):
"""Resource information for metrics labels."""
return {
"project": self._instance._client.project
if self._instance and self._instance._client
else None,
"project": (
self._instance._client.project
if self._instance and self._instance._client
else None
),
"instance": self._instance.instance_id if self._instance else None,
"database": self.database_id,
}
Expand Down Expand Up @@ -530,7 +532,7 @@ def with_error_augmentation(
tuple: (metadata_list, context_manager)"""
if span is None:
span = get_current_span()
metadata, request_id = _metadata_with_request_id_and_req_id(
(metadata, request_id) = _metadata_with_request_id_and_req_id(
self._nth_client_id,
self._channel_id,
nth_request,
Expand Down Expand Up @@ -807,7 +809,7 @@ def execute_pdml():
session = self._sessions_manager.get_session(transaction_type)
try:
add_span_event(span, "Starting BeginTransaction")
call_metadata, error_augmenter = self.with_error_augmentation(
(call_metadata, error_augmenter) = self.with_error_augmentation(
self._next_nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down
12 changes: 7 additions & 5 deletions google/cloud/spanner_v1/pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def _fill_pool(self):
f"Creating {request.session_count} sessions",
span_event_attributes,
)
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
database._next_nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down Expand Up @@ -612,7 +612,7 @@ def bind(self, database):
) as span, MetricsCapture(self._resource_info):
returned_session_count = 0
while returned_session_count < self.size:
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
database._next_nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down Expand Up @@ -654,7 +654,7 @@ def get(self, timeout=None):
ping_after = None
session = None
try:
ping_after, session = CrossSync._Sync_Impl.queue_get(
(ping_after, session) = CrossSync._Sync_Impl.queue_get(
self._sessions, block=True, timeout=timeout
)
except CrossSync._Sync_Impl.QueueEmpty as e:
Expand Down Expand Up @@ -698,7 +698,9 @@ def clear(self):
"""Delete all sessions in the pool."""
while True:
try:
_, session = CrossSync._Sync_Impl.queue_get(self._sessions, block=False)
(_, session) = CrossSync._Sync_Impl.queue_get(
self._sessions, block=False
)
except CrossSync._Sync_Impl.QueueEmpty:
break
else:
Expand All @@ -711,7 +713,7 @@ def ping(self):
or during the "idle" phase of an event loop."""
while True:
try:
ping_after, session = CrossSync._Sync_Impl.queue_get(
(ping_after, session) = CrossSync._Sync_Impl.queue_get(
self._sessions, block=False
)
except CrossSync._Sync_Impl.QueueEmpty:
Expand Down
8 changes: 4 additions & 4 deletions google/cloud/spanner_v1/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def create(self):
observability_options=observability_options,
metadata=metadata,
) as span, MetricsCapture(self._resource_info):
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down Expand Up @@ -232,7 +232,7 @@ def exists(self):
observability_options=observability_options,
metadata=metadata,
) as span, MetricsCapture(self._resource_info):
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down Expand Up @@ -283,7 +283,7 @@ def delete(self):
observability_options=observability_options,
metadata=metadata,
) as span, MetricsCapture(self._resource_info):
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, 1, metadata, span
)
with error_augmenter:
Expand All @@ -300,7 +300,7 @@ def ping(self):
metadata = _metadata_with_prefix(database.name)
nth_request = database._next_nth_request
with trace_call("CloudSpanner.Session.ping", self) as span:
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/spanner_v1/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ def execute_sql(
raise ValueError("Transaction has not begun.")
if params is not None:
params_pb = Struct(
fields={key: _make_value_pb(value) for key, value in params.items()}
fields={key: _make_value_pb(value) for (key, value) in params.items()}
)
else:
params_pb = {}
Expand Down Expand Up @@ -513,7 +513,7 @@ def partition_query(
raise ValueError("Cannot partition a single-use transaction.")
if params is not None:
params_pb = Struct(
fields={key: _make_value_pb(value) for key, value in params.items()}
fields={key: _make_value_pb(value) for (key, value) in params.items()}
)
else:
params_pb = Struct()
Expand Down Expand Up @@ -614,7 +614,7 @@ def wrapped_method():
begin_transaction_request = BeginTransactionRequest(
**begin_request_kwargs
)
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, attempt.increment(), metadata, span
)
begin_transaction_method = functools.partial(
Expand Down
8 changes: 4 additions & 4 deletions google/cloud/spanner_v1/streamed.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def _consume_next(self):

def __iter__(self):
while True:
iter_rows, self._rows[:] = (self._rows[:], ())
(iter_rows, self._rows[:]) = (self._rows[:], ())
while iter_rows:
yield iter_rows.pop(0)
if self._done:
Expand Down Expand Up @@ -230,7 +230,7 @@ def to_dict_list(self):
rows.append(
{
column: value
for column, value in zip(
for (column, value) in zip(
[column.name for column in self._metadata.row_type.fields], row
)
}
Expand Down Expand Up @@ -291,7 +291,7 @@ def _merge_array(lhs, rhs, type_):
if element_type.code in _UNMERGEABLE_TYPES:
lhs.list_value.values.extend(rhs.list_value.values)
return lhs
lhs, rhs = (list(lhs.list_value.values), list(rhs.list_value.values))
(lhs, rhs) = (list(lhs.list_value.values), list(rhs.list_value.values))
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=lhs + rhs))
first = rhs.pop(0)
Expand All @@ -316,7 +316,7 @@ def _merge_array(lhs, rhs, type_):
def _merge_struct(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
fields = type_.struct_type.fields
lhs, rhs = (list(lhs.list_value.values), list(rhs.list_value.values))
(lhs, rhs) = (list(lhs.list_value.values), list(rhs.list_value.values))
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=lhs + rhs))
candidate_type = fields[len(lhs) - 1].type_
Expand Down
18 changes: 9 additions & 9 deletions google/cloud/spanner_v1/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def rollback(self) -> None:

def wrapped_method(*args, **kwargs):
attempt.increment()
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, attempt.value, metadata, span
)
rollback_method = functools.partial(
Expand Down Expand Up @@ -269,7 +269,7 @@ def wrapped_method(*args, **kwargs):
is_multiplexed = getattr(self._session, "is_multiplexed", False)
if is_multiplexed and self._precommit_token is not None:
commit_request_args["precommit_token"] = self._precommit_token
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, attempt.value, metadata, span
)
commit_method = functools.partial(
Expand Down Expand Up @@ -300,7 +300,7 @@ def before_next_retry(nth_retry, delay_in_seconds):
if commit_response_pb._pb.HasField("precommit_token"):
add_span_event(span, commit_retry_event_name)
nth_request = database._next_nth_request
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, 1, metadata, span
)
with error_augmenter:
Expand Down Expand Up @@ -338,7 +338,7 @@ def _make_params_pb(params, param_types):
If ``params`` is None but ``param_types`` is not None."""
if params:
return Struct(
fields={key: _make_value_pb(value) for key, value in params.items()}
fields={key: _make_value_pb(value) for (key, value) in params.items()}
)
return {}

Expand Down Expand Up @@ -417,7 +417,7 @@ def execute_update(
metadata.append(
_metadata_with_leader_aware_routing(database._route_to_leader_enabled)
)
seqno, self._execute_sql_request_count = (
(seqno, self._execute_sql_request_count) = (
self._execute_sql_request_count,
self._execute_sql_request_count + 1,
)
Expand Down Expand Up @@ -454,7 +454,7 @@ def execute_update(

def wrapped_method(*args, **kwargs):
attempt.increment()
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, attempt.value, metadata
)
execute_sql_method = functools.partial(
Expand Down Expand Up @@ -544,7 +544,7 @@ def batch_update(
if isinstance(statement, str):
parsed.append(ExecuteBatchDmlRequest.Statement(sql=statement))
else:
dml, params, param_types = statement
(dml, params, param_types) = statement
params_pb = self._make_params_pb(params, param_types)
parsed.append(
ExecuteBatchDmlRequest.Statement(
Expand All @@ -556,7 +556,7 @@ def batch_update(
metadata.append(
_metadata_with_leader_aware_routing(database._route_to_leader_enabled)
)
seqno, self._execute_sql_request_count = (
(seqno, self._execute_sql_request_count) = (
self._execute_sql_request_count,
self._execute_sql_request_count + 1,
)
Expand Down Expand Up @@ -590,7 +590,7 @@ def batch_update(

def wrapped_method(*args, **kwargs):
attempt.increment()
call_metadata, error_augmenter = database.with_error_augmentation(
(call_metadata, error_augmenter) = database.with_error_augmentation(
nth_request, attempt.value, metadata
)
execute_batch_dml_method = functools.partial(
Expand Down
Loading