Skip to content

Commit d4b6bd1

Browse files
authored
Merge branch 'main' into defaultTracing
2 parents 2bf9005 + 7266686 commit d4b6bd1

23 files changed

+514
-145
lines changed

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "3.57.0"
2+
".": "3.58.0"
33
}

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,18 @@
44

55
[1]: https://pypi.org/project/google-cloud-spanner/#history
66

7+
## [3.58.0](https://github.com/googleapis/python-spanner/compare/v3.57.0...v3.58.0) (2025-09-10)
8+
9+
10+
### Features
11+
12+
* **spanner:** Support setting read lock mode ([#1404](https://github.com/googleapis/python-spanner/issues/1404)) ([ee24c6e](https://github.com/googleapis/python-spanner/commit/ee24c6ee2643bc74d52e9f0a924b80a830fa2697))
13+
14+
15+
### Dependencies
16+
17+
* Remove Python 3.7 and 3.8 as supported runtimes ([#1395](https://github.com/googleapis/python-spanner/issues/1395)) ([fc93792](https://github.com/googleapis/python-spanner/commit/fc9379232224f56d29d2e36559a756c05a5478ff))
18+
719
## [3.57.0](https://github.com/googleapis/python-spanner/compare/v3.56.0...v3.57.0) (2025-08-14)
820

921

google/cloud/spanner_admin_database_v1/gapic_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,4 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
#
16-
__version__ = "3.57.0" # {x-release-please-version}
16+
__version__ = "3.58.0" # {x-release-please-version}

google/cloud/spanner_admin_instance_v1/gapic_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,4 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
#
16-
__version__ = "3.57.0" # {x-release-please-version}
16+
__version__ = "3.58.0" # {x-release-please-version}

google/cloud/spanner_v1/client.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
"""
2626
import grpc
2727
import os
28+
import logging
2829
import warnings
2930

3031
from google.api_core.gapic_v1 import client_info
@@ -97,6 +98,9 @@ def _get_spanner_optimizer_statistics_package():
9798
return os.getenv(OPTIMIZER_STATISITCS_PACKAGE_ENV_VAR, "")
9899

99100

101+
log = logging.getLogger(__name__)
102+
103+
100104
def _get_spanner_enable_builtin_metrics():
101105
return os.getenv(ENABLE_SPANNER_METRICS_ENV_VAR) == "true"
102106

@@ -240,19 +244,24 @@ def __init__(
240244
and HAS_GOOGLE_CLOUD_MONITORING_INSTALLED
241245
):
242246
meter_provider = metrics.NoOpMeterProvider()
243-
if not _get_spanner_emulator_host():
244-
meter_provider = MeterProvider(
245-
metric_readers=[
246-
PeriodicExportingMetricReader(
247-
CloudMonitoringMetricsExporter(
248-
project_id=project, credentials=credentials
247+
try:
248+
if not _get_spanner_emulator_host():
249+
meter_provider = MeterProvider(
250+
metric_readers=[
251+
PeriodicExportingMetricReader(
252+
CloudMonitoringMetricsExporter(
253+
project_id=project, credentials=credentials
254+
),
255+
export_interval_millis=METRIC_EXPORT_INTERVAL_MS,
249256
),
250-
export_interval_millis=METRIC_EXPORT_INTERVAL_MS,
251-
)
252-
]
257+
]
258+
)
259+
metrics.set_meter_provider(meter_provider)
260+
SpannerMetricsTracerFactory()
261+
except Exception as e:
262+
log.warning(
263+
"Failed to initialize Spanner built-in metrics. Error: %s", e
253264
)
254-
metrics.set_meter_provider(meter_provider)
255-
SpannerMetricsTracerFactory()
256265
else:
257266
SpannerMetricsTracerFactory(enabled=False)
258267

google/cloud/spanner_v1/database.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1532,6 +1532,14 @@ def to_dict(self):
15321532
"transaction_id": snapshot._transaction_id,
15331533
}
15341534

1535+
def __enter__(self):
1536+
"""Begin ``with`` block."""
1537+
return self
1538+
1539+
def __exit__(self, exc_type, exc_val, exc_tb):
1540+
"""End ``with`` block."""
1541+
self.close()
1542+
15351543
@property
15361544
def observability_options(self):
15371545
return getattr(self._database, "observability_options", {})
@@ -1703,6 +1711,7 @@ def process_read_batch(
17031711
*,
17041712
retry=gapic_v1.method.DEFAULT,
17051713
timeout=gapic_v1.method.DEFAULT,
1714+
lazy_decode=False,
17061715
):
17071716
"""Process a single, partitioned read.
17081717
@@ -1717,6 +1726,14 @@ def process_read_batch(
17171726
:type timeout: float
17181727
:param timeout: (Optional) The timeout for this request.
17191728
1729+
:type lazy_decode: bool
1730+
:param lazy_decode:
1731+
(Optional) If this argument is set to ``true``, the iterator
1732+
returns the underlying protobuf values instead of decoded Python
1733+
objects. This reduces the time that is needed to iterate through
1734+
large result sets. The application is responsible for decoding
1735+
the data that is needed.
1736+
17201737
17211738
:rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`
17221739
:returns: a result set instance which can be used to consume rows.
@@ -1844,6 +1861,7 @@ def process_query_batch(
18441861
self,
18451862
batch,
18461863
*,
1864+
lazy_decode: bool = False,
18471865
retry=gapic_v1.method.DEFAULT,
18481866
timeout=gapic_v1.method.DEFAULT,
18491867
):
@@ -1854,6 +1872,13 @@ def process_query_batch(
18541872
one of the mappings returned from an earlier call to
18551873
:meth:`generate_query_batches`.
18561874
1875+
:type lazy_decode: bool
1876+
:param lazy_decode:
1877+
(Optional) If this argument is set to ``true``, the iterator
1878+
returns the underlying protobuf values instead of decoded Python
1879+
objects. This reduces the time that is needed to iterate through
1880+
large result sets.
1881+
18571882
:type retry: :class:`~google.api_core.retry.Retry`
18581883
:param retry: (Optional) The retry settings for this request.
18591884
@@ -1870,6 +1895,7 @@ def process_query_batch(
18701895
return self._get_snapshot().execute_sql(
18711896
partition=batch["partition"],
18721897
**batch["query"],
1898+
lazy_decode=lazy_decode,
18731899
retry=retry,
18741900
timeout=timeout,
18751901
)
@@ -1883,6 +1909,7 @@ def run_partitioned_query(
18831909
max_partitions=None,
18841910
query_options=None,
18851911
data_boost_enabled=False,
1912+
lazy_decode=False,
18861913
):
18871914
"""Start a partitioned query operation to get list of partitions and
18881915
then executes each partition on a separate thread
@@ -1943,7 +1970,7 @@ def run_partitioned_query(
19431970
data_boost_enabled,
19441971
)
19451972
)
1946-
return MergedResultSet(self, partitions, 0)
1973+
return MergedResultSet(self, partitions, 0, lazy_decode=lazy_decode)
19471974

19481975
def process(self, batch):
19491976
"""Process a single, partitioned query or read.

google/cloud/spanner_v1/gapic_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,4 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
#
16-
__version__ = "3.57.0" # {x-release-please-version}
16+
__version__ = "3.58.0" # {x-release-please-version}

google/cloud/spanner_v1/merged_result_set.py

Lines changed: 37 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,13 @@ class PartitionExecutor:
3333
rows in the queue
3434
"""
3535

36-
def __init__(self, batch_snapshot, partition_id, merged_result_set):
36+
def __init__(
37+
self, batch_snapshot, partition_id, merged_result_set, lazy_decode=False
38+
):
3739
self._batch_snapshot: BatchSnapshot = batch_snapshot
3840
self._partition_id = partition_id
3941
self._merged_result_set: MergedResultSet = merged_result_set
42+
self._lazy_decode = lazy_decode
4043
self._queue: Queue[PartitionExecutorResult] = merged_result_set._queue
4144

4245
def run(self):
@@ -52,7 +55,9 @@ def run(self):
5255
def __run(self):
5356
results = None
5457
try:
55-
results = self._batch_snapshot.process_query_batch(self._partition_id)
58+
results = self._batch_snapshot.process_query_batch(
59+
self._partition_id, lazy_decode=self._lazy_decode
60+
)
5661
for row in results:
5762
if self._merged_result_set._metadata is None:
5863
self._set_metadata(results)
@@ -75,6 +80,7 @@ def _set_metadata(self, results, is_exception=False):
7580
try:
7681
if not is_exception:
7782
self._merged_result_set._metadata = results.metadata
83+
self._merged_result_set._result_set = results
7884
finally:
7985
self._merged_result_set.metadata_lock.release()
8086
self._merged_result_set.metadata_event.set()
@@ -94,7 +100,10 @@ class MergedResultSet:
94100
records in the MergedResultSet is not guaranteed.
95101
"""
96102

97-
def __init__(self, batch_snapshot, partition_ids, max_parallelism):
103+
def __init__(
104+
self, batch_snapshot, partition_ids, max_parallelism, lazy_decode=False
105+
):
106+
self._result_set = None
98107
self._exception = None
99108
self._metadata = None
100109
self.metadata_event = Event()
@@ -110,7 +119,7 @@ def __init__(self, batch_snapshot, partition_ids, max_parallelism):
110119
partition_executors = []
111120
for partition_id in partition_ids:
112121
partition_executors.append(
113-
PartitionExecutor(batch_snapshot, partition_id, self)
122+
PartitionExecutor(batch_snapshot, partition_id, self, lazy_decode)
114123
)
115124
executor = ThreadPoolExecutor(max_workers=parallelism)
116125
for partition_executor in partition_executors:
@@ -144,3 +153,27 @@ def metadata(self):
144153
def stats(self):
145154
# TODO: Implement
146155
return None
156+
157+
def decode_row(self, row: []) -> []:
158+
"""Decodes a row from protobuf values to Python objects. This function
159+
should only be called for result sets that use ``lazy_decoding=True``.
160+
The array that is returned by this function is the same as the array
161+
that would have been returned by the rows iterator if ``lazy_decoding=False``.
162+
163+
:returns: an array containing the decoded values of all the columns in the given row
164+
"""
165+
if self._result_set is None:
166+
raise ValueError("iterator not started")
167+
return self._result_set.decode_row(row)
168+
169+
def decode_column(self, row: [], column_index: int):
170+
"""Decodes a column from a protobuf value to a Python object. This function
171+
should only be called for result sets that use ``lazy_decoding=True``.
172+
The object that is returned by this function is the same as the object
173+
that would have been returned by the rows iterator if ``lazy_decoding=False``.
174+
175+
:returns: the decoded column value
176+
"""
177+
if self._result_set is None:
178+
raise ValueError("iterator not started")
179+
return self._result_set.decode_column(row, column_index)

google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
from .metrics_tracer_factory import MetricsTracerFactory
1919
import os
20+
import logging
2021
from .constants import (
2122
SPANNER_SERVICE_NAME,
2223
GOOGLE_CLOUD_REGION_KEY,
@@ -33,9 +34,6 @@
3334

3435
import mmh3
3536

36-
# Override Resource detector logging to not warn when GCP resources are not detected
37-
import logging
38-
3937
logging.getLogger("opentelemetry.resourcedetector.gcp_resource_detector").setLevel(
4038
logging.ERROR
4139
)
@@ -48,6 +46,8 @@
4846
from google.cloud.spanner_v1 import __version__
4947
from uuid import uuid4
5048

49+
log = logging.getLogger(__name__)
50+
5151

5252
class SpannerMetricsTracerFactory(MetricsTracerFactory):
5353
"""A factory for creating SpannerMetricsTracer instances."""
@@ -158,15 +158,23 @@ def _generate_client_hash(client_uid: str) -> str:
158158
def _get_location() -> str:
159159
"""Get the location of the resource.
160160
161+
In case of any error during detection, this method will log a warning
162+
and default to the "global" location.
163+
161164
Returns:
162165
str: The location of the resource. If OpenTelemetry is not installed, returns a global region.
163166
"""
164167
if not HAS_OPENTELEMETRY_INSTALLED:
165168
return GOOGLE_CLOUD_REGION_GLOBAL
166-
detector = gcp_resource_detector.GoogleCloudResourceDetector()
167-
resources = detector.detect()
168-
169-
if GOOGLE_CLOUD_REGION_KEY not in resources.attributes:
170-
return GOOGLE_CLOUD_REGION_GLOBAL
171-
else:
172-
return resources[GOOGLE_CLOUD_REGION_KEY]
169+
try:
170+
detector = gcp_resource_detector.GoogleCloudResourceDetector()
171+
resources = detector.detect()
172+
173+
if GOOGLE_CLOUD_REGION_KEY in resources.attributes:
174+
return resources.attributes[GOOGLE_CLOUD_REGION_KEY]
175+
except Exception as e:
176+
log.warning(
177+
"Failed to detect GCP resource location for Spanner metrics, defaulting to 'global'. Error: %s",
178+
e,
179+
)
180+
return GOOGLE_CLOUD_REGION_GLOBAL

samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
],
99
"language": "PYTHON",
1010
"name": "google-cloud-spanner-admin-database",
11-
"version": "0.1.0"
11+
"version": "3.58.0"
1212
},
1313
"snippets": [
1414
{

0 commit comments

Comments
 (0)