From b17f8f1e856d655bce49921fc84f856192c64f8a Mon Sep 17 00:00:00 2001 From: Morgan Chen Date: Mon, 3 Nov 2025 14:51:14 -0800 Subject: [PATCH 1/4] add initial pipelines snippet file --- snippets/firestore/firestore_pipelines.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 snippets/firestore/firestore_pipelines.py diff --git a/snippets/firestore/firestore_pipelines.py b/snippets/firestore/firestore_pipelines.py new file mode 100644 index 000000000..bedec4f75 --- /dev/null +++ b/snippets/firestore/firestore_pipelines.py @@ -0,0 +1,22 @@ +# Copyright 2025 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import firebase_admin +from firebase_admin import firestore + +default_app = firebase_admin.initialize_app() +client = firestore.client(default_app, "your-new-enterprise-database") + +# pylint: disable=invalid-name +def init_firestore_client(): From 71c481fcbf1b46af3771bc499e80090d41c1c02e Mon Sep 17 00:00:00 2001 From: Morgan Chen Date: Tue, 4 Nov 2025 15:51:56 -0800 Subject: [PATCH 2/4] add pipeline snippets for python --- snippets/firestore/firestore_pipelines.py | 1269 ++++++++++++++++++++- 1 file changed, 1268 insertions(+), 1 deletion(-) diff --git a/snippets/firestore/firestore_pipelines.py b/snippets/firestore/firestore_pipelines.py index bedec4f75..478236ba8 100644 --- a/snippets/firestore/firestore_pipelines.py +++ b/snippets/firestore/firestore_pipelines.py @@ -12,6 +12,29 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.cloud.firestore import Query +from google.cloud.firestore_v1.pipeline import Pipeline +from google.cloud.firestore_v1.pipeline_source import PipelineSource +from google.cloud.firestore_v1.pipeline_expressions import ( + AggregateFunction, + Constant, + Expression, + Field, + Count, +) +from google.cloud.firestore_v1.pipeline_expressions import ( + And, Conditional, Or, Not, Xor +) +from google.cloud.firestore_v1.pipeline_stages import ( + Aggregate, + FindNearestOptions, + SampleOptions, + UnnestOptions, +) +from google.cloud.firestore_v1.base_vector_query import DistanceMeasure +from google.cloud.firestore_v1.vector import Vector +from google.cloud.firestore_v1.client import Client + import firebase_admin from firebase_admin import firestore @@ -19,4 +42,1248 @@ client = firestore.client(default_app, "your-new-enterprise-database") # pylint: disable=invalid-name -def init_firestore_client(): +def pipeline_concepts(): + # [START pipeline_concepts] + pipeline = client.pipeline() \ + .collection("cities") \ + .where(Field.of("population").greater_than(100000)) \ + .sort(Field.of("name").ascending()) \ + .limit(10) + # [END pipeline_concepts] + print(pipeline) + +def basic_read(): + # [START basic_read] + pipeline = client.pipeline().collection("users") + for result in pipeline.execute(): + print(result.id + " => " + result.data) + # [END basic_read] + +def pipeline_initialization(): + # [START pipeline_initialization] + firestore_client = firestore.client(default_app, "your-new-enterprise-database") + pipeline = firestore_client.pipeline() + # [END pipeline_initialization] + print(pipeline) + +def field_vs_constants(): + # [START field_or_constant] + pipeline = client.pipeline() \ + .collection("cities") \ + .where(Field.of("name").equal(Constant("Toronto"))) + # [END field_or_constant] + print(pipeline) + +def input_stages(): + # [START input_stages] + # Return all restaurants in San Francisco + results = client.pipeline().collection("cities/sf/restaurants").execute() + + # Return all restaurants + results = client.pipeline().collection_group("restaurants").execute() + + # Return all documents across all collections in the database (the entire database) + results = client.pipeline().database().execute() + + # Batch read of 3 documents + results = client.pipeline().documents( + client.collection("cities").document("SF"), + client.collection("cities").document("DC"), + client.collection("cities").document("NY") + ).execute() + # [END input_stages] + for result in results: + print(result) + +def where_pipeline(): + # [START pipeline_where] + results = client.pipeline().collection("books") \ + .where(Field.of("rating").equal(5)) \ + .where(Field.of("published").less_than(1900)) \ + .execute() + + results = client.pipeline().collection("books") \ + .where(And( + Field.of("rating").equal(5), + Field.of("published").less_than(1900) + )) \ + .execute() + # [END pipeline_where] + for result in results: + print(result) + +def aggregate_groups(): + # [START aggregate_groups] + results = client.pipeline() \ + .collection("books") \ + .aggregate( + Field.of("rating").average().as_("avg_rating"), + groups=[Field.of("genre")] + ) \ + .execute() + # [END aggregate_groups] + for result in results: + print(result) + +def aggregate_distinct(): + # [START aggregate_distinct] + results = client.pipeline() \ + .collection("books") \ + .distinct( + Field.of("author").to_upper().as_("author"), + "genre" + ) \ + .execute() + # [END aggregate_distinct] + for result in results: + print(result) + +def sort(): + # [START sort] + results = client.pipeline() \ + .collection("books") \ + .sort( + Field.of("release_date").descending(), + Field.of("author").ascending() + ) \ + .execute() + # [END sort] + for result in results: + print(result) + +def sort_comparison(): + # [START sort_comparison] + query = client.collection("cities") \ + .order_by("state") \ + .order_by("population", direction=Query.DESCENDING) + + pipeline = client.pipeline() \ + .collection("books") \ + .sort( + Field.of("release_date").descending(), + Field.of("author").ascending() + ) + # [END sort_comparison] + print(query) + print(pipeline) + +def functions_example(): + # [START functions_example] + # Type 1: Scalar (for use in non-aggregation stages) + # Example: Return the min store price for each book. + results = client.pipeline().collection("books") \ + .select( + Field.of("current").logical_minimum(Field.of("updated")).as_("price_min") + ) \ + .execute() + + # Type 2: Aggregation (for use in aggregate stages) + # Example: Return the min price of all books. + results = client.pipeline().collection("books") \ + .aggregate(Field.of("price").minimum().as_("min_price")) \ + .execute() + # [END functions_example] + for result in results: + print(result) + +def creating_indexes(): + # [START query_example] + results = client.pipeline() \ + .collection("books") \ + .where(Field.of("published").less_than(1900)) \ + .where(Field.of("genre").equal("Science Fiction")) \ + .where(Field.of("rating").greater_than(4.3)) \ + .sort(Field.of("published").descending()) \ + .execute() + # [END query_example] + for result in results: + print(result) + +def sparse_indexes(): + # [START sparse_index_example] + results = client.pipeline() \ + .collection("books") \ + .where(Field.of("category").like("%fantasy%")) \ + .execute() + # [END sparse_index_example] + for result in results: + print(result) + +def sparse_indexes2(): + # [START sparse_index_example_2] + results = client.pipeline() \ + .collection("books") \ + .sort(Field.of("release_date").ascending()) \ + .execute() + # [END sparse_index_example_2] + for result in results: + print(result) + +def covered_query(): + # [START covered_query] + results = client.pipeline() \ + .collection("books") \ + .where(Field.of("category").like("%fantasy%")) \ + .where(Field.of("title").exists()) \ + .where(Field.of("author").exists()) \ + .select("title", "author") \ + .execute() + # [END covered_query] + for result in results: + print(result) + +def pagination(): + # [START pagination_not_supported_preview] + # Existing pagination via `start_at()` + query = client.collection("cities").order_by("population").start_at({ + "population": 1000000 + }) + + # Private preview workaround using pipelines + pipeline = client.pipeline() \ + .collection("cities") \ + .where(Field.of("population").greater_than_or_equal(1000000)) \ + .sort(Field.of("population").descending()) + # [END pagination_not_supported_preview] + print(query) + print(pipeline) + +def collection_stage(): + # [START collection_example] + results = client.pipeline() \ + .collection("users/bob/games") \ + .sort(Field.of("name").ascending()) \ + .execute() + # [END collection_example] + for result in results: + print(result) + +def collection_group_stage(): + # [START collection_group_example] + results = client.pipeline() \ + .collection_group("games") \ + .sort(Field.of("name").ascending()) \ + .execute() + # [END collection_group_example] + for result in results: + print(result) + +def database_stage(): + # [START database_example] + # Count all documents in the database + results = client.pipeline() \ + .database() \ + .aggregate(Count().as_("total")) \ + .execute() + # [END database_example] + for result in results: + print(result) + +def documents_stage(): + # [START documents_example] + results = client.pipeline().documents( + client.collection("cities").document("SF"), + client.collection("cities").document("DC"), + client.collection("cities").document("NY") + ).execute() + # [END documents_example] + for result in results: + print(result) + +def replace_with_stage(): + # [START initial_data] + client.collection("cities").document("SF").set({ + "name": "San Francisco", + "population": 800000, + "location": { + "country": "USA", + "state": "California" + } + }) + client.collection("cities").document("TO").set({ + "name": "Toronto", + "population": 3000000, + "province": "ON", + "location": { + "country": "Canada", + "province": "Ontario" + } + }) + client.collection("cities").document("NY").set({ + "name": "New York", + "location": { + "country": "USA", + "state": "New York" + } + }) + client.collection("cities").document("AT").set({ + "name": "Atlantis", + }) + # [END initial_data] + + # [START full_replace] + names = client.pipeline() \ + .collection("cities") \ + .replace_with(Field.of("location")) \ + .execute() + # [END full_replace] + + # [START map_merge_overwrite] + # unsupported in client SDKs for now + # [END map_merge_overwrite] + for name in names: + print(name) + +def sample_stage(): + # [START sample_example] + # Get a sample of 100 documents in a database + results = client.pipeline() \ + .database() \ + .sample(100) \ + .execute() + + # Randomly shuffle a list of 3 documents + results = client.pipeline() \ + .documents( + client.collection("cities").document("SF"), + client.collection("cities").document("NY"), + client.collection("cities").document("DC") + ) \ + .sample(3) \ + .execute() + # [END sample_example] + for result in results: + print(result) + +def sample_percent(): + # [START sample_percent] + # Get a sample of on average 50% of the documents in the database + results = client.pipeline() \ + .database() \ + .sample(SampleOptions.percentage(0.5)) \ + .execute() + # [END sample_percent] + for result in results: + print(result) + +def union_stage(): + # [START union_stage] + results = client.pipeline() \ + .collection("cities/SF/restaurants") \ + .where(Field.of("type").equal("Chinese")) \ + .union(client.pipeline() \ + .collection("cities/NY/restaurants") \ + .where(Field.of("type").equal("Italian"))) \ + .where(Field.of("rating").greater_than_or_equal(4.5)) \ + .sort(Field.of("__name__").descending()) \ + .execute() + # [END union_stage] + for result in results: + print(result) + +def union_stage_stable(): + # [START union_stage_stable] + results = client.pipeline() \ + .collection("cities/SF/restaurants") \ + .where(Field.of("type").equal("Chinese")) \ + .union(client.pipeline() \ + .collection("cities/NY/restaurants") \ + .where(Field.of("type").equal("Italian"))) \ + .where(Field.of("rating").greater_than_or_equal(4.5)) \ + .sort(Field.of("__name__").descending()) \ + .execute() + # [END union_stage_stable] + for result in results: + print(result) + +def unnest_stage(): + # [START unnest_stage] + results = client.pipeline() \ + .database() \ + .unnest(Field.of("arrayField").as_("unnestedArrayField"), \ + options=UnnestOptions(index_field="index")) \ + .execute() + # [END unnest_stage] + for result in results: + print(result) + +def unnest_stage_empty_or_non_array(): + # [START unnest_edge_cases] + # Input + # { "identifier" : 1, "neighbors": [ "Alice", "Cathy" ] } + # { "identifier" : 2, "neighbors": [] } + # { "identifier" : 3, "neighbors": "Bob" } + + results = client.pipeline() \ + .database() \ + .unnest(Field.of("neighbors").as_("unnestedNeighbors"), \ + options=UnnestOptions(index_field="index")) \ + .execute() + + # Output + # { "identifier": 1, "neighbors": [ "Alice", "Cathy" ], + # "unnestedNeighbors": "Alice", "index": 0 } + # { "identifier": 1, "neighbors": [ "Alice", "Cathy" ], + # "unnestedNeighbors": "Cathy", "index": 1 } + # { "identifier": 3, "neighbors": "Bob", "index": null} + # [END unnest_edge_cases] + for result in results: + print(result) + +def count_function(): + # [START count_function] + # Total number of books in the collection + count_all = client.pipeline() \ + .collection("books") \ + .aggregate(Count().as_("count")) \ + .execute() + + # Number of books with nonnull `ratings` field + count_field = client.pipeline() \ + .collection("books") \ + .aggregate(Count("ratings").as_("count")) \ + .execute() + # [END count_function] + for result in count_all: + print(result) + for result in count_field: + print(result) + +def count_if_function(): + # [START count_if] + result = client.pipeline() \ + .collection("books") \ + .aggregate( + Field.of("rating").greater_than(4).count_if().as_("filteredCount") + ) \ + .execute() + # [END count_if] + for res in result: + print(res) + +def count_distinct_function(): + # [START count_distinct] + result = client.pipeline() \ + .collection("books") \ + .aggregate(Field.of("author").count_distinct().as_("unique_authors")) \ + .execute() + # [END count_distinct] + for res in result: + print(res) + +def sum_function(): + # [START sum_function] + result = client.pipeline() \ + .collection("cities") \ + .aggregate(Field.of("population").sum().as_("totalPopulation")) \ + .execute() + # [END sum_function] + for res in result: + print(res) + +def avg_function(): + # [START avg_function] + result = client.pipeline() \ + .collection("cities") \ + .aggregate(Field.of("population").average().as_("averagePopulation")) \ + .execute() + # [END avg_function] + for res in result: + print(res) + +def min_function(): + # [START min_function] + result = client.pipeline() \ + .collection("books") \ + .aggregate(Field.of("price").minimum().as_("minimumPrice")) \ + .execute() + # [END min_function] + for res in result: + print(res) + +def max_function(): + # [START max_function] + result = client.pipeline() \ + .collection("books") \ + .aggregate(Field.of("price").maximum().as_("maximumPrice")) \ + .execute() + # [END max_function] + for res in result: + print(res) + +def add_function(): + # [START add_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("soldBooks").add(Field.of("unsoldBooks")).as_("totalBooks")) \ + .execute() + # [END add_function] + for res in result: + print(res) + +def subtract_function(): + # [START subtract_function] + store_credit = 7 + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("price").subtract(store_credit).as_("totalCost")) \ + .execute() + # [END subtract_function] + for res in result: + print(res) + +def multiply_function(): + # [START multiply_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("price").multiply(Field.of("soldBooks")).as_("revenue")) \ + .execute() + # [END multiply_function] + for res in result: + print(res) + +def divide_function(): + # [START divide_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("ratings").divide(Field.of("soldBooks")).as_("reviewRate")) \ + .execute() + # [END divide_function] + for res in result: + print(res) + +def mod_function(): + # [START mod_function] + display_capacity = 1000 + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("unsoldBooks").mod(display_capacity).as_("warehousedBooks")) \ + .execute() + # [END mod_function] + for res in result: + print(res) + +def ceil_function(): + # [START ceil_function] + books_per_shelf = 100 + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("unsoldBooks").divide(books_per_shelf).ceil().as_("requiredShelves") + ) \ + .execute() + # [END ceil_function] + for res in result: + print(res) + +def floor_function(): + # [START floor_function] + result = client.pipeline() \ + .collection("books") \ + .add_fields( + Field.of("wordCount").divide(Field.of("pages")).floor().as_("wordsPerPage") + ) \ + .execute() + # [END floor_function] + for res in result: + print(res) + +def round_function(): + # [START round_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("soldBooks").multiply(Field.of("price")).round().as_("partialRevenue")) \ + .aggregate(Field.of("partialRevenue").sum().as_("totalRevenue")) \ + .execute() + # [END round_function] + for res in result: + print(res) + +def pow_function(): + # [START pow_function] + googleplexLat = 37.4221 + googleplexLng = -122.0853 + result = client.pipeline() \ + .collection("cities") \ + .add_fields( + Field.of("lat").subtract(googleplexLat) + .multiply(111) # km per degree + .pow(2) + .as_("latitudeDifference"), + Field.of("lng").subtract(googleplexLng) + .multiply(111) # km per degree + .pow(2) + .as_("longitudeDifference") + ) \ + .select( + Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() + # Inaccurate for large distances or close to poles + .as_("approximateDistanceToGoogle") + ) \ + .execute() + # [END pow_function] + for res in result: + print(res) + +def sqrt_function(): + # [START sqrt_function] + googleplexLat = 37.4221 + googleplexLng = -122.0853 + result = client.pipeline() \ + .collection("cities") \ + .add_fields( + Field.of("lat").subtract(googleplexLat) + .multiply(111) # km per degree + .pow(2) + .as_("latitudeDifference"), + Field.of("lng").subtract(googleplexLng) + .multiply(111) # km per degree + .pow(2) + .as_("longitudeDifference") + ) \ + .select( + Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() + # Inaccurate for large distances or close to poles + .as_("approximateDistanceToGoogle") + ) \ + .execute() + # [END sqrt_function] + for res in result: + print(res) + +def exp_function(): + # [START exp_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").exp().as_("expRating")) \ + .execute() + # [END exp_function] + for res in result: + print(res) + +def ln_function(): + # [START ln_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").ln().as_("lnRating")) \ + .execute() + # [END ln_function] + for res in result: + print(res) + +def log_function(): + # [START log_function] + # Not supported on Python + # [END log_function] + pass + +def array_concat(): + # [START array_concat] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("genre").array_concat(Field.of("subGenre")).as_("allGenres")) \ + .execute() + # [END array_concat] + for res in result: + print(res) + +def array_contains(): + # [START array_contains] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("genre").array_contains("mystery").as_("isMystery")) \ + .execute() + # [END array_contains] + for res in result: + print(res) + +def array_contains_all(): + # [START array_contains_all] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("genre") + .array_contains_all(["fantasy", "adventure"]) + .as_("isFantasyAdventure") + ) \ + .execute() + # [END array_contains_all] + for res in result: + print(res) + +def array_contains_any(): + # [START array_contains_any] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("genre") + .array_contains_any(["fantasy", "nonfiction"]) + .as_("isMysteryOrFantasy") + ) \ + .execute() + # [END array_contains_any] + for res in result: + print(res) + +def array_length(): + # [START array_length] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("genre").array_length().as_("genreCount")) \ + .execute() + # [END array_length] + for res in result: + print(res) + +def array_reverse(): + # [START array_reverse] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("genre").array_reverse().as_("reversedGenres")) \ + .execute() + # [END array_reverse] + for res in result: + print(res) + +def equal_function(): + # [START equal_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").equal(5).as_("hasPerfectRating")) \ + .execute() + # [END equal_function] + for res in result: + print(res) + +def greater_than_function(): + # [START greater_than] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").greater_than(4).as_("hasHighRating")) \ + .execute() + # [END greater_than] + for res in result: + print(res) + +def greater_than_or_equal_to_function(): + # [START greater_or_equal] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("published").greater_than_or_equal(1900).as_("publishedIn20thCentury")) \ + .execute() + # [END greater_or_equal] + for res in result: + print(res) + +def less_than_function(): + # [START less_than] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("published").less_than(1923).as_("isPublicDomainProbably")) \ + .execute() + # [END less_than] + for res in result: + print(res) + +def less_than_or_equal_to_function(): + # [START less_or_equal] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").less_than_or_equal(2).as_("hasBadRating")) \ + .execute() + # [END less_or_equal] + for res in result: + print(res) + +def not_equal_function(): + # [START not_equal] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("title").not_equal("1984").as_("not1984")) \ + .execute() + # [END not_equal] + for res in result: + print(res) + +def exists_function(): + # [START exists_function] + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").exists().as_("hasRating")) \ + .execute() + # [END exists_function] + for res in result: + print(res) + +def and_function(): + # [START and_function] + result = client.pipeline() \ + .collection("books") \ + .select( + And( + Field.of("rating").greater_than(4), + Field.of("price").less_than(10) + ).as_("under10Recommendation") + ) \ + .execute() + # [END and_function] + for res in result: + print(res) + +def or_function(): + # [START or_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Or( + Field.of("genre").equal("Fantasy"), + Field.of("tags").array_contains("adventure") + ).as_("matchesSearchFilters") + ) \ + .execute() + # [END or_function] + for res in result: + print(res) + +def xor_function(): + # [START xor_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Xor([ + Field.of("tags").array_contains("magic"), + Field.of("tags").array_contains("nonfiction") + ]).as_("matchesSearchFilters") + ) \ + .execute() + # [END xor_function] + for res in result: + print(res) + +def not_function(): + # [START not_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Not( + Field.of("tags").array_contains("nonfiction") + ).as_("isFiction") + ) \ + .execute() + # [END not_function] + for res in result: + print(res) + +def cond_function(): + # [START cond_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("tags").array_concat( + Conditional( + Field.of("pages").greater_than(100), + Constant("longRead"), + Constant("shortRead") + ) + ).as_("extendedTags") + ) \ + .execute() + # [END cond_function] + for res in result: + print(res) + +def equal_any_function(): + # [START eq_any] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("genre").equal_any(["Science Fiction", "Psychological Thriller"]) + .as_("matchesGenreFilters") + ) \ + .execute() + # [END eq_any] + for res in result: + print(res) + +def not_equal_any_function(): + # [START not_eq_any] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("author").not_equal_any(["George Orwell", "F. Scott Fitzgerald"]) + .as_("byExcludedAuthors") + ) \ + .execute() + # [END not_eq_any] + for res in result: + print(res) + +def max_logical_function(): + # [START max_logical_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("rating").logical_maximum(1).as_("flooredRating") + ) \ + .execute() + # [END max_logical_function] + for res in result: + print(res) + +def min_logical_function(): + # [START min_logical_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("rating").logical_minimum(5).as_("cappedRating") + ) \ + .execute() + # [END min_logical_function] + for res in result: + print(res) + +def map_get_function(): + # [START map_get] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("awards").map_get("pulitzer").as_("hasPulitzerAward") + ) \ + .execute() + # [END map_get] + for res in result: + print(res) + +def byte_length_function(): + # [START byte_length] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("title").byte_length().as_("titleByteLength") + ) \ + .execute() + # [END byte_length] + for res in result: + print(res) + +def char_length_function(): + # [START char_length] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("title").char_length().as_("titleCharLength") + ) \ + .execute() + # [END char_length] + for res in result: + print(res) + +def starts_with_function(): + # [START starts_with] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("title").starts_with("The") + .as_("needsSpecialAlphabeticalSort") + ) \ + .execute() + # [END starts_with] + for res in result: + print(res) + +def ends_with_function(): + # [START ends_with] + result = client.pipeline() \ + .collection("inventory/devices/laptops") \ + .select( + Field.of("name").ends_with("16 inch") + .as_("16InLaptops") + ) \ + .execute() + # [END ends_with] + for res in result: + print(res) + +def like_function(): + # [START like] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("genre").like("%Fiction") + .as_("anyFiction") + ) \ + .execute() + # [END like] + for res in result: + print(res) + +def regex_contains_function(): + # [START regex_contains] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("title").regex_contains("Firestore (Enterprise|Standard)") + .as_("isFirestoreRelated") + ) \ + .execute() + # [END regex_contains] + for res in result: + print(res) + +def regex_match_function(): + # [START regex_match] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("title").regex_match("Firestore (Enterprise|Standard)") + .as_("isFirestoreExactly") + ) \ + .execute() + # [END regex_match] + for res in result: + print(res) + +def str_concat_function(): + # [START str_concat] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("title").concat(" by ", Field.of("author")) + .as_("fullyQualifiedTitle") + ) \ + .execute() + # [END str_concat] + for res in result: + print(res) + +def str_contains_function(): + # [START string_contains] + result = client.pipeline() \ + .collection("articles") \ + .select( + Field.of("body").string_contains("Firestore") + .as_("isFirestoreRelated") + ) \ + .execute() + # [END string_contains] + for res in result: + print(res) + +def to_upper_function(): + # [START to_upper] + result = client.pipeline() \ + .collection("authors") \ + .select( + Field.of("name").to_upper() + .as_("uppercaseName") + ) \ + .execute() + # [END to_upper] + for res in result: + print(res) + +def to_lower_function(): + # [START to_lower] + result = client.pipeline() \ + .collection("authors") \ + .select( + Field.of("genre").to_lower().equal("fantasy") + .as_("isFantasy") + ) \ + .execute() + # [END to_lower] + for res in result: + print(res) + +def substr_function(): + # [START substr_function] + result = client.pipeline() \ + .collection("books") \ + .where(Field.of("title").starts_with("The ")) \ + .select( + Field.of("title").substring(4) + .as_("titleWithoutLeadingThe") + ) \ + .execute() + # [END substr_function] + for res in result: + print(res) + +def str_reverse_function(): + # [START str_reverse] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("name").string_reverse().as_("reversedName") + ) \ + .execute() + # [END str_reverse] + for res in result: + print(res) + +def str_trim_function(): + # [START trim_function] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("name").trim().as_("whitespaceTrimmedName") + ) \ + .execute() + # [END trim_function] + for res in result: + print(res) + +def str_replace_function(): + # not yet supported until GA + pass + +def str_split_function(): + # not yet supported until GA + pass + +def unix_micros_to_timestamp_function(): + # [START unix_micros_timestamp] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("createdAtMicros").unix_micros_to_timestamp().as_("createdAtString") + ) \ + .execute() + # [END unix_micros_timestamp] + for res in result: + print(res) + +def unix_millis_to_timestamp_function(): + # [START unix_millis_timestamp] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("createdAtMillis").unix_millis_to_timestamp().as_("createdAtString") + ) \ + .execute() + # [END unix_millis_timestamp] + for res in result: + print(res) + +def unix_seconds_to_timestamp_function(): + # [START unix_seconds_timestamp] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("createdAtSeconds").unix_seconds_to_timestamp().as_("createdAtString") + ) \ + .execute() + # [END unix_seconds_timestamp] + for res in result: + print(res) + +def timestamp_add_function(): + # [START timestamp_add] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("createdAt").timestamp_add("day", 3653).as_("expiresAt") + ) \ + .execute() + # [END timestamp_add] + for res in result: + print(res) + +def timestamp_sub_function(): + # [START timestamp_sub] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("expiresAt").timestamp_subtract("day", 14).as_("sendWarningTimestamp") + ) \ + .execute() + # [END timestamp_sub] + for res in result: + print(res) + +def timestamp_to_unix_micros_function(): + # [START timestamp_unix_micros] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("dateString").timestamp_to_unix_micros().as_("unixMicros") + ) \ + .execute() + # [END timestamp_unix_micros] + for res in result: + print(res) + +def timestamp_to_unix_millis_function(): + # [START timestamp_unix_millis] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("dateString").timestamp_to_unix_millis().as_("unixMillis") + ) \ + .execute() + # [END timestamp_unix_millis] + for res in result: + print(res) + +def timestamp_to_unix_seconds_function(): + # [START timestamp_unix_seconds] + result = client.pipeline() \ + .collection("documents") \ + .select( + Field.of("dateString").timestamp_to_unix_seconds().as_("unixSeconds") + ) \ + .execute() + # [END timestamp_unix_seconds] + for res in result: + print(res) + +def cosine_distance_function(): + # [START cosine_distance] + sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("embedding").cosine_distance(sample_vector).as_("cosineDistance") + ) \ + .execute() + # [END cosine_distance] + for res in result: + print(res) + +def dot_product_function(): + # [START dot_product] + sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("embedding").dot_product(sample_vector).as_("dotProduct") + ) \ + .execute() + # [END dot_product] + for res in result: + print(res) + +def euclidean_distance_function(): + # [START euclidean_distance] + sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("embedding").euclidean_distance(sample_vector).as_("euclideanDistance") + ) \ + .execute() + # [END euclidean_distance] + for res in result: + print(res) + +def vector_length_function(): + # [START vector_length] + result = client.pipeline() \ + .collection("books") \ + .select( + Field.of("embedding").vector_length().as_("vectorLength") + ) \ + .execute() + # [END vector_length] + for res in result: + print(res) From cf8b63e485b826bc281a3832c63bf1fafc9b8f46 Mon Sep 17 00:00:00 2001 From: Morgan Chen Date: Thu, 6 Nov 2025 09:57:52 -0800 Subject: [PATCH 3/4] address feedback except formatting --- snippets/firestore/firestore_pipelines.py | 361 +++++++++++++++------- 1 file changed, 251 insertions(+), 110 deletions(-) diff --git a/snippets/firestore/firestore_pipelines.py b/snippets/firestore/firestore_pipelines.py index 478236ba8..ac94cf9e5 100644 --- a/snippets/firestore/firestore_pipelines.py +++ b/snippets/firestore/firestore_pipelines.py @@ -46,7 +46,7 @@ def pipeline_concepts(): # [START pipeline_concepts] pipeline = client.pipeline() \ .collection("cities") \ - .where(Field.of("population").greater_than(100000)) \ + .where(Field.of("population").greater_than(100_000)) \ .sort(Field.of("name").ascending()) \ .limit(10) # [END pipeline_concepts] @@ -56,7 +56,11 @@ def basic_read(): # [START basic_read] pipeline = client.pipeline().collection("users") for result in pipeline.execute(): - print(result.id + " => " + result.data) + print(f"{result.id} => {result.data()}") + # or, asynchronously + result_stream = pipeline.stream() + async for result in result_stream: + print(f"{result.id} => {result.data()}") # [END basic_read] def pipeline_initialization(): @@ -70,7 +74,7 @@ def field_vs_constants(): # [START field_or_constant] pipeline = client.pipeline() \ .collection("cities") \ - .where(Field.of("name").equal(Constant("Toronto"))) + .where(Field.of("name").equal(Constant.of("Toronto"))) # [END field_or_constant] print(pipeline) @@ -97,6 +101,8 @@ def input_stages(): def where_pipeline(): # [START pipeline_where] + from google.cloud.firestore_v1.pipeline_expressions import (And, Field) + results = client.pipeline().collection("books") \ .where(Field.of("rating").equal(5)) \ .where(Field.of("published").less_than(1900)) \ @@ -106,53 +112,57 @@ def where_pipeline(): .where(And( Field.of("rating").equal(5), Field.of("published").less_than(1900) - )) \ - .execute() + )).execute() # [END pipeline_where] for result in results: print(result) def aggregate_groups(): # [START aggregate_groups] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .aggregate( Field.of("rating").average().as_("avg_rating"), groups=[Field.of("genre")] - ) \ - .execute() + ).execute() # [END aggregate_groups] for result in results: print(result) def aggregate_distinct(): # [START aggregate_distinct] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .distinct( Field.of("author").to_upper().as_("author"), "genre" - ) \ - .execute() + ).execute() # [END aggregate_distinct] for result in results: print(result) def sort(): # [START sort] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .sort( Field.of("release_date").descending(), Field.of("author").ascending() - ) \ - .execute() + ).execute() # [END sort] for result in results: print(result) def sort_comparison(): # [START sort_comparison] + from google.cloud.firestore_v1.pipeline_expressions import Field + query = client.collection("cities") \ .order_by("state") \ .order_by("population", direction=Query.DESCENDING) @@ -169,13 +179,14 @@ def sort_comparison(): def functions_example(): # [START functions_example] + from google.cloud.firestore_v1.pipeline_expressions import Field + # Type 1: Scalar (for use in non-aggregation stages) # Example: Return the min store price for each book. results = client.pipeline().collection("books") \ .select( Field.of("current").logical_minimum(Field.of("updated")).as_("price_min") - ) \ - .execute() + ).execute() # Type 2: Aggregation (for use in aggregate stages) # Example: Return the min price of all books. @@ -188,6 +199,8 @@ def functions_example(): def creating_indexes(): # [START query_example] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .where(Field.of("published").less_than(1900)) \ @@ -201,6 +214,8 @@ def creating_indexes(): def sparse_indexes(): # [START sparse_index_example] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .where(Field.of("category").like("%fantasy%")) \ @@ -211,6 +226,8 @@ def sparse_indexes(): def sparse_indexes2(): # [START sparse_index_example_2] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .sort(Field.of("release_date").ascending()) \ @@ -221,6 +238,8 @@ def sparse_indexes2(): def covered_query(): # [START covered_query] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("books") \ .where(Field.of("category").like("%fantasy%")) \ @@ -234,15 +253,17 @@ def covered_query(): def pagination(): # [START pagination_not_supported_preview] + from google.cloud.firestore_v1.pipeline_expressions import Field + # Existing pagination via `start_at()` query = client.collection("cities").order_by("population").start_at({ - "population": 1000000 + "population": 1_000_000 }) # Private preview workaround using pipelines pipeline = client.pipeline() \ .collection("cities") \ - .where(Field.of("population").greater_than_or_equal(1000000)) \ + .where(Field.of("population").greater_than_or_equal(1_000_000)) \ .sort(Field.of("population").descending()) # [END pagination_not_supported_preview] print(query) @@ -250,6 +271,8 @@ def pagination(): def collection_stage(): # [START collection_example] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("users/bob/games") \ .sort(Field.of("name").ascending()) \ @@ -260,6 +283,8 @@ def collection_stage(): def collection_group_stage(): # [START collection_group_example] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection_group("games") \ .sort(Field.of("name").ascending()) \ @@ -270,6 +295,8 @@ def collection_group_stage(): def database_stage(): # [START database_example] + from google.cloud.firestore_v1.pipeline_expressions import Count + # Count all documents in the database results = client.pipeline() \ .database() \ @@ -294,7 +321,7 @@ def replace_with_stage(): # [START initial_data] client.collection("cities").document("SF").set({ "name": "San Francisco", - "population": 800000, + "population": 800_000, "location": { "country": "USA", "state": "California" @@ -302,7 +329,7 @@ def replace_with_stage(): }) client.collection("cities").document("TO").set({ "name": "Toronto", - "population": 3000000, + "population": 3_000_000, "province": "ON", "location": { "country": "Canada", @@ -311,6 +338,7 @@ def replace_with_stage(): }) client.collection("cities").document("NY").set({ "name": "New York", + "population": 8_500_000, "location": { "country": "USA", "state": "New York" @@ -322,6 +350,8 @@ def replace_with_stage(): # [END initial_data] # [START full_replace] + from google.cloud.firestore_v1.pipeline_expressions import Field + names = client.pipeline() \ .collection("cities") \ .replace_with(Field.of("location")) \ @@ -357,6 +387,8 @@ def sample_stage(): def sample_percent(): # [START sample_percent] + from google.cloud.firestore_v1.pipeline_stages import SampleOptions + # Get a sample of on average 50% of the documents in the database results = client.pipeline() \ .database() \ @@ -368,6 +400,8 @@ def sample_percent(): def union_stage(): # [START union_stage] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("cities/SF/restaurants") \ .where(Field.of("type").equal("Chinese")) \ @@ -383,6 +417,8 @@ def union_stage(): def union_stage_stable(): # [START union_stage_stable] + from google.cloud.firestore_v1.pipeline_expressions import Field + results = client.pipeline() \ .collection("cities/SF/restaurants") \ .where(Field.of("type").equal("Chinese")) \ @@ -398,6 +434,9 @@ def union_stage_stable(): def unnest_stage(): # [START unnest_stage] + from google.cloud.firestore_v1.pipeline_expressions import Field + from google.cloud.firestore_v1.pipeline_stages import UnnestOptions + results = client.pipeline() \ .database() \ .unnest(Field.of("arrayField").as_("unnestedArrayField"), \ @@ -409,6 +448,9 @@ def unnest_stage(): def unnest_stage_empty_or_non_array(): # [START unnest_edge_cases] + from google.cloud.firestore_v1.pipeline_expressions import Field + from google.cloud.firestore_v1.pipeline_stages import UnnestOptions + # Input # { "identifier" : 1, "neighbors": [ "Alice", "Cathy" ] } # { "identifier" : 2, "neighbors": [] } @@ -432,6 +474,8 @@ def unnest_stage_empty_or_non_array(): def count_function(): # [START count_function] + from google.cloud.firestore_v1.pipeline_expressions import Count + # Total number of books in the collection count_all = client.pipeline() \ .collection("books") \ @@ -451,18 +495,21 @@ def count_function(): def count_if_function(): # [START count_if] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .aggregate( Field.of("rating").greater_than(4).count_if().as_("filteredCount") - ) \ - .execute() + ).execute() # [END count_if] for res in result: print(res) def count_distinct_function(): # [START count_distinct] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .aggregate(Field.of("author").count_distinct().as_("unique_authors")) \ @@ -473,6 +520,8 @@ def count_distinct_function(): def sum_function(): # [START sum_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("cities") \ .aggregate(Field.of("population").sum().as_("totalPopulation")) \ @@ -483,6 +532,8 @@ def sum_function(): def avg_function(): # [START avg_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("cities") \ .aggregate(Field.of("population").average().as_("averagePopulation")) \ @@ -493,6 +544,8 @@ def avg_function(): def min_function(): # [START min_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .aggregate(Field.of("price").minimum().as_("minimumPrice")) \ @@ -503,6 +556,8 @@ def min_function(): def max_function(): # [START max_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .aggregate(Field.of("price").maximum().as_("maximumPrice")) \ @@ -513,6 +568,8 @@ def max_function(): def add_function(): # [START add_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("soldBooks").add(Field.of("unsoldBooks")).as_("totalBooks")) \ @@ -523,6 +580,8 @@ def add_function(): def subtract_function(): # [START subtract_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + store_credit = 7 result = client.pipeline() \ .collection("books") \ @@ -534,6 +593,8 @@ def subtract_function(): def multiply_function(): # [START multiply_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("price").multiply(Field.of("soldBooks")).as_("revenue")) \ @@ -544,6 +605,8 @@ def multiply_function(): def divide_function(): # [START divide_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("ratings").divide(Field.of("soldBooks")).as_("reviewRate")) \ @@ -554,6 +617,8 @@ def divide_function(): def mod_function(): # [START mod_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + display_capacity = 1000 result = client.pipeline() \ .collection("books") \ @@ -565,31 +630,35 @@ def mod_function(): def ceil_function(): # [START ceil_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + books_per_shelf = 100 result = client.pipeline() \ .collection("books") \ .select( Field.of("unsoldBooks").divide(books_per_shelf).ceil().as_("requiredShelves") - ) \ - .execute() + ).execute() # [END ceil_function] for res in result: print(res) def floor_function(): # [START floor_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .add_fields( Field.of("wordCount").divide(Field.of("pages")).floor().as_("wordsPerPage") - ) \ - .execute() + ).execute() # [END floor_function] for res in result: print(res) def round_function(): # [START round_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("soldBooks").multiply(Field.of("price")).round().as_("partialRevenue")) \ @@ -601,6 +670,8 @@ def round_function(): def pow_function(): # [START pow_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + googleplexLat = 37.4221 googleplexLng = -122.0853 result = client.pipeline() \ @@ -619,14 +690,15 @@ def pow_function(): Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() # Inaccurate for large distances or close to poles .as_("approximateDistanceToGoogle") - ) \ - .execute() + ).execute() # [END pow_function] for res in result: print(res) def sqrt_function(): # [START sqrt_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + googleplexLat = 37.4221 googleplexLng = -122.0853 result = client.pipeline() \ @@ -645,14 +717,15 @@ def sqrt_function(): Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() # Inaccurate for large distances or close to poles .as_("approximateDistanceToGoogle") - ) \ - .execute() + ).execute() # [END sqrt_function] for res in result: print(res) def exp_function(): # [START exp_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").exp().as_("expRating")) \ @@ -663,6 +736,8 @@ def exp_function(): def ln_function(): # [START ln_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").ln().as_("lnRating")) \ @@ -673,12 +748,20 @@ def ln_function(): def log_function(): # [START log_function] - # Not supported on Python + from google.cloud.firestore_v1.pipeline_expressions import Field + + result = client.pipeline() \ + .collection("books") \ + .select(Field.of("rating").log(2).as_("log2Rating")) \ + .execute() # [END log_function] - pass + for res in result: + print(res) def array_concat(): # [START array_concat] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("genre").array_concat(Field.of("subGenre")).as_("allGenres")) \ @@ -689,6 +772,8 @@ def array_concat(): def array_contains(): # [START array_contains] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("genre").array_contains("mystery").as_("isMystery")) \ @@ -699,34 +784,38 @@ def array_contains(): def array_contains_all(): # [START array_contains_all] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("genre") .array_contains_all(["fantasy", "adventure"]) .as_("isFantasyAdventure") - ) \ - .execute() + ).execute() # [END array_contains_all] for res in result: print(res) def array_contains_any(): # [START array_contains_any] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("genre") .array_contains_any(["fantasy", "nonfiction"]) .as_("isMysteryOrFantasy") - ) \ - .execute() + ).execute() # [END array_contains_any] for res in result: print(res) def array_length(): # [START array_length] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("genre").array_length().as_("genreCount")) \ @@ -737,6 +826,8 @@ def array_length(): def array_reverse(): # [START array_reverse] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("genre").array_reverse().as_("reversedGenres")) \ @@ -747,6 +838,8 @@ def array_reverse(): def equal_function(): # [START equal_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").equal(5).as_("hasPerfectRating")) \ @@ -757,6 +850,8 @@ def equal_function(): def greater_than_function(): # [START greater_than] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").greater_than(4).as_("hasHighRating")) \ @@ -767,6 +862,8 @@ def greater_than_function(): def greater_than_or_equal_to_function(): # [START greater_or_equal] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("published").greater_than_or_equal(1900).as_("publishedIn20thCentury")) \ @@ -777,6 +874,8 @@ def greater_than_or_equal_to_function(): def less_than_function(): # [START less_than] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("published").less_than(1923).as_("isPublicDomainProbably")) \ @@ -787,6 +886,8 @@ def less_than_function(): def less_than_or_equal_to_function(): # [START less_or_equal] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").less_than_or_equal(2).as_("hasBadRating")) \ @@ -797,6 +898,8 @@ def less_than_or_equal_to_function(): def not_equal_function(): # [START not_equal] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("title").not_equal("1984").as_("not1984")) \ @@ -807,6 +910,8 @@ def not_equal_function(): def exists_function(): # [START exists_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select(Field.of("rating").exists().as_("hasRating")) \ @@ -817,6 +922,8 @@ def exists_function(): def and_function(): # [START and_function] + from google.cloud.firestore_v1.pipeline_expressions import (Field, And) + result = client.pipeline() \ .collection("books") \ .select( @@ -824,14 +931,15 @@ def and_function(): Field.of("rating").greater_than(4), Field.of("price").less_than(10) ).as_("under10Recommendation") - ) \ - .execute() + ).execute() # [END and_function] for res in result: print(res) def or_function(): # [START or_function] + from google.cloud.firestore_v1.pipeline_expressions import (Field, And) + result = client.pipeline() \ .collection("books") \ .select( @@ -839,14 +947,15 @@ def or_function(): Field.of("genre").equal("Fantasy"), Field.of("tags").array_contains("adventure") ).as_("matchesSearchFilters") - ) \ - .execute() + ).execute() # [END or_function] for res in result: print(res) def xor_function(): # [START xor_function] + from google.cloud.firestore_v1.pipeline_expressions import (Field, Xor) + result = client.pipeline() \ .collection("books") \ .select( @@ -854,281 +963,301 @@ def xor_function(): Field.of("tags").array_contains("magic"), Field.of("tags").array_contains("nonfiction") ]).as_("matchesSearchFilters") - ) \ - .execute() + ).execute() # [END xor_function] for res in result: print(res) def not_function(): # [START not_function] + from google.cloud.firestore_v1.pipeline_expressions import (Field, Not) + result = client.pipeline() \ .collection("books") \ .select( Not( Field.of("tags").array_contains("nonfiction") ).as_("isFiction") - ) \ - .execute() + ).execute() # [END not_function] for res in result: print(res) def cond_function(): # [START cond_function] + from google.cloud.firestore_v1.pipeline_expressions import (Field, Constant, Conditional) + result = client.pipeline() \ .collection("books") \ .select( Field.of("tags").array_concat( Conditional( Field.of("pages").greater_than(100), - Constant("longRead"), - Constant("shortRead") + Constant.of("longRead"), + Constant.of("shortRead") ) ).as_("extendedTags") - ) \ - .execute() + ).execute() # [END cond_function] for res in result: print(res) def equal_any_function(): # [START eq_any] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("genre").equal_any(["Science Fiction", "Psychological Thriller"]) .as_("matchesGenreFilters") - ) \ - .execute() + ).execute() # [END eq_any] for res in result: print(res) def not_equal_any_function(): # [START not_eq_any] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("author").not_equal_any(["George Orwell", "F. Scott Fitzgerald"]) .as_("byExcludedAuthors") - ) \ - .execute() + ).execute() # [END not_eq_any] for res in result: print(res) def max_logical_function(): # [START max_logical_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("rating").logical_maximum(1).as_("flooredRating") - ) \ - .execute() + ).execute() # [END max_logical_function] for res in result: print(res) def min_logical_function(): # [START min_logical_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("rating").logical_minimum(5).as_("cappedRating") - ) \ - .execute() + ).execute() # [END min_logical_function] for res in result: print(res) def map_get_function(): # [START map_get] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("awards").map_get("pulitzer").as_("hasPulitzerAward") - ) \ - .execute() + ).execute() # [END map_get] for res in result: print(res) def byte_length_function(): # [START byte_length] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("title").byte_length().as_("titleByteLength") - ) \ - .execute() + ).execute() # [END byte_length] for res in result: print(res) def char_length_function(): # [START char_length] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("title").char_length().as_("titleCharLength") - ) \ - .execute() + ).execute() # [END char_length] for res in result: print(res) def starts_with_function(): # [START starts_with] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("title").starts_with("The") .as_("needsSpecialAlphabeticalSort") - ) \ - .execute() + ).execute() # [END starts_with] for res in result: print(res) def ends_with_function(): # [START ends_with] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("inventory/devices/laptops") \ .select( Field.of("name").ends_with("16 inch") .as_("16InLaptops") - ) \ - .execute() + ).execute() # [END ends_with] for res in result: print(res) def like_function(): # [START like] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("genre").like("%Fiction") .as_("anyFiction") - ) \ - .execute() + ).execute() # [END like] for res in result: print(res) def regex_contains_function(): # [START regex_contains] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("title").regex_contains("Firestore (Enterprise|Standard)") .as_("isFirestoreRelated") - ) \ - .execute() + ).execute() # [END regex_contains] for res in result: print(res) def regex_match_function(): # [START regex_match] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("title").regex_match("Firestore (Enterprise|Standard)") .as_("isFirestoreExactly") - ) \ - .execute() + ).execute() # [END regex_match] for res in result: print(res) def str_concat_function(): # [START str_concat] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("title").concat(" by ", Field.of("author")) .as_("fullyQualifiedTitle") - ) \ - .execute() + ).execute() # [END str_concat] for res in result: print(res) def str_contains_function(): # [START string_contains] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("articles") \ .select( Field.of("body").string_contains("Firestore") .as_("isFirestoreRelated") - ) \ - .execute() + ).execute() # [END string_contains] for res in result: print(res) def to_upper_function(): # [START to_upper] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("authors") \ .select( Field.of("name").to_upper() .as_("uppercaseName") - ) \ - .execute() + ).execute() # [END to_upper] for res in result: print(res) def to_lower_function(): # [START to_lower] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("authors") \ .select( Field.of("genre").to_lower().equal("fantasy") .as_("isFantasy") - ) \ - .execute() + ).execute() # [END to_lower] for res in result: print(res) def substr_function(): # [START substr_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .where(Field.of("title").starts_with("The ")) \ .select( Field.of("title").substring(4) .as_("titleWithoutLeadingThe") - ) \ - .execute() + ).execute() # [END substr_function] for res in result: print(res) def str_reverse_function(): # [START str_reverse] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("name").string_reverse().as_("reversedName") - ) \ - .execute() + ).execute() # [END str_reverse] for res in result: print(res) def str_trim_function(): # [START trim_function] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("name").trim().as_("whitespaceTrimmedName") - ) \ - .execute() + ).execute() # [END trim_function] for res in result: print(res) @@ -1143,147 +1272,159 @@ def str_split_function(): def unix_micros_to_timestamp_function(): # [START unix_micros_timestamp] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("createdAtMicros").unix_micros_to_timestamp().as_("createdAtString") - ) \ - .execute() + ).execute() # [END unix_micros_timestamp] for res in result: print(res) def unix_millis_to_timestamp_function(): # [START unix_millis_timestamp] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("createdAtMillis").unix_millis_to_timestamp().as_("createdAtString") - ) \ - .execute() + ).execute() # [END unix_millis_timestamp] for res in result: print(res) def unix_seconds_to_timestamp_function(): # [START unix_seconds_timestamp] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("createdAtSeconds").unix_seconds_to_timestamp().as_("createdAtString") - ) \ - .execute() + ).execute() # [END unix_seconds_timestamp] for res in result: print(res) def timestamp_add_function(): # [START timestamp_add] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("createdAt").timestamp_add("day", 3653).as_("expiresAt") - ) \ - .execute() + ).execute() # [END timestamp_add] for res in result: print(res) def timestamp_sub_function(): # [START timestamp_sub] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("expiresAt").timestamp_subtract("day", 14).as_("sendWarningTimestamp") - ) \ - .execute() + ).execute() # [END timestamp_sub] for res in result: print(res) def timestamp_to_unix_micros_function(): # [START timestamp_unix_micros] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("dateString").timestamp_to_unix_micros().as_("unixMicros") - ) \ - .execute() + ).execute() # [END timestamp_unix_micros] for res in result: print(res) def timestamp_to_unix_millis_function(): # [START timestamp_unix_millis] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("dateString").timestamp_to_unix_millis().as_("unixMillis") - ) \ - .execute() + ).execute() # [END timestamp_unix_millis] for res in result: print(res) def timestamp_to_unix_seconds_function(): # [START timestamp_unix_seconds] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("documents") \ .select( Field.of("dateString").timestamp_to_unix_seconds().as_("unixSeconds") - ) \ - .execute() + ).execute() # [END timestamp_unix_seconds] for res in result: print(res) def cosine_distance_function(): # [START cosine_distance] - sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + from google.cloud.firestore_v1.pipeline_expressions import Field + + sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) result = client.pipeline() \ .collection("books") \ .select( Field.of("embedding").cosine_distance(sample_vector).as_("cosineDistance") - ) \ - .execute() + ).execute() # [END cosine_distance] for res in result: print(res) def dot_product_function(): # [START dot_product] - sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + from google.cloud.firestore_v1.pipeline_expressions import Field + + sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) result = client.pipeline() \ .collection("books") \ .select( Field.of("embedding").dot_product(sample_vector).as_("dotProduct") - ) \ - .execute() + ).execute() # [END dot_product] for res in result: print(res) def euclidean_distance_function(): # [START euclidean_distance] - sample_vector = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0] + from google.cloud.firestore_v1.pipeline_expressions import Field + + sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) result = client.pipeline() \ .collection("books") \ .select( Field.of("embedding").euclidean_distance(sample_vector).as_("euclideanDistance") - ) \ - .execute() + ).execute() # [END euclidean_distance] for res in result: print(res) def vector_length_function(): # [START vector_length] + from google.cloud.firestore_v1.pipeline_expressions import Field + result = client.pipeline() \ .collection("books") \ .select( Field.of("embedding").vector_length().as_("vectorLength") - ) \ - .execute() + ).execute() # [END vector_length] for res in result: print(res) From 45302f89cb7a2d85cdfcf916942bf32947b282cd Mon Sep 17 00:00:00 2001 From: Morgan Chen Date: Thu, 6 Nov 2025 09:58:18 -0800 Subject: [PATCH 4/4] run format --- snippets/firestore/firestore_pipelines.py | 1283 +++++++++++++-------- 1 file changed, 805 insertions(+), 478 deletions(-) diff --git a/snippets/firestore/firestore_pipelines.py b/snippets/firestore/firestore_pipelines.py index ac94cf9e5..639140959 100644 --- a/snippets/firestore/firestore_pipelines.py +++ b/snippets/firestore/firestore_pipelines.py @@ -23,7 +23,11 @@ Count, ) from google.cloud.firestore_v1.pipeline_expressions import ( - And, Conditional, Or, Not, Xor + And, + Conditional, + Or, + Not, + Xor, ) from google.cloud.firestore_v1.pipeline_stages import ( Aggregate, @@ -41,17 +45,21 @@ default_app = firebase_admin.initialize_app() client = firestore.client(default_app, "your-new-enterprise-database") + # pylint: disable=invalid-name def pipeline_concepts(): # [START pipeline_concepts] - pipeline = client.pipeline() \ - .collection("cities") \ - .where(Field.of("population").greater_than(100_000)) \ - .sort(Field.of("name").ascending()) \ + pipeline = ( + client.pipeline() + .collection("cities") + .where(Field.of("population").greater_than(100_000)) + .sort(Field.of("name").ascending()) .limit(10) + ) # [END pipeline_concepts] print(pipeline) + def basic_read(): # [START basic_read] pipeline = client.pipeline().collection("users") @@ -63,6 +71,7 @@ def basic_read(): print(f"{result.id} => {result.data()}") # [END basic_read] + def pipeline_initialization(): # [START pipeline_initialization] firestore_client = firestore.client(default_app, "your-new-enterprise-database") @@ -70,14 +79,18 @@ def pipeline_initialization(): # [END pipeline_initialization] print(pipeline) + def field_vs_constants(): # [START field_or_constant] - pipeline = client.pipeline() \ - .collection("cities") \ + pipeline = ( + client.pipeline() + .collection("cities") .where(Field.of("name").equal(Constant.of("Toronto"))) + ) # [END field_or_constant] print(pipeline) + def input_stages(): # [START input_stages] # Return all restaurants in San Francisco @@ -90,272 +103,324 @@ def input_stages(): results = client.pipeline().database().execute() # Batch read of 3 documents - results = client.pipeline().documents( - client.collection("cities").document("SF"), - client.collection("cities").document("DC"), - client.collection("cities").document("NY") - ).execute() + results = ( + client.pipeline() + .documents( + client.collection("cities").document("SF"), + client.collection("cities").document("DC"), + client.collection("cities").document("NY"), + ) + .execute() + ) # [END input_stages] for result in results: print(result) + def where_pipeline(): # [START pipeline_where] - from google.cloud.firestore_v1.pipeline_expressions import (And, Field) + from google.cloud.firestore_v1.pipeline_expressions import And, Field - results = client.pipeline().collection("books") \ - .where(Field.of("rating").equal(5)) \ - .where(Field.of("published").less_than(1900)) \ + results = ( + client.pipeline() + .collection("books") + .where(Field.of("rating").equal(5)) + .where(Field.of("published").less_than(1900)) .execute() + ) - results = client.pipeline().collection("books") \ - .where(And( - Field.of("rating").equal(5), - Field.of("published").less_than(1900) - )).execute() + results = ( + client.pipeline() + .collection("books") + .where(And(Field.of("rating").equal(5), Field.of("published").less_than(1900))) + .execute() + ) # [END pipeline_where] for result in results: print(result) + def aggregate_groups(): # [START aggregate_groups] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ + results = ( + client.pipeline() + .collection("books") .aggregate( - Field.of("rating").average().as_("avg_rating"), - groups=[Field.of("genre")] - ).execute() + Field.of("rating").average().as_("avg_rating"), groups=[Field.of("genre")] + ) + .execute() + ) # [END aggregate_groups] for result in results: print(result) + def aggregate_distinct(): # [START aggregate_distinct] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .distinct( - Field.of("author").to_upper().as_("author"), - "genre" - ).execute() + results = ( + client.pipeline() + .collection("books") + .distinct(Field.of("author").to_upper().as_("author"), "genre") + .execute() + ) # [END aggregate_distinct] for result in results: print(result) + def sort(): # [START sort] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .sort( - Field.of("release_date").descending(), - Field.of("author").ascending() - ).execute() + results = ( + client.pipeline() + .collection("books") + .sort(Field.of("release_date").descending(), Field.of("author").ascending()) + .execute() + ) # [END sort] for result in results: print(result) + def sort_comparison(): # [START sort_comparison] from google.cloud.firestore_v1.pipeline_expressions import Field - query = client.collection("cities") \ - .order_by("state") \ + query = ( + client.collection("cities") + .order_by("state") .order_by("population", direction=Query.DESCENDING) + ) - pipeline = client.pipeline() \ - .collection("books") \ - .sort( - Field.of("release_date").descending(), - Field.of("author").ascending() - ) + pipeline = ( + client.pipeline() + .collection("books") + .sort(Field.of("release_date").descending(), Field.of("author").ascending()) + ) # [END sort_comparison] print(query) print(pipeline) + def functions_example(): # [START functions_example] from google.cloud.firestore_v1.pipeline_expressions import Field # Type 1: Scalar (for use in non-aggregation stages) # Example: Return the min store price for each book. - results = client.pipeline().collection("books") \ + results = ( + client.pipeline() + .collection("books") .select( Field.of("current").logical_minimum(Field.of("updated")).as_("price_min") - ).execute() + ) + .execute() + ) # Type 2: Aggregation (for use in aggregate stages) # Example: Return the min price of all books. - results = client.pipeline().collection("books") \ - .aggregate(Field.of("price").minimum().as_("min_price")) \ + results = ( + client.pipeline() + .collection("books") + .aggregate(Field.of("price").minimum().as_("min_price")) .execute() + ) # [END functions_example] for result in results: print(result) + def creating_indexes(): # [START query_example] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .where(Field.of("published").less_than(1900)) \ - .where(Field.of("genre").equal("Science Fiction")) \ - .where(Field.of("rating").greater_than(4.3)) \ - .sort(Field.of("published").descending()) \ + results = ( + client.pipeline() + .collection("books") + .where(Field.of("published").less_than(1900)) + .where(Field.of("genre").equal("Science Fiction")) + .where(Field.of("rating").greater_than(4.3)) + .sort(Field.of("published").descending()) .execute() + ) # [END query_example] for result in results: print(result) + def sparse_indexes(): # [START sparse_index_example] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .where(Field.of("category").like("%fantasy%")) \ + results = ( + client.pipeline() + .collection("books") + .where(Field.of("category").like("%fantasy%")) .execute() + ) # [END sparse_index_example] for result in results: print(result) + def sparse_indexes2(): # [START sparse_index_example_2] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .sort(Field.of("release_date").ascending()) \ + results = ( + client.pipeline() + .collection("books") + .sort(Field.of("release_date").ascending()) .execute() + ) # [END sparse_index_example_2] for result in results: print(result) + def covered_query(): # [START covered_query] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("books") \ - .where(Field.of("category").like("%fantasy%")) \ - .where(Field.of("title").exists()) \ - .where(Field.of("author").exists()) \ - .select("title", "author") \ + results = ( + client.pipeline() + .collection("books") + .where(Field.of("category").like("%fantasy%")) + .where(Field.of("title").exists()) + .where(Field.of("author").exists()) + .select("title", "author") .execute() + ) # [END covered_query] for result in results: print(result) + def pagination(): # [START pagination_not_supported_preview] from google.cloud.firestore_v1.pipeline_expressions import Field # Existing pagination via `start_at()` - query = client.collection("cities").order_by("population").start_at({ - "population": 1_000_000 - }) + query = ( + client.collection("cities") + .order_by("population") + .start_at({"population": 1_000_000}) + ) # Private preview workaround using pipelines - pipeline = client.pipeline() \ - .collection("cities") \ - .where(Field.of("population").greater_than_or_equal(1_000_000)) \ + pipeline = ( + client.pipeline() + .collection("cities") + .where(Field.of("population").greater_than_or_equal(1_000_000)) .sort(Field.of("population").descending()) + ) # [END pagination_not_supported_preview] print(query) print(pipeline) + def collection_stage(): # [START collection_example] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("users/bob/games") \ - .sort(Field.of("name").ascending()) \ + results = ( + client.pipeline() + .collection("users/bob/games") + .sort(Field.of("name").ascending()) .execute() + ) # [END collection_example] for result in results: print(result) + def collection_group_stage(): # [START collection_group_example] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection_group("games") \ - .sort(Field.of("name").ascending()) \ + results = ( + client.pipeline() + .collection_group("games") + .sort(Field.of("name").ascending()) .execute() + ) # [END collection_group_example] for result in results: print(result) + def database_stage(): # [START database_example] from google.cloud.firestore_v1.pipeline_expressions import Count # Count all documents in the database - results = client.pipeline() \ - .database() \ - .aggregate(Count().as_("total")) \ - .execute() + results = client.pipeline().database().aggregate(Count().as_("total")).execute() # [END database_example] for result in results: print(result) + def documents_stage(): # [START documents_example] - results = client.pipeline().documents( - client.collection("cities").document("SF"), - client.collection("cities").document("DC"), - client.collection("cities").document("NY") - ).execute() + results = ( + client.pipeline() + .documents( + client.collection("cities").document("SF"), + client.collection("cities").document("DC"), + client.collection("cities").document("NY"), + ) + .execute() + ) # [END documents_example] for result in results: print(result) + def replace_with_stage(): # [START initial_data] - client.collection("cities").document("SF").set({ - "name": "San Francisco", - "population": 800_000, - "location": { - "country": "USA", - "state": "California" + client.collection("cities").document("SF").set( + { + "name": "San Francisco", + "population": 800_000, + "location": {"country": "USA", "state": "California"}, + } + ) + client.collection("cities").document("TO").set( + { + "name": "Toronto", + "population": 3_000_000, + "province": "ON", + "location": {"country": "Canada", "province": "Ontario"}, } - }) - client.collection("cities").document("TO").set({ - "name": "Toronto", - "population": 3_000_000, - "province": "ON", - "location": { - "country": "Canada", - "province": "Ontario" + ) + client.collection("cities").document("NY").set( + { + "name": "New York", + "population": 8_500_000, + "location": {"country": "USA", "state": "New York"}, } - }) - client.collection("cities").document("NY").set({ - "name": "New York", - "population": 8_500_000, - "location": { - "country": "USA", - "state": "New York" + ) + client.collection("cities").document("AT").set( + { + "name": "Atlantis", } - }) - client.collection("cities").document("AT").set({ - "name": "Atlantis", - }) + ) # [END initial_data] # [START full_replace] from google.cloud.firestore_v1.pipeline_expressions import Field - names = client.pipeline() \ - .collection("cities") \ - .replace_with(Field.of("location")) \ + names = ( + client.pipeline() + .collection("cities") + .replace_with(Field.of("location")) .execute() + ) # [END full_replace] # [START map_merge_overwrite] @@ -364,88 +429,104 @@ def replace_with_stage(): for name in names: print(name) + def sample_stage(): # [START sample_example] # Get a sample of 100 documents in a database - results = client.pipeline() \ - .database() \ - .sample(100) \ - .execute() + results = client.pipeline().database().sample(100).execute() # Randomly shuffle a list of 3 documents - results = client.pipeline() \ + results = ( + client.pipeline() .documents( client.collection("cities").document("SF"), client.collection("cities").document("NY"), - client.collection("cities").document("DC") - ) \ - .sample(3) \ + client.collection("cities").document("DC"), + ) + .sample(3) .execute() + ) # [END sample_example] for result in results: print(result) + def sample_percent(): # [START sample_percent] from google.cloud.firestore_v1.pipeline_stages import SampleOptions # Get a sample of on average 50% of the documents in the database - results = client.pipeline() \ - .database() \ - .sample(SampleOptions.percentage(0.5)) \ - .execute() + results = ( + client.pipeline().database().sample(SampleOptions.percentage(0.5)).execute() + ) # [END sample_percent] for result in results: print(result) + def union_stage(): # [START union_stage] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("cities/SF/restaurants") \ - .where(Field.of("type").equal("Chinese")) \ - .union(client.pipeline() \ - .collection("cities/NY/restaurants") \ - .where(Field.of("type").equal("Italian"))) \ - .where(Field.of("rating").greater_than_or_equal(4.5)) \ - .sort(Field.of("__name__").descending()) \ + results = ( + client.pipeline() + .collection("cities/SF/restaurants") + .where(Field.of("type").equal("Chinese")) + .union( + client.pipeline() + .collection("cities/NY/restaurants") + .where(Field.of("type").equal("Italian")) + ) + .where(Field.of("rating").greater_than_or_equal(4.5)) + .sort(Field.of("__name__").descending()) .execute() + ) # [END union_stage] for result in results: print(result) + def union_stage_stable(): # [START union_stage_stable] from google.cloud.firestore_v1.pipeline_expressions import Field - results = client.pipeline() \ - .collection("cities/SF/restaurants") \ - .where(Field.of("type").equal("Chinese")) \ - .union(client.pipeline() \ - .collection("cities/NY/restaurants") \ - .where(Field.of("type").equal("Italian"))) \ - .where(Field.of("rating").greater_than_or_equal(4.5)) \ - .sort(Field.of("__name__").descending()) \ + results = ( + client.pipeline() + .collection("cities/SF/restaurants") + .where(Field.of("type").equal("Chinese")) + .union( + client.pipeline() + .collection("cities/NY/restaurants") + .where(Field.of("type").equal("Italian")) + ) + .where(Field.of("rating").greater_than_or_equal(4.5)) + .sort(Field.of("__name__").descending()) .execute() + ) # [END union_stage_stable] for result in results: print(result) + def unnest_stage(): # [START unnest_stage] from google.cloud.firestore_v1.pipeline_expressions import Field from google.cloud.firestore_v1.pipeline_stages import UnnestOptions - results = client.pipeline() \ - .database() \ - .unnest(Field.of("arrayField").as_("unnestedArrayField"), \ - options=UnnestOptions(index_field="index")) \ + results = ( + client.pipeline() + .database() + .unnest( + Field.of("arrayField").as_("unnestedArrayField"), + options=UnnestOptions(index_field="index"), + ) .execute() + ) # [END unnest_stage] for result in results: print(result) + def unnest_stage_empty_or_non_array(): # [START unnest_edge_cases] from google.cloud.firestore_v1.pipeline_expressions import Field @@ -456,11 +537,15 @@ def unnest_stage_empty_or_non_array(): # { "identifier" : 2, "neighbors": [] } # { "identifier" : 3, "neighbors": "Bob" } - results = client.pipeline() \ - .database() \ - .unnest(Field.of("neighbors").as_("unnestedNeighbors"), \ - options=UnnestOptions(index_field="index")) \ + results = ( + client.pipeline() + .database() + .unnest( + Field.of("neighbors").as_("unnestedNeighbors"), + options=UnnestOptions(index_field="index"), + ) .execute() + ) # Output # { "identifier": 1, "neighbors": [ "Alice", "Cathy" ], @@ -472,959 +557,1201 @@ def unnest_stage_empty_or_non_array(): for result in results: print(result) + def count_function(): # [START count_function] from google.cloud.firestore_v1.pipeline_expressions import Count # Total number of books in the collection - count_all = client.pipeline() \ - .collection("books") \ - .aggregate(Count().as_("count")) \ - .execute() + count_all = ( + client.pipeline().collection("books").aggregate(Count().as_("count")).execute() + ) # Number of books with nonnull `ratings` field - count_field = client.pipeline() \ - .collection("books") \ - .aggregate(Count("ratings").as_("count")) \ + count_field = ( + client.pipeline() + .collection("books") + .aggregate(Count("ratings").as_("count")) .execute() + ) # [END count_function] for result in count_all: print(result) for result in count_field: print(result) + def count_if_function(): # [START count_if] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .aggregate( - Field.of("rating").greater_than(4).count_if().as_("filteredCount") - ).execute() + result = ( + client.pipeline() + .collection("books") + .aggregate(Field.of("rating").greater_than(4).count_if().as_("filteredCount")) + .execute() + ) # [END count_if] for res in result: print(res) + def count_distinct_function(): # [START count_distinct] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .aggregate(Field.of("author").count_distinct().as_("unique_authors")) \ + result = ( + client.pipeline() + .collection("books") + .aggregate(Field.of("author").count_distinct().as_("unique_authors")) .execute() + ) # [END count_distinct] for res in result: print(res) + def sum_function(): # [START sum_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("cities") \ - .aggregate(Field.of("population").sum().as_("totalPopulation")) \ + result = ( + client.pipeline() + .collection("cities") + .aggregate(Field.of("population").sum().as_("totalPopulation")) .execute() + ) # [END sum_function] for res in result: print(res) + def avg_function(): # [START avg_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("cities") \ - .aggregate(Field.of("population").average().as_("averagePopulation")) \ + result = ( + client.pipeline() + .collection("cities") + .aggregate(Field.of("population").average().as_("averagePopulation")) .execute() + ) # [END avg_function] for res in result: print(res) + def min_function(): # [START min_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .aggregate(Field.of("price").minimum().as_("minimumPrice")) \ + result = ( + client.pipeline() + .collection("books") + .aggregate(Field.of("price").minimum().as_("minimumPrice")) .execute() + ) # [END min_function] for res in result: print(res) + def max_function(): # [START max_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .aggregate(Field.of("price").maximum().as_("maximumPrice")) \ + result = ( + client.pipeline() + .collection("books") + .aggregate(Field.of("price").maximum().as_("maximumPrice")) .execute() + ) # [END max_function] for res in result: print(res) + def add_function(): # [START add_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("soldBooks").add(Field.of("unsoldBooks")).as_("totalBooks")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("soldBooks").add(Field.of("unsoldBooks")).as_("totalBooks")) .execute() + ) # [END add_function] for res in result: print(res) + def subtract_function(): # [START subtract_function] from google.cloud.firestore_v1.pipeline_expressions import Field store_credit = 7 - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("price").subtract(store_credit).as_("totalCost")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("price").subtract(store_credit).as_("totalCost")) .execute() + ) # [END subtract_function] for res in result: print(res) + def multiply_function(): # [START multiply_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("price").multiply(Field.of("soldBooks")).as_("revenue")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("price").multiply(Field.of("soldBooks")).as_("revenue")) .execute() + ) # [END multiply_function] for res in result: print(res) + def divide_function(): # [START divide_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("ratings").divide(Field.of("soldBooks")).as_("reviewRate")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("ratings").divide(Field.of("soldBooks")).as_("reviewRate")) .execute() + ) # [END divide_function] for res in result: print(res) + def mod_function(): # [START mod_function] from google.cloud.firestore_v1.pipeline_expressions import Field display_capacity = 1000 - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("unsoldBooks").mod(display_capacity).as_("warehousedBooks")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("unsoldBooks").mod(display_capacity).as_("warehousedBooks")) .execute() + ) # [END mod_function] for res in result: print(res) + def ceil_function(): # [START ceil_function] from google.cloud.firestore_v1.pipeline_expressions import Field books_per_shelf = 100 - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("unsoldBooks").divide(books_per_shelf).ceil().as_("requiredShelves") - ).execute() + Field.of("unsoldBooks") + .divide(books_per_shelf) + .ceil() + .as_("requiredShelves") + ) + .execute() + ) # [END ceil_function] for res in result: print(res) + def floor_function(): # [START floor_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .add_fields( Field.of("wordCount").divide(Field.of("pages")).floor().as_("wordsPerPage") - ).execute() + ) + .execute() + ) # [END floor_function] for res in result: print(res) + def round_function(): # [START round_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("soldBooks").multiply(Field.of("price")).round().as_("partialRevenue")) \ - .aggregate(Field.of("partialRevenue").sum().as_("totalRevenue")) \ + result = ( + client.pipeline() + .collection("books") + .select( + Field.of("soldBooks") + .multiply(Field.of("price")) + .round() + .as_("partialRevenue") + ) + .aggregate(Field.of("partialRevenue").sum().as_("totalRevenue")) .execute() + ) # [END round_function] for res in result: print(res) + def pow_function(): # [START pow_function] from google.cloud.firestore_v1.pipeline_expressions import Field googleplexLat = 37.4221 googleplexLng = -122.0853 - result = client.pipeline() \ - .collection("cities") \ + result = ( + client.pipeline() + .collection("cities") .add_fields( - Field.of("lat").subtract(googleplexLat) - .multiply(111) # km per degree - .pow(2) - .as_("latitudeDifference"), - Field.of("lng").subtract(googleplexLng) - .multiply(111) # km per degree - .pow(2) - .as_("longitudeDifference") - ) \ + Field.of("lat") + .subtract(googleplexLat) + .multiply(111) # km per degree + .pow(2) + .as_("latitudeDifference"), + Field.of("lng") + .subtract(googleplexLng) + .multiply(111) # km per degree + .pow(2) + .as_("longitudeDifference"), + ) .select( - Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() - # Inaccurate for large distances or close to poles - .as_("approximateDistanceToGoogle") - ).execute() + Field.of("latitudeDifference") + .add(Field.of("longitudeDifference")) + .sqrt() + # Inaccurate for large distances or close to poles + .as_("approximateDistanceToGoogle") + ) + .execute() + ) # [END pow_function] for res in result: print(res) + def sqrt_function(): # [START sqrt_function] from google.cloud.firestore_v1.pipeline_expressions import Field googleplexLat = 37.4221 googleplexLng = -122.0853 - result = client.pipeline() \ - .collection("cities") \ + result = ( + client.pipeline() + .collection("cities") .add_fields( - Field.of("lat").subtract(googleplexLat) - .multiply(111) # km per degree - .pow(2) - .as_("latitudeDifference"), - Field.of("lng").subtract(googleplexLng) - .multiply(111) # km per degree - .pow(2) - .as_("longitudeDifference") - ) \ + Field.of("lat") + .subtract(googleplexLat) + .multiply(111) # km per degree + .pow(2) + .as_("latitudeDifference"), + Field.of("lng") + .subtract(googleplexLng) + .multiply(111) # km per degree + .pow(2) + .as_("longitudeDifference"), + ) .select( - Field.of("latitudeDifference").add(Field.of("longitudeDifference")).sqrt() - # Inaccurate for large distances or close to poles - .as_("approximateDistanceToGoogle") - ).execute() + Field.of("latitudeDifference") + .add(Field.of("longitudeDifference")) + .sqrt() + # Inaccurate for large distances or close to poles + .as_("approximateDistanceToGoogle") + ) + .execute() + ) # [END sqrt_function] for res in result: print(res) + def exp_function(): # [START exp_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").exp().as_("expRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").exp().as_("expRating")) .execute() + ) # [END exp_function] for res in result: print(res) + def ln_function(): # [START ln_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").ln().as_("lnRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").ln().as_("lnRating")) .execute() + ) # [END ln_function] for res in result: print(res) + def log_function(): # [START log_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").log(2).as_("log2Rating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").log(2).as_("log2Rating")) .execute() + ) # [END log_function] for res in result: print(res) + def array_concat(): # [START array_concat] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("genre").array_concat(Field.of("subGenre")).as_("allGenres")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("genre").array_concat(Field.of("subGenre")).as_("allGenres")) .execute() + ) # [END array_concat] for res in result: print(res) + def array_contains(): # [START array_contains] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("genre").array_contains("mystery").as_("isMystery")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("genre").array_contains("mystery").as_("isMystery")) .execute() + ) # [END array_contains] for res in result: print(res) + def array_contains_all(): # [START array_contains_all] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( Field.of("genre") - .array_contains_all(["fantasy", "adventure"]) - .as_("isFantasyAdventure") - ).execute() + .array_contains_all(["fantasy", "adventure"]) + .as_("isFantasyAdventure") + ) + .execute() + ) # [END array_contains_all] for res in result: print(res) + def array_contains_any(): # [START array_contains_any] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( Field.of("genre") - .array_contains_any(["fantasy", "nonfiction"]) - .as_("isMysteryOrFantasy") - ).execute() + .array_contains_any(["fantasy", "nonfiction"]) + .as_("isMysteryOrFantasy") + ) + .execute() + ) # [END array_contains_any] for res in result: print(res) + def array_length(): # [START array_length] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("genre").array_length().as_("genreCount")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("genre").array_length().as_("genreCount")) .execute() + ) # [END array_length] for res in result: print(res) + def array_reverse(): # [START array_reverse] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("genre").array_reverse().as_("reversedGenres")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("genre").array_reverse().as_("reversedGenres")) .execute() + ) # [END array_reverse] for res in result: print(res) + def equal_function(): # [START equal_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").equal(5).as_("hasPerfectRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").equal(5).as_("hasPerfectRating")) .execute() + ) # [END equal_function] for res in result: print(res) + def greater_than_function(): # [START greater_than] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").greater_than(4).as_("hasHighRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").greater_than(4).as_("hasHighRating")) .execute() + ) # [END greater_than] for res in result: print(res) + def greater_than_or_equal_to_function(): # [START greater_or_equal] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("published").greater_than_or_equal(1900).as_("publishedIn20thCentury")) \ + result = ( + client.pipeline() + .collection("books") + .select( + Field.of("published") + .greater_than_or_equal(1900) + .as_("publishedIn20thCentury") + ) .execute() + ) # [END greater_or_equal] for res in result: print(res) + def less_than_function(): # [START less_than] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("published").less_than(1923).as_("isPublicDomainProbably")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("published").less_than(1923).as_("isPublicDomainProbably")) .execute() + ) # [END less_than] for res in result: print(res) + def less_than_or_equal_to_function(): # [START less_or_equal] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").less_than_or_equal(2).as_("hasBadRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").less_than_or_equal(2).as_("hasBadRating")) .execute() + ) # [END less_or_equal] for res in result: print(res) + def not_equal_function(): # [START not_equal] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("title").not_equal("1984").as_("not1984")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("title").not_equal("1984").as_("not1984")) .execute() + ) # [END not_equal] for res in result: print(res) + def exists_function(): # [START exists_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select(Field.of("rating").exists().as_("hasRating")) \ + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").exists().as_("hasRating")) .execute() + ) # [END exists_function] for res in result: print(res) + def and_function(): # [START and_function] - from google.cloud.firestore_v1.pipeline_expressions import (Field, And) + from google.cloud.firestore_v1.pipeline_expressions import Field, And - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( And( - Field.of("rating").greater_than(4), - Field.of("price").less_than(10) + Field.of("rating").greater_than(4), Field.of("price").less_than(10) ).as_("under10Recommendation") - ).execute() + ) + .execute() + ) # [END and_function] for res in result: print(res) + def or_function(): # [START or_function] - from google.cloud.firestore_v1.pipeline_expressions import (Field, And) + from google.cloud.firestore_v1.pipeline_expressions import Field, And - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( Or( Field.of("genre").equal("Fantasy"), - Field.of("tags").array_contains("adventure") + Field.of("tags").array_contains("adventure"), ).as_("matchesSearchFilters") - ).execute() + ) + .execute() + ) # [END or_function] for res in result: print(res) + def xor_function(): # [START xor_function] - from google.cloud.firestore_v1.pipeline_expressions import (Field, Xor) + from google.cloud.firestore_v1.pipeline_expressions import Field, Xor - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Xor([ - Field.of("tags").array_contains("magic"), - Field.of("tags").array_contains("nonfiction") - ]).as_("matchesSearchFilters") - ).execute() + Xor( + [ + Field.of("tags").array_contains("magic"), + Field.of("tags").array_contains("nonfiction"), + ] + ).as_("matchesSearchFilters") + ) + .execute() + ) # [END xor_function] for res in result: print(res) + def not_function(): # [START not_function] - from google.cloud.firestore_v1.pipeline_expressions import (Field, Not) + from google.cloud.firestore_v1.pipeline_expressions import Field, Not - result = client.pipeline() \ - .collection("books") \ - .select( - Not( - Field.of("tags").array_contains("nonfiction") - ).as_("isFiction") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Not(Field.of("tags").array_contains("nonfiction")).as_("isFiction")) + .execute() + ) # [END not_function] for res in result: print(res) + def cond_function(): # [START cond_function] - from google.cloud.firestore_v1.pipeline_expressions import (Field, Constant, Conditional) - - result = client.pipeline() \ - .collection("books") \ + from google.cloud.firestore_v1.pipeline_expressions import ( + Field, + Constant, + Conditional, + ) + + result = ( + client.pipeline() + .collection("books") .select( - Field.of("tags").array_concat( + Field.of("tags") + .array_concat( Conditional( Field.of("pages").greater_than(100), Constant.of("longRead"), - Constant.of("shortRead") + Constant.of("shortRead"), ) - ).as_("extendedTags") - ).execute() + ) + .as_("extendedTags") + ) + .execute() + ) # [END cond_function] for res in result: print(res) + def equal_any_function(): # [START eq_any] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("genre").equal_any(["Science Fiction", "Psychological Thriller"]) - .as_("matchesGenreFilters") - ).execute() + Field.of("genre") + .equal_any(["Science Fiction", "Psychological Thriller"]) + .as_("matchesGenreFilters") + ) + .execute() + ) # [END eq_any] for res in result: print(res) + def not_equal_any_function(): # [START not_eq_any] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("author").not_equal_any(["George Orwell", "F. Scott Fitzgerald"]) - .as_("byExcludedAuthors") - ).execute() + Field.of("author") + .not_equal_any(["George Orwell", "F. Scott Fitzgerald"]) + .as_("byExcludedAuthors") + ) + .execute() + ) # [END not_eq_any] for res in result: print(res) + def max_logical_function(): # [START max_logical_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("rating").logical_maximum(1).as_("flooredRating") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").logical_maximum(1).as_("flooredRating")) + .execute() + ) # [END max_logical_function] for res in result: print(res) + def min_logical_function(): # [START min_logical_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("rating").logical_minimum(5).as_("cappedRating") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("rating").logical_minimum(5).as_("cappedRating")) + .execute() + ) # [END min_logical_function] for res in result: print(res) + def map_get_function(): # [START map_get] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("awards").map_get("pulitzer").as_("hasPulitzerAward") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("awards").map_get("pulitzer").as_("hasPulitzerAward")) + .execute() + ) # [END map_get] for res in result: print(res) + def byte_length_function(): # [START byte_length] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("title").byte_length().as_("titleByteLength") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("title").byte_length().as_("titleByteLength")) + .execute() + ) # [END byte_length] for res in result: print(res) + def char_length_function(): # [START char_length] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("title").char_length().as_("titleCharLength") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("title").char_length().as_("titleCharLength")) + .execute() + ) # [END char_length] for res in result: print(res) + def starts_with_function(): # [START starts_with] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("title").starts_with("The") - .as_("needsSpecialAlphabeticalSort") - ).execute() + Field.of("title").starts_with("The").as_("needsSpecialAlphabeticalSort") + ) + .execute() + ) # [END starts_with] for res in result: print(res) + def ends_with_function(): # [START ends_with] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("inventory/devices/laptops") \ - .select( - Field.of("name").ends_with("16 inch") - .as_("16InLaptops") - ).execute() + result = ( + client.pipeline() + .collection("inventory/devices/laptops") + .select(Field.of("name").ends_with("16 inch").as_("16InLaptops")) + .execute() + ) # [END ends_with] for res in result: print(res) + def like_function(): # [START like] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("genre").like("%Fiction") - .as_("anyFiction") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("genre").like("%Fiction").as_("anyFiction")) + .execute() + ) # [END like] for res in result: print(res) + def regex_contains_function(): # [START regex_contains] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("title").regex_contains("Firestore (Enterprise|Standard)") - .as_("isFirestoreRelated") - ).execute() + Field.of("title") + .regex_contains("Firestore (Enterprise|Standard)") + .as_("isFirestoreRelated") + ) + .execute() + ) # [END regex_contains] for res in result: print(res) + def regex_match_function(): # [START regex_match] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("title").regex_match("Firestore (Enterprise|Standard)") - .as_("isFirestoreExactly") - ).execute() + Field.of("title") + .regex_match("Firestore (Enterprise|Standard)") + .as_("isFirestoreExactly") + ) + .execute() + ) # [END regex_match] for res in result: print(res) + def str_concat_function(): # [START str_concat] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("title").concat(" by ", Field.of("author")) - .as_("fullyQualifiedTitle") - ).execute() + Field.of("title") + .concat(" by ", Field.of("author")) + .as_("fullyQualifiedTitle") + ) + .execute() + ) # [END str_concat] for res in result: print(res) + def str_contains_function(): # [START string_contains] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("articles") \ - .select( - Field.of("body").string_contains("Firestore") - .as_("isFirestoreRelated") - ).execute() + result = ( + client.pipeline() + .collection("articles") + .select(Field.of("body").string_contains("Firestore").as_("isFirestoreRelated")) + .execute() + ) # [END string_contains] for res in result: print(res) + def to_upper_function(): # [START to_upper] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("authors") \ - .select( - Field.of("name").to_upper() - .as_("uppercaseName") - ).execute() + result = ( + client.pipeline() + .collection("authors") + .select(Field.of("name").to_upper().as_("uppercaseName")) + .execute() + ) # [END to_upper] for res in result: print(res) + def to_lower_function(): # [START to_lower] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("authors") \ - .select( - Field.of("genre").to_lower().equal("fantasy") - .as_("isFantasy") - ).execute() + result = ( + client.pipeline() + .collection("authors") + .select(Field.of("genre").to_lower().equal("fantasy").as_("isFantasy")) + .execute() + ) # [END to_lower] for res in result: print(res) + def substr_function(): # [START substr_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .where(Field.of("title").starts_with("The ")) \ - .select( - Field.of("title").substring(4) - .as_("titleWithoutLeadingThe") - ).execute() + result = ( + client.pipeline() + .collection("books") + .where(Field.of("title").starts_with("The ")) + .select(Field.of("title").substring(4).as_("titleWithoutLeadingThe")) + .execute() + ) # [END substr_function] for res in result: print(res) + def str_reverse_function(): # [START str_reverse] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("name").string_reverse().as_("reversedName") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("name").string_reverse().as_("reversedName")) + .execute() + ) # [END str_reverse] for res in result: print(res) + def str_trim_function(): # [START trim_function] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("name").trim().as_("whitespaceTrimmedName") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("name").trim().as_("whitespaceTrimmedName")) + .execute() + ) # [END trim_function] for res in result: print(res) + def str_replace_function(): # not yet supported until GA pass + def str_split_function(): # not yet supported until GA pass + def unix_micros_to_timestamp_function(): # [START unix_micros_timestamp] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("createdAtMicros").unix_micros_to_timestamp().as_("createdAtString") - ).execute() + Field.of("createdAtMicros") + .unix_micros_to_timestamp() + .as_("createdAtString") + ) + .execute() + ) # [END unix_micros_timestamp] for res in result: print(res) + def unix_millis_to_timestamp_function(): # [START unix_millis_timestamp] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("createdAtMillis").unix_millis_to_timestamp().as_("createdAtString") - ).execute() + Field.of("createdAtMillis") + .unix_millis_to_timestamp() + .as_("createdAtString") + ) + .execute() + ) # [END unix_millis_timestamp] for res in result: print(res) + def unix_seconds_to_timestamp_function(): # [START unix_seconds_timestamp] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("createdAtSeconds").unix_seconds_to_timestamp().as_("createdAtString") - ).execute() + Field.of("createdAtSeconds") + .unix_seconds_to_timestamp() + .as_("createdAtString") + ) + .execute() + ) # [END unix_seconds_timestamp] for res in result: print(res) + def timestamp_add_function(): # [START timestamp_add] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ - .select( - Field.of("createdAt").timestamp_add("day", 3653).as_("expiresAt") - ).execute() + result = ( + client.pipeline() + .collection("documents") + .select(Field.of("createdAt").timestamp_add("day", 3653).as_("expiresAt")) + .execute() + ) # [END timestamp_add] for res in result: print(res) + def timestamp_sub_function(): # [START timestamp_sub] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ + result = ( + client.pipeline() + .collection("documents") .select( - Field.of("expiresAt").timestamp_subtract("day", 14).as_("sendWarningTimestamp") - ).execute() + Field.of("expiresAt") + .timestamp_subtract("day", 14) + .as_("sendWarningTimestamp") + ) + .execute() + ) # [END timestamp_sub] for res in result: print(res) + def timestamp_to_unix_micros_function(): # [START timestamp_unix_micros] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ - .select( - Field.of("dateString").timestamp_to_unix_micros().as_("unixMicros") - ).execute() + result = ( + client.pipeline() + .collection("documents") + .select(Field.of("dateString").timestamp_to_unix_micros().as_("unixMicros")) + .execute() + ) # [END timestamp_unix_micros] for res in result: print(res) + def timestamp_to_unix_millis_function(): # [START timestamp_unix_millis] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ - .select( - Field.of("dateString").timestamp_to_unix_millis().as_("unixMillis") - ).execute() + result = ( + client.pipeline() + .collection("documents") + .select(Field.of("dateString").timestamp_to_unix_millis().as_("unixMillis")) + .execute() + ) # [END timestamp_unix_millis] for res in result: print(res) + def timestamp_to_unix_seconds_function(): # [START timestamp_unix_seconds] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("documents") \ - .select( - Field.of("dateString").timestamp_to_unix_seconds().as_("unixSeconds") - ).execute() + result = ( + client.pipeline() + .collection("documents") + .select(Field.of("dateString").timestamp_to_unix_seconds().as_("unixSeconds")) + .execute() + ) # [END timestamp_unix_seconds] for res in result: print(res) + def cosine_distance_function(): # [START cosine_distance] from google.cloud.firestore_v1.pipeline_expressions import Field sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( Field.of("embedding").cosine_distance(sample_vector).as_("cosineDistance") - ).execute() + ) + .execute() + ) # [END cosine_distance] for res in result: print(res) + def dot_product_function(): # [START dot_product] from google.cloud.firestore_v1.pipeline_expressions import Field sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("embedding").dot_product(sample_vector).as_("dotProduct") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("embedding").dot_product(sample_vector).as_("dotProduct")) + .execute() + ) # [END dot_product] for res in result: print(res) + def euclidean_distance_function(): # [START euclidean_distance] from google.cloud.firestore_v1.pipeline_expressions import Field sample_vector = Vector([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]) - result = client.pipeline() \ - .collection("books") \ + result = ( + client.pipeline() + .collection("books") .select( - Field.of("embedding").euclidean_distance(sample_vector).as_("euclideanDistance") - ).execute() + Field.of("embedding") + .euclidean_distance(sample_vector) + .as_("euclideanDistance") + ) + .execute() + ) # [END euclidean_distance] for res in result: print(res) + def vector_length_function(): # [START vector_length] from google.cloud.firestore_v1.pipeline_expressions import Field - result = client.pipeline() \ - .collection("books") \ - .select( - Field.of("embedding").vector_length().as_("vectorLength") - ).execute() + result = ( + client.pipeline() + .collection("books") + .select(Field.of("embedding").vector_length().as_("vectorLength")) + .execute() + ) # [END vector_length] for res in result: print(res)