Skip to content

Commit 6c53378

Browse files
authored
chore: disable semantic sys tests and doc tests (#1730)
1 parent de9efdb commit 6c53378

File tree

2 files changed

+19
-14
lines changed

2 files changed

+19
-14
lines changed

bigframes/operations/semantics.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def agg(
5757
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
5858
5959
>>> import bigframes.ml.llm as llm
60-
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001")
60+
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001") # doctest: +SKIP
6161
6262
>>> df = bpd.DataFrame(
6363
... {
@@ -68,7 +68,7 @@ def agg(
6868
... ],
6969
... "Year": [1997, 2013, 2010],
7070
... })
71-
>>> df.semantics.agg(
71+
>>> df.semantics.agg( # doctest: +SKIP
7272
... "Find the first name shared by all actors in {Movies}. One word answer.",
7373
... model=model,
7474
... )
@@ -326,10 +326,10 @@ def filter(self, instruction: str, model, ground_with_google_search: bool = Fals
326326
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
327327
328328
>>> import bigframes.ml.llm as llm
329-
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001")
329+
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001") # doctest: +SKIP
330330
331331
>>> df = bpd.DataFrame({"country": ["USA", "Germany"], "city": ["Seattle", "Berlin"]})
332-
>>> df.semantics.filter("{city} is the capital of {country}", model)
332+
>>> df.semantics.filter("{city} is the capital of {country}", model) # doctest: +SKIP
333333
country city
334334
1 Germany Berlin
335335
<BLANKLINE>
@@ -440,10 +440,10 @@ def map(
440440
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
441441
442442
>>> import bigframes.ml.llm as llm
443-
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001")
443+
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001") # doctest: +SKIP
444444
445445
>>> df = bpd.DataFrame({"ingredient_1": ["Burger Bun", "Soy Bean"], "ingredient_2": ["Beef Patty", "Bittern"]})
446-
>>> df.semantics.map("What is the food made from {ingredient_1} and {ingredient_2}? One word only.", output_column="food", model=model)
446+
>>> df.semantics.map("What is the food made from {ingredient_1} and {ingredient_2}? One word only.", output_column="food", model=model) # doctest: +SKIP
447447
ingredient_1 ingredient_2 food
448448
0 Burger Bun Beef Patty Burger
449449
<BLANKLINE>
@@ -563,12 +563,12 @@ def join(
563563
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
564564
565565
>>> import bigframes.ml.llm as llm
566-
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001")
566+
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001") # doctest: +SKIP
567567
568568
>>> cities = bpd.DataFrame({'city': ['Seattle', 'Ottawa', 'Berlin', 'Shanghai', 'New Delhi']})
569569
>>> continents = bpd.DataFrame({'continent': ['North America', 'Africa', 'Asia']})
570570
571-
>>> cities.semantics.join(continents, "{city} is in {continent}", model)
571+
>>> cities.semantics.join(continents, "{city} is in {continent}", model) # doctest: +SKIP
572572
city continent
573573
0 Seattle North America
574574
1 Ottawa North America
@@ -704,10 +704,10 @@ def search(
704704
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
705705
706706
>>> import bigframes.ml.llm as llm
707-
>>> model = llm.TextEmbeddingGenerator(model_name="text-embedding-005")
707+
>>> model = llm.TextEmbeddingGenerator(model_name="text-embedding-005") # doctest: +SKIP
708708
709709
>>> df = bpd.DataFrame({"creatures": ["salmon", "sea urchin", "frog", "chimpanzee"]})
710-
>>> df.semantics.search("creatures", "monkey", top_k=1, model=model, score_column='distance')
710+
>>> df.semantics.search("creatures", "monkey", top_k=1, model=model, score_column='distance') # doctest: +SKIP
711711
creatures distance
712712
3 chimpanzee 0.635844
713713
<BLANKLINE>
@@ -805,14 +805,14 @@ def top_k(
805805
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
806806
807807
>>> import bigframes.ml.llm as llm
808-
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001")
808+
>>> model = llm.GeminiTextGenerator(model_name="gemini-2.0-flash-001") # doctest: +SKIP
809809
810810
>>> df = bpd.DataFrame(
811811
... {
812812
... "Animals": ["Dog", "Bird", "Cat", "Horse"],
813813
... "Sounds": ["Woof", "Chirp", "Meow", "Neigh"],
814814
... })
815-
>>> df.semantics.top_k("{Animals} are more popular as pets", model=model, k=2)
815+
>>> df.semantics.top_k("{Animals} are more popular as pets", model=model, k=2) # doctest: +SKIP
816816
Animals Sounds
817817
0 Dog Woof
818818
2 Cat Meow
@@ -1006,12 +1006,12 @@ def sim_join(
10061006
>>> bpd.options.compute.semantic_ops_confirmation_threshold = 25
10071007
10081008
>>> import bigframes.ml.llm as llm
1009-
>>> model = llm.TextEmbeddingGenerator(model_name="text-embedding-005")
1009+
>>> model = llm.TextEmbeddingGenerator(model_name="text-embedding-005") # doctest: +SKIP
10101010
10111011
>>> df1 = bpd.DataFrame({'animal': ['monkey', 'spider']})
10121012
>>> df2 = bpd.DataFrame({'animal': ['scorpion', 'baboon']})
10131013
1014-
>>> df1.semantics.sim_join(df2, left_on='animal', right_on='animal', model=model, top_k=1)
1014+
>>> df1.semantics.sim_join(df2, left_on='animal', right_on='animal', model=model, top_k=1) # doctest: +SKIP
10151015
animal animal_1
10161016
0 monkey baboon
10171017
1 spider scorpion

tests/system/large/operations/test_semantics.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,11 @@
2222
import bigframes
2323
from bigframes import dataframe, dtypes, exceptions, series
2424

25+
pytest.skip(
26+
"Semantics namespace is deprecated. ",
27+
allow_module_level=True,
28+
)
29+
2530
SEM_OP_EXP_OPTION = "experiments.semantic_operators"
2631
BLOB_EXP_OPTION = "experiments.blob"
2732
THRESHOLD_OPTION = "compute.semantic_ops_confirmation_threshold"

0 commit comments

Comments
 (0)