Closed
Description
Hi All,
I am still having this same error, though have all the dependencies installed on a Jupyter Notebook in AWS Sagemaker including:
Proto-plus = 1.18.0 Protobuf = 3.13.0
For Google BigQuery 2.12.0
But I get the following error:
ValueError Traceback (most recent call last)
<ipython-input-3-017a44a94d83> in <module>
----> 1 df = gbq.read_gbq('SELECT * FROM `cmg-consumer-analytics.122142903.ga_sessions_20210314` LIMIT 10', project_id='cmg-consumer-analytics')
~/anaconda3/envs/python3/lib/python3.6/site-packages/pandas/io/gbq.py in read_gbq(query, project_id, index_col, col_order, reauth, auth_local_webserver, dialect, location, configuration, credentials, use_bqstorage_api, private_key, verbose, progress_bar_type)
161 DataFrame.to_gbq : Write a DataFrame to Google BigQuery.
162 """
--> 163 pandas_gbq = _try_import()
164
165 kwargs: Dict[str, Union[str, bool]] = {}
~/anaconda3/envs/python3/lib/python3.6/site-packages/pandas/io/gbq.py in _try_import()
15 "See the docs: https://pandas-gbq.readthedocs.io."
16 )
---> 17 pandas_gbq = import_optional_dependency("pandas_gbq", extra=msg)
18 return pandas_gbq
19
~/anaconda3/envs/python3/lib/python3.6/site-packages/pandas/compat/_optional.py in import_optional_dependency(name, extra, raise_on_missing, on_version)
87 )
88 try:
---> 89 module = importlib.import_module(name)
90 except ImportError:
91 if raise_on_missing:
~/anaconda3/envs/python3/lib/python3.6/importlib/__init__.py in import_module(name, package)
124 break
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
127
128
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap.py in _gcd_import(name, package, level)
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap.py in _find_and_load(name, import_)
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap.py in _load_unlocked(spec)
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap_external.py in exec_module(self, module)
~/anaconda3/envs/python3/lib/python3.6/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)
~/anaconda3/envs/python3/lib/python3.6/site-packages/pandas_gbq/__init__.py in <module>
----> 1 from .gbq import to_gbq, read_gbq, Context, context # noqa
2
3 from ._version import get_versions
4
5 versions = get_versions()
~/anaconda3/envs/python3/lib/python3.6/site-packages/pandas_gbq/gbq.py in <module>
10 try:
11 from google.api_core import exceptions as google_exceptions
---> 12 from google.cloud import bigquery
13 except ImportError: # pragma: NO COVER
14 bigquery = None
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery/__init__.py in <module>
33 __version__ = bigquery_version.__version__
34
---> 35 from google.cloud.bigquery.client import Client
36 from google.cloud.bigquery.dataset import AccessEntry
37 from google.cloud.bigquery.dataset import Dataset
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery/client.py in <module>
54 from google.cloud.bigquery._helpers import _verify_job_config_type
55 from google.cloud.bigquery._http import Connection
---> 56 from google.cloud.bigquery import _pandas_helpers
57 from google.cloud.bigquery.dataset import Dataset
58 from google.cloud.bigquery.dataset import DatasetListItem
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery/_pandas_helpers.py in <module>
34 pyarrow = None
35
---> 36 from google.cloud.bigquery import schema
37
38
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery/schema.py in <module>
17 import collections
18
---> 19 from google.cloud.bigquery_v2 import types
20
21
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery_v2/__init__.py in <module>
17
18
---> 19 from .types.encryption_config import EncryptionConfiguration
20 from .types.model import DeleteModelRequest
21 from .types.model import GetModelRequest
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery_v2/types/__init__.py in <module>
16 #
17
---> 18 from .encryption_config import EncryptionConfiguration
19 from .model import (
20 DeleteModelRequest,
~/anaconda3/envs/python3/lib/python3.6/site-packages/google/cloud/bigquery_v2/types/encryption_config.py in <module>
27
28
---> 29 class EncryptionConfiguration(proto.Message):
30 r"""
31
~/anaconda3/envs/python3/lib/python3.6/site-packages/proto/message.py in __new__(mcls, name, bases, attrs)
212 desc = descriptor_pb2.DescriptorProto(
213 name=name,
--> 214 field=[i.descriptor for i in fields],
215 oneof_decl=[
216 descriptor_pb2.OneofDescriptorProto(name=i) for i in oneofs.keys()
~/anaconda3/envs/python3/lib/python3.6/site-packages/proto/message.py in <listcomp>(.0)
212 desc = descriptor_pb2.DescriptorProto(
213 name=name,
--> 214 field=[i.descriptor for i in fields],
215 oneof_decl=[
216 descriptor_pb2.OneofDescriptorProto(name=i) for i in oneofs.keys()
~/anaconda3/envs/python3/lib/python3.6/site-packages/proto/fields.py in descriptor(self)
109 type_name=type_name,
110 json_name=self.json_name,
--> 111 proto3_optional=self.optional,
112 )
113
ValueError: Protocol message FieldDescriptorProto has no "proto3_optional" field.```
Why does the error still prevail? am I missing something else? Feedback appreciated.
_Originally posted by @Tsakunelson in https://github.com/googleapis/python-bigquery/issues/305#issuecomment-801102273_