Run TFMA for Keras models without compiling - tensorflow

I am training a Keras model using custom training loops in TensorFlow, where the weights are updated using gradient tape rather than the model.fit() method. As such, the model is not compiled before training.
After exporting the saved_model, I am able to successfully load it for inference:
model = tf.saved_model.load("path/to/saved_model")
pred_fn = model.signatures["serving_default"]
results = pred_fn(tf.constant(examples))
However, when I try loading it with TFMA using run_model_analysis:
eval_shared_model = tfma.default_eval_shared_model("path/to/saved_model", eval_config=eval_config)
eval_results = tfma.run_model_analysis(
eval_shared_model=eval_shared_model,
data_location=test_tfrecords_path,
file_format="tfrecords"
)
I get the following error:
WARNING:tensorflow:No training configuration found in save file, so the model was *not* compiled. Compile it manually.
-----------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-107-19f51f42014a> in <module>
2 eval_shared_model=eval_shared_model,
3 data_location=test_tfrecords_path,
----> 4 file_format="tfrecords"
5 )
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/api/model_eval_lib.py in run_model_analysis(eval_shared_model, eval_config, data_location, file_format, output_path, extractors, evaluators, writers, pipeline_options, slice_spec, write_config, compute_confidence_intervals, min_slice_size, random_seed_for_testing, schema)
1200 random_seed_for_testing=random_seed_for_testing,
1201 tensor_adapter_config=tensor_adapter_config,
-> 1202 schema=schema))
1203 # pylint: enable=no-value-for-parameter
1204
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pvalue.py in __or__(self, ptransform)
138
139 def __or__(self, ptransform):
--> 140 return self.pipeline.apply(ptransform, self)
141
142
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
575 if isinstance(transform, ptransform._NamedPTransform):
576 return self.apply(
--> 577 transform.transform, pvalueish, label or transform.label)
578
579 if not isinstance(transform, ptransform.PTransform):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
585 try:
586 old_label, transform.label = transform.label, label
--> 587 return self.apply(transform, pvalueish)
588 finally:
589 transform.label = old_label
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
628 transform.type_check_inputs(pvalueish)
629
--> 630 pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
631
632 if type_options is not None and type_options.pipeline_type_check:
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply(self, transform, input, options)
196 m = getattr(self, 'apply_%s' % cls.__name__, None)
197 if m:
--> 198 return m(transform, input, options)
199 raise NotImplementedError(
200 'Execution of [%s] not implemented in runner %s.' % (transform, self))
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply_PTransform(self, transform, input, options)
226 def apply_PTransform(self, transform, input, options):
227 # The base case of apply is to call the transform's expand.
--> 228 return transform.expand(input)
229
230 def run_transform(self,
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/transforms/ptransform.py in expand(self, pcoll)
921 # Might not be a function.
922 pass
--> 923 return self._fn(pcoll, *args, **kwargs)
924
925 def default_label(self):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/api/model_eval_lib.py in ExtractEvaluateAndWriteResults(examples, eval_shared_model, eval_config, extractors, evaluators, writers, output_path, display_only_data_location, display_only_file_format, slice_spec, write_config, compute_confidence_intervals, min_slice_size, random_seed_for_testing, tensor_adapter_config, schema)
1079 | 'ExtractAndEvaluate' >> ExtractAndEvaluate(
1080 extractors=extractors, evaluators=evaluators)
-> 1081 | 'WriteResults' >> WriteResults(writers=writers))
1082
1083 return beam.pvalue.PDone(examples.pipeline)
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pvalue.py in __or__(self, ptransform)
138
139 def __or__(self, ptransform):
--> 140 return self.pipeline.apply(ptransform, self)
141
142
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
575 if isinstance(transform, ptransform._NamedPTransform):
576 return self.apply(
--> 577 transform.transform, pvalueish, label or transform.label)
578
579 if not isinstance(transform, ptransform.PTransform):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
585 try:
586 old_label, transform.label = transform.label, label
--> 587 return self.apply(transform, pvalueish)
588 finally:
589 transform.label = old_label
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
628 transform.type_check_inputs(pvalueish)
629
--> 630 pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
631
632 if type_options is not None and type_options.pipeline_type_check:
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply(self, transform, input, options)
196 m = getattr(self, 'apply_%s' % cls.__name__, None)
197 if m:
--> 198 return m(transform, input, options)
199 raise NotImplementedError(
200 'Execution of [%s] not implemented in runner %s.' % (transform, self))
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply_PTransform(self, transform, input, options)
226 def apply_PTransform(self, transform, input, options):
227 # The base case of apply is to call the transform's expand.
--> 228 return transform.expand(input)
229
230 def run_transform(self,
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/transforms/ptransform.py in expand(self, pcoll)
921 # Might not be a function.
922 pass
--> 923 return self._fn(pcoll, *args, **kwargs)
924
925 def default_label(self):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/api/model_eval_lib.py in ExtractAndEvaluate(extracts, extractors, evaluators)
818 for v in evaluators:
819 if v.run_after == x.stage_name:
--> 820 update(evaluation, extracts | v.stage_name >> v.ptransform)
821 for v in evaluators:
822 if v.run_after == extractor.LAST_EXTRACTOR_STAGE_NAME:
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pvalue.py in __or__(self, ptransform)
138
139 def __or__(self, ptransform):
--> 140 return self.pipeline.apply(ptransform, self)
141
142
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
575 if isinstance(transform, ptransform._NamedPTransform):
576 return self.apply(
--> 577 transform.transform, pvalueish, label or transform.label)
578
579 if not isinstance(transform, ptransform.PTransform):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
585 try:
586 old_label, transform.label = transform.label, label
--> 587 return self.apply(transform, pvalueish)
588 finally:
589 transform.label = old_label
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
628 transform.type_check_inputs(pvalueish)
629
--> 630 pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
631
632 if type_options is not None and type_options.pipeline_type_check:
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply(self, transform, input, options)
196 m = getattr(self, 'apply_%s' % cls.__name__, None)
197 if m:
--> 198 return m(transform, input, options)
199 raise NotImplementedError(
200 'Execution of [%s] not implemented in runner %s.' % (transform, self))
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply_PTransform(self, transform, input, options)
226 def apply_PTransform(self, transform, input, options):
227 # The base case of apply is to call the transform's expand.
--> 228 return transform.expand(input)
229
230 def run_transform(self,
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/transforms/ptransform.py in expand(self, pcoll)
921 # Might not be a function.
922 pass
--> 923 return self._fn(pcoll, *args, **kwargs)
924
925 def default_label(self):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/evaluators/metrics_and_plots_evaluator_v2.py in _EvaluateMetricsAndPlots(extracts, eval_config, eval_shared_models, metrics_key, plots_key, validations_key, schema, random_seed_for_testing)
757 plots_key=plots_key,
758 schema=schema,
--> 759 random_seed_for_testing=random_seed_for_testing))
760
761 for k, v in evaluation.items():
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pvalue.py in __or__(self, ptransform)
138
139 def __or__(self, ptransform):
--> 140 return self.pipeline.apply(ptransform, self)
141
142
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
575 if isinstance(transform, ptransform._NamedPTransform):
576 return self.apply(
--> 577 transform.transform, pvalueish, label or transform.label)
578
579 if not isinstance(transform, ptransform.PTransform):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
585 try:
586 old_label, transform.label = transform.label, label
--> 587 return self.apply(transform, pvalueish)
588 finally:
589 transform.label = old_label
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/pipeline.py in apply(self, transform, pvalueish, label)
628 transform.type_check_inputs(pvalueish)
629
--> 630 pvalueish_result = self.runner.apply(transform, pvalueish, self._options)
631
632 if type_options is not None and type_options.pipeline_type_check:
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply(self, transform, input, options)
196 m = getattr(self, 'apply_%s' % cls.__name__, None)
197 if m:
--> 198 return m(transform, input, options)
199 raise NotImplementedError(
200 'Execution of [%s] not implemented in runner %s.' % (transform, self))
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/runners/runner.py in apply_PTransform(self, transform, input, options)
226 def apply_PTransform(self, transform, input, options):
227 # The base case of apply is to call the transform's expand.
--> 228 return transform.expand(input)
229
230 def run_transform(self,
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/apache_beam/transforms/ptransform.py in expand(self, pcoll)
921 # Might not be a function.
922 pass
--> 923 return self._fn(pcoll, *args, **kwargs)
924
925 def default_label(self):
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/evaluators/metrics_and_plots_evaluator_v2.py in _ComputeMetricsAndPlots(extracts, eval_config, metrics_specs, eval_shared_models, metrics_key, plots_key, schema, random_seed_for_testing)
582 if eval_shared_model.model_type == constants.TF_KERAS:
583 keras_specs = keras_util.metrics_specs_from_keras(
--> 584 model_name, eval_shared_model.model_loader)
585 metrics_specs = keras_specs + metrics_specs[:]
586 # TODO(mdreves): Add support for calling keras.evaluate().
~/.pyenv/versions/miniconda3-4.3.30/envs/tensorflow/lib/python3.7/site-packages/tensorflow_model_analysis/evaluators/keras_util.py in metrics_specs_from_keras(model_name, model_loader)
60 # y_true, y_pred as inputs so it can't be calculated via standard inputs so
61 # we remove it.
---> 62 metrics.extend(model.compiled_loss.metrics[1:])
63 metrics.extend(model.compiled_metrics.metrics)
64 metric_names = [m.name for m in metrics]
AttributeError: 'NoneType' object has no attribute 'metrics'
I suspect this might be because I am not compiling the Keras model before exporting it. Does TFMA only support compiled models?
I am using tensorflow==2.3.0 and tensorflow-model-analysis==0.22.1

Yes, your understanding is correct i.e., it is resulting in error because you are not compiling and consequently, not adding the METRICS.
It is evident from the statement specified in the Tensorflow Model Analysis Documentation as well, which is mentioned below.
Note: Only training time metrics added via model.compile (not
model.add_metric) are currently supported for keras.

Related

How to understand read_excel in Pandas

I'm trying to import some public data from the web but can't understand the error.
My code:
import pandas as pd
df2022 = pd.read_excel("https://ofslivefs.blob.core.windows.net/files/NSS%20data%202022/September/NSS2022_summary_data.xlsx")
It returns this:
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
/var/folders/v_/yq26pm194xj5ckqy8p_njwc00000gn/T/ipykernel_89117/2424267382.py in <module>
----> 1 df2022 = pd.read_excel("https://ofslivefs.blob.core.windows.net/files/NSS%20data%202022/September/NSS2022_summary_data.xlsx")
~/opt/anaconda3/lib/python3.9/site-packages/pandas/util/_decorators.py in wrapper(*args, **kwargs)
209 else:
210 kwargs[new_arg_name] = new_arg_value
--> 211 return func(*args, **kwargs)
212
213 return cast(F, wrapper)
~/opt/anaconda3/lib/python3.9/site-packages/pandas/util/_decorators.py in wrapper(*args, **kwargs)
329 stacklevel=find_stack_level(),
330 )
--> 331 return func(*args, **kwargs)
332
333 # error: "Callable[[VarArg(Any), KwArg(Any)], Any]" has no
~/opt/anaconda3/lib/python3.9/site-packages/pandas/io/excel/_base.py in read_excel(io, sheet_name, header, names, index_col, usecols, squeeze, dtype, engine, converters, true_values, false_values, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, parse_dates, date_parser, thousands, decimal, comment, skipfooter, convert_float, mangle_dupe_cols, storage_options)
480 if not isinstance(io, ExcelFile):
481 should_close = True
--> 482 io = ExcelFile(io, storage_options=storage_options, engine=engine)
483 elif engine and engine != io.engine:
484 raise ValueError(
~/opt/anaconda3/lib/python3.9/site-packages/pandas/io/excel/_base.py in __init__(self, path_or_buffer, engine, storage_options)
1693 self.storage_options = storage_options
1694
-> 1695 self._reader = self._engines[engine](self._io, storage_options=storage_options)
1696
1697 def __fspath__(self):
~/opt/anaconda3/lib/python3.9/site-packages/pandas/io/excel/_openpyxl.py in __init__(self, filepath_or_buffer, storage_options)
555 """
556 import_optional_dependency("openpyxl")
--> 557 super().__init__(filepath_or_buffer, storage_options=storage_options)
558
559 #property
~/opt/anaconda3/lib/python3.9/site-packages/pandas/io/excel/_base.py in __init__(self, filepath_or_buffer, storage_options)
543 self.handles.handle.seek(0)
544 try:
--> 545 self.book = self.load_workbook(self.handles.handle)
546 except Exception:
547 self.close()
~/opt/anaconda3/lib/python3.9/site-packages/pandas/io/excel/_openpyxl.py in load_workbook(self, filepath_or_buffer)
566 from openpyxl import load_workbook
567
--> 568 return load_workbook(
569 filepath_or_buffer, read_only=True, data_only=True, keep_links=False
570 )
~/opt/anaconda3/lib/python3.9/site-packages/openpyxl/reader/excel.py in load_workbook(filename, read_only, keep_vba, data_only, keep_links)
315 reader = ExcelReader(filename, read_only, keep_vba,
316 data_only, keep_links)
--> 317 reader.read()
318 return reader.wb
~/opt/anaconda3/lib/python3.9/site-packages/openpyxl/reader/excel.py in read(self)
281 apply_stylesheet(self.archive, self.wb)
282 self.read_worksheets()
--> 283 self.parser.assign_names()
284 if not self.read_only:
285 self.archive.close()
~/opt/anaconda3/lib/python3.9/site-packages/openpyxl/reader/workbook.py in assign_names(self)
100 reserved = defn.is_reserved
101 if reserved in ("Print_Titles", "Print_Area"):
--> 102 sheet = self.wb._sheets[defn.localSheetId]
103 if reserved == "Print_Titles":
104 rows, cols = _unpack_print_titles(defn)
IndexError: list index out of range
At this point I would traditonally download and convert to CSV but I want to access straight from web.
The sheet (which I guess I could access as sheetname="Q27 Providers (benchmarked)") doesn't work.
It looks like xlsx file is broken, therefore u can't download it. Did u try to open that xlsx file?

How to load a dataframe to postgresql 14

i have tried to load my dataframe to postgresql and especially to the server postgresql 14( i have 2 servers postgresql 9.3 running on port 5434 , and the other one is postgresql 14 running on port 5433) with this command :
from sqlalchemy import create_engine
engine = create_engine('postgresql://postgres:password#localhost:5433/MYDATABASE')
df.to_sql('My_Table', engine)
this is the error i get , also i've tried with more ways but its always the same error , i guess its related to the two servers i'm using :
---------------------------------------------------------------------------
OperationalError Traceback (most recent call last)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:3280, in Engine._wrap_pool_connect(self, fn, connection)
3279 try:
-> 3280 return fn()
3281 except dialect.dbapi.Error as e:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:310, in Pool.connect(self)
303 """Return a DBAPI connection from the pool.
304
305 The connection is instrumented such that when its
(...)
308
309 """
--> 310 return _ConnectionFairy._checkout(self)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:868, in _ConnectionFairy._checkout(cls, pool, threadconns, fairy)
867 if not fairy:
--> 868 fairy = _ConnectionRecord.checkout(pool)
870 fairy._pool = pool
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:476, in _ConnectionRecord.checkout(cls, pool)
474 #classmethod
475 def checkout(cls, pool):
--> 476 rec = pool._do_get()
477 try:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\impl.py:145, in QueuePool._do_get(self)
144 except:
--> 145 with util.safe_reraise():
146 self._dec_overflow()
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\langhelpers.py:70, in safe_reraise.__exit__(self, type_, value, traceback)
69 if not self.warn_only:
---> 70 compat.raise_(
71 exc_value,
72 with_traceback=exc_tb,
73 )
74 else:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\compat.py:207, in raise_(***failed resolving arguments***)
206 try:
--> 207 raise exception
208 finally:
209 # credit to
210 # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
211 # as the __traceback__ object creates a cycle
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\impl.py:143, in QueuePool._do_get(self)
142 try:
--> 143 return self._create_connection()
144 except:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:256, in Pool._create_connection(self)
254 """Called by subclasses to create a new ConnectionRecord."""
--> 256 return _ConnectionRecord(self)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:371, in _ConnectionRecord.__init__(self, pool, connect)
370 if connect:
--> 371 self.__connect()
372 self.finalize_callback = deque()
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:665, in _ConnectionRecord.__connect(self)
664 except Exception as e:
--> 665 with util.safe_reraise():
666 pool.logger.debug("Error on connect(): %s", e)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\langhelpers.py:70, in safe_reraise.__exit__(self, type_, value, traceback)
69 if not self.warn_only:
---> 70 compat.raise_(
71 exc_value,
72 with_traceback=exc_tb,
73 )
74 else:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\compat.py:207, in raise_(***failed resolving arguments***)
206 try:
--> 207 raise exception
208 finally:
209 # credit to
210 # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
211 # as the __traceback__ object creates a cycle
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:661, in _ConnectionRecord.__connect(self)
660 self.starttime = time.time()
--> 661 self.dbapi_connection = connection = pool._invoke_creator(self)
662 pool.logger.debug("Created new connection %r", connection)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\create.py:590, in create_engine.<locals>.connect(connection_record)
589 return connection
--> 590 return dialect.connect(*cargs, **cparams)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\default.py:597, in DefaultDialect.connect(self, *cargs, **cparams)
595 def connect(self, *cargs, **cparams):
596 # inherits the docstring from interfaces.Dialect.connect
--> 597 return self.dbapi.connect(*cargs, **cparams)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\psycopg2\__init__.py:122, in connect(dsn, connection_factory, cursor_factory, **kwargs)
121 dsn = _ext.make_dsn(dsn, **kwargs)
--> 122 conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
123 if cursor_factory is not None:
OperationalError:
The above exception was the direct cause of the following exception:
OperationalError Traceback (most recent call last)
Input In [105], in <cell line: 3>()
1 from sqlalchemy import create_engine
2 engine = create_engine('postgresql://postgres:password#localhost:5433/MYDATABASE')
----> 3 df.to_sql('My_Table', engine)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\core\generic.py:2951, in NDFrame.to_sql(self, name, con, schema, if_exists, index, index_label, chunksize, dtype, method)
2794 """
2795 Write records stored in a DataFrame to a SQL database.
2796
(...)
2947 [(1,), (None,), (2,)]
2948 """ # noqa:E501
2949 from pandas.io import sql
-> 2951 return sql.to_sql(
2952 self,
2953 name,
2954 con,
2955 schema=schema,
2956 if_exists=if_exists,
2957 index=index,
2958 index_label=index_label,
2959 chunksize=chunksize,
2960 dtype=dtype,
2961 method=method,
2962 )
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:697, in to_sql(frame, name, con, schema, if_exists, index, index_label, chunksize, dtype, method, engine, **engine_kwargs)
692 elif not isinstance(frame, DataFrame):
693 raise NotImplementedError(
694 "'frame' argument should be either a Series or a DataFrame"
695 )
--> 697 return pandas_sql.to_sql(
698 frame,
699 name,
700 if_exists=if_exists,
701 index=index,
702 index_label=index_label,
703 schema=schema,
704 chunksize=chunksize,
705 dtype=dtype,
706 method=method,
707 engine=engine,
708 **engine_kwargs,
709 )
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:1729, in SQLDatabase.to_sql(self, frame, name, if_exists, index, index_label, schema, chunksize, dtype, method, engine, **engine_kwargs)
1679 """
1680 Write records stored in a DataFrame to a SQL database.
1681
(...)
1725 Any additional kwargs are passed to the engine.
1726 """
1727 sql_engine = get_engine(engine)
-> 1729 table = self.prep_table(
1730 frame=frame,
1731 name=name,
1732 if_exists=if_exists,
1733 index=index,
1734 index_label=index_label,
1735 schema=schema,
1736 dtype=dtype,
1737 )
1739 total_inserted = sql_engine.insert_records(
1740 table=table,
1741 con=self.connectable,
(...)
1748 **engine_kwargs,
1749 )
1751 self.check_case_sensitive(name=name, schema=schema)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:1628, in SQLDatabase.prep_table(self, frame, name, if_exists, index, index_label, schema, dtype)
1616 raise ValueError(f"The type of {col} is not a SQLAlchemy type")
1618 table = SQLTable(
1619 name,
1620 self,
(...)
1626 dtype=dtype,
1627 )
-> 1628 table.create()
1629 return table
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:831, in SQLTable.create(self)
830 def create(self):
--> 831 if self.exists():
832 if self.if_exists == "fail":
833 raise ValueError(f"Table '{self.name}' already exists.")
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:815, in SQLTable.exists(self)
814 def exists(self):
--> 815 return self.pd_sql.has_table(self.name, self.schema)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\pandas\io\sql.py:1762, in SQLDatabase.has_table(self, name, schema)
1759 if _gt14():
1760 from sqlalchemy import inspect
-> 1762 insp = inspect(self.connectable)
1763 return insp.has_table(name, schema or self.meta.schema)
1764 else:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\inspection.py:64, in inspect(subject, raiseerr)
62 if reg is True:
63 return subject
---> 64 ret = reg(subject)
65 if ret is not None:
66 break
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\reflection.py:182, in Inspector._engine_insp(bind)
180 #inspection._inspects(Engine)
181 def _engine_insp(bind):
--> 182 return Inspector._construct(Inspector._init_engine, bind)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\reflection.py:117, in Inspector._construct(cls, init, bind)
114 cls = bind.dialect.inspector
116 self = cls.__new__(cls)
--> 117 init(self, bind)
118 return self
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\reflection.py:128, in Inspector._init_engine(self, engine)
126 def _init_engine(self, engine):
127 self.bind = self.engine = engine
--> 128 engine.connect().close()
129 self._op_context_requires_connect = True
130 self.dialect = self.engine.dialect
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:3234, in Engine.connect(self, close_with_result)
3219 def connect(self, close_with_result=False):
3220 """Return a new :class:`_engine.Connection` object.
3221
3222 The :class:`_engine.Connection` object is a facade that uses a DBAPI
(...)
3231
3232 """
-> 3234 return self._connection_cls(self, close_with_result=close_with_result)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:96, in Connection.__init__(self, engine, connection, close_with_result, _branch_from, _execution_options, _dispatch, _has_events, _allow_revalidate)
91 self._has_events = _branch_from._has_events
92 else:
93 self._dbapi_connection = (
94 connection
95 if connection is not None
---> 96 else engine.raw_connection()
97 )
99 self._transaction = self._nested_transaction = None
100 self.__savepoint_seq = 0
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:3313, in Engine.raw_connection(self, _connection)
3291 def raw_connection(self, _connection=None):
3292 """Return a "raw" DBAPI connection from the connection pool.
3293
3294 The returned object is a proxied version of the DBAPI
(...)
3311
3312 """
-> 3313 return self._wrap_pool_connect(self.pool.connect, _connection)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:3283, in Engine._wrap_pool_connect(self, fn, connection)
3281 except dialect.dbapi.Error as e:
3282 if connection is None:
-> 3283 Connection._handle_dbapi_exception_noconnection(
3284 e, dialect, self
3285 )
3286 else:
3287 util.raise_(
3288 sys.exc_info()[1], with_traceback=sys.exc_info()[2]
3289 )
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:2117, in Connection._handle_dbapi_exception_noconnection(cls, e, dialect, engine)
2115 util.raise_(newraise, with_traceback=exc_info[2], from_=e)
2116 elif should_wrap:
-> 2117 util.raise_(
2118 sqlalchemy_exception, with_traceback=exc_info[2], from_=e
2119 )
2120 else:
2121 util.raise_(exc_info[1], with_traceback=exc_info[2])
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\compat.py:207, in raise_(***failed resolving arguments***)
204 exception.__cause__ = replace_context
206 try:
--> 207 raise exception
208 finally:
209 # credit to
210 # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
211 # as the __traceback__ object creates a cycle
212 del exception, replace_context, from_, with_traceback
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\base.py:3280, in Engine._wrap_pool_connect(self, fn, connection)
3278 dialect = self.dialect
3279 try:
-> 3280 return fn()
3281 except dialect.dbapi.Error as e:
3282 if connection is None:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:310, in Pool.connect(self)
302 def connect(self):
303 """Return a DBAPI connection from the pool.
304
305 The connection is instrumented such that when its
(...)
308
309 """
--> 310 return _ConnectionFairy._checkout(self)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:868, in _ConnectionFairy._checkout(cls, pool, threadconns, fairy)
865 #classmethod
866 def _checkout(cls, pool, threadconns=None, fairy=None):
867 if not fairy:
--> 868 fairy = _ConnectionRecord.checkout(pool)
870 fairy._pool = pool
871 fairy._counter = 0
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:476, in _ConnectionRecord.checkout(cls, pool)
474 #classmethod
475 def checkout(cls, pool):
--> 476 rec = pool._do_get()
477 try:
478 dbapi_connection = rec.get_connection()
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\impl.py:145, in QueuePool._do_get(self)
143 return self._create_connection()
144 except:
--> 145 with util.safe_reraise():
146 self._dec_overflow()
147 else:
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\langhelpers.py:70, in safe_reraise.__exit__(self, type_, value, traceback)
68 self._exc_info = None # remove potential circular references
69 if not self.warn_only:
---> 70 compat.raise_(
71 exc_value,
72 with_traceback=exc_tb,
73 )
74 else:
75 if not compat.py3k and self._exc_info and self._exc_info[1]:
76 # emulate Py3K's behavior of telling us when an exception
77 # occurs in an exception handler.
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\compat.py:207, in raise_(***failed resolving arguments***)
204 exception.__cause__ = replace_context
206 try:
--> 207 raise exception
208 finally:
209 # credit to
210 # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
211 # as the __traceback__ object creates a cycle
212 del exception, replace_context, from_, with_traceback
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\impl.py:143, in QueuePool._do_get(self)
141 if self._inc_overflow():
142 try:
--> 143 return self._create_connection()
144 except:
145 with util.safe_reraise():
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:256, in Pool._create_connection(self)
253 def _create_connection(self):
254 """Called by subclasses to create a new ConnectionRecord."""
--> 256 return _ConnectionRecord(self)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:371, in _ConnectionRecord.__init__(self, pool, connect)
369 self.__pool = pool
370 if connect:
--> 371 self.__connect()
372 self.finalize_callback = deque()
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:665, in _ConnectionRecord.__connect(self)
663 self.fresh = True
664 except Exception as e:
--> 665 with util.safe_reraise():
666 pool.logger.debug("Error on connect(): %s", e)
667 else:
668 # in SQLAlchemy 1.4 the first_connect event is not used by
669 # the engine, so this will usually not be set
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\langhelpers.py:70, in safe_reraise.__exit__(self, type_, value, traceback)
68 self._exc_info = None # remove potential circular references
69 if not self.warn_only:
---> 70 compat.raise_(
71 exc_value,
72 with_traceback=exc_tb,
73 )
74 else:
75 if not compat.py3k and self._exc_info and self._exc_info[1]:
76 # emulate Py3K's behavior of telling us when an exception
77 # occurs in an exception handler.
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\util\compat.py:207, in raise_(***failed resolving arguments***)
204 exception.__cause__ = replace_context
206 try:
--> 207 raise exception
208 finally:
209 # credit to
210 # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
211 # as the __traceback__ object creates a cycle
212 del exception, replace_context, from_, with_traceback
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\pool\base.py:661, in _ConnectionRecord.__connect(self)
659 try:
660 self.starttime = time.time()
--> 661 self.dbapi_connection = connection = pool._invoke_creator(self)
662 pool.logger.debug("Created new connection %r", connection)
663 self.fresh = True
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\create.py:590, in create_engine.<locals>.connect(connection_record)
588 if connection is not None:
589 return connection
--> 590 return dialect.connect(*cargs, **cparams)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\sqlalchemy\engine\default.py:597, in DefaultDialect.connect(self, *cargs, **cparams)
595 def connect(self, *cargs, **cparams):
596 # inherits the docstring from interfaces.Dialect.connect
--> 597 return self.dbapi.connect(*cargs, **cparams)
File ~\AppData\Local\Programs\Python\Python310\lib\site-packages\psycopg2\__init__.py:122, in connect(dsn, connection_factory, cursor_factory, **kwargs)
119 kwasync['async_'] = kwargs.pop('async_')
121 dsn = _ext.make_dsn(dsn, **kwargs)
--> 122 conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
123 if cursor_factory is not None:
124 conn.cursor_factory = cursor_factory
OperationalError: (psycopg2.OperationalError)
(Background on this error at: https://sqlalche.me/e/14/e3q8)

tight_layout KeyError default, matplotlib widget

Using jupyterlab, i receive a KeyError: 'Default' when using plt.tight_layout() in combination with %matplotlib widget. The following code reproduces the issue:
import matplotlib.pyplot as plt
import numpy as np
%matplotlib widget
x=np.linspace(0,10)
y=x**2
plt.plot(x,y)
plt.tight_layout()
The complete error message is the following:
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/anaconda3/lib/python3.8/site-packages/matplotlib/backend_bases.py in _wait_cursor_for_draw_cm(self)
3024 try:
-> 3025 self.canvas.set_cursor(tools.Cursors.WAIT)
3026 yield
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in set_cursor(self, cursor)
209 }, cursor=cursor)
--> 210 self.send_event('cursor', cursor=cursor)
211
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in send_event(self, event_type, **kwargs)
391 if self.manager:
--> 392 self.manager._send_event(event_type, **kwargs)
393
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in _send_event(self, event_type, **kwargs)
540 for s in self.web_sockets:
--> 541 s.send_json(payload)
542
~/anaconda3/lib/python3.8/site-packages/ipympl/backend_nbagg.py in send_json(self, content)
180 if content['type'] == 'cursor':
--> 181 self._cursor = cursors_str[content['cursor']]
182
KeyError: 'wait'
During handling of the above exception, another exception occurred:
KeyError Traceback (most recent call last)
/tmp/ipykernel_119035/3466922198.py in <module>
7 y=x**2
8 plt.plot(x,y)
----> 9 plt.tight_layout()
~/anaconda3/lib/python3.8/site-packages/matplotlib/pyplot.py in tight_layout(pad, h_pad, w_pad, rect)
2300 #_copy_docstring_and_deprecators(Figure.tight_layout)
2301 def tight_layout(*, pad=1.08, h_pad=None, w_pad=None, rect=None):
-> 2302 return gcf().tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
2303
2304
~/anaconda3/lib/python3.8/site-packages/matplotlib/figure.py in tight_layout(self, pad, h_pad, w_pad, rect)
3186 "compatible with tight_layout, so results "
3187 "might be incorrect.")
-> 3188 renderer = _get_renderer(self)
3189 with getattr(renderer, "_draw_disabled", nullcontext)():
3190 kwargs = get_tight_layout_figure(
~/anaconda3/lib/python3.8/site-packages/matplotlib/backend_bases.py in _get_renderer(figure, print_method)
1542 figure.canvas._get_output_canvas(None, fmt), f"print_{fmt}")
1543 try:
-> 1544 print_method(io.BytesIO())
1545 except Done as exc:
1546 renderer, = figure._cachedRenderer, = exc.args
~/anaconda3/lib/python3.8/site-packages/matplotlib/backend_bases.py in wrapper(*args, **kwargs)
1641 kwargs.pop(arg)
1642
-> 1643 return func(*args, **kwargs)
1644
1645 return wrapper
~/anaconda3/lib/python3.8/site-packages/matplotlib/_api/deprecation.py in wrapper(*inner_args, **inner_kwargs)
410 else deprecation_addendum,
411 **kwargs)
--> 412 return func(*inner_args, **inner_kwargs)
413
414 DECORATORS[wrapper] = decorator
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_agg.py in print_png(self, filename_or_obj, metadata, pil_kwargs, *args)
538 *metadata*, including the default 'Software' key.
539 """
--> 540 FigureCanvasAgg.draw(self)
541 mpl.image.imsave(
542 filename_or_obj, self.buffer_rgba(), format="png", origin="upper",
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_agg.py in draw(self)
431 self.renderer = self.get_renderer(cleared=True)
432 # Acquire a lock on the shared font cache.
--> 433 with RendererAgg.lock, \
434 (self.toolbar._wait_cursor_for_draw_cm() if self.toolbar
435 else nullcontext()):
~/anaconda3/lib/python3.8/contextlib.py in __enter__(self)
111 del self.args, self.kwds, self.func
112 try:
--> 113 return next(self.gen)
114 except StopIteration:
115 raise RuntimeError("generator didn't yield") from None
~/anaconda3/lib/python3.8/site-packages/matplotlib/backend_bases.py in _wait_cursor_for_draw_cm(self)
3026 yield
3027 finally:
-> 3028 self.canvas.set_cursor(self._lastCursor)
3029 else:
3030 yield
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in set_cursor(self, cursor)
208 backend_tools.Cursors.RESIZE_VERTICAL: 'ns-resize',
209 }, cursor=cursor)
--> 210 self.send_event('cursor', cursor=cursor)
211
212 def set_image_mode(self, mode):
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in send_event(self, event_type, **kwargs)
390 def send_event(self, event_type, **kwargs):
391 if self.manager:
--> 392 self.manager._send_event(event_type, **kwargs)
393
394
~/anaconda3/lib/python3.8/site-packages/matplotlib/backends/backend_webagg_core.py in _send_event(self, event_type, **kwargs)
539 payload = {'type': event_type, **kwargs}
540 for s in self.web_sockets:
--> 541 s.send_json(payload)
542
543
~/anaconda3/lib/python3.8/site-packages/ipympl/backend_nbagg.py in send_json(self, content)
179 # Change in the widget state?
180 if content['type'] == 'cursor':
--> 181 self._cursor = cursors_str[content['cursor']]
182
183 elif content['type'] == 'message':
KeyError: 'default'

How to render Matplotlib plots with TEX code in Cyrillic

I'm trying to understand how to plot figures with Cyrillic text in the legend.
It seems that axes display TeX characters but text in the legend is using the default font. Here is the code:
import numpy as np
import matplotlib
matplotlib.use("pgf")
import matplotlib.pyplot as plt
matplotlib.rcParams['text.usetex'] = True
%matplotlib inline
x = np.linspace(-np.pi, np.pi, 256)
y = []
for i in range(-100, 100):
y += [np.cos(x + i)]
plt.xlim(-10, 10)
plt.ylim(-2, 2)
plt.xticks([-10, 0, 10])
plt.yticks()
plt.plot(x, y[0], color = 'red', linewidth = 2.5, linestyle = '-', label = 'style1')
plt.plot(x, y[1], color = 'blue', linewidth = 3, linestyle = '-', label = 'style2')
plt.legend()
Changing the label of the first (the red) line to something in cyrillic:
plt.plot(x, y[0], color = 'red', linewidth = 2.5, linestyle = '-', label = 'стил1')
gives the following error:
Error in callback .post_execute at 0x00000202EDCA3AF8> (for post_execute):
---------------------------------------------------------------------------
CalledProcessError Traceback (most recent call last)
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in _run_checked_subprocess(self, command, tex)
303 cwd=self.texcache,
--> 304 stderr=subprocess.STDOUT)
305 except FileNotFoundError as exc:
~\anaconda3\lib\subprocess.py in check_output(timeout, *popenargs, **kwargs)
410 return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
--> 411 **kwargs).stdout
412
~\anaconda3\lib\subprocess.py in run(input, capture_output, timeout, check, *popenargs, **kwargs)
511 raise CalledProcessError(retcode, process.args,
--> 512 output=stdout, stderr=stderr)
513 return CompletedProcess(process.args, retcode, stdout, stderr)
CalledProcessError: Command '['latex', '-interaction=nonstopmode', '--halt-on-error', 'C:\\Users\\Victor\\.matplotlib\\tex.cache\\b6f49bb15be8eba85bb671cfafccfb49.tex']' returned non-zero exit status 1.
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
~\anaconda3\lib\site-packages\matplotlib\pyplot.py in post_execute()
107 def post_execute():
108 if matplotlib.is_interactive():
--> 109 draw_all()
110
111 # IPython >= 2
~\anaconda3\lib\site-packages\matplotlib\_pylab_helpers.py in draw_all(cls, force)
126 for f_mgr in cls.get_all_fig_managers():
127 if force or f_mgr.canvas.figure.stale:
--> 128 f_mgr.canvas.draw_idle()
129
130 atexit.register(Gcf.destroy_all)
~\anaconda3\lib\site-packages\matplotlib\backend_bases.py in draw_idle(self, *args, **kwargs)
1914 if not self._is_idle_drawing:
1915 with self._idle_draw_cntx():
-> 1916 self.draw(*args, **kwargs)
1917
1918 def draw_cursor(self, event):
~\anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in draw(self)
386 self.renderer = self.get_renderer(cleared=True)
387 with RendererAgg.lock:
--> 388 self.figure.draw(self.renderer)
389 # A GUI class may be need to update a window using this draw, so
390 # don't forget to call the superclass.
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
1707 self.patch.draw(renderer)
1708 mimage._draw_list_compositing_images(
-> 1709 renderer, self, artists, self.suppressComposite)
1710
1711 renderer.close_group('figure')
~\anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
133 if not_composite or not has_images:
134 for a in artists:
--> 135 a.draw(renderer)
136 else:
137 # Composite any adjacent images together
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
2645 renderer.stop_rasterizing()
2646
-> 2647 mimage._draw_list_compositing_images(renderer, self, artists)
2648
2649 renderer.close_group('axes')
~\anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
133 if not_composite or not has_images:
134 for a in artists:
--> 135 a.draw(renderer)
136 else:
137 # Composite any adjacent images together
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\legend.py in draw(self, renderer)
656 # update the location and size of the legend. This needs to
657 # be done in any case to clip the figure right.
--> 658 bbox = self._legend_box.get_window_extent(renderer)
659 self.legendPatch.set_bounds(bbox.x0, bbox.y0,
660 bbox.width, bbox.height)
...
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
284 def get_extent(self, renderer):
285 """Return a tuple ``width, height, xdescent, ydescent`` of the box."""
--> 286 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
287 return w, h, xd, yd
288
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
284 def get_extent(self, renderer):
285 """Return a tuple ``width, height, xdescent, ydescent`` of the box."""
--> 286 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
287 return w, h, xd, yd
288
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
875 "lp", self._text._fontproperties, ismath=False)
876
--> 877 bbox, info, d = self._text._get_layout(renderer)
878 w, h = bbox.width, bbox.height
879
~\anaconda3\lib\site-packages\matplotlib\text.py in _get_layout(self, renderer)
296 if clean_line:
297 w, h, d = renderer.get_text_width_height_descent(
--> 298 clean_line, self._fontproperties, ismath=ismath)
299 else:
300 w = h = d = 0
~\anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in get_text_width_height_descent(self, s, prop, ismath)
199 fontsize = prop.get_size_in_points()
200 w, h, d = texmanager.get_text_width_height_descent(
--> 201 s, fontsize, renderer=self)
202 return w, h, d
203
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in get_text_width_height_descent(self, tex, fontsize, renderer)
446 else:
447 # use dviread. It sometimes returns a wrong descent.
--> 448 dvifile = self.make_dvi(tex, fontsize)
449 with dviread.Dvi(dvifile, 72 * dpi_fraction) as dvi:
450 page, = dvi
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in make_dvi(self, tex, fontsize)
336 self._run_checked_subprocess(
337 ["latex", "-interaction=nonstopmode", "--halt-on-error",
--> 338 texfile], tex)
339 for fname in glob.glob(basefile + '*'):
340 if not fname.endswith(('dvi', 'tex')):
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in _run_checked_subprocess(self, command, tex)
315 prog=command[0],
316 tex=tex.encode('unicode_escape'),
--> 317 exc=exc.output.decode('utf-8'))) from exc
318 _log.debug(report)
319 return report
RuntimeError: latex was not able to process the following string:
b'\\u0441\\u0442\\u0438\\u043b1'
Here is the full report generated by latex:
This is pdfTeX, Version 3.14159265-2.6-1.40.20 (TeX Live 2019/W32TeX) (preloaded format=latex)
restricted \write18 enabled.
entering extended mode
(c:/Users/Victor/.matplotlib/tex.cache/b6f49bb15be8eba85bb671cfafccfb49.tex
LaTeX2e <2020-02-02> patch level 5
L3 programming layer <2020-02-25>
(c:/texlive/2019/texmf-dist/tex/latex/base/article.cls
Document Class: article 2019/12/20 v1.4l Standard LaTeX document class
(c:/texlive/2019/texmf-dist/tex/latex/base/size10.clo))
(c:/texlive/2019/texmf-dist/tex/latex/type1cm/type1cm.sty)
(c:/texlive/2019/texmf-dist/tex/latex/base/textcomp.sty)
(c:/texlive/2019/texmf-dist/tex/latex/base/inputenc.sty)
(c:/texlive/2019/texmf-dist/tex/latex/geometry/geometry.sty
(c:/texlive/2019/texmf-dist/tex/latex/graphics/keyval.sty)
(c:/texlive/2019/texmf-dist/tex/generic/iftex/ifvtex.sty
(c:/texlive/2019/texmf-dist/tex/generic/iftex/iftex.sty))
Package geometry Warning: Over-specification in `h'-direction.
`width' (5058.9pt) is ignored.
Package geometry Warning: Over-specification in `v'-direction.
`height' (5058.9pt) is ignored.
) (c:/texlive/2019/texmf-dist/tex/latex/l3backend/l3backend-dvips.def)
(./b6f49bb15be8eba85bb671cfafccfb49.aux)
*geometry* driver: auto-detecting
*geometry* detected driver: dvips
! Package inputenc Error: Unicode character с (U+0441)
(inputenc) not set up for use with LaTeX.
See the inputenc package documentation for explanation.
Type H <return> for immediate help.
...
l.14 \fontsize{10.000000}{12.500000}{\sffamily с
тил1}
No pages of output.
Transcript written on b6f49bb15be8eba85bb671cfafccfb49.log.
---------------------------------------------------------------------------
CalledProcessError Traceback (most recent call last)
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in _run_checked_subprocess(self, command, tex)
303 cwd=self.texcache,
--> 304 stderr=subprocess.STDOUT)
305 except FileNotFoundError as exc:
~\anaconda3\lib\subprocess.py in check_output(timeout, *popenargs, **kwargs)
410 return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
--> 411 **kwargs).stdout
412
~\anaconda3\lib\subprocess.py in run(input, capture_output, timeout, check, *popenargs, **kwargs)
511 raise CalledProcessError(retcode, process.args,
--> 512 output=stdout, stderr=stderr)
513 return CompletedProcess(process.args, retcode, stdout, stderr)
CalledProcessError: Command '['latex', '-interaction=nonstopmode', '--halt-on-error', 'C:\\Users\\Victor\\.matplotlib\\tex.cache\\b6f49bb15be8eba85bb671cfafccfb49.tex']' returned non-zero exit status 1.
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
~\anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
339 pass
340 else:
--> 341 return printer(obj)
342 # Finally look for special method names
343 method = get_real_method(obj, self.print_method)
~\anaconda3\lib\site-packages\IPython\core\pylabtools.py in <lambda>(fig)
246
247 if 'png' in formats:
--> 248 png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
249 if 'retina' in formats or 'png2x' in formats:
250 png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
~\anaconda3\lib\site-packages\IPython\core\pylabtools.py in print_figure(fig, fmt, bbox_inches, **kwargs)
130 FigureCanvasBase(fig)
131
--> 132 fig.canvas.print_figure(bytes_io, **kw)
133 data = bytes_io.getvalue()
134 if fmt == 'svg':
~\anaconda3\lib\site-packages\matplotlib\backend_bases.py in print_figure(self, filename, dpi, facecolor, edgecolor, orientation, format, bbox_inches, **kwargs)
2063 orientation=orientation,
2064 dryrun=True,
-> 2065 **kwargs)
2066 renderer = self.figure._cachedRenderer
2067 bbox_artists = kwargs.pop("bbox_extra_artists", None)
~\anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in print_png(self, filename_or_obj, metadata, pil_kwargs, *args, **kwargs)
525
526 else:
--> 527 FigureCanvasAgg.draw(self)
528 renderer = self.get_renderer()
529 with cbook._setattr_cm(renderer, dpi=self.figure.dpi), \
~\anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in draw(self)
386 self.renderer = self.get_renderer(cleared=True)
387 with RendererAgg.lock:
--> 388 self.figure.draw(self.renderer)
389 # A GUI class may be need to update a window using this draw, so
390 # don't forget to call the superclass.
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
1707 self.patch.draw(renderer)
1708 mimage._draw_list_compositing_images(
-> 1709 renderer, self, artists, self.suppressComposite)
1710
1711 renderer.close_group('figure')
~\anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
133 if not_composite or not has_images:
134 for a in artists:
--> 135 a.draw(renderer)
136 else:
137 # Composite any adjacent images together
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
2645 renderer.stop_rasterizing()
2646
-> 2647 mimage._draw_list_compositing_images(renderer, self, artists)
2648
2649 renderer.close_group('axes')
~\anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
133 if not_composite or not has_images:
134 for a in artists:
--> 135 a.draw(renderer)
136 else:
137 # Composite any adjacent images together
~\anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
36 renderer.start_filter()
37
---> 38 return draw(artist, renderer, *args, **kwargs)
39 finally:
40 if artist.get_agg_filter() is not None:
~\anaconda3\lib\site-packages\matplotlib\legend.py in draw(self, renderer)
656 # update the location and size of the legend. This needs to
657 # be done in any case to clip the figure right.
--> 658 bbox = self._legend_box.get_window_extent(renderer)
659 self.legendPatch.set_bounds(bbox.x0, bbox.y0,
660 bbox.width, bbox.height)
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_window_extent(self, renderer)
289 def get_window_extent(self, renderer):
290 """Return the bounding box (`.Bbox`) in display space."""
--> 291 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
292 px, py = self.get_offset(w, h, xd, yd, renderer)
293 return mtransforms.Bbox.from_bounds(px - xd, py - yd, w, h)
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
284 def get_extent(self, renderer):
285 """Return a tuple ``width, height, xdescent, ydescent`` of the box."""
--> 286 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
287 return w, h, xd, yd
288
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
284 def get_extent(self, renderer):
285 """Return a tuple ``width, height, xdescent, ydescent`` of the box."""
--> 286 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
287 return w, h, xd, yd
288
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
417
418 whd_list = [c.get_extent(renderer)
--> 419 for c in self.get_visible_children()]
420 whd_list = [(w, h, xd, (h - yd)) for w, h, xd, yd in whd_list]
421
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
284 def get_extent(self, renderer):
285 """Return a tuple ``width, height, xdescent, ydescent`` of the box."""
--> 286 w, h, xd, yd, offsets = self.get_extent_offsets(renderer)
287 return w, h, xd, yd
288
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent_offsets(self, renderer)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in <listcomp>(.0)
491
492 whd_list = [c.get_extent(renderer)
--> 493 for c in self.get_visible_children()]
494
495 if not whd_list:
~\anaconda3\lib\site-packages\matplotlib\offsetbox.py in get_extent(self, renderer)
875 "lp", self._text._fontproperties, ismath=False)
876
--> 877 bbox, info, d = self._text._get_layout(renderer)
878 w, h = bbox.width, bbox.height
879
~\anaconda3\lib\site-packages\matplotlib\text.py in _get_layout(self, renderer)
296 if clean_line:
297 w, h, d = renderer.get_text_width_height_descent(
--> 298 clean_line, self._fontproperties, ismath=ismath)
299 else:
300 w = h = d = 0
~\anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in get_text_width_height_descent(self, s, prop, ismath)
199 fontsize = prop.get_size_in_points()
200 w, h, d = texmanager.get_text_width_height_descent(
--> 201 s, fontsize, renderer=self)
202 return w, h, d
203
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in get_text_width_height_descent(self, tex, fontsize, renderer)
446 else:
447 # use dviread. It sometimes returns a wrong descent.
--> 448 dvifile = self.make_dvi(tex, fontsize)
449 with dviread.Dvi(dvifile, 72 * dpi_fraction) as dvi:
450 page, = dvi
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in make_dvi(self, tex, fontsize)
336 self._run_checked_subprocess(
337 ["latex", "-interaction=nonstopmode", "--halt-on-error",
--> 338 texfile], tex)
339 for fname in glob.glob(basefile + '*'):
340 if not fname.endswith(('dvi', 'tex')):
~\anaconda3\lib\site-packages\matplotlib\texmanager.py in _run_checked_subprocess(self, command, tex)
315 prog=command[0],
316 tex=tex.encode('unicode_escape'),
--> 317 exc=exc.output.decode('utf-8'))) from exc
318 _log.debug(report)
319 return report
RuntimeError: latex was not able to process the following string:
b'\\u0441\\u0442\\u0438\\u043b1'
Here is the full report generated by latex:
This is pdfTeX, Version 3.14159265-2.6-1.40.20 (TeX Live 2019/W32TeX) (preloaded format=latex)
restricted \write18 enabled.
entering extended mode
(c:/Users/Victor/.matplotlib/tex.cache/b6f49bb15be8eba85bb671cfafccfb49.tex
LaTeX2e <2020-02-02> patch level 5
L3 programming layer <2020-02-25>
(c:/texlive/2019/texmf-dist/tex/latex/base/article.cls
Document Class: article 2019/12/20 v1.4l Standard LaTeX document class
(c:/texlive/2019/texmf-dist/tex/latex/base/size10.clo))
(c:/texlive/2019/texmf-dist/tex/latex/type1cm/type1cm.sty)
(c:/texlive/2019/texmf-dist/tex/latex/base/textcomp.sty)
(c:/texlive/2019/texmf-dist/tex/latex/base/inputenc.sty)
(c:/texlive/2019/texmf-dist/tex/latex/geometry/geometry.sty
(c:/texlive/2019/texmf-dist/tex/latex/graphics/keyval.sty)
(c:/texlive/2019/texmf-dist/tex/generic/iftex/ifvtex.sty
(c:/texlive/2019/texmf-dist/tex/generic/iftex/iftex.sty))
Package geometry Warning: Over-specification in `h'-direction.
`width' (5058.9pt) is ignored.
Package geometry Warning: Over-specification in `v'-direction.
`height' (5058.9pt) is ignored.
) (c:/texlive/2019/texmf-dist/tex/latex/l3backend/l3backend-dvips.def)
(./b6f49bb15be8eba85bb671cfafccfb49.aux)
*geometry* driver: auto-detecting
*geometry* detected driver: dvips
! Package inputenc Error: Unicode character с (U+0441)
(inputenc) not set up for use with LaTeX.
See the inputenc package documentation for explanation.
Type H <return> for immediate help.
...
l.14 \fontsize{10.000000}{12.500000}{\sffamily с
тил1}
No pages of output.
Transcript written on b6f49bb15be8eba85bb671cfafccfb49.log.
Solving this is important for me because I plan to redo a whole project, written using R (a book) in Python.
You need to tell tex that you would like to display some Russian.
# coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
rc('text', usetex=True)
rc('text.latex',unicode=True)
rc('text.latex',preamble=r'\usepackage[utf8]{inputenc}')
rc('text.latex',preamble=r'\usepackage[russian]{babel}')
x = np.linspace(-np.pi, np.pi, 256)
y = []
for i in range(-100, 100):
y += [np.cos(x + i)]
plt.xlim(-10, 10)
plt.ylim(-2, 2)
plt.xticks([-10, 0, 10])
plt.yticks()
plt.plot(x, y[0], color = 'red', linewidth = 2.5, linestyle = '-', label = 'стил1')
plt.plot(x, y[1], color = 'blue', linewidth = 3, linestyle = '-', label = 'style2')
plt.legend()
plt.show()

The Key Error : logtostderr (python, tensorflow)

I am trying to implement a InceptionV3 to classify the images.
(python 3.65 / tensorflow 1.14.0)
when I run the code, I am getting these key error : "logtostderr".
I don't know why this Key error happens.
How can I solve this problem?
Last week, I ran this code smoothly.
but, the problem was happened yesterday suddenly.
with tf.train.MonitoredSession(session_creator=session_creator) as sess:
for filenames, images in load_images(FLAGS.input_dir, batch_shape):
labels = sess.run(predicted_labels, feed_dict={x_input: images})
for filename, label in zip(filenames, labels):
true_label = image_labels.merge(pd.DataFrame({"ImageId":[filename[:-4]]}), on="ImageId")["TrueLabel"][0]
predictions.append([filename[:-4], true_label, label])
KeyError Traceback (most recent call last)
<ipython-input-19-307b2daa19e5> in <module>
88
89
---> 90 with tf.train.MonitoredSession(session_creator=session_creator) as sess:
91 for filenames, images in load_images(FLAGS.input_dir, batch_shape):
92 labels = sess.run(predicted_labels, feed_dict={x_input: images})
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in __init__(self, session_creator, hooks, stop_grace_period_secs)
1005 hooks,
1006 should_recover=True,
-> 1007 stop_grace_period_secs=stop_grace_period_secs)
1008
1009
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in __init__(self, session_creator, hooks, should_recover, stop_grace_period_secs)
723 stop_grace_period_secs=stop_grace_period_secs)
724 if should_recover:
--> 725 self._sess = _RecoverableSession(self._coordinated_creator)
726 else:
727 self._sess = self._coordinated_creator.create_session()
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in __init__(self, sess_creator)
1198 """
1199 self._sess_creator = sess_creator
-> 1200 _WrappedSession.__init__(self, self._create_session())
1201
1202 def _create_session(self):
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in _create_session(self)
1203 while True:
1204 try:
-> 1205 return self._sess_creator.create_session()
1206 except _PREEMPTION_ERRORS as e:
1207 logging.info(
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in create_session(self)
869 """Creates a coordinated session."""
870 # Keep the tf_sess for unit testing.
--> 871 self.tf_sess = self._session_creator.create_session()
872 # We don't want coordinator to suppress any exception.
873 self.coord = coordinator.Coordinator(clean_stop_exception_types=[])
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in create_session(self)
636
637 def create_session(self):
--> 638 self._scaffold.finalize()
639 return self._get_session_manager().prepare_session(
640 self._master,
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\training\monitored_session.py in finalize(self)
238
239 ops.get_default_graph().finalize()
--> 240 logging.info('Graph was finalized.')
241 return self
242
C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\platform\tf_logging.py in info(msg, *args, **kwargs)
154 #tf_export(v1=['logging.info'])
155 def info(msg, *args, **kwargs):
--> 156 get_logger().info(msg, *args, **kwargs)
157
158
C:\ProgramData\Anaconda3\lib\logging\__init__.py in info(self, msg, *args, **kwargs)
1306 """
1307 if self.isEnabledFor(INFO):
-> 1308 self._log(INFO, msg, args, **kwargs)
1309
1310 def warning(self, msg, *args, **kwargs):
C:\ProgramData\Anaconda3\lib\logging\__init__.py in _log(self, level, msg, args, exc_info, extra, stack_info)
1442 record = self.makeRecord(self.name, level, fn, lno, msg, args,
1443 exc_info, func, extra, sinfo)
-> 1444 self.handle(record)
1445
1446 def handle(self, record):
C:\ProgramData\Anaconda3\lib\logging\__init__.py in handle(self, record)
1452 """
1453 if (not self.disabled) and self.filter(record):
-> 1454 self.callHandlers(record)
1455
1456 def addHandler(self, hdlr):
C:\ProgramData\Anaconda3\lib\logging\__init__.py in callHandlers(self, record)
1514 found = found + 1
1515 if record.levelno >= hdlr.level:
-> 1516 hdlr.handle(record)
1517 if not c.propagate:
1518 c = None #break out
C:\ProgramData\Anaconda3\lib\site-packages\absl\logging\__init__.py in handle(self, record)
889 rv = self.filter(record)
890 if rv:
--> 891 return self._current_handler.handle(record)
892 return rv
893
C:\ProgramData\Anaconda3\lib\logging\__init__.py in handle(self, record)
863 self.acquire()
864 try:
--> 865 self.emit(record)
866 finally:
867 self.release()
C:\ProgramData\Anaconda3\lib\site-packages\absl\logging\__init__.py in emit(self, record)
824 _warn_preinit_stderr = False
825 self._log_to_stderr(record)
--> 826 elif FLAGS['logtostderr'].value:
827 self._log_to_stderr(record)
828 else:
C:\ProgramData\Anaconda3\lib\site-packages\absl\flags\_flagvalues.py in __getitem__(self, name)
461 def __getitem__(self, name):
462 """Returns the Flag object for the flag --name."""
--> 463 return self._flags()[name]
464
465 def _hide_flag(self, name):
KeyError: 'logtostderr'