So Im getting the following error when I import matplotlib.pyplot:
ImportError Traceback (most recent call last)
<ipython-input-18-413b0dcce8d2> in <module>()
1 import pandas as pd
----> 2 import matplotlib.pyplot as plt
3 data=pd.read_csv("fifa_countries_audience.csv")
4 del data['country']
5 print(data)
~\Anaconda3\lib\site-packages\matplotlib\pyplot.py in <module>()
30 from cycler import cycler
31 import matplotlib
---> 32 import matplotlib.colorbar
33 from matplotlib import style
34 from matplotlib import _pylab_helpers, interactive
~\Anaconda3\lib\site-packages\matplotlib\colorbar.py in <module>()
30
31 import matplotlib as mpl
---> 32 import matplotlib.artist as martist
33 import matplotlib.cbook as cbook
34 import matplotlib.collections as collections
~\Anaconda3\lib\site-packages\matplotlib\artist.py in <module>()
14 import matplotlib
15 from . import cbook, docstring, rcParams
---> 16 from .path import Path
17 from .transforms import (Bbox, IdentityTransform, Transform, TransformedBbox,
18 TransformedPatchPath, TransformedPath)
~\Anaconda3\lib\site-packages\matplotlib\path.py in <module>()
24
25 from . import _path, rcParams
---> 26 from .cbook import (_to_unmasked_float_array, simple_linear_interpolation,
27 maxdict)
28
ImportError: cannot import name '_to_unmasked_float_array'
Anyone has idea what might be the case? I tried reinstalling matplotlib, updating it, updating conda but it didnt solve the case.
Related
when i import TensorFlow GPU I get this error
after i type "import tensorflow as tf"
AttributeError: partially initialized module 'charset_normalizer' has no attribute 'md__mypyc' (most likely due to a circular import)
like below:
`your tex
AttributeError Traceback (most recent call last)
Cell In[22], line 1
----> 1 import tensorflow as tf
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\__init__.py:51
49 from ._api.v2 import autograph
50 from ._api.v2 import bitwise
---> 51 from ._api.v2 import compat
52 from ._api.v2 import config
53 from ._api.v2 import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\__init__.py:37
3 """Compatibility functions.
4
5 The `tf.compat` module contains two sets of compatibility functions.
(...)
32
33 """
35 import sys as _sys
---> 37 from . import v1
38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\__init__.py:30
28 from . import autograph
29 from . import bitwise
---> 30 from . import compat
31 from . import config
32 from . import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\compat\__init__.py:38
35 import sys as _sys
37 from . import v1
---> 38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
40 from tensorflow.python.compat.compat import forward_compatible
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\compat\v2\__init__.py:28
25 # pylint: disable=g-bad-import-order
27 from . import compat
---> 28 from tensorflow._api.v2.compat.v2 import __internal__
29 from tensorflow._api.v2.compat.v2 import __operators__
30 from tensorflow._api.v2.compat.v2 import audio
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\__init__.py:33
31 from . import autograph
32 from . import bitwise
---> 33 from . import compat
34 from . import config
35 from . import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\compat\__init__.py:38
35 import sys as _sys
37 from . import v1
---> 38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
40 from tensorflow.python.compat.compat import forward_compatible
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\compat\v2\__init__.py:37
35 from tensorflow._api.v2.compat.v2 import data
36 from tensorflow._api.v2.compat.v2 import debugging
---> 37 from tensorflow._api.v2.compat.v2 import distribute
38 from tensorflow._api.v2.compat.v2 import dtypes
39 from tensorflow._api.v2.compat.v2 import errors
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\__init__.py:182
180 from . import cluster_resolver
181 from . import coordinator
--> 182 from . import experimental
183 from tensorflow.python.distribute.collective_all_reduce_strategy import CollectiveAllReduceStrategy as MultiWorkerMirroredStrategy
184 from tensorflow.python.distribute.cross_device_ops import CrossDeviceOps
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\experimental\__init__.py:10
8 from . import coordinator
9 from . import partitioners
---> 10 from . import rpc
11 from tensorflow.python.distribute.central_storage_strategy import CentralStorageStrategy
12 from tensorflow.python.distribute.collective_all_reduce_strategy import _CollectiveAllReduceStrategyExperimental as MultiWorkerMirroredStrategy
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\experimental\rpc\__init__.py:8
3 """Public API for tf.distribute.experimental.rpc namespace.
4 """
6 import sys as _sys
----> 8 from tensorflow.python.distribute.experimental.rpc.rpc_ops import Client
9 from tensorflow.python.distribute.experimental.rpc.rpc_ops import Server
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\experimental\__init__.py:22
20 from tensorflow.python.distribute import parameter_server_strategy
21 from tensorflow.python.distribute import tpu_strategy
---> 22 from tensorflow.python.distribute.failure_handling import failure_handling
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\failure_handling\failure_handling.py:33
31 from tensorflow.python.checkpoint import checkpoint_management
32 from tensorflow.python.distribute import multi_worker_util
---> 33 from tensorflow.python.distribute.failure_handling import gce_util
34 from tensorflow.python.eager import context
35 from tensorflow.python.framework import constant_op
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\failure_handling\gce_util.py:20
17 import os
18 import sys
---> 20 import requests
22 from six.moves.urllib import request
23 from tensorflow.python.eager import context
File ~\anaconda3\envs\tf_gpu\lib\site-packages\requests\__init__.py:48
45 from .exceptions import RequestsDependencyWarning
47 try:
---> 48 from charset_normalizer import __version__ as charset_normalizer_version
49 except ImportError:
50 charset_normalizer_version = None
File ~\anaconda3\envs\tf_gpu\lib\site-packages\charset_normalizer\__init__.py:23
1 """
2 Charset-Normalizer
3 ~~~~~~~~~~~~~~
(...)
21 :license: MIT, see LICENSE for more details.
22 """
---> 23 from charset_normalizer.api import from_fp, from_path, from_bytes, normalize
24 from charset_normalizer.legacy import detect
25 from charset_normalizer.version import __version__, VERSION
File ~\anaconda3\envs\tf_gpu\lib\site-packages\charset_normalizer\api.py:10
7 PathLike = Union[str, 'os.PathLike[str]'] # type: ignore
9 from charset_normalizer.constant import TOO_SMALL_SEQUENCE, TOO_BIG_SEQUENCE, IANA_SUPPORTED
---> 10 from charset_normalizer.md import mess_ratio
11 from charset_normalizer.models import CharsetMatches, CharsetMatch
12 from warnings import warn
AttributeError: partially initialized module 'charset_normalizer' has no attribute 'md__mypyc' (most likely due to a circular import)
t`
I install "requests" , "chardet" ,"openpyxl" but nothing change .
When doing my imports:
import cv2
from matplotlib import pyplot as plt
import numpy as np
import opencv_wrapper as cvw
I get:
ImportError Traceback (most recent call last)
/var/folders/bw/fb7g3vhj2ln41zrg3v5ynnn40000gn/T/ipykernel_96862/883562467.py in <module>
1 # import the necessary packages
2 import cv2
----> 3 from matplotlib import pyplot as plt
4 import numpy as np
5 import opencv_wrapper as cvw
/opt/anaconda3/envs/py37/lib/python3.7/site-packages/matplotlib/__init__.py in <module>
114 # Get the version from the _version.py versioneer file. For a git checkout,
115 # this is computed based on the number of commits since the last tag.
--> 116 from ._version import get_versions
117 __version__ = str(get_versions()['version'])
118 del get_versions
ImportError: cannot import name 'get_versions' from 'matplotlib._version' (/opt/anaconda3/envs/py37/lib/python3.7/site-packages/matplotlib/_version.py)
I'm using:
matplotlib 3.4.3
numpy 1.16.2
opencv-python 4.0.0.21
opencv-wrapper 0.2.3
python 3.7.13
Due to the opencv wrapper these are the versions I need to use.
I've tried to uninstall and reinstall but the error persists. Any help will be appreciated.
I have problems installing basemap.
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
I get the following error:
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
<ipython-input-1-db2649dcf0a1> in <module>
2 import numpy as np
3 import matplotlib.pyplot as plt
----> 4 from mpl_toolkits.basemap import Basemap
~/opt/anaconda3/lib/python3.7/site-packages/mpl_toolkits/basemap/__init__.py in <module>
154 # create dictionary that maps epsg codes to Basemap kwargs.
155 pyproj_datadir = os.environ['PROJ_LIB']
--> 156 epsgf = open(os.path.join(pyproj_datadir,'epsg'))
157 epsg_dict={}
158 for line in epsgf:
FileNotFoundError: [Errno 2] No such file or directory: '/Users/andreamathis/opt/anaconda3/share/proj/epsg'
It looks that a the file 'epsg' is missing. Has somebody encountered this error before and knows how to solve the problem?
I updated the statsmodels package in the kaggle kernel successfully using the code:
!pip install statsmodels --upgrade
This gave me the version of (0.10.0). However when I try to import statsmodels.api it giving me an error.
import statsmodels.api as sm
The error I am getting is:
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-7-3b8b7e2c2e57> in <module>
8 import matplotlib.pyplot as plt
9 from sklearn.preprocessing import Normalizer
---> 10 import statsmodels.api as sm
11
12 # Input data files are available in the "../input/" directory.
/opt/conda/lib/python3.6/site-packages/statsmodels/api.py in <module>
16 from . import robust
17 from .robust.robust_linear_model import RLM
---> 18 from .discrete.discrete_model import (Poisson, Logit, Probit,
19 MNLogit, NegativeBinomial,
20 GeneralizedPoisson,
/opt/conda/lib/python3.6/site-packages/statsmodels/discrete/discrete_model.py in <module>
43
44 from statsmodels.base.l1_slsqp import fit_l1_slsqp
---> 45 from statsmodels.distributions import genpoisson_p
46
47 try:
/opt/conda/lib/python3.6/site-packages/statsmodels/distributions/__init__.py in <module>
1 from statsmodels.tools._testing import PytestTester
2 from .empirical_distribution import ECDF, monotone_fn_inverter, StepFunction
----> 3 from .edgeworth import ExpandedNormal
4 from .discrete import genpoisson_p, zipoisson, zigenpoisson, zinegbin
5
/opt/conda/lib/python3.6/site-packages/statsmodels/distributions/edgeworth.py in <module>
5 import numpy as np
6 from numpy.polynomial.hermite_e import HermiteE
----> 7 from statsmodels.compat.scipy import factorial
8 from scipy.stats import rv_continuous
9 import scipy.special as special
ImportError: cannot import name 'factorial'
I've upgraded the scipy package, and I am still getting the same error. I am new to Kaggle and Python and I need the OLS function to do regression analysis. How can I fix this problem? If not, is there any other function I can use to get a regression summary?
I am new to iPython/Jupyter. Python skills limited, but learning. I am trying to import numpy as np and get the following:
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-1-4ee716103900> in <module>()
----> 1 import numpy as np
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/__init__.py in <module>()
166 return loader(*packages, **options)
167
--> 168 from . import add_newdocs
169 __all__ = ['add_newdocs', 'ModuleDeprecationWarning']
170
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/add_newdocs.py in <module>()
11 from __future__ import division, absolute_import, print_function
12
---> 13 from numpy.lib import add_newdoc
14
15 ###############################################################################
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/lib/__init__.py in <module>()
6 from numpy.version import version as __version__
7
----> 8 from .type_check import *
9 from .index_tricks import *
10 from .function_base import *
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/lib/type_check.py in <module>()
9 'common_type']
10
---> 11 import numpy.core.numeric as _nx
12 from numpy.core.numeric import asarray, asanyarray, array, isnan, \
13 obj2sctype, zeros
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/core/__init__.py in <module>()
4 from numpy.version import version as __version__
5
----> 6 from . import multiarray
7 from . import umath
8 from . import _internal # for freeze programs
ImportError: dlopen(/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/core/multiarray.so, 2): no suitable image found. Did find:
/Users/jmmiii/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/numpy/core/multiarray.so: mach-o, but wrong architecture
I have several python installs on my Mac, which has Yosemite, including Canopy and Anaconda. I want my Jupyter notebook to use the Anaconda install including all the modules, libraries, etc. associated with it. It seems however that jupyter is targeting Canopy instead. Thus, I think my problem might stem from the wrong linkage.
QUESTION 1: Does my conclusion hold water? If not, what might I be missing?
QUESTION 2: How can I direct/link jupyter with Anaconda and not with Canopy so that I import everything from anaconda only?
Thanks for everyone's help!
You can either set the PATH to execute python commands from the ~/anaconda/bin directory by prepending it to your .bah_profile by running the following command.
export PATH="/Users/jmmiii/anaconda/bin:$PATH"
OR, you can create an alias for the command by editing your ~/.bash_profile and adding:
alias jupyter-notebook="/Users/jmmiii/anaconda/bin/jupyter-notebook"