I am just trying to import plotly.plotly as py per this tutorial:https://plot.ly/~Dreamshot/9199/import-plotly-plotly-version-/#/ but I am getting the following error, not sure why?
0 from plotly import exceptions, files, session, tools, utils
---> 31 from plotly.api import v1, v2
32 from plotly.basedatatypes import BaseTraceType, BaseFigure, BaseLayoutType
33 from plotly.plotly import chunked_requests
ImportError: cannot import name 'v1'
Related
when i import TensorFlow GPU I get this error
after i type "import tensorflow as tf"
AttributeError: partially initialized module 'charset_normalizer' has no attribute 'md__mypyc' (most likely due to a circular import)
like below:
`your tex
AttributeError Traceback (most recent call last)
Cell In[22], line 1
----> 1 import tensorflow as tf
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\__init__.py:51
49 from ._api.v2 import autograph
50 from ._api.v2 import bitwise
---> 51 from ._api.v2 import compat
52 from ._api.v2 import config
53 from ._api.v2 import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\__init__.py:37
3 """Compatibility functions.
4
5 The `tf.compat` module contains two sets of compatibility functions.
(...)
32
33 """
35 import sys as _sys
---> 37 from . import v1
38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\__init__.py:30
28 from . import autograph
29 from . import bitwise
---> 30 from . import compat
31 from . import config
32 from . import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\compat\__init__.py:38
35 import sys as _sys
37 from . import v1
---> 38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
40 from tensorflow.python.compat.compat import forward_compatible
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v1\compat\v2\__init__.py:28
25 # pylint: disable=g-bad-import-order
27 from . import compat
---> 28 from tensorflow._api.v2.compat.v2 import __internal__
29 from tensorflow._api.v2.compat.v2 import __operators__
30 from tensorflow._api.v2.compat.v2 import audio
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\__init__.py:33
31 from . import autograph
32 from . import bitwise
---> 33 from . import compat
34 from . import config
35 from . import data
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\compat\__init__.py:38
35 import sys as _sys
37 from . import v1
---> 38 from . import v2
39 from tensorflow.python.compat.compat import forward_compatibility_horizon
40 from tensorflow.python.compat.compat import forward_compatible
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\compat\v2\__init__.py:37
35 from tensorflow._api.v2.compat.v2 import data
36 from tensorflow._api.v2.compat.v2 import debugging
---> 37 from tensorflow._api.v2.compat.v2 import distribute
38 from tensorflow._api.v2.compat.v2 import dtypes
39 from tensorflow._api.v2.compat.v2 import errors
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\__init__.py:182
180 from . import cluster_resolver
181 from . import coordinator
--> 182 from . import experimental
183 from tensorflow.python.distribute.collective_all_reduce_strategy import CollectiveAllReduceStrategy as MultiWorkerMirroredStrategy
184 from tensorflow.python.distribute.cross_device_ops import CrossDeviceOps
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\experimental\__init__.py:10
8 from . import coordinator
9 from . import partitioners
---> 10 from . import rpc
11 from tensorflow.python.distribute.central_storage_strategy import CentralStorageStrategy
12 from tensorflow.python.distribute.collective_all_reduce_strategy import _CollectiveAllReduceStrategyExperimental as MultiWorkerMirroredStrategy
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\_api\v2\compat\v2\distribute\experimental\rpc\__init__.py:8
3 """Public API for tf.distribute.experimental.rpc namespace.
4 """
6 import sys as _sys
----> 8 from tensorflow.python.distribute.experimental.rpc.rpc_ops import Client
9 from tensorflow.python.distribute.experimental.rpc.rpc_ops import Server
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\experimental\__init__.py:22
20 from tensorflow.python.distribute import parameter_server_strategy
21 from tensorflow.python.distribute import tpu_strategy
---> 22 from tensorflow.python.distribute.failure_handling import failure_handling
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\failure_handling\failure_handling.py:33
31 from tensorflow.python.checkpoint import checkpoint_management
32 from tensorflow.python.distribute import multi_worker_util
---> 33 from tensorflow.python.distribute.failure_handling import gce_util
34 from tensorflow.python.eager import context
35 from tensorflow.python.framework import constant_op
File ~\anaconda3\envs\tf_gpu\lib\site-packages\tensorflow\python\distribute\failure_handling\gce_util.py:20
17 import os
18 import sys
---> 20 import requests
22 from six.moves.urllib import request
23 from tensorflow.python.eager import context
File ~\anaconda3\envs\tf_gpu\lib\site-packages\requests\__init__.py:48
45 from .exceptions import RequestsDependencyWarning
47 try:
---> 48 from charset_normalizer import __version__ as charset_normalizer_version
49 except ImportError:
50 charset_normalizer_version = None
File ~\anaconda3\envs\tf_gpu\lib\site-packages\charset_normalizer\__init__.py:23
1 """
2 Charset-Normalizer
3 ~~~~~~~~~~~~~~
(...)
21 :license: MIT, see LICENSE for more details.
22 """
---> 23 from charset_normalizer.api import from_fp, from_path, from_bytes, normalize
24 from charset_normalizer.legacy import detect
25 from charset_normalizer.version import __version__, VERSION
File ~\anaconda3\envs\tf_gpu\lib\site-packages\charset_normalizer\api.py:10
7 PathLike = Union[str, 'os.PathLike[str]'] # type: ignore
9 from charset_normalizer.constant import TOO_SMALL_SEQUENCE, TOO_BIG_SEQUENCE, IANA_SUPPORTED
---> 10 from charset_normalizer.md import mess_ratio
11 from charset_normalizer.models import CharsetMatches, CharsetMatch
12 from warnings import warn
AttributeError: partially initialized module 'charset_normalizer' has no attribute 'md__mypyc' (most likely due to a circular import)
t`
I install "requests" , "chardet" ,"openpyxl" but nothing change .
When doing my imports:
import cv2
from matplotlib import pyplot as plt
import numpy as np
import opencv_wrapper as cvw
I get:
ImportError Traceback (most recent call last)
/var/folders/bw/fb7g3vhj2ln41zrg3v5ynnn40000gn/T/ipykernel_96862/883562467.py in <module>
1 # import the necessary packages
2 import cv2
----> 3 from matplotlib import pyplot as plt
4 import numpy as np
5 import opencv_wrapper as cvw
/opt/anaconda3/envs/py37/lib/python3.7/site-packages/matplotlib/__init__.py in <module>
114 # Get the version from the _version.py versioneer file. For a git checkout,
115 # this is computed based on the number of commits since the last tag.
--> 116 from ._version import get_versions
117 __version__ = str(get_versions()['version'])
118 del get_versions
ImportError: cannot import name 'get_versions' from 'matplotlib._version' (/opt/anaconda3/envs/py37/lib/python3.7/site-packages/matplotlib/_version.py)
I'm using:
matplotlib 3.4.3
numpy 1.16.2
opencv-python 4.0.0.21
opencv-wrapper 0.2.3
python 3.7.13
Due to the opencv wrapper these are the versions I need to use.
I've tried to uninstall and reinstall but the error persists. Any help will be appreciated.
I updated the statsmodels package in the kaggle kernel successfully using the code:
!pip install statsmodels --upgrade
This gave me the version of (0.10.0). However when I try to import statsmodels.api it giving me an error.
import statsmodels.api as sm
The error I am getting is:
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-7-3b8b7e2c2e57> in <module>
8 import matplotlib.pyplot as plt
9 from sklearn.preprocessing import Normalizer
---> 10 import statsmodels.api as sm
11
12 # Input data files are available in the "../input/" directory.
/opt/conda/lib/python3.6/site-packages/statsmodels/api.py in <module>
16 from . import robust
17 from .robust.robust_linear_model import RLM
---> 18 from .discrete.discrete_model import (Poisson, Logit, Probit,
19 MNLogit, NegativeBinomial,
20 GeneralizedPoisson,
/opt/conda/lib/python3.6/site-packages/statsmodels/discrete/discrete_model.py in <module>
43
44 from statsmodels.base.l1_slsqp import fit_l1_slsqp
---> 45 from statsmodels.distributions import genpoisson_p
46
47 try:
/opt/conda/lib/python3.6/site-packages/statsmodels/distributions/__init__.py in <module>
1 from statsmodels.tools._testing import PytestTester
2 from .empirical_distribution import ECDF, monotone_fn_inverter, StepFunction
----> 3 from .edgeworth import ExpandedNormal
4 from .discrete import genpoisson_p, zipoisson, zigenpoisson, zinegbin
5
/opt/conda/lib/python3.6/site-packages/statsmodels/distributions/edgeworth.py in <module>
5 import numpy as np
6 from numpy.polynomial.hermite_e import HermiteE
----> 7 from statsmodels.compat.scipy import factorial
8 from scipy.stats import rv_continuous
9 import scipy.special as special
ImportError: cannot import name 'factorial'
I've upgraded the scipy package, and I am still getting the same error. I am new to Kaggle and Python and I need the OLS function to do regression analysis. How can I fix this problem? If not, is there any other function I can use to get a regression summary?
So Im getting the following error when I import matplotlib.pyplot:
ImportError Traceback (most recent call last)
<ipython-input-18-413b0dcce8d2> in <module>()
1 import pandas as pd
----> 2 import matplotlib.pyplot as plt
3 data=pd.read_csv("fifa_countries_audience.csv")
4 del data['country']
5 print(data)
~\Anaconda3\lib\site-packages\matplotlib\pyplot.py in <module>()
30 from cycler import cycler
31 import matplotlib
---> 32 import matplotlib.colorbar
33 from matplotlib import style
34 from matplotlib import _pylab_helpers, interactive
~\Anaconda3\lib\site-packages\matplotlib\colorbar.py in <module>()
30
31 import matplotlib as mpl
---> 32 import matplotlib.artist as martist
33 import matplotlib.cbook as cbook
34 import matplotlib.collections as collections
~\Anaconda3\lib\site-packages\matplotlib\artist.py in <module>()
14 import matplotlib
15 from . import cbook, docstring, rcParams
---> 16 from .path import Path
17 from .transforms import (Bbox, IdentityTransform, Transform, TransformedBbox,
18 TransformedPatchPath, TransformedPath)
~\Anaconda3\lib\site-packages\matplotlib\path.py in <module>()
24
25 from . import _path, rcParams
---> 26 from .cbook import (_to_unmasked_float_array, simple_linear_interpolation,
27 maxdict)
28
ImportError: cannot import name '_to_unmasked_float_array'
Anyone has idea what might be the case? I tried reinstalling matplotlib, updating it, updating conda but it didnt solve the case.
I have successfully installed scipy, numpy, and pillow, however I get error as below
ImportError: cannot import name 'imread'
Are you following the same steps?
import scipy.misc
img = scipy.misc.imread('my_image_path')
# To verify image is read properly.
import matplotlib.pyplot as plt
print(img.shape)
plt.imshow(img)
plt.show()
imread and imsave are deprecated in scipy.misc
Use imageio.imread instead after import imageio.
For saving -
Use imageio.imsave instead or use imageio.write
For resizing use skimage.transform.resize instead after import skimage