I'm new user of scrapy to crawl my websites.I want to store data crawled into mysql database.
myspider.py:
class MininovaSpider(CrawlSpider):
name = 'myspider'
allowed_domains = ['example.com']
start_urls = ['http://www.example.com']
rules = [Rule(SgmlLinkExtractor(allow=('/categorie/.*'),restrict_xpaths=('//div[#id="contLeftNavig"]',)), 'parse_t')]
def parse_t(self, response):
x = HtmlXPathSelector(response)
torrent = Torrent()
torrent['url'] = response.url
torrent['title']=x.select("//h1[#class='infoAneTitre']/text()").extract()
torrent['wilaya'] = x.select("//span[#class='ville_t']/text()").extract()
#torrent['prix'] = x.select("//div[#id='datail_ann']/ul[1]/li[4]/span/text()").extract()
#torrent['surface'] = x.select("//div[#id='datail_ann']/ul[3]/li[1]/span/text()").extract()
torrent['description'] = x.select("//div[#class='box_pad']/text()").extract()
return torrent
and for pipelines.py, i modified and used the example of googldir.So when i run crawl i get this error :
exceptions.AttributeError: 'MininovaSpider' object has no attribute 'iterkeys'
exceptions.TypeError: 'MininovaSpider' object is not subscriptable
pipeline.py:
from scrapy import log
from twisted.enterprise import adbapi
import time
import MySQLdb.cursors
class Pipeline(object):
def __init__(self):
self.dbpool = adbapi.ConnectionPool('MySQLdb',
db='test',
user='root',
passwd='',
cursorclass=MySQLdb.cursors.DictCursor,
charset='utf8',
use_unicode=True
)
def process_item(self, spider, item):
query = self.dbpool.runInteraction(self._conditional_insert, item)
query.addErrback(self.handle_error)
return item
def _conditional_insert(self, tx, item):
tx.execute("select * from database where url = %s", (item['url'] ))
result = tx.fetchone()
if result:
log.msg("Item already stored in db: %s" % item, level=log.DEBUG)
else:
tx.execute(\
"insert into database (wilaya,titre, site, lien,resume,timestamp) "
"values (%s, %s, %s, %s,%s,%s)",
(item['wilaya'],
item['title'],
'example.com',item['url'],item['description'],
time.time())
)
log.msg("Item stored in db: %s" % item, level=log.DEBUG)
def handle_error(self, e):
log.err(e)
and traceback:
Traceback (most recent call last):
File "/usr/lib/python2.7/twisted/internet/defer.py", line 287, in addCallbacks
self._runCallbacks()
File "/usr/lib/python2.7/twisted/internet/defer.py", line 545, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/usr/lib/python2.7/site-packages/scrapy/core/scraper.py", line 208, in _itemproc_finished
item=output, response=response, spider=spider)
File "/usr/lib/python2.7/site-packages/scrapy/utils/signal.py", line 53, in send_catch_log_deferred
*arguments, **named)
--- <exception caught here> ---
File "/usr/lib/python2.7/twisted/internet/defer.py", line 134, in maybeDeferred
result = f(*args, **kw)
File "/usr/lib/python2.7/site-packages/scrapy/xlib/pydispatch/robustapply.py", line 47, in robustApply
return receiver(*arguments, **named)
File "/usr/lib/python2.7/site-packages/scrapy/contrib/feedexport.py", line 177, in item_scraped
slot.exporter.export_item(item)
File "/usr/lib/python2.7/site-packages/scrapy/contrib/exporter/__init__.py", line 109, in export_item
itemdict = dict(self._get_serialized_fields(item))
File "/usr/lib/python2.7/site-packages/scrapy/contrib/exporter/__init__.py", line 60, in _get_serialized_fields
field_iter = item.iterkeys()
**exceptions.AttributeError: 'MininovaSpider' object has no attribute 'iterkeys'
2012-01-18 16:00:43-0600 [scrapy] Unhandled Error
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 503, in __bootstrap
self.__bootstrap_inner()
File "/usr/lib/python2.7/threading.py", line 530, in __bootstrap_inner
self.run()
File "/usr/lib/python2.7/threading.py", line 483, in run
self.__target(*self.__args, **self.__kwargs)
--- <exception caught here> ---
File "/usr/lib/python2.7/twisted/python/threadpool.py", line 207, in _worker
result = context.call(ctx, function, *args, **kwargs)
File "/usr/lib/python2.7/twisted/python/context.py", line 118, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/usr/lib/python2.7/twisted/python/context.py", line 81, in callWithContext
return func(*args,**kw)
File "/usr/lib/python2.7/twisted/enterprise/adbapi.py", line 448, in _runInteraction
result = interaction(trans, *args, **kw)
File "/opt/scrapy/test/pipelines.py", line 33, in _conditional_insert
tx.execute("select * from database where url = %s", (item['url'] ))
**exceptions.TypeError: 'MininovaSpider' object is not subscriptable
exceptions.TypeError: 'MininovaSpider' object is not subscriptable
Looks like you have yielded somewhere a spider (MininovaSpider) instance instead of an item. I think you have there more code you haven't shown.
In Pipeline.process_item() put this to confirm:
def process_item(self, spider, item):
assert isinstance(item, Torrent), 'Here should be Torrent instance!'
query = self.dbpool.runInteraction(self._conditional_insert, item)
query.addErrback(self.handle_error)
return item
Related
Unhandled error in Deferred: 2020-07-24 09:12:40 [twisted] CRITICAL: Unhandled error in Deferred:
Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 192, in crawl return self._crawl(crawler, *args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 196, in _crawl d = crawler.crawl(*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1613, in unwindGenerator return _cancellableInlineCallbacks(gen) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
_inlineCallbacks(None, g, status)
--- --- File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: builtins.TypeError: 'NoneType' object is not iterable
2020-07-24 09:12:40 [twisted] CRITICAL: Traceback (most recent call last): File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks result = g.send(result) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 87, in crawl self.engine = self._create_engine() File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/crawler.py", line 101, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/engine.py", line 69, in init self.downloader = downloader_cls(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/core/downloader/init.py", line 83, in init self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 53, in from_crawler return cls.from_settings(crawler.settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/middleware.py", line 35, in from_settings mw = create_instance(mwcls, settings, crawler) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy/utils/misc.py", line 150, in create_instance instance = objcls.from_crawler(crawler,
*args, **kwargs) File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 67, in from_crawler middleware = cls( File "/home/baku/Dev/workspace/moje-python/scrape_linkedin/venv/lib/python3.8/site-packages/scrapy_selenium/middlewares.py", line 43, in init for argument in driver_arguments: TypeError: 'NoneType' object is not iterable
my settings.py
from shutil import which
SELENIUM_DRIVER_NAME = 'firefox'
SELENIUM_DRIVER_EXECUTABLE_PATH = which('geckodriver')
SELENIUM_BROWSER_EXECUTABLE_PATH = which('firefox')
...
'scrapy_selenium.SeleniumMiddleware': 800,
looks like permissions for driver are good:
:/usr/local/bin$ ll | grep gecko
-rwxrwxrwx 1 baku baku 7008696 lip 24 09:09 geckodriver*
crawler code:
class LinkedInProfileSeleniumSpider(scrapy.Spider):
name = 'lips'
allowed_domains = ['www.linkedin.com']
def start_requests(self):
yield SeleniumRequest(
url="https://www.linkedin.com/login/",
callback=self.proceed_login,
wait_until=(
EC.presence_of_element_located(
(By.CSS_SELECTOR, "#username")
)
),
script='window.scrollTo(0, document.body.scrollHeight);',
wait_time=30
)
def proceed_login(self, response):
# AFTER LOGIN
driver = response.request.meta['driver']
...
can you please help why it's failing? thanks!
( btw it works with chrome drivers, fails with gecko )
The same problem I had on mac, this one I am trying on ubuntu machine.
Not sure what can be the issue, there to debug etc.
I does not even land into self.proceed_login. Fails on first request.
I am trying to get the job id of a scrapy 2.1.x job on spider_close method:
class mysql_pipeline(object):
import os
def test:
print(os.environ['SCRAPY_JOB'])
Unfortunatelly this results in a key error:
ERROR: Scraper close failure
Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/Users/andy/spider2/crawler/pipelines.py", line 137, in close_spider
os.environ['SCRAPY_JOB'],
File "/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/os.py", line 675, in __getitem__
raise KeyError(key) from None
KeyError: 'SCRAPY_JOB'
2020-05-16 17:24:52 [scrapy
How can I pull the job id within the method?
In the spider constructor(inside init),
add the line -->
self.jobId = kwargs.get('_job')
then in the parse function pass this in item,
def parse(self, response):
data = {}
.............
yield data['_job']
in the pipeline add this -->
def process_item(self, item, spider):
self.jobId = item['jobId']
.......
def close_spider(self, spider):
print(self.jobId)
......
I have error when try to receive data from column.
Model is:
class int_filial_phone(models.Model):
_name = 'pr_filials.int_filial_phone'
name = fields.Char(string="Partner-number") #, compute='_get_name_field')
number = fields.Char(string="Phone")
active = fields.Boolean(string="Active")
filial_addr_ids = fields.One2many('pr_filials.filial_addr', 'int_filial_phone_id', string='Address')
filial_id = fields.Many2one('res.company', string='Filial')
advert_phone_ids = fields.One2many('pr_filials.advert_phone', 'int_filial_phone_id', 'Advert phone')
_sql_constraints = [
('number_unique',
'UNIQUE(number)',
"The parameter number must be unique"),
]
Methods:
def find_client_in_filial_phone(self, phone, table):
cr, uid, context, registry = request.cr, request.uid, request.context, request.registry
result = None
table = request.env[table]
phone = self.format_symbol_phone(phone)
_logger.error('find_client_in_filial_phone phone: %r ', phone )
ids = table.sudo().search([['number', '=', phone],], limit=1)
if(len(ids)>0):
result = table.sudo().browse(ids)[0]
_logger.error('find_client_in_filial_phone result: %r ', result )
return result
I try to receive record id:
int_phone = self.find_client_in_filial_phone(data[3], 'pr_filials.int_filial_phone')
int_phone_id = int(int_phone.id)
All work fine
When i try to receive another field of record:
_logger.error("PHONE NAME: %r", int_phone[0].name)
I receive error:
Traceback (most recent call last): File
"/home/skif/odoo/openerp/http.py", line 648, in _handle_exception
return super(JsonRequest, self)._handle_exception(exception) File "/home/skif/odoo/openerp/http.py", line 685, in dispatch
result = self._call_function(**self.params) File "/home/skif/odoo/openerp/http.py", line 321, in _call_function
return checked_call(self.db, *args, **kwargs) File "/home/skif/odoo/openerp/service/model.py", line 118, in wrapper
return f(dbname, *args, **kwargs) File "/home/skif/odoo/openerp/http.py", line 314, in checked_call
result = self.endpoint(*a, **kw) File "/home/skif/odoo/openerp/http.py", line 964, in call
return self.method(*args, **kw) File "/home/skif/odoo/openerp/http.py", line 514, in response_wrap
response = f(*args, **kw) File "/home/skif/odoo/openerp/my-addons/pr_finance/controllers/controllers.py",
line 151, in upload_file
_logger.error("PHONE INT : %r", int_phone[0].name) File "/home/skif/odoo/openerp/fields.py", line 830, in get
self.determine_value(record) File "/home/skif/odoo/openerp/fields.py", line 930, in determine_value
record._prefetch_field(self) File "/home/skif/odoo/openerp/api.py", line 248, in wrapper
return new_api(self, *args, **kwargs) File "/home/skif/odoo/openerp/models.py", line 3308, in _prefetch_field
result = records.read([f.name for f in fs], load='_classic_write') File "/home/skif/odoo/openerp/api.py", line 248, in wrapper
return new_api(self, *args, **kwargs) File "/home/skif/odoo/openerp/models.py", line 3238, in read
self._read_from_database(stored, inherited) File "/home/skif/odoo/openerp/api.py", line 248, in wrapper
return new_api(self, *args, **kwargs) File "/home/skif/odoo/openerp/models.py", line 3376, in _read_from_database
cr.execute(query_str, params) File "/home/skif/odoo/openerp/sql_db.py", line 141, in wrapper
return f(self, *args, **kwargs) File "/home/skif/odoo/openerp/sql_db.py", line 220, in execute
res = self._obj.execute(query, params) File "/home/skif/.local/lib/python2.7/site-packages/psycopg2/extensions.py",
line 129, in getquoted
pobjs = [adapt(o) for o in self._seq] ProgrammingError: can't adapt type 'pr_filials.int_filial_phone'
How I can receive data from record? Why i received id but cannot received data from other fields of record?
In the new version of the api, when you used the method "search", the value return is a recordSet.
Example:
Old api version
record_ids = self.pool.get('model.name').search([('name', '=', 'Jon doe')])
# The value of record_ids is like [1,2,3,4]
records = self.pool.get('model.name').browse(records_ids)
# The value of records is like model.name(1,2,3,4)
In the new version of api
records = self.env['model.name'].search([('name', '=', 'Jondoe')])
# The vale of records is like model.name(1,2,3,4)
In your code you try to browse with recordSet.
def find_client_in_filial_phone(self, phone, table):
...
ids = table.sudo().search([['number', '=', phone],], limit=1)
# Here ids is a recordSet
if(len(ids)>0):
result = table.sudo().browse(ids)[0]
_logger.error('find_client_in_filial_phone result: %r ', result )
return result
You must do like this.
def find_client_in_filial_phone(self, phone, table):
cr, uid, context, registry = request.cr, request.uid, request.context, request.registry
table = request.env[table]
phone = self.format_symbol_phone(phone)
_logger.error('find_client_in_filial_phone phone: %r ', phone )
result = table.sudo().search([['number', '=', phone],], limit=1)
_logger.error('find_client_in_filial_phone result: %r ', result )
return result
If your search doesn't find value, an empty recordSet is return.
When I create a goal definition with:
computation mode : (Automatic: sum on a field,
Model : Sale Order Line,
Field to Sum : SubTotal)
When I add this goal definition to a challenge and try to start it, I get this traceback:
Client Traceback (most recent call last):
File "/home/openerp/server/openerp/addons/web/http.py", line 204, in dispatch
response["result"] = method(self, **self.params)
File "/home/openerp/server/openerp/addons/web/controllers/main.py", line 1132, in call_button
action = self._call_kw(req, model, method, args, {})
File "/home/openerp/server/openerp/addons/web/controllers/main.py", line 1120, in _call_kw
return getattr(req.session.model(model), method)(*args, **kwargs)
File "/home/openerp/server/openerp/addons/web/session.py", line 42, in proxy
result = self.proxy.execute_kw(self.session._db, self.session._uid, self.session._password, self.model, method, args, kw)
File "/home/openerp/server/openerp/addons/web/session.py", line 30, in proxy_method
result = self.session.send(self.service_name, method, *args)
File "/home/openerp/server/openerp/addons/web/session.py", line 103, in send
raise xmlrpclib.Fault(openerp.tools.ustr(e), formatted_info)
Server Traceback (most recent call last):
File "/home/openerp/server/openerp/addons/web/session.py", line 89, in send
return openerp.netsvc.dispatch_rpc(service_name, method, args)
File "/home/openerp/server/openerp/netsvc.py", line 296, in dispatch_rpc
result = ExportService.getService(service_name).dispatch(method, params)
File "/home/openerp/server/openerp/service/web_services.py", line 626, in dispatch
res = fn(db, uid, *params)
File "/home/openerp/server/openerp/osv/osv.py", line 190, in execute_kw
return self.execute(db, uid, obj, method, *args, **kw or {})
File "/home/openerp/server/openerp/osv/osv.py", line 132, in wrapper
return f(self, dbname, *args, **kwargs)
File "/home/openerp/server/openerp/osv/osv.py", line 199, in execute
res = self.execute_cr(cr, uid, obj, method, *args, **kw)
File "/home/openerp/server/openerp/osv/osv.py", line 187, in execute_cr
return getattr(object, method)(cr, uid, *args, **kw)
File "/home/openerp/server/openerp/addons/gamification/plan.py", line 342, in action_start
return self.generate_goals_from_plan(cr, uid, ids, context=context)
File "/home/openerp/server/openerp/addons/gamification/plan.py", line 439, in generate_goals_from_plan
goal_obj.update(cr, uid, [new_goal_id], context=context)
File "/home/openerp/server/openerp/addons/gamification/goal.py", line 278, in update
new_value = res and res[0][field_name] or 0.0
KeyError: u'price_subtotal'
this is a video that shows the scenario that I did: http://youtu.be/b546xTc1ArY
Error is generated while i am upgrading my module in OpenERP
res_model not found error
Error:
Client Traceback (most recent call last):
File "/opt/openerp/server/openerp/addons/web/http.py", line 204, in dispatch
response["result"] = method(self, **self.params)
File "/opt/openerp/server/openerp/addons/web/controllers/main.py", line 993, in load
menu_root_ids = self.get_user_roots(req)
File "/opt/openerp/server/openerp/addons/web/controllers/main.py", line 979, in get_user_roots
return Menus.search(menu_domain, 0, False, False, req.context) File "/opt/openerp/server/openerp/addons/web/session.py", line 42, in proxy
result = self.proxy.execute_kw(self.session._db, self.session._uid, self.session._password, self.model, method, args, kw)
File "/opt/openerp/server/openerp/addons/web/session.py", line 30, in proxy_method
result = self.session.send(self.service_name, method, *args)
File "/opt/openerp/server/openerp/addons/web/session.py", line 103, in send
raise xmlrpclib.Fault(openerp.tools.ustr(e), formatted_info)
Server Traceback (most recent call last):
File "/opt/openerp/server/openerp/addons/web/session.py", line 89, in send
return openerp.netsvc.dispatch_rpc(service_name, method, args)
File "/opt/openerp/server/openerp/netsvc.py", line 292, in dispatch_rpc
result = ExportService.getService(service_name).dispatch(method, params)
File "/opt/openerp/server/openerp/service/web_services.py", line 626, in dispatch
res = fn(db, uid, *params)
File "/opt/openerp/server/openerp/osv/osv.py", line 190, in execute_kw
return self.execute(db, uid, obj, method, *args, **kw or {})
File "/opt/openerp/server/openerp/osv/osv.py", line 132, in wrapper
return f(self, dbname, *args, **kwargs)
File "/opt/openerp/server/openerp/osv/osv.py", line 199, in execute
res = self.execute_cr(cr, uid, obj, method, *args, **kw)
File "/opt/openerp/server/openerp/osv/osv.py", line 187, in execute_cr
return getattr(object, method)(cr, uid, *args, **kw)
File "/opt/openerp/server/openerp/addons/mail/mail_group_menu.py", line 44, in search
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False)
File "/opt/openerp/server/openerp/addons/base/ir/ir_ui_menu.py", line 127, in search
result = self._filter_visible_menus(cr, uid, ids, context=context)
File "/opt/openerp/server/openerp/addons/base/ir/ir_ui_menu.py", line 103, in _filter_visible_menus
if not menu.child_id:
File "/opt/openerp/server/openerp/osv/orm.py", line 497, in __getattr__
return self[name]
File "/opt/openerp/server/openerp/osv/orm.py", line 405, in __getitem__
field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
File "/opt/openerp/server/openerp/osv/orm.py", line 3621, in read
result = self._read_flat(cr, user, select, fields, context, load)
File "/opt/openerp/server/openerp/osv/orm.py", line 3742, in _read_flat
res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
File "/opt/openerp/server/openerp/osv/fields.py", line 538, in get
ids2 = obj.pool.get(self._obj).search(cr, user, domain + [(self._fields_id, 'in', ids)], limit=self._limit, context=context)
File "/opt/openerp/server/openerp/addons/mail/mail_group_menu.py", line 44, in search
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False)
File "/opt/openerp/server/openerp/addons/base/ir/ir_ui_menu.py", line 127, in search
result = self._filter_visible_menus(cr, uid, ids, context=context)
File "/opt/openerp/server/openerp/addons/base/ir/ir_ui_menu.py", line 103, in _filter_visible_menus
if not menu.child_id:
File "/opt/openerp/server/openerp/osv/orm.py", line 499, in __getattr__
raise AttributeError(e)
AttributeError: 'Field res_model not found in browse_record(ir.actions.act_window, 494)'
This is rather ambiguous question.
What version of openERP is in problem here?
In OE7, only ir_act_window has res_model, I don't even see model that you provided (ir.actions.act_window). Same in OE8.
If you could specify what is going, is this your module/extension? It would help a lot.
In my case, I have deleted a xml file for which menu item is created.As i have deleted the file,the menu item is searching for it and it is throwing the error.So finally i resolved the issue by deleting the Menu item.