How to speed up scrapy - scrapy

I need to collect a lot(really a lot) of data for statistics, all the necessary information is in <script type="application/ld+json"></script>
and I wrote scrapy parser (script inside html) under it, but parsing is very slow (about 3 pages per second). Is there any way to speed up the process? Ideally I would like to see 10+ pages per second
#spider.py:
import scrapy
import json
class Spider(scrapy.Spider):
name = 'scrape'
start_urls = [
about 10000 urls
]
def parse(self, response):
data = json.loads(response.css('script[type="application/ld+json"]::text').extract_first())
name = data['name']
image = data['image']
path = response.css('span[itemprop="name"]::text').extract()
yield {
'name': name,
'image': image,
'path': path
}
return
#settings.py:
USER_AGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0"
ROBOTSTXT_OBEY = False
CONCURRENT_REQUESTS = 32
DOWNLOAD_DELAY = 0.33
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en',
}
EXTENSIONS = {
'scrapy.extensions.telnet.TelnetConsole': None,
}
AUTOTHROTTLE_DEBUG = False
LOG_ENABLED = False
My PC specs:
16GB ram, i5 2400, ssd, 1gb ethernet
#Edited

settings.py
ROBOTSTXT_OBEY = False
DOWNLOAD_DELAY = 0
DOWNLOAD_TIMEOUT = 30
RANDOMIZE_DOWNLOAD_DELAY = True
REACTOR_THREADPOOL_MAXSIZE = 128
CONCURRENT_REQUESTS = 256
CONCURRENT_REQUESTS_PER_DOMAIN = 256
CONCURRENT_REQUESTS_PER_IP = 256
AUTOTHROTTLE_ENABLED = True
AUTOTHROTTLE_START_DELAY = 1
AUTOTHROTTLE_MAX_DELAY = 0.25
AUTOTHROTTLE_TARGET_CONCURRENCY = 128
AUTOTHROTTLE_DEBUG = True
RETRY_ENABLED = True
RETRY_TIMES = 3
RETRY_HTTP_CODES = [500, 502, 503, 504, 400, 401, 403, 404, 405, 406, 407, 408, 409, 410, 429]
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
'scrapy.spidermiddlewares.referer.RefererMiddleware': 80,
'scrapy.downloadermiddlewares.retry.RetryMiddleware': 90,
'scrapy_fake_useragent.middleware.RandomUserAgentMiddleware': 120,
'scrapy.downloadermiddlewares.cookies.CookiesMiddleware': 130,
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
'scrapy.downloadermiddlewares.redirect.RedirectMiddleware': 900,
'scraper.middlewares.ScraperDownloaderMiddleware': 1000
}

Related

How to store all scraped stats moment before spider closes?

I want to store all the stats collected from the spider into a single output file stored as json format. However, I get this error:
'MemoryStatsCollector' object has no attribute 'get_all'
: The documentation mentions that stats.get_all is how you get all the stores. What is the correct method of implementation for this?
import scrapy
from scrapy import signals
from scrapy import crawler
import jsonlines
class TestSpider(scrapy.Spider):
name = 'stats'
start_urls = ['http://quotes.toscrape.com']
def __init__(self, stats):
self.stats = stats
#classmethod
def from_crawler(cls, crawler, *args, **kwargs):
#spider = super(TestSpider, cls).from_crawler(crawler, *args, **kwargs)
stat = cls(crawler.stats)
crawler.signals.connect(stat.spider_closed, signals.spider_closed)
return stat
def spider_closed(self):
#self.stats = stat
txt_file = 'some_text.jl'
with jsonlines.open(txt_file, 'w') as f:
f.write(self.stats.get_all())
def start_requests(self):
for url in self.start_urls:
yield scrapy.Request(
url=url,
callback=self.parse
)
def parse(self, response):
content = response.xpath('//div[#class = "row"]')
for items in content:
yield {
'some_items_links':items.xpath(".//a//#href").get()
}
Turns out there is no get_all for the method and instead I had to input get_stats(), the documentation provides a few examples of some:
stats.get_value()
stats.get_stats()
stats.max_value()/stats.min_value()
stats.inc_value()
stats.set_value()
Some further information provided in the documentation for stats.
The working part:
def spider_closed(self):
#self.stats = stat
txt_file = 'some_text.jl'
with jsonlines.open(txt_file, 'w') as f:
# f.write(f'{self.stats.get_all()}') --- Changed
f.write(f'{self.stats.get_stats()}')
Output:
{
"log_count/INFO": 10,
"log_count/DEBUG": 3,
"start_time": datetime.datetime(2022, 7, 6, 16, 16, 30, 553373),
"memusage/startup": 59895808,
"memusage/max": 59895808,
"scheduler/enqueued/memory": 1,
"scheduler/enqueued": 1,
"scheduler/dequeued/memory": 1,
"scheduler/dequeued": 1,
"downloader/request_count": 1,
"downloader/request_method_count/GET": 1,
"downloader/request_bytes": 223,
"downloader/response_count": 1,
"downloader/response_status_count/200": 1,
"downloader/response_bytes": 2086,
"httpcompression/response_bytes": 11053,
"httpcompression/response_count": 1,
"response_received_count": 1,
"item_scraped_count": 1,
"elapsed_time_seconds": 0.34008,
"finish_time": datetime.datetime(2022, 7, 6, 16, 16, 30, 893453),
"finish_reason": "finished",
}

How can I generate a WM_SEC.AUTH_SIGNATURE in Elixir?

I'm trying to generate Walmart's API WM_SEC.AUTH_SIGNATURE header.
I found a lot of examples in Ruby and Python, but no examples in Elixir. I've been trying to create one and try it out.
Here's what I attempted.
# mix.exs
{:ex_crypto, git: "https://github.com/ntrepid8/ex_crypto.git", branch: "master"}
def scrape_store_item(store_item) do
consumer_id = "my-consumer-id"
private_key_version = "1"
private_key_password = "my-private-key-password"
private_key =
Application.app_dir(:my_app, "priv/keys/walmart/WM_IO_private_key.pem")
|> ExPublicKey.load!(private_key_password)
timestamp = DateTime.utc_now() |> DateTime.to_unix()
url = "https://developer.api.walmart.com/api-proxy/service/affil/product/v2/taxonomy"
message = "#{consumer_id}\n#{timestamp}\n#{private_key_version}\n"
encoded_message = Base.encode64(message)
{:ok, auth_signature} = ExPublicKey.sign(encoded_message, private_key)
auth_signature = Base.encode64(auth_signature)
middleware = [
{Tesla.Middleware.BaseUrl,
"https://developer.api.walmart.com/api-proxy/service/affil/product/v2"},
{Tesla.Middleware.Headers,
[
{"WM_CONSUMER.ID", consumer_id},
{"WM_CONSUMER.INTIMESTAMP", timestamp},
{"WM_SEC.KEY_VERSION", private_key_version},
{"WM_SEC.AUTH_SIGNATURE", auth_signature},
{"WM_SHOW_REASON_CODES", "ALL"},
{"Content-Type", "application/json"}
]}
]
client = Tesla.client(middleware)
{:ok, response} = Tesla.get(client, "/taxonomy") |> IO.inspect()
IO.inspect(response.body)
end
I get a 401 response with this code:
{:ok,
%Tesla.Env{
__client__: %Tesla.Client{
adapter: nil,
fun: nil,
post: [],
pre: [
{Tesla.Middleware.BaseUrl, :call,
["https://developer.api.walmart.com/api-proxy/service/affil/product/v2"]},
{Tesla.Middleware.Headers, :call,
[
[
{"WM_CONSUMER.ID", "my-consumer-id"},
{"WM_CONSUMER.INTIMESTAMP", 1654443660},
{"WM_SEC.KEY_VERSION", "1"},
{"WM_SEC.AUTH_SIGNATURE",
"kdXG+e6R/n+8pH1ha1WKnzLrAHbUqmJsZfN9nOIyOzp6gsHAH7/VrX0K477cdzAq/v7YLpNJXZug3Yt6WTZoP17sZhz6Dig1BK1gg+EZqVqRaF3VJdRwBKlVgBO31s634xL7M8kPhXK11CsMxG8/9xjTGn2cDKEZ9aLeq15ECIfYa5tVtCdTcjNS4u6a7npByU9PIFp9a7n3h1KbW9C/9EA05kTuC1N0oS8nBlnKbA2+C0UW9EAvN4MaIkG0SqOqf/uEHn9BteAv8hI0Ayyny9RpJQmfZEpZ0G3htA7t1pWTzwxUsIJrF/5D1gV+IIYR7OiwHUg2RsIrnPohbznPQw=="}
]
]}
]
},
__module__: Tesla,
body: <<31, 139, 8, 0, 0, 9, 110, 136, 0, 255, 68, 204, 65, 138, 131, 64, 16,
70, 225, 171, 20, 255, 122, 70, 156, 25, 199, 69, 175, 115, 132, 236, 67,
97, 151, 177, 192, 110, 155, 174, 210, 4, 196, 187, 135, ...>>,
headers: [
{"content-encoding", "gzip"},
{"content-type", "application/json;charset=utf-8"},
{"last-modified", "Sun, 05 Jun 2022 15:41:00 GMT"},
{"strict-transport-security", "max-age=86400"},
{"wm_svc.env", "prod"},
{"wm_svc.name", "affil-product"},
{"wm_svc.version", "2.0.0"},
{"x-lua-strict-transport-security", "max-age=86400"},
{"x-tb", "1"},
{"x-tb-optimization-total-bytes-saved", "0"},
{"date", "Sun, 05 Jun 2022 15:41:00 GMT"},
{"connection", "close"},
{"set-cookie",
"TS01a35e2a=01c5a4e2f95f0b472a3a7606aa7c7c33653874c13d636655443ecbca84d23369b19bc1de1973ac24c93ff1f24512e7af49264d46c6; Path=/; Secure"}
],
method: :get,
opts: [],
query: [],
status: 401,
url: "https://developer.api.walmart.com/api-proxy/service/affil/product/v2/taxonomy"
}}
Here's the example code someone shared for a working Ruby version.
version = 'YOUR VERSION'
consumer_id = "YOUR CONSUMER ID"
time_stamp = (Time.now.to_i * 1000).to_s
p_key = "YOUR PRIVATE KEY"
digest = OpenSSL::Digest.new('sha256')
data = consumer_id + "\n" + time_stamp + "\n" + version + "\n"
k = OpenSSL::PKey::RSA.new(p_key.to_s)
digest = OpenSSL::Digest::SHA256.new
signature = k.sign(digest,data)
signature = Base64.strict_encode64(signature)
headers = {
"WM_SEC.KEY_VERSION": version,
"WM_CONSUMER.ID": consumer_id,
"WM_CONSUMER.INTIMESTAMP": time_stamp,
"WM_SEC.AUTH_SIGNATURE": signature
}
puts HTTParty.get("https://developer.api.walmart.com/api-proxy/service/affil/product/v2/taxonomy", headers: headers).parsed_response
Here's a script that does it without ex_crypto. Nice challenge, those Wallmart docs are terrible.
Mix.install([:tesla])
key_version = System.fetch_env!("WALLMART_KEY_VERSION")
consumer_id = System.fetch_env!("WALLMART_CONSUMER_ID")
private_key_pem = File.read!("WM_IO_private_key.pem")
[pem_entry] = :public_key.pem_decode(private_key_pem)
private_key = :public_key.pem_entry_decode(pem_entry)
timestamp = System.os_time(:millisecond)
auth_signature =
"#{consumer_id}\n#{timestamp}\n#{key_version}\n"
|> :public_key.sign(:sha256, private_key)
|> Base.encode64()
url = "https://developer.api.walmart.com/api-proxy/service/affil/product/v2/taxonomy"
headers = [
{"WM_CONSUMER.ID", consumer_id},
{"WM_CONSUMER.INTIMESTAMP", timestamp},
{"WM_SEC.KEY_VERSION", key_version},
{"WM_SEC.AUTH_SIGNATURE", auth_signature}
]
{:ok, %{body: body}} = Tesla.get(url, headers: headers)
IO.puts(body)
Output:
{"categories":[{"id":"0","name":"Home Page","path":"Home Page",...
Here's how you generate WM_SEC.AUTH_SIGNATURE in Elixir:
Make sure you have ex_crypto package installed from master branch since the latest version has necessary changes but is not published.
{:ex_crypto, git: "https://github.com/ntrepid8/ex_crypto.git", branch: "master"}
Then here's the solution:
version = "1"
consumer_id = "my-consumer-id"
timestamp = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
data = "#{consumer_id}\n#{timestamp}\n#{version}\n"
private_key =
Application.app_dir(:my_app, "priv/keys/walmart/WM_IO_private_key.pem")
|> ExPublicKey.load!()
{:ok, auth_signature} = ExPublicKey.sign(data, private_key)
auth_signature = Base.encode64(auth_signature)
middleware = [
{Tesla.Middleware.BaseUrl,
"https://developer.api.walmart.com/api-proxy/service/affil/product/v2"},
{Tesla.Middleware.Headers,
[
{"WM_CONSUMER.ID", consumer_id},
{"WM_CONSUMER.INTIMESTAMP", timestamp},
{"WM_SEC.KEY_VERSION", version},
{"WM_SEC.AUTH_SIGNATURE", auth_signature}
]}
]
client = Tesla.client(middleware)
Tesla.get(client, "/taxonomy")

Trying to place a binance futures trailing stop order using python?

I have got so far,
#changing the margin
client.futures_change_margin_type(symbol = symbol_buy, marginType = 'ISOLATED')
#changing the leverage
client.futures_change_leverage(symbol = symbol_buy, leverage = 1)
shared.client.futures_create_order( symbol=symbol_buy, side = 'SELL', type='TRAILING_STOP_LOSS', quantity = 100)
and i'm thus getting this error below
BinanceAPIException Traceback (most recent call last)
in
3
4 #orderdetails_buy = shared.client_future.futures_create_order( symbol=symbol_buy, side = 'BUY', type='MARKET', quantity = 100)
----> 5 orderdetails_sell = shared.client_future.futures_create_order( symbol=symbol_buy, side = 'SELL', type='TRAILING_STOP_LOSS', quantity = 100)
6
C:\ProgramData\Anaconda3\lib\site-packages\binance\client.py in futures_create_order(self, **params)
5289
5290 """
-> 5291 return self._request_futures_api('post', 'order', True, data=params)
5292
5293 def futures_place_batch_order(self, **params):
C:\ProgramData\Anaconda3\lib\site-packages\binance\client.py in _request_futures_api(self, method, path, signed, **kwargs)
331 uri = self._create_futures_api_uri(path)
332
--> 333 return self._request(method, uri, signed, True, **kwargs)
334
335 def _request_futures_data_api(self, method, path, signed=False, **kwargs) -> Dict:
C:\ProgramData\Anaconda3\lib\site-packages\binance\client.py in _request(self, method, uri, signed, force_params, **kwargs)
307
308 self.response = getattr(self.session, method)(uri, **kwargs)
--> 309 return self._handle_response(self.response)
310
311 #staticmethod
C:\ProgramData\Anaconda3\lib\site-packages\binance\client.py in _handle_response(response)
316 """
317 if not (200 <= response.status_code < 300):
--> 318 raise BinanceAPIException(response, response.status_code, response.text)
319 try:
320 return response.json()
BinanceAPIException: APIError(code=-1116): Invalid orderType.
you've probably solved this by now but with binance futures, I don't think there is a TRAILING_STOP_LOSS trade type via API just yet, available order types are (from the docs):
Order types (orderTypes, type):
LIMIT
MARKET
STOP
STOP_MARKET
TAKE_PROFIT
TAKE_PROFIT_MARKET
TRAILING_STOP_MARKET
[https://binance-docs.github.io/apidocs/futures/en/#public-endpoints-info][1]

How can I download an invoice from Odoo (v13) via xml rpc in Python?

I struggle currently a bit to download an invoice as PDF from Odoo 13 with xml rpc.
The closest that I could get is this:
model_name = 'ir.actions.report'
model_method = 'render_qweb_pdf'
report_id = 282
invoice_id = 4
args = [[report_id]]
kwargs = {'res_ids': [invoice_id]}
models = ServerProxy('{}/xmlrpc/2/object'.format(url))
return models.execute_kw(db, uid, password,
model_name, method_name,
args, kwargs)
Yet I always end up with this error:
...py", line 46, in execute_kw
args, kwargs)
File "/usr/lib/python3.6/xmlrpc/client.py", line 1112, in __call__
return self.__send(self.__name, args)
File "/usr/lib/python3.6/xmlrpc/client.py", line 1452, in __request
verbose=self.__verbose
File "/usr/lib/python3.6/xmlrpc/client.py", line 1154, in request
return self.single_request(host, handler, request_body, verbose)
File "/usr/lib/python3.6/xmlrpc/client.py", line 1170, in single_request
return self.parse_response(resp)
File "/usr/lib/python3.6/xmlrpc/client.py", line 1336, in parse_response
p.feed(data)
File "/usr/lib/python3.6/xmlrpc/client.py", line 439, in feed
self._parser.Parse(data, 0)
xml.parsers.expat.ExpatError: not well-formed (invalid token): line 64, column 9
The data that its trying to parse there in this self._parser.Parse(data, 0)-line is
b"<?xml version='1.0'?>\n<methodResponse>\n<params>\n<param>\n<value><array><data>\n<value><string>%PDF-1.3\n1 0 obj\n<<\n/Type /Pages\n/Count 0\n/Kids [ ]\n>>\nendobj\n2 0 obj\n<<\n/Producer (PyPDF2)\n>>\nendobj\n3 0 obj\n<<\n/Type /Catalog\n/Pages 4 0 R\n/Outlines 23 0 R\n/PageMode /UseOutlines\n/Dests 25 0 R\n/Names <<\n/EmbeddedFiles <<\n/Names [ (factur\\055x\\056xml) <<\n/Type /Filespec\n/F (factur\\055x\\056xml)\n/EF <<\n/F 27 0 R\n>>\n>> ]\n>>\n>>\n>>\nendobj\n4 0 obj\n<<\n/Type /Pages\n/Kids [ 5 0 R ]\n/Count 1\n/ProcSet [ /PDF /Text /ImageB /ImageC ]\n>>\nendobj\n5 0 obj\n<<\n/Type /Page\n/Parent 4 0 R\n/Contents 6 0 R\n/Resources 7 0 R\n/Annots 22 0 R\n/MediaBox [ 0 0 595 842 ]\n>>\nendobj\n6 0 obj\n<<\n/Filter /FlateDecode\n/Length 2705\n>>\nstream\nx\xc2\x9c\xc3\xad]K\xc2\x8f\xc3\xa4\xc2\xb8\r\xc2\xbe\xc3\x97\xc2\xaf\xc3\xb09#\xc2\xbb\xc2\xad\xc2\xb7\x0c\x04\x0bL\xc2\xbf\xc2\x82\xc3\xa4\x10`0\r\xc3\xac!\xc3\x88!\xc2\x98\xc3\x9dM\xc2\xb0\xc2\x98\xc3\x9ed\xc2\xb2\xc2\x87\xc3\xbc\xc3\xbdH\xc2\xb2\xc3\xbc\xc2\x92\xc3\xab\xc2\x93m\xc2\xb5\xc3\xad\xc2\xb2\xc2\xabk\x1a\xc2\x98z\xc2\xb0$Q\x14I\xc2\x91\x14)\xc3\x9f\xc3\xbf\xc3\xa9\xc3\x8b?\xc2\xb2\x7f\xc3\xbe\xc2\x9e\xc3\x9d?~\xc3\xb9O\xc3\xb6\xc3\x95\xc2\xbf>~9\x15\xc2\xb9.\xc3\xbc\xc2\xbf\xc3\x8c\xc3\xbe\xc3\x9d\xc3\xb5\xc2\xbfP\xc2\x84\xc3\xa7\xc2\xaa\xc2\xb4\xc3\xbf\xc2\xb2\xc2\xafo\xc2\xa7\xc3\xaf\xc3\x99\xc3\xb7\xc3\x93\xc3\xa7\xc3\x93g\xc3\xb3\xc2\xbf}\xc3\xbd~\xc2\xaa;"
So it actually looks quite good and so promising ... :(
Is there a better approach in Odoo 13 now? I checked and all the info for Odoo 12 and so on seems outdated as models / reports / functions / xrpc calls ... all don't exist anymore...
After discovering jsonrpc I was finally able to download the invoice ... Hope this helps someone in the absence of a solution with xmlrpc. (I am still looking for an xml rpc solution.)
import urllib.request
import json
import random
model_name = 'ir.actions.report'
method_name = 'render_qweb_pdf'
report_id = 282
invoice_id = 4
method = "call"
params = {
"service": "object",
"method": "execute",
"args": [db, uid, password, model_name, method_name, report_id, invoice_id],
}
data = {
"jsonrpc": "2.0",
"method": method,
"params": params,
"id": random.randint(0, 1000000000),
}
req = urllib.request.Request(url=f"{self.url}/jsonrpc", data=json.dumps(data).encode(), headers={
"Content-Type": "application/json",
})
reply = json.loads(urllib.request.urlopen(req).read().decode('UTF-8'))
if reply.get("error"):
raise Exception(reply["error"])
return reply["result"]

how does method updateCredentialsForItem1 work

How does the method ItemManagementService.updateCredentialsForItem1 work?
what happens if I update the account using wrong credential? will it throw exception?
I added one account into yodlee, then I changed the password of that account. It threw error 402 when I tried to get the transactions. After this, I called this method ItemManagementService.updateCredentialsForItem1(userContext, itemId,true, fieldInfoList.ToArray(),true) to update this account. But this method doesn't threw any exception or error code even I updated it using wrong password.
What should I do to make sure yodlee will tell me the credential is wrong if I update the account using wrong credential?
Thanks a lot.
Yuj
I believe research and experience has told/taught me that you need to pass false as the "start refresh on addition/update" and start the refresh manually
The Workflow is exactly the same whether you add or update.
You add/update:
AddItemForContentService1 (if New) or updateCredentialsForItem1 (if updating credentials).
Then You check if the item can be refreshed and if/when it can Start a refresh and then begin your polling.
You need to check RefreshService.isItemRefreshing (this needs to be false)
Then start the refresh RefreshService.startRefresh7.
Then your standard polling routine which is where you will find out whether the updated credentials worked. Ala, At 10/15 second intervals you need to check the status with RefreshClient.getRefreshInfo passing the content item id.
The return value most notably the RefreshInfo.statusCode which correlates to Gatherer Error codes (see code below) lets you know if there is a problem. 0 is successful.
Below are the C# Enums that I built based upon Java SDK and filling in a few missing values with some tech support. The Values are the same regardless of what language. So it's a good reference as the names semi-make sense.
402 and 419 are the two most common Login error codes. 402 is invalid credentials, 419 means it didn't register them in time. The full list of error codes can be found on Yodlee's site here: https://developer.yodlee.com/FAQs/Error_Codes.
public enum GathererErrorsEnum
{
STATUS_OK = 0,
STATUS_INVALID_GATHERER_REQUEST = 400,
STATUS_NO_CONNECTION = 401,
STATUS_LOGIN_FAILED = 402,
STATUS_INTERNAL_ERROR = 403,
STATUS_LOST_REQUEST = 404,
STATUS_ABORT_REQUEST = 405,
STATUS_PASSWORD_EXPIRED = 406,
STATUS_ACCOUNT_LOCKED = 407,
STATUS_DATA_EXPECTED = 408,
STATUS_SITE_UNAVILABLE = 409,
STATUS_POP3_SERVER_FAILED = 410,
STATUS_SITE_OUT_OF_BUSINESS = 411,
STATUS_SITE_APPLICATION_ERROR = 412,
STATUS_REQUIRED_FIELD_UNAVAILABLE = 413,
STATUS_NO_ACCOUNT_FOUND = 414,
STATUS_SITE_TERMINATED_SESSION = 415,
STATUS_SITE_SESSION_ALREADY_ESTABLISHED = 416,
STATUS_DATA_MODEL_NO_SUPPORT = 417,
STATUS_HTTP_DNS_ERROR = 418,
STATUS_LOGIN_NOT_COMPLETED = 419,
STATUS_SITE_MERGED_ERROR = 420,
STATUS_UNSUPPORTED_LANGUAGE_ERROR = 421,
STATUS_ACCOUNT_CANCELLED = 422,
STATUS_ACCT_INFO_UNAVAILABLE = 423,
STATUS_SITE_DOWN_FOR_MAINTENANCE = 424,
STATUS_SITE_CERTIFICATE_ERROR = 425,
STATUS_SITE_BLOCKING_ERROR = 426,
STATUS_NEW_SPLASH_PAGE = 427,
STATUS_NEW_TERMS_AND_CONDITIONS = 428,
STATUS_UPDATE_INFORMATION_ERROR = 429,
STATUS_SITE_NOT_SUPPORTED = 430,
STATUS_HTTP_FILE_NOT_FOUND_ERROR = 431,
STATUS_HTTP_INTERNAL_SERVER_ERROR = 432,
STATUS_REGISTRATION_PARTIAL_SUCCESS = 433,
STATUS_REGISTRATION_FAILED_ERROR = 434,
STATUS_REGISTRATION_INVALID_DATA = 435,
STATUS_REGISTRATION_ACCOUNT_ALREADY_REGISTERED = 436,
STATUS_REGISTRATION_TIMEOUT = 404,
UNIQUEID_FROM_DATA_SOURCE_ERROR = 475,
ACCOUNT_REQUIRED_FIELDS_NOT_SET = 476,
BILL_REQUIRED_FIELDS_NOT_SET = 477,
STATUS_DUPLICATE_BILL = 478,
STATUS_COULD_NOT_GENERATE_AUTOREGISTER_CREDENTIALS = 479,
STATUS_MAX_REGISTRATION_ATTEMPTS_EXCEEDED = 481,
STATUS_ACCOUNT_REGISTERED_ELSE_WHERE = 484,
STATUS_REGISTRATION_BOT_SUPPORTED_FOR_REGION = 485,
STATUS_REGISTRATION_NOT_SUPPORTED_FOR_REGION = 485,
STATUS_UNSUPPORTED_REGISTRATION_ACCOUNT_TYPE = 486,
REWARDS_PROGRAM_REQUIRED_FIELDS_NOT_SET = 491,
REWARDS_ACTIVITY_REQUIRED_FIELDS_NOT_SET = 492,
TAX_LOT_REQUIRED_FIELDS_NOT_SET = 493,
INVESTMENT_TRANSACTION_REQUIRED_FIELDS_NOT_SET = 494,
LOAN_TRANSACTION_REQUIRED_FIELDS_NOT_SET = 495,
CARD_TRANSACTION_REQUIRED_FIELDS_NOT_SET = 496,
BANK_TRANSACTION_REQUIRED_FIELDS_NOT_SET = 497,
HOLDING_REQUIRED_FIELDS_NOT_SET = 498,
SITE_CURRENTLY_NOT_SUPPORTED = 505,
NEW_LOGIN_INFO_REQUIRED_FOR_SITE = 506,
BETA_SITE_WORK_IN_PROGRESS = 507,
STATUS_INSTANT_REQUEST_TIMEDOUT = 508,
TOKEN_ID_INVALID = 509,
PROPERTY_RECORD_NOT_FOUND = 510,
HOME_VALUE_NOT_FOUND = 511,
NO_PAYEE_FOUND = 512,
NO_PAYEE_RETRIEVED = 513,
SOME_PAYEE_NOT_RETRIEVED = 514,
NO_PAYMENT_ACCOUNT_FOUND = 515,
NO_PAYMENT_ACCOUNT_SELECTED = 516,
GENERAL_EXCEPTION_WHILE_GATHERING_MFA_DATA = 517,
NEW_MFA_INFO_REQUIRED_FOR_AGENTS = 518,
MFA_INFO_NOT_PROVIDED_TO_YODLEE_BY_USER_FOR_AGENTS = 519,
MFA_INFO_MISMATCH_FOR_AGENTS = 520,
ENROLL_IN_MFA_AT_SITE = 521,
MFA_INFO_NOT_PROVIDED_IN_REAL_TIME_BY_USER_VIA_APP = 522,
INVALID_MFA_INFO_IN_REAL_TIME_BY_USER_VIA_APP = 523,
USER_PROVIDED_REAL_TIME_MFA_DATA_EXPIRED = 524,
MFA_INFO_NOT_PROVIDED_IN_REAL_TIME_BY_GATHERER = 525,
INVALID_MFA_INFO_OR_CREDENTIALS = 526,
STATUS_DBFILER_SUMMARY_SAVE_ERROR = 601,
STATUS_REQUEST_GENERATION_ERROR = 602,
STATUS_REQUEST_DISPATCH_ERROR = 603,
STATUS_REQUEST_GENERATION_ERROR_LOGIN_FAILURE = 604,
STATUS_REQUEST_GENERATION_ERROR_DELETED_ITEM = 605,
INPUT_INVALID_DATA = 701,
INPUT_LENGTH_ERROR = 702,
INPUT_FORMAT_ERROR = 703,
INPUT_USERNAME_ALREADY_TAKEN_ERROR = 704,
INPUT_VALUE_TOO_SMALL = 705,
INPUT_VALUE_TOO_LARGE = 706,
REFRESH_NEVER_DONE = 801,
REFRESH_NEVER_DONE_AFTER_CREDENTIALS_UPDATE = 802,
}