TensorFlow Serving + gRPC "Did not read entire message" - tensorflow

I'm trying to call my TensorFlow model which is deployed on a cloud foundry server with an Python 2.7 API using TensorFlow Serving and gRPC. The model expects a 200 dim vector as input, which I hardcoded at the moment. The connection Variables are stored in a virtualenv and checked twice.
The code:
import os
from grpc.beta import implementations
import tensorflow as tf
from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2
from grpc._cython import cygrpc
MODEL_NAME = str(os.getenv('MODEL_NAME', ''))
MODEL_SERVER_HOST = str(os.getenv('MODEL_SERVER_HOST', ''))
MODEL_SERVER_PORT = int(os.getenv('MODEL_SERVER_PORT', ''))
ROOT_CERT = str(os.getenv('ROOT_CERT', '')).replace('\\n', '\n')
def metadata_transformer(metadata):
additions = []
token = 'Bearer <my access token>'
additions.append(('authorization', token))
return tuple(metadata) + tuple(additions)
credentials = implementations.ssl_channel_credentials(root_certificates=ROOT_CERT)
channel = implementations.secure_channel(MODEL_SERVER_HOST, MODEL_SERVER_PORT, credentials)
stub = prediction_service_pb2.beta_create_PredictionService_stub(channel, metadata_transformer=metadata_transformer)
import numpy as np
data = np.matrix([0.06222425773739815, 0.08211926370859146, -0.060986146330833435, 0.13920938968658447, 0.10515272617340088, -0.06220443174242973, -0.05927170068025589, -0.054189786314964294, -0.0986655130982399, 0.013334010727703571, -0.05667420104146004, 0.059366412460803986, -0.03483295068144798, -0.05382293462753296, 0.02721281163394451, -0.1428503543138504, 0.029297124594449997, 0.07006879895925522, 0.06501731276512146, 0.028620243072509766, 0.07128454744815826, 0.029960375279188156, 0.0710490494966507, -0.04619687795639038, -0.03106304071843624, -0.04266272485256195, 0.004348727408796549, 0.03099834732711315, 0.09248803555965424, -0.036939311772584915, 0.00017547572497278452, 0.03521900251507759, 0.10932505130767822, -0.019729139283299446, 0.12315405160188675, 0.10092845559120178, -0.12633951008319855, -0.022320391610264778, 0.0870826318860054, -0.06696301698684692, -0.016253307461738586, -0.0413096621632576, -0.040929097682237625, 0.09338817000389099, -0.08800378441810608, 0.015543102286756039, 0.018787918612360954, 0.07351260632276535, 0.038140904158353806, 0.019255049526691437, 0.0875692293047905, -0.07542476058006287, -0.04116508364677429, 0.04507743567228317, -0.06986603885889053, -0.24688798189163208, -0.035459864884614944, 0.06200174242258072, -0.06932217627763748, 0.06320516765117645, -0.023999478667974472, -0.04712359234690666, 0.03672196343541145, -0.02999514900147915, 0.04105519875884056, 0.08891177922487259, 0.15175248682498932, -0.0021488466300070286, 0.04398706927895546, -0.04429445043206215, 0.04708605632185936, 0.043234940618276596, -0.043555982410907745, 0.017381751909852028, 0.048889972269535065, -0.016929129138588905, 0.01731136068701744, -0.04694319888949394, 0.20381565392017365, 0.009074307978153229, 0.004490611143410206, -0.08525945991277695, -0.03385556861758232, 0.017475442960858345, -0.040392760187387466, 0.14970248937606812, 0.042721331119537354, -0.1257765144109726, -0.07097769528627396, -0.10943038016557693, 0.015442096628248692, -0.06519876420497894, -0.07588690519332886, -0.07620779424905777, 0.04572996124625206, -0.058589719235897064, -0.04492143541574478, -0.01922304928302765, -0.008066931739449501, 0.04317406192421913, 0.020763304084539413, -0.025430725887417793, 0.04271349683403969, 0.07393930852413177, 0.0020402593072503805, 0.0783640518784523, 0.047386448830366135, 0.010610940866172314, 0.022059153765439987, 0.034980181604623795, -0.006882485933601856, -0.08911270648241043, -0.001243607490323484, -0.06307544559240341, -0.01352659147232771, -0.24622271955013275, 0.07930449396371841, 0.03659113869071007, -0.05077377334237099, 0.08726480603218079, -0.09274136275053024, -0.05766649544239044, -0.12269984930753708, 0.056026071310043335, -0.0048304214142262936, -0.05568183213472366, -0.08890420943498611, -0.02911136858165264, -0.0944124087691307, 0.0011820291401818395, -0.08908636122941971, -0.008728212676942348, -0.014545259065926075, -0.008866528049111366, 0.02728298306465149, -0.020994992926716805, 0.031155599281191826, 0.036098793148994446, 0.06911332905292511, -0.06691643595695496, -0.00014896543871145695, -0.007080242037773132, 0.0031992685981094837, 0.043563224375247955, 0.02550852671265602, -0.015397937037050724, 0.06041031703352928, -0.08981014788150787, -0.10881254076957703, 0.03226703032851219, -0.02039985917508602, -0.05354547128081322, -0.026514282450079918, 0.09616094827651978, -0.04160488396883011, -0.06793050467967987, -0.17060619592666626, -0.08044841140508652, 0.042605575174093246, 0.08186516910791397, 0.026051705703139305, 0.1254323273897171, 0.09807661175727844, 0.04692094400525093, 0.05536479875445366, 0.004592049401253462, 0.01953544095158577, -0.02827763929963112, 0.11051501333713531, -0.05077047273516655, -0.09987067431211472, 0.025186538696289062, -0.24119670689105988, -0.054666098207235336, 0.03561021387577057, -0.006030901800841093, 0.14740994572639465, 0.09515859931707382, 0.0628485381603241, 0.020558597519993782, -0.04458167776465416, -0.04740617796778679, 0.024550801143050194, -0.09533495455980301, 0.057229768484830856, -0.08855120837688446, 0.027864644303917885, -0.07248448580503464, 0.0647491067647934, 0.09660986065864563, 0.038834456354379654, -0.030274877324700356, -0.024261653423309326, 0.05457066744565964, -0.00860705878585577, 0.04901411384344101, 0.017157232388854027, -0.02722001262009144, 0.012187148444354534, 0.05596058815717697])
request = predict_pb2.PredictRequest()
request.model_spec.name = MODEL_NAME
request.model_spec.signature_name = 'ticketCatFeature2'
request.inputs['input'].CopyFrom(
tf.contrib.util.make_tensor_proto(data, shape=[200]))
print stub.Classify(request, 10)
I'm getting following error message when running the app:
Traceback (most recent call last):
File "app.py", line 36, in
print stub.Classify(request, 10)
File "/home/vagrant/Desktop/Masterarbeit/appDir/venv/local/lib/python2.7/site-packages/grpc/beta/_client_adaptations.py", line 309, in call
self._request_serializer, self._response_deserializer)
File "/home/vagrant/Desktop/Masterarbeit/appDir/venv/local/lib/python2.7/site-packages/grpc/beta/_client_adaptations.py", line 195, in _blocking_unary_unary
raise _abortion_error(rpc_error_call)
grpc.framework.interfaces.face.face.AbortionError: AbortionError(code=StatusCode.INTERNAL, details="Did not read entire message")
Log of grpc Debug: https://ufile.io/owk76

Related

Make parcer at colab for learning images

i hope to make parcer at colab but somethig wrong
here is my code
from google.colab import drive
drive.mount('/content/gdrive/')
from urllib.request import urlopen
import argparse
import requests as req
from bs4 import BeautifulSoup
root_dir = "/content/gdrive/My Drive/img/"
parser = argparse.ArgumentParser(description='input.')
parser.add_argument("-name", "--people", required=True)
args = parser.parse_args()
people = args.people
def main():
url_info = "https://www.google.co.kr/search?"
params = {
"q" : people,
"tbm":"isch"
}
html_object = req.get(url_info,params)
if html_object.status_code == 200:
bs_object = BeautifulSoup(html_object.text,"html.parser")
root_dir = bs_object.find_all("img")
for i in enumerate(root_dir[1:]):
t = urlopen(i[1].attrs['src']).read()
filename = "byeongwoo_"+str(i[0]+1)+'.jpg'
with open(filename,"wb") as f:
f.write(t)
print("Img Save Success")
if __name__=="__main__":
main()
and this is error message
usage: ipykernel_launcher.py [-h] -name PEOPLE
ipykernel_launcher.py: error: the following arguments are required: -name/--people
An exception has occurred, use %tb to see the full traceback.
SystemExit: 2
/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py:2890: UserWarning: To exit: use 'exit', 'quit', or Ctrl-D.
warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1)

when test the grid scenario, there is an valueError

On flow tutorial_8, I tried another scenario: SimpleGridScenario
I already check the grid.py, and followed the rules for the parameters.
My origianl program is that:
from flow.envs import Env
from gym.spaces.box import Box
from gym.spaces.tuple_space import Tuple
from flow.controllers import IDMController, ContinuousRouter,GridRouter
from flow.core.experiment import Experiment
from flow.core.params import SumoParams, EnvParams, \
InitialConfig, NetParams
from flow.core.params import VehicleParams
from flow.scenarios.loop import LoopScenario, ADDITIONAL_NET_PARAMS
from flow.scenarios.grid import SimpleGridScenario
from flow.scenarios import SimpleGridScenario
import numpy as np
from flow.envs import myEnv
ADDITIONAL_ENV_PARAMS={
"max_accel":1,
"max_decel":1,
}
sumo_params = SumoParams(sim_step=0.1,render=True)
vehicles=VehicleParams()
vehicles.add(
veh_id="idm",
acceleration_controller=(IDMController,{}),
routing_controller=(GridRouter,{}),
num_vehicles=22
)
env_params = EnvParams(additional_params=ADDITIONAL_ENV_PARAMS)
additional_net_params = ADDITIONAL_NET_PARAMS.copy()
net_params = NetParams(additional_params=additional_net_params)
initial_config = InitialConfig(bunching=20)
scenario = SimpleGridScenario(
name = 'grid',
vehicles = vehicles,
net_params = NetParams(
additional_params={
'grid_array':{
'row_num':3,
'col_num':2,
'inner_length':500,
'short_length':500,
'long_length':500,
'cars_top':20,
'cars_bot':20,
'cars_left':20,
'cars_right':20,
},
'horizontal_lanes':1,
'vertical_lanes':1,
'speed_limit':{
'vertical':35,
'horizontal':35
}
},
no_internal_links=False
),
initial_config = initial_config
)
env = myEnv(env_params, sumo_params, scenario)
exp = Experiment(env)
_ = exp.run(1, 1500)
And then I ran that, there is an error, the error log is:
(flow) dnl#dnl-Iiyama:~/flow$ python Tutorial_9_1.py
Loading configuration... done.
Success.
Loading configuration... done.
Error in edge length with key bot3_2
Error in edge length with key bot3_2
Traceback (most recent call last):
File "Tutorial_9_1.py", line 72, in <module>
_ = exp.run(1, 1500)
File "/home/dnl/flow/flow/core/experiment.py", line 118, in run
state = self.env.reset()
File "/home/dnl/flow/flow/envs/base_env.py", line 483, in reset
speed=speed)
File "/home/dnl/flow/flow/core/kernel/vehicle/traci.py", line 990, in add
[i for i in range(num_routes)], size=1, p=frac)[0])
File "mtrand.pyx", line 1126, in mtrand.RandomState.choice
ValueError: a must be non-empty
I want to why i am wrong ?
I try to test another scenario expect the LoopScenario. But it doesn't work
Please help.

Python facebook chatbook download from google

I'm trying to program a python file that downloads google images, but gives the following error
"C:\Users\marco\Desktop\Scripts Python\venv\Scripts\python.exe" "C:/Users/marco/Desktop/Scripts Python/ChatBot.py"
Traceback (most recent call last):
File "C:/Users/marco/Desktop/Scripts Python/ChatBot.py", line 4, in
from urllib import FancyURLopener
ImportError: cannot import name 'FancyURLopener' from 'urllib' (C:\Users\marco\AppData\Local\Programs\Python\Python37-32\lib\urllib__init__.py)
my code:
import os
import sys
import time
from urllib import FancyURLopener
import urllib2
import simplejson
# Define search term
searchTerm = "william shatner"
# Replace spaces ' ' in search term for '%20' in order to comply with request
searchTerm = searchTerm.replace(' ','%20')
# Start FancyURLopener with defined version
class MyOpener(FancyURLopener):
version = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11'
myopener = MyOpener()
# Set count to 0
count= 0
for i in range(0,10):
# Notice that the start changes for each iteration in order to request a new set of images for each loop
url = ('https://ajax.googleapis.com/ajax/services/search/images?' + 'v=1.0&q='+searchTerm+'&start='+str(i*4)+'&userip=MyIP')
print (url)
request = urllib2.Request(url, None, {'Referer': 'testing'})
response = urllib2.urlopen(request)
# Get results using JSON
results = simplejson.load(response)
data = results['responseData']
dataInfo = data['results']
# Iterate for each result and get unescaped url
for myUrl in dataInfo:
count = count + 1
print (myUrl['unescapedUrl'])
myopener.retrieve(myUrl['unescapedUrl'],str(count)+'.jpg')
# Sleep for one second to prevent IP blocking from Google
time.sleep(1)
As the error message says, FancyURLopener is not where you are looking for it. This is the correct import statement:
from urllib.request import FancyURLopener

minimal example of how to export a jupyter notebook to pdf using nbconvert and PDFExporter()

I am trying to export a pdf copy of a jupyter notebook using nbconvert from within a notebook cell. I have read the documentation, but I just cannot find some basic code to actually execute the nbconvert command and export to pdf.
I was able to get this far, but I was hoping that someone could just fill in the final gaps.
from nbconvert import PDFExporter
notebook_pdf = PDFExporter()
notebook_pdf.template_file = '../print_script/pdf_nocode.tplx'
Note sure how to get from here to actually getting the pdf created.
Any help would be appreciated.
I'm no expert, but managed to get this working. The key is that you need to preprocess the notebook which will allow you to use the PDFExporter.from_notebook_node() function. This will give you your pdf_data in byte format that can then be written to file:
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert import PDFExporter
notebook_filename = "notebook.ipynb"
with open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python3')
ep.preprocess(nb, {'metadata': {'path': 'notebooks/'}})
pdf_exporter = PDFExporter()
pdf_data, resources = pdf_exporter.from_notebook_node(nb)
with open("notebook.pdf", "wb") as f:
f.write(pdf_data)
f.close()
It's worth noting that the ExecutePreprocessor requires the resources dict, but we don't use it in this example.
Following is rest api that convert .ipynb file into .html
POST: http://URL/export/<id>
Get: http://URL/export/<id> will return a id.html
import os
from flask import Flask, render_template, make_response
from flask_cors import CORS
from flask_restful import reqparse, abort, Api, Resource
from nbconvert.exporters import HTMLExporter
exporter = HTMLExporter()
app = Flask(__name__)
cors = CORS(app, resources={r"/export/*": {"origins": "*"}})
api = Api(app)
parser = reqparse.RequestParser()
parser.add_argument('path')
notebook_file_srv = '/path of your .ipynb file'
def notebook_doesnt_exist(nb):
abort(404, message="Notebook {} doesn't exist".format(nb))
class Notebook(Resource):
def get(self, id):
headers = {'Content-Type': 'text/html'}
return make_response(render_template(id + '.html'), 200, headers)
def post(self, id):
args = parser.parse_args()
notebook_file = args['path']
notebook_file = notebook_file_srv + id + '.ipynb'
if not os.path.exists(notebook_file):
return 'notebook \'.ipynb\' file not found', 404
else:
nb_name, _ = os.path.splitext(os.path.basename(notebook_file))
# dirname = os.path.dirname(notebook_file)
output_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'templates')
output_path = os.path.join(output_path, '{}.html'.format(nb_name))
output, resources = exporter.from_filename(notebook_file)
f = open(output_path, 'wb')
f.write(output.encode('utf8'))
f.close()
return 'done', 201
api.add_resource(Notebook, '/export/<id>')
if __name__ == '__main__':
app.run(debug=True)

List index out of range error in splunk API call

I am trying to get the result of a search query from splunk. But when i try to get the session key, i am getting the following error.
Traceback (most recent call last):
File "splunkenter.py", line 18, in <module>
sessionkey = minidom.parseString(servercontent).getElementsByTagName('sessionKey')[0].childNodes[0].nodeValue
IndexError: list index out of range
splunkenter.py:
import urllib
import httplib2
import time
import re from time
import localtime,strftime
from xml.dom import minidom
import json
baseurl = 'abc.haihd.com:8000'
username = 'xxxxx'
password = 'xxxxx'
myhttp = httplib2.Http()
#Step 1: Get a session key
servercontent = myhttp.request(baseurl + '/services/auth/login', 'POST', headers={}, body=urllib.urlencode({'username':username, 'password':password}))[1]
sessionkey =
minidom.parseString(servercontent).getElementsByTagName('sessionKey')[0].childNo‌​des[0].nodeValue
print "====>sessionkey: %s <====" % sessionke
Can anybody tell me where is the problem lying. I am very new to API's.