delete a record in flask sql_alchemy [duplicate] - flask-sqlalchemy

I'm trying to get a server for an app working, but I'm getting an error upon login:
[!] Object '<User at 0x7f12bc185a90>' is already attached to session '2' (this is '3')
It seems the session I'm adding is already on the database. This is the snippet of code that is causing the problem:
#app.route('/login', methods=['POST'])
def login():
u = User.query.filter(User.username == request.form["username"]).first()
if not u or u.password != request.form["password"]:
return error("E1")
s = Session.get_by_user(u)
if s is not None:
db_session.delete(s)
db_session.commit()
print db_session.execute("SELECT * FROM sessions").fetchall()
s = Session(u)
db_session.add(s)
db_session.commit()
return jsonify(s.values)
As you can see, I'm printing the content from the sessions table before trying to add anything, and it is empty! ([])
What else could be causing this?
Here is the 'Session' implementation:
class Session(Base):
__tablename__ = "sessions"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'), unique=True)
user = relationship(User)
key = Column(String(50), unique=True)
created = Column(DateTime)
def __init__(self, user=None):
self.user = user
self.key = base64.encodestring(os.urandom(24)).strip()
self.created = datetime.now()
def __repr__(self):
return '<Session %r>' % (self.key)
#property
def values(self):
return {"username" : self.user.username,
"key" : self.key,
"created" : str(self.created),
}
#classmethod
def get_by_key(cls, key):
s = cls.query.filter(cls.key == key).first()
#print datetime.now() - s.created
if s and datetime.now() - s.created > settings.SESSION_LIFETIME:
s = None
return s
#classmethod
def get_by_user(cls, user):
s = cls.query.filter(cls.user == user).first()
if s and datetime.now() - s.created > settings.SESSION_LIFETIME:
s.query.delete()
db_session.commit()
s = None
return s

As #marcinkuzminski mentioned, you can't add an object that is already attached to another session. Just pulling in the original session from the object with object_session() is risky, though, if you aren't sure that session originated in the same thread context you're currently operating in. A thread-safe method is to use merge():
local_object = db_session.merge(original_object)
db_session.add(local_object)
db_session.commit()

Object you're trying to modify is already attached to another session.
Maybe you have wrong imports, and db_session is a new instance.
A good workaround to this is to extract the current bound session and use it:
Instead of:
db_session.add(s)
Do:
current_db_sessions = db_session.object_session(s)
current_db_sessions.add(s)

This db session issue will arise if you are having server.py and model.py importing each other
server.py
from flask import Flask
import os
import models as appmod #################### importing models here in server.py<----------
app = Flask(__name__) # L1
app.config.from_object(os.environ['APP_SETTINGS']) # L2
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # L3
database = SQLAlchemy(app) # L4
db = database # L5
#app.route('/item_delete/<id>', methods=['DELETE'])
def remove_method(id = None):
data_rec = appmod.Employee.query.get(id)
db.session.delete(data_rec)
db.session.commit()
return "DELETE"
if __name__ == '__main__':
app.run(port=5000, host='0.0.0.0',debug=True,threaded=True)
models.py
from server import db #################### importing server in models.py here <------------
from sqlalchemy.dialects.mysql import JSON
class Employee(db.Model):
__tablename__ = 'employe_flask'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128))
datetime = db.Column(db.DateTime)
designation = db.Column(db.String(128))
def __init__(self, name, datetime, designation):
self.name = name
self.datetime = datetime
self.designation = designation
#staticmethod
def delete_rec(data_rec):
db.session.delete(data_rec)#.delete
db.session.commit()
def __repr__(self):
record = {"name":self.name,"date":self.datetime.ctime(),"designation":self.designation}.__str__()
return record
Remove the line L1 to L5 from server.py and place it in common file like settings.py
and import 'app' and 'db' to server.py and import db in models.py
like this files below
server.py
from flask import Flask
import os
import models as appmod
from settings import app, db
#app.route('/item_delete/<id>', methods=['DELETE'])
def remove_method(id = None):
data_rec = appmod.Employee.query.get(id)
db.session.delete(data_rec)
db.session.commit()
return "DELETE"
if __name__ == '__main__':
app.run(port=5000, host='0.0.0.0',debug=True,threaded=True)
settings.py
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__) # L1
app.config.from_object(os.environ['APP_SETTINGS']) # L2
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # L3
database = SQLAlchemy(app) # L4
db = database # L5
models.py
from settings import db
from sqlalchemy.dialects.mysql import JSON
class Employee(db.Model):
__tablename__ = 'employe_flask'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128))
datetime = db.Column(db.DateTime)
designation = db.Column(db.String(128))
def __init__(self, name, datetime, designation):
self.name = name
self.datetime = datetime
self.designation = designation
#staticmethod
def delete_rec(data_rec):
db.session.delete(data_rec)#.delete
db.session.commit()
def __repr__(self):
record = {"name":self.name,"date":self.datetime.ctime(),"designation":self.designation}.__str__()
return record

This error means the record you are handling is attached to 2 different session(db)!
One of the reasons is that you may define your model with one db = SQLAlchemy(app) and add/insert/modify the database with another!
My solution is UNIFORMING THE DB!
try this:
u = db.session.query(User).filter(User.username == request.form["username"]).first()
Instead of this:
u = User.query.filter(User.username == request.form["username"]).first()

I had this problem too.
I created a test_file.py and added this code:
from app import app
from models import Tovar
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)
tovardel = Tovar.query.filter(Tovar.region == 1 and Tovar.price == 12).first()
db.session.delete(tovardel)
tovar = Tovar.query.filter(Tovar.region == 1 and Tovar.price == 12).first()
print(tovar.description)
and when I ran the code I got this error:
Object '<Tovar at 0x7f09cbf74208>' is already attached to session '1' (this is '2')
PROBLEM SOLVING:
If you have db = SQLAlchemy(app) in, for example, text_file.py, and in app.py, you get this problem all time. You should del db = SQLAlchemy(app), and import db from app from app import db

I faced the same issue. I was defining models in a separate file and I had to call SQLAlchemy twice. That's why there were two different sessions were running.
I solved this by doing following:
In case you are trying to remove an object from db:
Just create the removeObject function inside the model

Related

getting import error while importing flask _wtf from flask in my main.py file that is app.py

Below is the code for registration and login form :
from flask_wtf import Flaskform
from wtforms import StringField,PasswordField,SubmitField,BooleanField
from wtforms.validators import DataRequired,Length,EqualTo
class RegistrationForm(Flaskform):
username = StringField('Username',validators=[DataRequired(),Length(min=2,max=20)])
password = PasswordField('Password',validators=[DataRequired()])
confirm_password = PasswordField(' Confirm Password',validators=[DataRequired(),EqualTo('password')])
submit = SubmitField('Sign Up')
class LoginForm(Flaskform):
username = StringField('Username',validators=[DataRequired(),Length(min=2,max=20)])
password = PasswordField('Password',validators=[DataRequired()])
remember = BooleanField("Remember Me")
submit = SubmitField('LOGIN')
the main file is app.py:
from flask import Flask,render_template,request
from flask_sqlalchemy import SQLAlchemy
from login import RegistrationForm,LoginForm
#media for the project
import os
prerna = os.path.join('static','pic.png')
prerna2 = os.path.join('static','Untitled.png')
prerna3 = os.path.join('static','wardrobe.png')
video = os.path.join('static','DigitalWardrobe.mp4')
prerna4 = os.path.join('static','BUUTON.png')
prerna5 = os.path.join('static','start.png')
prerna6=os.path.join('static','home.jpeg')
#create flask app
app = Flask(__name__)
#Database connenction
app.config["SQLALCHEMY_DATABASE_URI"] = 'sqlite:///loginForm.db'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config['SECRET_KEY'] = 'f79e06501f31fae1c771ee83cc8c3868'
#initialize the database
db = SQLAlchemy(app)
'''class is the table created in the database'''
class loginForm(db.Model):
id = db.Column(db.Integer(), primary_key=True)
username = db.Column(db.String(100), unique=True, nullable=False)
password = db.Column(db.String(8), unique=True, nullable=False)
def __repr__(self):
return '<Name %r>' % self.id
with app.app_context():
db.create_all()
#path for the pages
#app.route('/')
def index():
return render_template("index.html",img = prerna2,img1 = prerna,img2 = prerna3,vid=video)
#app.route('/home')
def home() :
return render_template("home.html",title='home',button=prerna4,home=prerna6)
#app.route('/register')
def register():
form = RegistrationForm()
return render_template(register.html,title='Register',form=form)
#app.route('/login')
def login():
form = loginForm()
return render_template(login.html,title='login',form=form)
# #app.route("/login",methods=["GET","POST"])
# def login():
# return render_template("login.html")
#app.route("/createyourcloset")
def createyourcloset():
return render_template("createyourcloset.html",start=prerna5)
#app.route("/opennewcloset")
def opennewcloset():
return render_template("opennewcloset.html")
if __name__=="__main__":
app.run(debug=True)
error :
Traceback (most recent call last):
File "c:\Users\lenovo\OneDrive\Desktop\digifit\login.py", line 1, in
from flask_wtf import Flaskform
ImportError: cannot import name 'Flaskform' from 'flask_wtf' (C:\Users\lenovo\OneDrive\Desktop\digifit\env\Lib\site-packages\flask_wtf_init_.py)
Simple Typo Error: The Class is called FlaskForm not Flaskform
from flask_wtf import FlaskForm

ValueError: NaTType does not support timetuple when converting a dataframe to dictionary using to_dict('records')

I'm running this flask app
from flask import Flask, request, jsonify, render_template
from flask_cors import CORS, cross_origin
import json
import pandas as pd
# Create the app object
app = Flask(__name__)
cors = CORS(app, resources= {r"/*": {'origins' : "*"}})
# importing function for calculations
from Record_Matching import Matching
#app.route("/query", methods = ['get'])
#cross_origin()
def query():
# service_account_creds = request.json
query1 = request.args.get('query1', type = str)
query2 = request.args.get('query2', type = str)
querycolumns = request.args.get('querycolumns')
project_id = request.args.get('project_id', type = str)
service_account_creds = request.args.get('service_account')
SS = request.args.get('SS', type = float)
TT = request.args.get('TT', type = float)
result = Matching(query1,query2, SS,TT, service_account_creds, project_id, querycolumns)
return result
if __name__ == "__main__":
app.run(host="localhost", port=8080, debug=True)
and I'm importing the matching function from this python scripts
import pandas as pd
from google.cloud import bigquery
from google.oauth2 import service_account
import recordlinkage
from recordlinkage.preprocessing import phonetic
from pandas.io.json import json_normalize
import uuid
from uuid import uuid4
import random
import string
import json
import ast
# Results to data frame function
def gcp2df(sql, client):
query = client.query(sql)
results = query.result()
return results.to_dataframe()
# Exporting df to bigquery - table parameter example: "dataset.tablename"
# def insert(df, table):
# client = bigquery.Client()
# job_config = bigquery.LoadJobConfig(write_disposition=bigquery.job.WriteDisposition.WRITE_TRUNCATE)
# return client.load_table_from_dataframe(df, table, job_config = job_config)
def pair(df1, df2, TT, querycolumns):
# function to take pair from list and compare:
L = querycolumns
l=len(querycolumns)
p1=0
p2=1
# To generate phonetics we need to make sure all names are in english.
# thus we'll replace non-english words by random english strings
df1[L[p1]] = df1[L[p1]].astype(str)
df2[L[p2]] = df2[L[p2]].astype(str)
for i in range(0,len(df1)):
if df1[L[p1]][i].isascii() == False:
df1[L[p1]][i] = ''.join(random.choices(string.ascii_lowercase, k=5))
for i in range(0,len(df2)):
if df2[L[p2]][i].isascii() == False:
df2[L[p2]][i] = ''.join(random.choices(string.ascii_lowercase, k=5))
compare = recordlinkage.Compare()
df1["phonetic_given_name"] = phonetic(df1[L[p1]], "soundex")
df2["phonetic_given_name"] = phonetic(df2[L[p2]], "soundex")
df1["initials"] = (df1[L[p1]].str[0] + df1[L[p1]].str[-1])
df2["initials"] = (df2[L[p2]].str[0] + df2[L[p2]].str[-1])
indexer = recordlinkage.Index()
indexer.block('initials')
candidate_links = indexer.index(df1, df2)
compare.exact('phonetic_given_name', 'phonetic_given_name', label="phonetic_given_name")
# O(n) a function that uses two pointers to track consecutive pairs for the input list
while p2 <=l:
compare.string(L[p1], L[p2], method='jarowinkler',threshold = TT, label=L[p1])
p1+=2
p2+=2
features = compare.compute(candidate_links,df1, df2)
return features
def Matching(query1,query2, SS,TT, service_account_creds, project_id, querycolumns):
service_account_creds = ast.literal_eval(service_account_creds)
credentials = service_account.Credentials(service_account_creds, service_account_creds['client_email'],
service_account_creds['token_uri'])
job_config = bigquery.LoadJobConfig()
client = bigquery.Client( project = project_id)
SS=int(SS)
TT=float(TT)
df1 = gcp2df("""{}""".format(query1), client)
df2 = gcp2df("""{}""".format(query2), client)
querycolumns = json.loads(querycolumns)
querycolumns = list(querycolumns.values())
features = pair(df1, df2, TT, querycolumns)
features['Similarity_score'] = features.sum(axis=1)
features = features[features['Similarity_score']>=SS].reset_index()
final = features[['level_0', 'level_1']]
final.rename(columns= {'level_0':'df1_index', 'level_1':'df2_index'}, inplace= True)
final['Unique_ID'] = [uuid.uuid4() for _ in range(len(final.index))]
final['Unique_ID'] = final['Unique_ID'].astype(str)
final['Similarity_Score'] = SS
final_duplicates = final['df1_index'].value_counts().max()
# insert(final,"test-ahmed-project.Record_Linkage.Matching_Indices")
message = "Mission accomplished!, your highest number of duplicates is " + str(final_duplicates)
return {'message':message,'final':final.to_dict('records'), 'df1':df1.to_dict('records')}
I'm not sure why when I return df1 as a dictionary it shows ValueError error when I try to to use the function from flask app, but when I run it in a jupytor notebook using the same dataframe that I'm taking from bigquery, it works just fine, so why does it not work on the flask app?
I tried to_dict('record') to convert a dataframe to a dictionary,
it looking online many resources suggest the error exists because the data contains missing values, but it shouldn't be a problem because when I try converting the same dataframe to dictionary in jupyter notebook it works just fine.

How to write python unittest cases to mock redis connection (redis.StrictRedis) in Django

How can I mock the following function for connecting to Redis?
import redis
class RedisCache:
redis_instance = None
#classmethod
def set_connect(cls):
redis_instance = redis.StrictRedis(host='0.0.0.0', port=6379, password='xyz', charset='utf-8', decode_responses=True, socket_timeout=30)
return redis_instance
#classmethod
def get_conn(cls):
cls.redis_instance = cls.set_connect()
return cls.redis_instance
I looked for some solutions, but they were basically using fakeredis module. I wanted to have a simpler way to mock these functions.
Note-
data returned by the function: Redis<ConnectionPool<Connection<host=127.0.0.1,port=6379,db=0>>>
You can use patch() function to mock out redis.StrictRedis class. See where-to-patch
E.g.
redis_cache.py:
import redis
class RedisCache:
redis_instance = None
#classmethod
def set_connect(cls):
redis_instance = redis.StrictRedis(host='0.0.0.0', port=6379, password='xyz',
charset='utf-8', decode_responses=True, socket_timeout=30)
return redis_instance
#classmethod
def get_conn(cls):
cls.redis_instance = cls.set_connect()
return cls.redis_instance
test_redis_cache.py:
from unittest import TestCase
import unittest
from unittest.mock import patch, Mock
from redis_cache import RedisCache
class TestRedisCache(TestCase):
def test_set_connect(self):
with patch('redis.StrictRedis') as mock_StrictRedis:
mock_redis_instance = mock_StrictRedis.return_value
actual = RedisCache.set_connect()
self.assertEqual(actual, mock_redis_instance)
mock_StrictRedis.assert_called_once_with(host='0.0.0.0', port=6379, password='xyz',
charset='utf-8', decode_responses=True, socket_timeout=30)
#patch('redis.StrictRedis')
def test_get_conn(self, mock_StrictRedis):
mock_redis_instance = mock_StrictRedis.return_value
RedisCache.get_conn()
self.assertEqual(RedisCache.redis_instance, mock_redis_instance)
if __name__ == '__main__':
unittest.main()
test result:
..
----------------------------------------------------------------------
Ran 2 tests in 0.004s
OK
Name Stmts Miss Cover Missing
------------------------------------------------------------------------------
src/stackoverflow/70016401/redis_cache.py 11 0 100%
src/stackoverflow/70016401/test_redis_cache.py 18 0 100%
------------------------------------------------------------------------------
TOTAL 29 0 100%

Flask REST API TypeError

When i try to use on "/register" POST-body-raw {"name" : "mike", "password" : "demo"} in postman i get this bug, pls help to correct:
line 62, in signup_user
hashed_password = generate_password_hash(data['password'], method='sha256') TypeError: 'NoneType' object is not subscriptable
from flask import Flask, request, jsonify, make_response
from flask_sqlalchemy import SQLAlchemy
from werkzeug.security import generate_password_hash, check_password_hash
import uuid
import jwt
import datetime
from functools import wraps
app = Flask(__name__)
app.config['SECRET_KEY'] = 'Th1s1ss3cr3t'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///library.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
class Users(db.Model):
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.Integer)
name = db.Column(db.String(50))
password = db.Column(db.String(50))
admin = db.Column(db.Boolean)
class Authors(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
book = db.Column(db.String(20), unique=True, nullable=False)
country = db.Column(db.String(50), nullable=False)
booker_prize = db.Column(db.Boolean)
user_id = db.Column(db.Integer)
def token_required(f):
#wraps(f)
def decorator(*args, **kwargs):
token = None
if 'x-access-tokens' in request.headers:
token = request.headers['x-access-tokens']
if not token:
return jsonify({'message': 'a valid token is missing'})
try:
data = jwt.decode(token, app.config[SECRET_KEY])
current_user = Users.query.filter_by(public_id=data['public_id']).first()
except:
return jsonify({'message': 'token is invalid'})
return f(current_user, *args, **kwargs)
return decorator
#app.route('/register', methods=['GET', 'POST'])
def signup_user():
data = request.get_json()
hashed_password = generate_password_hash(data['password'], method='sha256')
new_user = Users(public_id=str(uuid.uuid4()), name=data['name'], password=hashed_password, admin=False)
db.session.add(new_user)
db.session.commit()
return jsonify({'message': 'registered successfully'})
Most likely your request is missing content-type header.
Content-Type:application/json
In Postman you can set it in Headers tab. Another option is to select "raw" and "JSON (application/json)" in Body tab just above the text area.

import and mapping csv to sqlalchemy dynamically

I am creating database using sqlalchemy in flask app and filling the database with existing CSV with selected columns from it so I use pandas here is my classes creation
I need to add company objects and commit them in dynamic way , but that way does not work , the csv file is not small about 20,000 record I can not add them manually ,so any suggestions to add them in dynamic way?
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from flask import jsonify
Base = declarative_base()
class Company(Base):
__tablename__ = 'forbesglobal2000_2016'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
profits = Column(String(250), nullable=False)
marketValue = Column(String(250), nullable=False)
revenue = Column(String(250), nullable=False)
industry = Column(String(250), nullable=False)
class SIC(Base):
__tablename__ = "SIC"
id = Column(Integer, primary_key=True)
SIC = Column(Integer, nullable=False)
Industry_name = Column(String(250),ForeignKey('forbesglobal2000_2016.industry'))
Indusrty = relationship(Company)
# configuration part
engine = create_engine('sqlite:///CompainesData.db')
Base.metadata.create_all(engine)
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from database_setup import *
import pandas as pd
# opening connection with database
engine = create_engine('sqlite:///CompainesData.db')
Base.metadata.bind = engine
# Clear database
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
session = DBSession()
df = pd.read_csv("forbesglobal2000-2016.csv")
df1 = pd.read_csv("SIC.csv")
# market valuation, revenue, profits and industry
profit_column = df.profits
name_column = df.name
industry_column = df.industry
revenue_column = df.revenue
marketvalue_column = df.marketValue
industry_column_f = df1.Description
SIC_column = df1.SICCode
company = []
i = 1
while i < name_column.__len__():
company[i] = Company(name = name_column[i] , industry=industry_column[i], marketValue = marketvalue_column[i] , profits = profit_column[i] ,
revenue = revenue_column[i] )
i = i +1
for i in company:
session.add(i)
session.commit()
# printing test
com = session.query(Company).all()
for f in com:
print(f.name)
print(f.industry)
print(f.profits)
print(f.revenue)
print(f.marketValue)
If you want to load data from csv files to database just use df.to_sql() function it allows you to do that. For example :
df.to_sql(con=engine, name=airlines.__tablename__, if_exists='replace',index=False)
Pay attention to index=False, it's used to ignore pandas id column.
I think the index will start at 0 and not 1:
i = 1
should be
i = 0
can you try that?