Why is SQLALCHEMY_DATABASE_URI set to "sqlite:///:memory:" when I set it to a path in my Config? - flask-sqlalchemy

I am learning Flask by following Miguel Ginsberg mega tutorial chapter 4. When I run any Flask command from the Anaconda command panel I get an error that includes "Neither SQLALCHEMY_DATABASE_URI nor SQLALCHEMY_BINDS is set." and as a result an SQLite database is created in memory.
But I have created a Config object that sets SQLALCHEMY_DATABASE_URI, SECRET_KEY and SQLALCHEMY_TRACK_MODIFICATIONS, and have tested the python separately, and it all works.
I have tried everything I can think of including testing snippets of code separately, at least 8 hours searching the web, and trawling though Ginsberg's posts, nothing works. One person Graham (post #29) seems to have had the same problem but Ginsberg does not give a useful answer.
Here is my app init code
__init__
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from app import routes, models
Here is my config, it works when run separately.
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
For completeness here are my routes and models
from flask import render_template, flash, redirect, url_for
from app import app
from app.forms import LoginForm
#app.route('/')
#app.route('/index')
def index():
user = {'username': 'Miguel'}
posts = [
{
'author': {'username': 'John'},
'body': 'Beautiful day in Portland!'
},
{
'author': {'username': 'Susan'},
'body': 'The Avengers movie was so cool!'
}
]
return render_template('index.html', title='Home', user=user, posts=posts)
#app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
flash('Login requested for user {}, remember_me={}'.format(
form.username.data, form.remember_me.data))
return redirect(url_for('index'))
return render_template('login.html', title='Sign In', form=form)
and
from datetime import datetime
from app import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
password_hash = db.Column(db.String(128))
posts = db.relationship('Post', backref='author', lazy='dynamic')
def __repr__(self):
return '<User {}>'.format(self.username)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.String(140))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Post {}>'.format(self.body)
What should happen is that when I run a command like
> flask db init
or
> flask db migrate -m "users table"
the command should complete successfully because SQLALCHEMY_DATABASE_URI should equal the path of the app and the SQLite database should be app.db.
Instead I get error messages stating SQLALCHEMY_DATABASE_URI is not set and that therefore SQLALCHEMY_DATABASE_URI has been set to "sqlite:///:memory:"
My app needs a persistent database! Why isn't SQLALCHEMY_DATABASE_URI and SQLALCHEMY_TRACK_MODIFICATIONS being set?

this problem has gone away by itself, but since others may experience it I decided to describe the work-around I used to save them some frustration. I think the original problem may have been due to the sequence in which I was importing packages/modules and initiating classes/objects into my __init__ method.
The workaround is to comment out the original config statement and directly set the config variables, including the SQLite database, in __init__.
### app.config.from_object(Config)
app.config["SECRET_KEY"] = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL') or \
'sqlite:///' + 'C:\\...path...\\app.db'
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
...
The workaround can probably be backed off a little by using
import os
basedir = os.path.abspath(os.path.dirname(__file__))
...
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'app.db')
...

Related

Is flask_restful compatible with virtual environments?

I am making an API with Flask-RESTFUL, but when I make the POST
http://127.0.0.1:5000/bot?id_articulo=1&url_articulo=www.wiki.org
I get the message
"message": "The browser (or proxy) sent a request that this server could not understand."
My python code is
from flask import Flask
from flask_restful import Resource, Api, reqparse
import pandas as pd
app = Flask(__name__)
api = Api(app)
class Bot(Resource):
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('id_articulo' , required=True, type=int)
parser.add_argument('url_articulo', required=True, type=str)
args = parser.parse_args()
print(args)
data_articulo = pd.read_csv('articulos.csv')
print(data_articulo)
if args['url_articulo'] in list(data_articulo['url']):
return {
'mensage': f"El artículo '{args['url_articulo']}' ya existe."
}, 409
else:
nueva_columna = pd.DataFrame({
'id_articulo': [args['id_articulo']],
'url': [args['url_articulo']],
})
data_articulo = data_articulo.append(nueva_columna, ignore_index=True)
data_articulo.to_csv('articulos.csv', index=False)
return {'data': data_articulo.to_dict()}, 200
api.add_resource(Bot, '/bot', methods=['POST'])
if __name__ == '__main__':
app.run()
Now, I noticed that the error message is thrown only when I am in a virtual environment whose requirements.txt is
aniso8601==9.0.1
click==8.1.3
colorama==0.4.5
Flask==2.1.2
Flask-RESTful==0.3.9
importlib-metadata==4.12.0
itsdangerous==2.1.2
Jinja2==3.1.2
joblib==1.1.0
MarkupSafe==2.1.1
numpy==1.23.1
pandas==1.4.3
python-dateutil==2.8.2
pytz==2022.1
six==1.16.0
Werkzeug==2.1.2
zipp==3.8.0
By this far, I don't have a clue about what is going on and it makes me think that the flask_restful library have issues with virtual environments and I would like to know how to make this work properly in one.

How to get messages of telegram channel by python-telegram-bot tool

I was wondering if there is a possible way to get messages from the telegram channel knowing that I logged in to this account and I am the admin of this channel so I just want the get messages.
import feedparser
from telegram import Update, ForceReply, InlineKeyboardButton, InlineKeyboardMarkup
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, CallbackContext, CallbackQueryHandler
from bs4 import BeautifulSoup
from datetime import datetime
import json
import telegram
from time import sleep
from telegram.ext import MessageHandler, Filters
class Config:
def __init__(self):
with open("config.json", "r") as config:
self.config = json.load(config)
class TelegramBotChannel:
def __init__(self, token, start_channel_id):
self.updater = Updater(token=token, use_context=True)
self.dispatcher = self.updater.dispatcher
self.start_channel_id = start_channel_id
if __name__ == '__main__':
telegram_bot = TelegramBotChannel(Config().config["token"], Config().config["start"])
pass
This is the minimal code to fetch the messages from a channel using a telegram bot which is the subscriber (only admin subscription possible) of the channel. Provide the correct bot api as KEY.:
from api_keys import bot_api_key as KEY
from telegram.ext import Updater, Filters, MessageHandler
updater = Updater(token=KEY, use_context=True)
dispatcher = updater.dispatcher
def forwarder(update, context):
msg = update.channel_post
if msg:
print(msg)
forwardHandler = MessageHandler(Filters.text & (~Filters.command), forwarder)
dispatcher.add_handler(forwardHandler)
updater.start_polling()
updater.idle()
Bots can only get updates about channel posts if they are a member in that channel (and bots can only be added to channels as admin). If they are admins in the channel, they will receive updates just like from every other chat.
Requirements :
Your bot should be in the channel. obviously as an admin
so first just make a function :
def forwader(update , context):
context.bot.copy_message("#temporary2for" ,"#tempmain" , update.channel_post.message_id)
After that make handler :
forwadHandler= MessageHandler(Filters.text & (~Filters.command) , forwader)
Than register your handler :
dispatcher.add_handler(forwadHandler)
Than don't forget to start Bot polling :
updater.start_polling()
updater.idle()
Full code :
from telegram import bot
from telegram.ext import Updater , CommandHandler , Filters , MessageHandler
from config import useless
import logging
updater = Updater(token=useless, use_context=True)
dispatcher = updater.dispatcher
import logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
def forwader(update , context):
context.bot.copy_message("#temporary2for" ,"#tempmain" , update.channel_post.message_id)
forwadHandler= MessageHandler(Filters.text & (~Filters.command) , forwader)
dispatcher.add_handler(forwadHandler)
updater.start_polling()
updater.idle()
Some Import are useless .

Session Auth in Django-Rest-Framwork, Is this really what I have to do to make is CSRF safe?

First off, this code works, it just doesn't feel as clean as it should be for something so simple.
Background:
I'm trying to make a custom login API endpoint in DRF that will be consumed by the React Frontend. It seems you have to manually force a csrf to be sent in DRF so that's what I have done.
I didn't want to send over a Django Form because it didn't seem RESTful, but this is the only method I could find to avoid that. Please let me know if this is clean code.
Serializers.py
from rest_framework import serializers
from django.contrib.auth import get_user_model # If used custom user model
UserModel = get_user_model()
class UserSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True)
def create(self, validated_data):
user = UserModel.objects.create_user(
username=validated_data['username'],
password=validated_data['password'],
email=validated_data['email'],
)
return user
class Meta:
model = UserModel
# Tuple of serialized model fields (see link [2])
fields = ( "id", "username", 'email', "password", )
View.py
from rest_framework import permissions
from django.contrib.auth import get_user_model # If used custom user model
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from .serializers import UserSerializer
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_protect
class CreateUserView(APIView):
model = get_user_model()
permission_classes = [
permissions.AllowAny # Or anon users can't register
]
serializer_class = UserSerializer
#method_decorator(ensure_csrf_cookie)
def get(self, request, format = None):
return Response(status=status.HTTP_200_OK)
#method_decorator(csrf_protect)
def post(self,request, format = None):
serializer = UserSerializer(data=request.data)
if serializer.is_valid():
serializer.create(serializer.validated_data)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
CSRF is enabled by Django, not DRF. And as specified, CSRF protections only kick in when logged in.
Login and registration actions does not need to be CSRF protected (as the password data is needed, and cannot be guessed, in a CSRF attack scenario) by the attacker.
Also per Django spec, GET actions/views are not protected by CSRF. However, GET actions should not change the state of your application. If it's not the case, and you're able to implemant the CSRF protection on your front (which is possible for REST app, but not with default Django app), you can manually protect it with your decorator.
This is mainly not a DRF issue but a Django issue.

StaticLiveServerTestCase not logging even if Client().login works

I'm trying to use selenium with StaticLiveServerTestCase in order to run the django server in the tests automatically.
If I'm using Client().login which returns success, I still can't pass the login page (by default, if the user is logged in it redirects to the homepage).
After some debugging it turns out that request.user= AnonymousUser and User.objects.all()= QuerySet [User: USERNAME]
It means that the user was created successfully, but is not logged in for some reason.
Here is the code :
class SeleniumTestCase(StaticLiveServerTestCase):
def setUp(self):
self.initialize()
def tearDown(self):
self.selenium.quit()
def initialize(self):
self.create_test_user()
self.run_chrome()
self.login()
def run_chrome(self):
chrome_options = Options()
chrome_options.add_argument("--start-maximized")
self.selenium = webdriver.Chrome(chrome_options=chrome_options)
self.wait = WebDriverWait(self.selenium, 10)
#staticmethod
def create_test_user():
user = User.objects.create_superuser(
username=USERNAME,
email=EMAIL,
is_active=True,
password=PASSWORD)
user.save()
def login(self):
login_success = self.client.login(username=USERNAME, password=PASSWORD)
self.assertTrue(login_success)
self.selenium.get(self.live_server_url)
def test1(self):
self.assertTrue(True)
If I sign in via the gui in my website (selenium) - I can pass the login page. But with self.client.login I can't.
Any ideas?
Thanks
I was having this problem myself and initially thought it might be an anomaly of the clicks or something in the form. Notice though that in LiveServerTestCase.setUpClass(), there are extra threads created, so some actions become isolated and basically, some actions will seem to have no effect on Django's back-end database.
I needed to call setUpClass() to get my fixtures, but I didn't want this thread interference just yet.
The solution for me was to make my own call to load the fixtures, then proceed with the rest of super's setup:
class TestSalesUI(StaticLiveServerTestCase, TestCase):
hidden_fixtures = ["bevrly/fixtures/fish_dinners.json",
"wine/fixtures/wine_sizes.json"]
#classmethod
def setUpClass(cls):
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.hidden_fixtures, **{
'verbosity': 0,
'commit': False,
'database': db_name,
})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
raise
User = get_user_model()
cls.carpenter = User.objects.get(username="carpenter")
# Password needs to be set now rather than in a fixture, for new hashing.
cls.carpenter.set_password(CARPENTER_PASSWORD)
cls.carpenter.save()
super(TestSalesUI, cls).setUpClass() # New threads now
cls.driver = webdriver.Chrome()
cls.driver.implicitly_wait(10) # ⏳☕🖵

OAuth2Decorator: Using developer's token to run API calls for user

For the "normal" oauth2 dance, I get to specify the user and get a corresponding token.
This allows me to make API calls masquerading as that user, i.e. on his behalf.
It can also allow the user to make calls masquerading as me.
A use case is bigquery where I don't have to grant table access to the user and I can specify my own preferred level of control.
Using the simplified OAuth2Decorator, I don't seem to have this option.
Am I right to say that?
Or is there a work-around?
In general, what is the best practice? To use the proper oauth (comprising of Flow, Credentials and Storage)? Or to use OAuth2Decorator.
Thank you very much.
You can certainly use an OAuth2Decorator
Here is an example:
main.py
import bqclient
import httplib2
import os
from django.utils import simplejson as json
from google.appengine.api import memcache
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from oauth2client.appengine import oauth2decorator_from_clientsecrets
PROJECT_ID = "xxxxxxxxxxx"
DATASET = "your_dataset"
QUERY = "select columns from dataset.table"
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__),'client_secrets.json')
http = httplib2.Http(memcache)
decorator = oauth2decorator_from_clientsecrets(CLIENT_SECRETS,
'https://www.googleapis.com/auth/bigquery')
bq = bqclient.BigQueryClient(http, decorator)
class MainHandler(webapp.RequestHandler):
#decorator.oauth_required
def get(self):
data = {'data': json.dumps(bq.Query(QUERY, PROJECT_ID))}
template = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(render(template, data))
application = webapp.WSGIApplication([('/', MainHandler),], debug=True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
bqclient.py that gets imported in your main.py which handles BigQuery actions
from apiclient.discovery import build
class BigQueryClient(object):
def __init__(self, http, decorator):
"""Creates the BigQuery client connection"""
self.service = build('bigquery', 'v2', http=http)
self.decorator = decorator
def Query(self, query, project, timeout_ms=10):
query_config = {
'query': query,
'timeoutMs': timeout_ms
}
decorated = self.decorator.http()
queryReply = (self.service.jobs()
.query(projectId=project, body=query_config)
.execute(decorated))
jobReference=queryReply['jobReference']
while(not queryReply['jobComplete']):
queryReply = self.service.jobs().getQueryResults(
projectId=jobReference['projectId'],
jobId=jobReference['jobId'],
timeoutMs=timeout_ms).execute(decorated)
return queryReply
where all your authentication details are kept in a json file client_secrets.json
{
"web": {
"client_id": "xxxxxxxxxxxxxxx",
"client_secret": "xxxxxxxxxxxxxxx",
"redirect_uris": ["http://localhost:8080/oauth2callback"],
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token"
}
}
finally, don't forget to add these lines to your app.yaml:
- url: /oauth2callback
script: oauth2client/appengine.py
Hope that helps.
I am not sure I completely understand the use case, but if you are creating an application for others to use without their having to authorize access based on their own credentials, I would recommend using App Engine service accounts.
An example of this type of auth flow is described in the App Engine service accounts + Prediction API article.
Also, see this part and this part of the App Engine Datastore to BigQuery codelab, which also uses this authorization method.
The code might look something like this:
import httplib2
# Available in the google-api-python-client lib
from apiclient.discovery import build
from oauth2client.appengine import AppAssertionCredentials
# BigQuery Scope
SCOPE = 'https://www.googleapis.com/auth/bigquery'
# Instantiate and authorize a BigQuery API client
credentials = AppAssertionCredentials(scope=SCOPE)
http = credentials.authorize(httplib2.Http())
bigquery_service = build("bigquery", "v2", http=http)
# Make some calls to the API
jobs = bigquery_service.jobs()
result = jobs.insert(projectId='some_project_id',body='etc, etc')